From b4b15181b4727c328530b381e91f27be56dd45f6 Mon Sep 17 00:00:00 2001 From: Guillem Barba Date: Thu, 31 Jul 2014 18:28:47 +0200 Subject: [PATCH 001/293] Add env-read-docs event to allow modify docnames list before processing --- doc/extdev/appapi.rst | 8 ++++++++ sphinx/application.py | 1 + sphinx/environment.py | 9 ++++++++- 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/doc/extdev/appapi.rst b/doc/extdev/appapi.rst index 1a0b95bf2..c6f5ee4c0 100644 --- a/doc/extdev/appapi.rst +++ b/doc/extdev/appapi.rst @@ -422,6 +422,14 @@ handlers to the events. Example: .. versionadded:: 0.5 +.. event:: env-read-docs (app, env, docnames) + + Emited after get the list of all added and changed files and just before + read them. It allow extension author modify docnames list before processing; + reordering, append and remove. + + .. versionadded:: 1.3.0 + .. event:: source-read (app, docname, source) Emitted when a source file has been read. The *source* argument is a list diff --git a/sphinx/application.py b/sphinx/application.py index dc4563e0c..7f2c382e9 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -48,6 +48,7 @@ events = { 'builder-inited': '', 'env-get-outdated': 'env, added, changed, removed', 'env-purge-doc': 'env, docname', + 'env-read-docs': 'env, docnames', 'source-read': 'docname, source text', 'doctree-read': 'the doctree before being pickled', 'missing-reference': 'env, node, contnode', diff --git a/sphinx/environment.py b/sphinx/environment.py index 5b92c0376..a4901a746 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -469,7 +469,14 @@ class BuildEnvironment: self.clear_doc(docname) # read all new and changed files - for docname in sorted(added | changed): + docnames = sorted(added | changed) + if app: + new_docnames = [] + for mod_docnames in app.emit('env-read-docs', self, docnames): + new_docnames.extend(mod_docnames) + if new_docnames: + docnames = new_docnames + for docname in docnames: yield docname self.read_doc(docname, app=app) From f974aca3565992d6e011452a071be69b1fab74e8 Mon Sep 17 00:00:00 2001 From: tk0miya Date: Fri, 26 Sep 2014 22:40:24 +0900 Subject: [PATCH 002/293] Assign figure numbers automatically --- sphinx/builders/html.py | 1 + sphinx/builders/websupport.py | 1 + sphinx/environment.py | 66 ++++++++++++++++++++++++++++++++--- sphinx/transforms.py | 23 ++++++++++++ sphinx/writers/html.py | 10 ++++++ 5 files changed, 97 insertions(+), 4 deletions(-) diff --git a/sphinx/builders/html.py b/sphinx/builders/html.py index c2c308937..d4bbefe49 100644 --- a/sphinx/builders/html.py +++ b/sphinx/builders/html.py @@ -424,6 +424,7 @@ class StandaloneHTMLBuilder(Builder): doctree.settings = self.docsettings self.secnumbers = self.env.toc_secnumbers.get(docname, {}) + self.fignumbers = self.env.toc_fignumbers.get(docname, {}) self.imgpath = relative_uri(self.get_target_uri(docname), '_images') self.dlpath = relative_uri(self.get_target_uri(docname), '_downloads') self.current_docname = docname diff --git a/sphinx/builders/websupport.py b/sphinx/builders/websupport.py index 7b0e6f724..ed913945c 100644 --- a/sphinx/builders/websupport.py +++ b/sphinx/builders/websupport.py @@ -58,6 +58,7 @@ class WebSupportBuilder(PickleHTMLBuilder): doctree.settings = self.docsettings self.secnumbers = self.env.toc_secnumbers.get(docname, {}) + self.fignumbers = self.env.toc_fignumbers.get(docname, {}) self.imgpath = '/' + posixpath.join(self.virtual_staticdir, '_images') self.dlpath = '/' + posixpath.join(self.virtual_staticdir, '_downloads') self.current_docname = docname diff --git a/sphinx/environment.py b/sphinx/environment.py index 1ce284f64..84149718e 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -48,7 +48,7 @@ from sphinx.errors import SphinxError, ExtensionError from sphinx.locale import _ from sphinx.versioning import add_uids, merge_doctrees from sphinx.transforms import DefaultSubstitutions, MoveModuleTargets, \ - HandleCodeBlocks, SortIds, CitationReferences, Locale, \ + HandleCodeBlocks, AutoNumbering, SortIds, CitationReferences, Locale, \ RemoveTranslatableInline, SphinxContentsFilter @@ -98,7 +98,7 @@ class SphinxStandaloneReader(standalone.Reader): Add our own transforms. """ transforms = [Locale, CitationReferences, DefaultSubstitutions, - MoveModuleTargets, HandleCodeBlocks, SortIds, + MoveModuleTargets, HandleCodeBlocks, AutoNumbering, SortIds, RemoveTranslatableInline] def get_transforms(self): @@ -234,6 +234,7 @@ class BuildEnvironment: # used to determine when to show the TOC # in a sidebar (don't show if it's only one item) self.toc_secnumbers = {} # docname -> dict of sectionid -> number + self.toc_fignumbers = {} # docname -> dict of figureid -> number self.toctree_includes = {} # docname -> list of toctree includefiles self.files_to_rebuild = {} # docname -> set of files @@ -635,8 +636,8 @@ class BuildEnvironment: self._warnfunc(*warning) def check_dependents(self, already): - to_rewrite = self.assign_section_numbers() - for docname in to_rewrite: + to_rewrite = self.assign_section_numbers() + self.assign_figure_numbers() + for docname in set(to_rewrite): if docname not in already: yield docname @@ -1693,6 +1694,63 @@ class BuildEnvironment: return rewrite_needed + def assign_figure_numbers(self): + """Assign a figure number to each figure under a numbered toctree.""" + + rewrite_needed = [] + + old_fignumbers = self.toc_fignumbers + self.toc_fignumbers = {} + fignum_counter = {} + + def get_section_number(docname, section): + anchorname = '#' + section['ids'][0] + secnumbers = self.toc_secnumbers.get(docname, {}) + if anchorname in secnumbers: + secnum = secnumbers.get(anchorname) + else: + secnum = secnumbers.get('') + + return secnum or tuple() + + def get_next_figure_number(secnum): + secnum = secnum[:1] + fignum_counter[secnum] = fignum_counter.get(secnum, 0) + 1 + return secnum + (fignum_counter[secnum],) + + def _walk_doctree(docname, doctree, secnum): + fignums = self.toc_fignumbers.setdefault(docname, {}) + for subnode in doctree.children: + if isinstance(subnode, nodes.section): + next_secnum = get_section_number(docname, subnode) + if next_secnum: + _walk_doctree(docname, subnode, next_secnum) + else: + _walk_doctree(docname, subnode, secnum) + continue + elif isinstance(subnode, addnodes.toctree): + for title, subdocname in subnode['entries']: + _walk_doc(subdocname, secnum) + + continue + + if isinstance(subnode, nodes.figure): + figure_id = subnode['ids'][0] + fignums[figure_id] = get_next_figure_number(secnum) + + _walk_doctree(docname, subnode, secnum) + + def _walk_doc(docname, secnum): + doctree = self.get_doctree(docname) + _walk_doctree(docname, doctree, secnum) + + _walk_doc(self.config.master_doc, tuple()) + for docname, fignums in iteritems(self.toc_fignumbers): + if fignums != old_fignumbers.get(docname): + rewrite_needed.append(docname) + + return rewrite_needed + def create_index(self, builder, group_entries=True, _fixre=re.compile(r'(.*) ([(][^()]*[)])')): """Create the real index from the collected index entries.""" diff --git a/sphinx/transforms.py b/sphinx/transforms.py index 42abea588..e4806092f 100644 --- a/sphinx/transforms.py +++ b/sphinx/transforms.py @@ -101,6 +101,29 @@ class HandleCodeBlocks(Transform): # del node.parent[parindex+1] +class AutoNumbering(Transform): + """ + Register IDs of tables, figures and literal_blocks to assign numbers. + """ + default_priority = 210 + + def apply(self): + def has_child(node, cls): + for child in node: + if isinstance(child, cls): + return True + + return False + + for node in self.document.traverse(nodes.Element): + if isinstance(node, nodes.figure): + if has_child(node, nodes.caption): + self.document.note_implicit_target(node) + elif isinstance(node, nodes.image): + if has_child(node.parent, nodes.caption): + self.document.note_implicit_target(node.parent) + + class SortIds(Transform): """ Sort secion IDs so that the "id[0-9]+" one comes last. diff --git a/sphinx/writers/html.py b/sphinx/writers/html.py index 56657ee72..dea591922 100644 --- a/sphinx/writers/html.py +++ b/sphinx/writers/html.py @@ -250,6 +250,15 @@ class HTMLTranslator(BaseTranslator): self.body.append('.'.join(map(str, numbers)) + self.secnumber_suffix) + def add_fignumber(self, node): + if node.get('fignumber'): + self.body.append('Fig.' + '.'.join(map(str, node['fignumber']))) + elif isinstance(node.parent, nodes.figure): + figure_id = node.parent['ids'][0] + if self.builder.fignumbers.get(figure_id): + numbers = self.builder.fignumbers[figure_id] + self.body.append('Fig.' + '.'.join(map(str, numbers))) + # overwritten to avoid emitting empty def visit_bullet_list(self, node): if len(node) == 1 and node[0].tagname == 'toctree': @@ -291,6 +300,7 @@ class HTMLTranslator(BaseTranslator): self.body.append(self.starttag(node, 'div', '', CLASS='code-block-caption')) else: BaseTranslator.visit_caption(self, node) + self.add_fignumber(node) def depart_caption(self, node): if isinstance(node.parent, nodes.container) and node.parent.get('literal_block'): From 2f5ea82b37eac433cca7e48973bc35f47cd0f96d Mon Sep 17 00:00:00 2001 From: tk0miya Date: Sat, 16 Aug 2014 09:39:01 +0900 Subject: [PATCH 003/293] Add config entries for numfig --- sphinx/config.py | 3 +++ sphinx/environment.py | 11 ++++++----- sphinx/writers/html.py | 7 +++---- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/sphinx/config.py b/sphinx/config.py index 40536ae5a..60626cc7d 100644 --- a/sphinx/config.py +++ b/sphinx/config.py @@ -70,6 +70,9 @@ class Config(object): needs_extensions = ({}, None), nitpicky = (False, 'env'), nitpick_ignore = ([], 'html'), + numfig = (False, 'env'), + numfig_secnum_depth = (1, 'env'), + numfig_prefix = ({'figure': 'Fig.'}, 'env'), # HTML options html_theme = ('default', 'html'), diff --git a/sphinx/environment.py b/sphinx/environment.py index 84149718e..a6e06993f 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -1714,7 +1714,7 @@ class BuildEnvironment: return secnum or tuple() def get_next_figure_number(secnum): - secnum = secnum[:1] + secnum = secnum[:self.config.numfig_secnum_depth] fignum_counter[secnum] = fignum_counter.get(secnum, 0) + 1 return secnum + (fignum_counter[secnum],) @@ -1744,10 +1744,11 @@ class BuildEnvironment: doctree = self.get_doctree(docname) _walk_doctree(docname, doctree, secnum) - _walk_doc(self.config.master_doc, tuple()) - for docname, fignums in iteritems(self.toc_fignumbers): - if fignums != old_fignumbers.get(docname): - rewrite_needed.append(docname) + if self.config.numfig: + _walk_doc(self.config.master_doc, tuple()) + for docname, fignums in iteritems(self.toc_fignumbers): + if fignums != old_fignumbers.get(docname): + rewrite_needed.append(docname) return rewrite_needed diff --git a/sphinx/writers/html.py b/sphinx/writers/html.py index dea591922..de384ba1e 100644 --- a/sphinx/writers/html.py +++ b/sphinx/writers/html.py @@ -251,13 +251,12 @@ class HTMLTranslator(BaseTranslator): self.secnumber_suffix) def add_fignumber(self, node): - if node.get('fignumber'): - self.body.append('Fig.' + '.'.join(map(str, node['fignumber']))) - elif isinstance(node.parent, nodes.figure): + if isinstance(node.parent, nodes.figure): figure_id = node.parent['ids'][0] if self.builder.fignumbers.get(figure_id): + prefix = self.builder.config.numfig_prefix.get('figure') numbers = self.builder.fignumbers[figure_id] - self.body.append('Fig.' + '.'.join(map(str, numbers))) + self.body.append(prefix + '.'.join(map(str, numbers)) + " ") # overwritten to avoid emitting empty
    def visit_bullet_list(self, node): From ba5a42c504c519e811d6aabc2d4cb0a1304156af Mon Sep 17 00:00:00 2001 From: tk0miya Date: Thu, 21 Aug 2014 09:54:57 +0900 Subject: [PATCH 004/293] Ignore "self" and URL in toctree --- sphinx/environment.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sphinx/environment.py b/sphinx/environment.py index a6e06993f..4a6829340 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -1730,6 +1730,10 @@ class BuildEnvironment: continue elif isinstance(subnode, addnodes.toctree): for title, subdocname in subnode['entries']: + if url_re.match(subdocname) or subdocname == 'self': + # don't mess with those + continue + _walk_doc(subdocname, secnum) continue From 2ccdebf5d91ccf7f5b2cbad5fbc6a8333611f8fb Mon Sep 17 00:00:00 2001 From: tk0miya Date: Thu, 21 Aug 2014 09:54:57 +0900 Subject: [PATCH 005/293] Fix RuntimeError with numbered circular toctree --- sphinx/environment.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/sphinx/environment.py b/sphinx/environment.py index 4a6829340..aa76ea411 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -1699,6 +1699,7 @@ class BuildEnvironment: rewrite_needed = [] + assigned = set() old_fignumbers = self.toc_fignumbers self.toc_fignumbers = {} fignum_counter = {} @@ -1745,8 +1746,10 @@ class BuildEnvironment: _walk_doctree(docname, subnode, secnum) def _walk_doc(docname, secnum): - doctree = self.get_doctree(docname) - _walk_doctree(docname, doctree, secnum) + if docname not in assigned: + assigned.add(docname) + doctree = self.get_doctree(docname) + _walk_doctree(docname, doctree, secnum) if self.config.numfig: _walk_doc(self.config.master_doc, tuple()) From b17320ee1e3881ebe042eac0c11123610a415aa7 Mon Sep 17 00:00:00 2001 From: tk0miya Date: Sun, 24 Aug 2014 14:33:56 +0900 Subject: [PATCH 006/293] Fix cannot capture title of literal_block node by xref --- sphinx/domains/std.py | 5 +++++ tests/root/markup.txt | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/sphinx/domains/std.py b/sphinx/domains/std.py index bb044e304..b6dc3cc9b 100644 --- a/sphinx/domains/std.py +++ b/sphinx/domains/std.py @@ -563,6 +563,11 @@ class StandardDomain(Domain): break else: continue + elif node.tagname == 'literal_block': + if 'caption' in node: + sectname = node['caption'] + else: + continue else: # anonymous-only labels continue diff --git a/tests/root/markup.txt b/tests/root/markup.txt index f6f955e24..4bd5efec7 100644 --- a/tests/root/markup.txt +++ b/tests/root/markup.txt @@ -142,6 +142,7 @@ Adding \n to test unescaping. * :ref:`here ` * :ref:`my-figure` * :ref:`my-table` +* :ref:`my-code-block` * :doc:`subdir/includes` * ``:download:`` is tested in includes.txt * :option:`Python -c option ` @@ -228,8 +229,11 @@ Version markup Code blocks ----------- +.. _my-code-block: + .. code-block:: ruby :linenos: + :caption: my ruby code def ruby? false From 24c5eb6e9cebf038675543b675135303ef3365c0 Mon Sep 17 00:00:00 2001 From: Guillem Barba Date: Mon, 1 Sep 2014 09:19:43 +0200 Subject: [PATCH 007/293] Add unit test for new env-read-docs event --- tests/test_env_read_docs.py | 54 +++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 tests/test_env_read_docs.py diff --git a/tests/test_env_read_docs.py b/tests/test_env_read_docs.py new file mode 100644 index 000000000..0b57815bd --- /dev/null +++ b/tests/test_env_read_docs.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +""" + test_env_read_docs + ~~~~~~~~~~~~~~~~~~ + + Test docnames read order modification using the env-read-docs event. + + :copyright: Copyright 2014 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import pickle + +from docutils.parsers.rst.directives.html import MetaBody + +from sphinx import addnodes +from sphinx.versioning import add_uids, merge_doctrees, get_ratio + +from util import test_root, TestApp + +def setup_module(): + pass + +def test_default_docnames_order(): + """By default, docnames are read in alphanumeric order""" + def on_env_read_docs(app, env, docnames): + return docnames + + app = TestApp(srcdir='(temp)', freshenv=True) + env = app.env + app.connect('env-read-docs', on_env_read_docs) + + msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app) + read_docnames = [docname for docname in it] + assert len(read_docnames) > 1 and read_docnames == sorted(read_docnames) + +def test_inverse_docnames_order(): + """By default, docnames are read in alphanumeric order""" + def on_env_read_docs(app, env, docnames): + docnames.reverse() + return docnames + + app = TestApp(srcdir='(temp)', freshenv=True) + env = app.env + app.connect('env-read-docs', on_env_read_docs) + + msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app) + read_docnames = [docname for docname in it] + reversed_read_docnames = sorted(read_docnames)[:] + reversed_read_docnames.reverse() + assert len(read_docnames) > 1 and read_docnames == reversed_read_docnames + +def teardown_module(): + (test_root / '_build').rmtree(True) From 7b9c9586bd14cce9b876be5d441799738ef6290a Mon Sep 17 00:00:00 2001 From: Takayuki Shimizukawa Date: Tue, 9 Sep 2014 17:27:00 +0900 Subject: [PATCH 008/293] sphinx.application.Sphinx.add_search_language raises AssertionError for correct type of argument. Closes #1563 --- CHANGES | 6 ++++++ sphinx/application.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES b/CHANGES index fa1d9fd83..b965e7923 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,12 @@ Release 1.2.4 (in development) ============================== +Bugs fixed +---------- + +* #1563: :meth:`~sphinx.application.Sphinx.add_search_language` raises + AssertionError for correct type of argument. Thanks to rikoman. + Release 1.2.3 (released Sep 1, 2014) ==================================== diff --git a/sphinx/application.py b/sphinx/application.py index 4fb9eb384..cce6de8df 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -615,7 +615,7 @@ class Sphinx(object): def add_search_language(self, cls): self.debug('[app] adding search language: %r', cls) from sphinx.search import languages, SearchLanguage - assert isinstance(cls, SearchLanguage) + assert issubclass(cls, SearchLanguage) languages[cls.lang] = cls From 7a2e5a0c931e919db7a935739d35f9b18bd18bff Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Fri, 12 Sep 2014 19:31:44 -0700 Subject: [PATCH 009/293] Test for subclasses of docutils node types when screening searchindex --- sphinx/search/__init__.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index bd95ecc4c..d57efd9ed 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -143,9 +143,10 @@ class WordCollector(NodeVisitor): self.lang = lang def dispatch_visit(self, node): - if node.__class__ is comment: + nodetype = type(node) + if issubclass(nodetype, comment): raise SkipNode - if node.__class__ is raw: + if issubclass(nodetype, raw): # Some people might put content in raw HTML that should be searched, # so we just amateurishly strip HTML tags and index the remaining # content @@ -154,9 +155,9 @@ class WordCollector(NodeVisitor): nodetext = re.sub(r'<[^<]+?>', '', nodetext) self.found_words.extend(self.lang.split(nodetext)) raise SkipNode - if node.__class__ is Text: + if issubclass(nodetype, Text): self.found_words.extend(self.lang.split(node.astext())) - elif node.__class__ is title: + elif issubclass(nodetype, title): self.found_title_words.extend(self.lang.split(node.astext())) From 580458fce1e42349397edd372f2c0afd1a36ea6f Mon Sep 17 00:00:00 2001 From: Matt Hickford Date: Sun, 14 Sep 2014 17:13:52 +0000 Subject: [PATCH 010/293] Explain how to install Sphinx with pip (rather than easy_install). Fixes #1447 --- doc/_templates/indexsidebar.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/_templates/indexsidebar.html b/doc/_templates/indexsidebar.html index 4a350ae47..019b20fc1 100644 --- a/doc/_templates/indexsidebar.html +++ b/doc/_templates/indexsidebar.html @@ -14,7 +14,7 @@

    {%trans%}Current version: {{ version }}{%endtrans%}

    {%trans%}Get Sphinx from the Python Package Index, or install it with:{%endtrans%}

    -
    easy_install -U Sphinx
    +
    pip install -U Sphinx

    {%trans%}Latest development version docs are also available.{%endtrans%}

    {% endif %} From f5567e03face18af00ee9407f1c2514f68da906e Mon Sep 17 00:00:00 2001 From: Matt Hickford Date: Sun, 14 Sep 2014 17:19:32 +0000 Subject: [PATCH 011/293] Add instructions how to install Sphinx to top of tutorial --- doc/tutorial.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/tutorial.rst b/doc/tutorial.rst index 777f115c2..dcfc40958 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -9,6 +9,12 @@ while using Sphinx. The green arrows designate "more info" links leading to advanced sections about the described task. +Install Sphinx +-------------- + +Install Sphinx from `PyPI `_ :: + + $ pip install Sphinx Setting up the documentation sources ------------------------------------ From bf6f5acd2c57b89ac164571586427cbe3445c1bf Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 14 Sep 2014 17:26:13 +0000 Subject: [PATCH 012/293] add distribution possibility --- doc/tutorial.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/tutorial.rst b/doc/tutorial.rst index dcfc40958..0a12a27a0 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -9,13 +9,16 @@ while using Sphinx. The green arrows designate "more info" links leading to advanced sections about the described task. + Install Sphinx -------------- -Install Sphinx from `PyPI `_ :: +Install Sphinx, either from a distribution package or from +`PyPI `_ with :: $ pip install Sphinx + Setting up the documentation sources ------------------------------------ From 873efc53580b04c4f4a6bfeb9d3164cbb2dfbb7b Mon Sep 17 00:00:00 2001 From: tk0miya Date: Mon, 15 Sep 2014 22:10:13 +0900 Subject: [PATCH 013/293] Fix #1512 env.record_dependency crashes on multibyte directories --- sphinx/environment.py | 4 ++-- sphinx/util/pycompat.py | 3 +++ tests/test_env.py | 27 ++++++++++++++++++++++++++- 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/sphinx/environment.py b/sphinx/environment.py index 69a8b57fd..bc3df2c08 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -39,7 +39,7 @@ from sphinx.util import url_re, get_matching_docs, docname_join, split_into, \ from sphinx.util.nodes import clean_astext, make_refnode, WarningStream from sphinx.util.osutil import SEP, fs_encoding, find_catalog_files from sphinx.util.matching import compile_matchers -from sphinx.util.pycompat import class_types +from sphinx.util.pycompat import class_types, getcwd from sphinx.util.compat import docutils_version from sphinx.util.websupport import is_commentable from sphinx.errors import SphinxError, ExtensionError @@ -760,7 +760,7 @@ class BuildEnvironment: def process_dependencies(self, docname, doctree): """Process docutils-generated dependency info.""" - cwd = os.getcwd() + cwd = getcwd() frompath = path.join(path.normpath(self.srcdir), 'dummy') deps = doctree.settings.record_dependencies if not deps: diff --git a/sphinx/util/pycompat.py b/sphinx/util/pycompat.py index 17f8871e8..c4598142f 100644 --- a/sphinx/util/pycompat.py +++ b/sphinx/util/pycompat.py @@ -25,6 +25,8 @@ if sys.version_info >= (3, 0): bytes = bytes # prefix for Unicode strings u = '' + # getcwd function + from os import getcwd # StringIO/BytesIO classes from io import StringIO, BytesIO, TextIOWrapper # safely encode a string for printing to the terminal @@ -58,6 +60,7 @@ else: b = str bytes = str u = 'u' + from os import getcwdu as getcwd from StringIO import StringIO BytesIO = StringIO # no need to refactor on 2.x versions diff --git a/tests/test_env.py b/tests/test_env.py index eaaa212f8..c5a091cda 100644 --- a/tests/test_env.py +++ b/tests/test_env.py @@ -8,9 +8,11 @@ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ +import os import sys +import tempfile -from util import TestApp, remove_unicode_literals, path +from util import TestApp, test_root, remove_unicode_literals, path from sphinx.builders.html import StandaloneHTMLBuilder from sphinx.builders.latex import LaTeXBuilder @@ -95,6 +97,29 @@ def test_second_update(): assert 'autodoc' not in env.all_docs assert 'autodoc' not in env.found_docs + +def test_record_dependency_on_multibyte_directory(): + app = None + olddir = os.getcwd() + try: + tmproot = path(os.path.abspath(tempfile.mkdtemp())) + tmpdir = tmproot / u'テスト' + test_root.copytree(tmpdir) + os.chdir(tmpdir) + tmpdir = path(os.getcwd()) # for MacOSX; tmpdir is based on symlinks + + app = TestApp(srcdir=tmpdir, freshenv=True) + (app.srcdir / 'test.txt').write_text('.. include:: test.inc') + (app.srcdir / 'test.inc').write_text('hello sphinx') + _, _, it = app.env.update(app.config, app.srcdir, app.doctreedir, app) + list(it) # take all from iterator + finally: + tmproot.rmtree(ignore_errors=True) + os.chdir(olddir) + if app: + app.cleanup() + + def test_object_inventory(): refs = env.domaindata['py']['objects'] From 424a6b572bdb946a9acaf6238ecc3edd4e5c876c Mon Sep 17 00:00:00 2001 From: Gerard Marull Paretas Date: Wed, 17 Sep 2014 10:14:25 +0000 Subject: [PATCH 014/293] Added "libreoffice" extension to the extension list --- doc/develop.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/develop.rst b/doc/develop.rst index ab24c0557..5110aa3a2 100644 --- a/doc/develop.rst +++ b/doc/develop.rst @@ -55,6 +55,7 @@ This is the current list of contributed extensions in that repository: - hyphenator: client-side hyphenation of HTML using hyphenator_ - inlinesyntaxhighlight_: inline syntax highlighting - lassodomain: a domain for documenting Lasso_ source code +- libreoffice: an extension to include any drawing supported by LibreOffice (e.g. odg, vsd...). - lilypond: an extension inserting music scripts from Lilypond_ in PNG format. - makedomain_: a domain for `GNU Make`_ - matlabdomain: document MATLAB_ code. From 9f0afa534dd099bc5f05879d15a7f50284af2473 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Wed, 17 Sep 2014 12:22:14 +0200 Subject: [PATCH 015/293] Exception logs now contain the last 10 messages emitted by Sphinx. --- CHANGES | 5 +++++ sphinx/application.py | 9 +++++++-- sphinx/builders/__init__.py | 6 +++--- sphinx/util/__init__.py | 12 ++++++++++-- sphinx/util/console.py | 3 +++ 5 files changed, 28 insertions(+), 7 deletions(-) diff --git a/CHANGES b/CHANGES index fa1d9fd83..e87bd806a 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,11 @@ Release 1.2.4 (in development) ============================== +Features added +-------------- + +* Exception logs now contain the last 10 messages emitted by Sphinx. + Release 1.2.3 (released Sep 1, 2014) ==================================== diff --git a/sphinx/application.py b/sphinx/application.py index 4fb9eb384..8a1c65630 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -17,17 +17,18 @@ import types import posixpath from os import path from cStringIO import StringIO +from collections import deque from docutils import nodes from docutils.parsers.rst import convert_directive_function, \ - directives, roles + directives, roles import sphinx from sphinx import package_dir, locale from sphinx.roles import XRefRole from sphinx.config import Config from sphinx.errors import SphinxError, SphinxWarning, ExtensionError, \ - VersionRequirementError, ConfigError + VersionRequirementError, ConfigError from sphinx.domains import ObjType, BUILTIN_DOMAINS from sphinx.domains.std import GenericObject, Target, StandardDomain from sphinx.builders import BUILTIN_BUILDERS @@ -95,6 +96,9 @@ class Sphinx(object): self._events = events.copy() + # keep last few messages for traceback + self.messagelog = deque(maxlen=10) + # say hello to the world self.info(bold('Running Sphinx v%s' % sphinx.__version__)) @@ -241,6 +245,7 @@ class Sphinx(object): wfile.write('\n') if hasattr(wfile, 'flush'): wfile.flush() + self.messagelog.append(message) def warn(self, message, location=None, prefix='WARNING: '): """Emit a warning. diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 44c76fafb..c02ecb53f 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -277,8 +277,8 @@ class Builder(object): # finish (write static files etc.) self.finish() - status = (self.app.statuscode == 0 and 'succeeded' - or 'finished with problems') + status = (self.app.statuscode == 0 + and 'succeeded' or 'finished with problems') if self.app._warncount: self.info(bold('build %s, %s warning%s.' % (status, self.app._warncount, @@ -387,7 +387,7 @@ class Builder(object): threads.append(t) # make sure all threads have finished - self.info(bold('waiting for workers... '))#, nonl=True) + self.info(bold('waiting for workers... ')) for t in threads: t.join() diff --git a/sphinx/util/__init__.py b/sphinx/util/__init__.py index cf3ae3279..91b7111ca 100644 --- a/sphinx/util/__init__.py +++ b/sphinx/util/__init__.py @@ -12,7 +12,6 @@ import os import re import sys -import shutil import fnmatch import tempfile import posixpath @@ -30,6 +29,7 @@ import jinja2 import sphinx from sphinx.errors import PycodeError from sphinx.util.pycompat import bytes +from sphinx.util.console import strip_colors # import other utilities; partly for backwards compatibility, so don't # prune unused ones indiscriminately @@ -176,6 +176,8 @@ _DEBUG_HEADER = '''\ # Python version: %s # Docutils version: %s %s # Jinja2 version: %s +# Last messages: +%s # Loaded extensions: ''' @@ -184,11 +186,17 @@ def save_traceback(app): import platform exc = traceback.format_exc() fd, path = tempfile.mkstemp('.log', 'sphinx-err-') + last_msgs = '' + if app is not None: + last_msgs = '\n'.join( + '# %s' % strip_colors(force_decode(s, 'utf-8')).strip() + for s in app.messagelog) os.write(fd, (_DEBUG_HEADER % (sphinx.__version__, platform.python_version(), docutils.__version__, docutils.__version_details__, - jinja2.__version__)).encode('utf-8')) + jinja2.__version__, + last_msgs)).encode('utf-8')) if app is not None: for extname, extmod in app._extensions.iteritems(): os.write(fd, ('# %s from %s\n' % ( diff --git a/sphinx/util/console.py b/sphinx/util/console.py index c2330102d..24a22d754 100644 --- a/sphinx/util/console.py +++ b/sphinx/util/console.py @@ -63,6 +63,9 @@ def coloron(): def colorize(name, text): return codes.get(name, '') + text + codes.get('reset', '') +def strip_colors(s): + return re.compile('\x1b.*?m').sub('', s) + def create_color_func(name): def inner(text): return colorize(name, text) From d2b955b7f9421f431a8e48f613651468d33e7304 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Wed, 17 Sep 2014 13:37:05 +0200 Subject: [PATCH 016/293] Closes #1568: since "centered" directly contains text children, it should be a TextElement. --- CHANGES | 1 + sphinx/addnodes.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGES b/CHANGES index 0e3f4c802..a0aa269d7 100644 --- a/CHANGES +++ b/CHANGES @@ -11,6 +11,7 @@ Bugs fixed * #1563: :meth:`~sphinx.application.Sphinx.add_search_language` raises AssertionError for correct type of argument. Thanks to rikoman. +* #1568: fix a crash when a "centered" directive contains a reference. Release 1.2.3 (released Sep 1, 2014) diff --git a/sphinx/addnodes.py b/sphinx/addnodes.py index 365efa798..2806a059c 100644 --- a/sphinx/addnodes.py +++ b/sphinx/addnodes.py @@ -107,7 +107,7 @@ class index(nodes.Invisible, nodes.Inline, nodes.TextElement): *entrytype* is one of "single", "pair", "double", "triple". """ -class centered(nodes.Part, nodes.Element): +class centered(nodes.Part, nodes.TextElement): """Deprecated.""" class acks(nodes.Element): From 447c8c1d377374e59f3a2a2549fa400de486f4f8 Mon Sep 17 00:00:00 2001 From: tk0miya Date: Fri, 19 Sep 2014 11:08:07 +0900 Subject: [PATCH 017/293] Wrap float environment on writing literal_block to latex --- sphinx/texinputs/sphinx.sty | 6 ++++++ sphinx/writers/latex.py | 7 ++++--- tests/test_directive_code.py | 6 ++---- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/sphinx/texinputs/sphinx.sty b/sphinx/texinputs/sphinx.sty index ee0a923d0..d617c62bc 100644 --- a/sphinx/texinputs/sphinx.sty +++ b/sphinx/texinputs/sphinx.sty @@ -522,3 +522,9 @@ \gdef\@chappos{} } \fi + +% Define literal-block environment +\RequirePackage{float} +\floatstyle{plaintop} +\newfloat{literal-block}{htbp}{loc}[chapter] +\floatname{literal-block}{List} diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index a7459c924..59a174c1f 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -1347,9 +1347,8 @@ class LaTeXTranslator(nodes.NodeVisitor): linenos = node['linenos'] caption = node.get('caption') if caption: - self.body.append('\n{\\colorbox[rgb]{0.9,0.9,0.9}' - '{\\makebox[\\textwidth][l]' - '{\\small\\texttt{%s}}}}\n' % (caption,)) + self.body.append('\n\\begin{literal-block}\caption{%s}\n' % + (caption,)) def warner(msg): self.builder.warn(msg, (self.curfilestack[-1], node.line)) hlcode = self.highlighter.highlight_block(code, lang, warn=warner, @@ -1367,6 +1366,8 @@ class LaTeXTranslator(nodes.NodeVisitor): hlcode = hlcode.rstrip() + '\n' self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' % (self.table and 'Original' or '')) + if caption: + self.body.append('\n\\end{literal-block}\n') raise nodes.SkipNode def depart_literal_block(self, node): self.body.append('\n\\end{alltt}\n') diff --git a/tests/test_directive_code.py b/tests/test_directive_code.py index 4dbdff881..f2f64970b 100644 --- a/tests/test_directive_code.py +++ b/tests/test_directive_code.py @@ -93,8 +93,7 @@ def test_code_block_caption_html(app): def test_code_block_caption_latex(app): app.builder.build('index') latex = (app.outdir / 'Python.tex').text() - caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]' - '{\\small\\texttt{caption-test.rb}}}}') + caption = '\\caption{caption-test.rb}' assert caption in latex @@ -168,6 +167,5 @@ def test_literalinclude_caption_html(app): def test_literalinclude_caption_latex(app): app.builder.build('index') latex = (app.outdir / 'Python.tex').text() - caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]' - '{\\small\\texttt{caption-test.py}}}}') + caption = '\\caption{caption-test.py}' assert caption in latex From d0764521996ffdae0fa9e8dba120fcbd182fafbd Mon Sep 17 00:00:00 2001 From: tk0miya Date: Fri, 19 Sep 2014 12:04:25 +0900 Subject: [PATCH 018/293] do nested_parse() :caption: of code-block directive --- sphinx/directives/code.py | 38 ++++++++++++++++----- sphinx/domains/std.py | 5 +++ sphinx/themes/basic/static/basic.css_t | 3 +- sphinx/writers/html.py | 15 ++++++-- sphinx/writers/latex.py | 23 +++++++++---- tests/roots/test-directive-code/caption.rst | 4 +-- tests/test_directive_code.py | 11 +++--- 7 files changed, 70 insertions(+), 29 deletions(-) diff --git a/sphinx/directives/code.py b/sphinx/directives/code.py index 6ea525b0f..855437594 100644 --- a/sphinx/directives/code.py +++ b/sphinx/directives/code.py @@ -13,6 +13,7 @@ from difflib import unified_diff from docutils import nodes from docutils.parsers.rst import Directive, directives +from docutils.statemachine import ViewList from six import string_types @@ -47,7 +48,6 @@ class Highlight(Directive): linenothreshold=linenothreshold)] - def dedent_lines(lines, dedent): if not dedent: return lines @@ -62,6 +62,21 @@ def dedent_lines(lines, dedent): return new_lines +def container_wrapper(directive, literal_node, caption): + container_node = nodes.container('', literal_block=True) + + parsed = nodes.Element() + directive.state.nested_parse(ViewList([caption], source=''), + directive.content_offset, parsed) + caption_node = nodes.caption(parsed[0].rawsource, '', + *parsed[0].children) + caption_node.source = parsed[0].source + caption_node.line = parsed[0].line + container_node += caption_node + container_node += literal_node + return container_node + + class CodeBlock(Directive): """ Directive for a code block with special highlighting or line numbering @@ -101,9 +116,6 @@ class CodeBlock(Directive): literal = nodes.literal_block(code, code) literal['language'] = self.arguments[0] - caption = self.options.get('caption') - if caption: - literal['caption'] = caption literal['linenos'] = 'linenos' in self.options or \ 'lineno-start' in self.options extra_args = literal['highlight_args'] = {} @@ -112,6 +124,11 @@ class CodeBlock(Directive): if 'lineno-start' in self.options: extra_args['linenostart'] = self.options['lineno-start'] set_source_info(self, literal) + + caption = self.options.get('caption') + if caption: + literal = container_wrapper(self, literal, caption) + return [literal] @@ -269,17 +286,20 @@ class LiteralInclude(Directive): retnode['language'] = self.options['language'] retnode['linenos'] = 'linenos' in self.options or \ 'lineno-start' in self.options - caption = self.options.get('caption') - if caption is not None: - if not caption: - caption = self.arguments[0] - retnode['caption'] = caption extra_args = retnode['highlight_args'] = {} if hl_lines is not None: extra_args['hl_lines'] = hl_lines if 'lineno-start' in self.options: extra_args['linenostart'] = self.options['lineno-start'] env.note_dependency(rel_filename) + + caption = self.options.get('caption') + if caption is not None: + if caption: + retnode = container_wrapper(self, retnode, caption) + else: + retnode = container_wrapper(self, retnode, self.arguments[0]) + return [retnode] diff --git a/sphinx/domains/std.py b/sphinx/domains/std.py index bb044e304..7a769221e 100644 --- a/sphinx/domains/std.py +++ b/sphinx/domains/std.py @@ -563,6 +563,11 @@ class StandardDomain(Domain): break else: continue + elif node.tagname == 'container' and node.get('literal_block'): + for n in node: + if n.tagname == 'caption': + sectname = clean_astext(n) + break else: # anonymous-only labels continue diff --git a/sphinx/themes/basic/static/basic.css_t b/sphinx/themes/basic/static/basic.css_t index 17547d0fd..3616288c8 100644 --- a/sphinx/themes/basic/static/basic.css_t +++ b/sphinx/themes/basic/static/basic.css_t @@ -484,8 +484,7 @@ div.code-block-filename code { background-color: transparent; } -div.code-block-caption + pre, -div.code-block-caption + div.highlight > pre { +div.code-block-caption + div > div.highlight > pre { margin-top: 0; } diff --git a/sphinx/writers/html.py b/sphinx/writers/html.py index 14b11fcff..56657ee72 100644 --- a/sphinx/writers/html.py +++ b/sphinx/writers/html.py @@ -283,12 +283,21 @@ class HTMLTranslator(BaseTranslator): **highlight_args) starttag = self.starttag(node, 'div', suffix='', CLASS='highlight-%s' % lang) - if 'caption' in node: - starttag += '
    %s
    ' % ( - node['caption'],) self.body.append(starttag + highlighted + '\n') raise nodes.SkipNode + def visit_caption(self, node): + if isinstance(node.parent, nodes.container) and node.parent.get('literal_block'): + self.body.append(self.starttag(node, 'div', '', CLASS='code-block-caption')) + else: + BaseTranslator.visit_caption(self, node) + + def depart_caption(self, node): + if isinstance(node.parent, nodes.container) and node.parent.get('literal_block'): + self.body.append('\n') + else: + BaseTranslator.depart_caption(self, node) + def visit_doctest_block(self, node): self.visit_literal_block(node) diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index a7459c924..11ea8c87f 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -266,6 +266,7 @@ class LaTeXTranslator(nodes.NodeVisitor): self.next_section_ids = set() self.next_figure_ids = set() self.next_table_ids = set() + self.next_literal_ids = set() # flags self.in_title = 0 self.in_production_list = 0 @@ -1109,6 +1110,12 @@ class LaTeXTranslator(nodes.NodeVisitor): self.next_table_ids.add(node['refid']) self.next_table_ids.update(node['ids']) return + elif isinstance(next, nodes.container) and next.get('literal_block'): + # same for literal_block, but only if they have a caption + if node.get('refid'): + self.next_literal_ids.add(node['refid']) + self.next_literal_ids.update(node['ids']) + return except IndexError: pass if 'refuri' in node: @@ -1345,11 +1352,6 @@ class LaTeXTranslator(nodes.NodeVisitor): highlight_args['force'] = True if 'linenos' in node: linenos = node['linenos'] - caption = node.get('caption') - if caption: - self.body.append('\n{\\colorbox[rgb]{0.9,0.9,0.9}' - '{\\makebox[\\textwidth][l]' - '{\\small\\texttt{%s}}}}\n' % (caption,)) def warner(msg): self.builder.warn(msg, (self.curfilestack[-1], node.line)) hlcode = self.highlighter.highlight_block(code, lang, warn=warner, @@ -1494,9 +1496,16 @@ class LaTeXTranslator(nodes.NodeVisitor): pass def visit_container(self, node): - pass + if node.get('literal_block'): + ids = '' + for id in self.next_literal_ids: + ids += self.hypertarget(id, anchor=False) + self.next_figure_ids.clear() + self.body.append('\n\\begin{literal-block}' + ids) + def depart_container(self, node): - pass + if node.get('literal_block'): + self.body.append('\\end{literal-block}\n') def visit_decoration(self, node): pass diff --git a/tests/roots/test-directive-code/caption.rst b/tests/roots/test-directive-code/caption.rst index 274d0f19d..5a2fe4a1f 100644 --- a/tests/roots/test-directive-code/caption.rst +++ b/tests/roots/test-directive-code/caption.rst @@ -5,7 +5,7 @@ Code blocks ----------- .. code-block:: ruby - :caption: caption-test.rb + :caption: caption *test* rb def ruby? false @@ -17,5 +17,5 @@ Literal Include .. literalinclude:: literal.inc :language: python - :caption: caption-test.py + :caption: caption **test** py :lines: 10-11 diff --git a/tests/test_directive_code.py b/tests/test_directive_code.py index 4dbdff881..140483bf4 100644 --- a/tests/test_directive_code.py +++ b/tests/test_directive_code.py @@ -83,7 +83,8 @@ def test_code_block_dedent(app): def test_code_block_caption_html(app): app.builder.build('index') html = (app.outdir / 'caption.html').text() - caption = '
    caption-test.rb
    ' + caption = '
    caption test rb
    ' + print caption, html assert caption in html @@ -93,8 +94,7 @@ def test_code_block_caption_html(app): def test_code_block_caption_latex(app): app.builder.build('index') latex = (app.outdir / 'Python.tex').text() - caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]' - '{\\small\\texttt{caption-test.rb}}}}') + caption = '\\caption{caption \\emph{test} rb}' assert caption in latex @@ -158,7 +158,7 @@ def test_literal_include_dedent(app): def test_literalinclude_caption_html(app): app.builder.build('index') html = (app.outdir / 'caption.html').text() - caption = '
    caption-test.py
    ' + caption = '
    caption test py
    ' assert caption in html @@ -168,6 +168,5 @@ def test_literalinclude_caption_html(app): def test_literalinclude_caption_latex(app): app.builder.build('index') latex = (app.outdir / 'Python.tex').text() - caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]' - '{\\small\\texttt{caption-test.py}}}}') + caption = '\\caption{caption \\textbf{test} py}' assert caption in latex From e895676d82ea8a841e8908967dd2b71376ac8ef3 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 10:34:29 +0200 Subject: [PATCH 019/293] CPP domain: fix whitespace. --- sphinx/domains/cpp.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/sphinx/domains/cpp.py b/sphinx/domains/cpp.py index 23bd469fd..778a36bff 100644 --- a/sphinx/domains/cpp.py +++ b/sphinx/domains/cpp.py @@ -7,11 +7,11 @@ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. - + See http://www.nongnu.org/hcb/ for the grammar. See http://mentorembedded.github.io/cxx-abi/abi.html#mangling for the inspiration for the id generation. - + common grammar things: simple-declaration -> attribute-specifier-seq[opt] decl-specifier-seq[opt] @@ -20,7 +20,7 @@ # Use at most 1 init-declerator. -> decl-specifier-seq init-declerator -> decl-specifier-seq declerator initializer - + decl-specifier -> storage-class-specifier -> "static" (only for member_object and function_object) @@ -76,8 +76,8 @@ constant-expression | type-specifier-seq abstract-declerator | id-expression - - + + declerator -> ptr-declerator | noptr-declarator parameters-and-qualifiers trailing-return-type @@ -108,11 +108,11 @@ memberFunctionInit -> "=" "0" # (note: only "0" is allowed as the value, according to the standard, # right?) - - + + We additionally add the possibility for specifying the visibility as the first thing. - + type_object: goal: either a single type (e.g., "MyClass:Something_T" or a typedef-like @@ -126,14 +126,14 @@ -> decl-specifier-seq abstract-declarator[opt] grammar, typedef-like: no initilizer decl-specifier-seq declerator - - + + member_object: goal: as a type_object which must have a declerator, and optionally with a initializer grammar: decl-specifier-seq declerator initializer - + function_object: goal: a function declaration, TODO: what about templates? for now: skip grammar: no initializer @@ -1875,4 +1875,4 @@ class CPPDomain(Domain): def get_objects(self): for refname, (docname, type, theid) in iteritems(self.data['objects']): - yield (refname, refname, type, docname, refname, 1) \ No newline at end of file + yield (refname, refname, type, docname, refname, 1) From c683bd750482796171961b2820d63b12b77c3bbf Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 10:35:43 +0200 Subject: [PATCH 020/293] Std domain: don't expect "refprogram" attribute on an xref node. This would lead to an exception if we pass in some generic x-ref node that tries to resolve as an option. --- sphinx/domains/std.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sphinx/domains/std.py b/sphinx/domains/std.py index 43341e42b..9a7937aad 100644 --- a/sphinx/domains/std.py +++ b/sphinx/domains/std.py @@ -600,7 +600,7 @@ class StandardDomain(Domain): return make_refnode(builder, fromdocname, docname, labelid, contnode) elif typ == 'option': - progname = node['refprogram'] + progname = node.get('refprogram', '') docname, labelid = self.data['progoptions'].get((progname, target), ('', '')) if not docname: From 01f413108e1af67b00c95ffe7e2038547cd3a5dc Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 10:36:25 +0200 Subject: [PATCH 021/293] Ignore .tags file. --- .hgignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.hgignore b/.hgignore index 45ecebc31..1154332f7 100644 --- a/.hgignore +++ b/.hgignore @@ -11,6 +11,7 @@ ^Sphinx.egg-info/ ^doc/_build/ ^TAGS +^\.tags ^\.ropeproject/ ^env/ \.DS_Store$ From edcff75e32cc47c0ccd217de8e4c360c3b414e55 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 10:38:38 +0200 Subject: [PATCH 022/293] sphinx.environment: PEP8 cleanup. --- sphinx/environment.py | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/sphinx/environment.py b/sphinx/environment.py index d51e7a168..42f4da0ef 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -36,23 +36,25 @@ from docutils.writers import UnfilteredWriter from sphinx import addnodes from sphinx.util import url_re, get_matching_docs, docname_join, split_into, \ - FilenameUniqDict + FilenameUniqDict from sphinx.util.nodes import clean_astext, make_refnode, WarningStream -from sphinx.util.osutil import SEP, fs_encoding, find_catalog_files +from sphinx.util.osutil import SEP, find_catalog_files from sphinx.util.matching import compile_matchers from sphinx.util.websupport import is_commentable from sphinx.errors import SphinxError, ExtensionError from sphinx.locale import _ from sphinx.versioning import add_uids, merge_doctrees from sphinx.transforms import DefaultSubstitutions, MoveModuleTargets, \ - HandleCodeBlocks, SortIds, CitationReferences, Locale, \ - RemoveTranslatableInline, SphinxContentsFilter + HandleCodeBlocks, SortIds, CitationReferences, Locale, \ + RemoveTranslatableInline, SphinxContentsFilter orig_role_function = roles.role orig_directive_function = directives.directive -class ElementLookupError(Exception): pass + +class ElementLookupError(Exception): + pass default_settings = { @@ -138,9 +140,9 @@ class BuildEnvironment: # remove potentially pickling-problematic values from config for key, val in list(vars(self.config).items()): if key.startswith('_') or \ - isinstance(val, types.ModuleType) or \ - isinstance(val, types.FunctionType) or \ - isinstance(val, class_types): + isinstance(val, types.ModuleType) or \ + isinstance(val, types.FunctionType) or \ + isinstance(val, class_types): del self.config[key] try: pickle.dump(self, picklefile, pickle.HIGHEST_PROTOCOL) @@ -646,7 +648,7 @@ class BuildEnvironment: # therefore time.time() can be older than filesystem's timestamp. # For example, FAT32 has 2sec timestamp resolution.) self.all_docs[docname] = max( - time.time(), path.getmtime(self.doc2path(docname))) + time.time(), path.getmtime(self.doc2path(docname))) if self.versioning_condition: # get old doctree @@ -846,7 +848,7 @@ class BuildEnvironment: # nodes are multiply inherited... if isinstance(node, nodes.authors): md['authors'] = [author.astext() for author in node] - elif isinstance(node, nodes.TextElement): # e.g. author + elif isinstance(node, nodes.TextElement): # e.g. author md[node.__class__.__name__] = node.astext() else: name, body = node @@ -976,7 +978,7 @@ class BuildEnvironment: def build_toc_from(self, docname, document): """Build a TOC from the doctree and store it in the inventory.""" - numentries = [0] # nonlocal again... + numentries = [0] # nonlocal again... def traverse_in_section(node, cls): """Like traverse(), but stay within the same section.""" @@ -1102,7 +1104,6 @@ class BuildEnvironment: stream=WarningStream(self._warnfunc)) return doctree - def get_and_resolve_doctree(self, docname, builder, doctree=None, prune_toctrees=True, includehidden=False): """Read the doctree from the pickle, resolve cross-references and @@ -1117,7 +1118,8 @@ class BuildEnvironment: # now, resolve all toctree nodes for toctreenode in doctree.traverse(addnodes.toctree): result = self.resolve_toctree(docname, builder, toctreenode, - prune=prune_toctrees, includehidden=includehidden) + prune=prune_toctrees, + includehidden=includehidden) if result is None: toctreenode.replace_self([]) else: @@ -1174,7 +1176,7 @@ class BuildEnvironment: else: # cull sub-entries whose parents aren't 'current' if (collapse and depth > 1 and - 'iscurrent' not in subnode.parent): + 'iscurrent' not in subnode.parent): subnode.parent.remove(subnode) else: # recurse on visible children @@ -1256,7 +1258,7 @@ class BuildEnvironment: child = toc.children[0] for refnode in child.traverse(nodes.reference): if refnode['refuri'] == ref and \ - not refnode['anchorname']: + not refnode['anchorname']: refnode.children = [nodes.Text(title)] if not toc.children: # empty toc means: no titles will show up in the toctree @@ -1595,7 +1597,7 @@ class BuildEnvironment: # prefixes match: add entry as subitem of the # previous entry oldsubitems.setdefault(m.group(2), [[], {}])[0].\ - extend(targets) + extend(targets) del newlist[i] continue oldkey = m.group(1) @@ -1622,6 +1624,7 @@ class BuildEnvironment: def collect_relations(self): relations = {} getinc = self.toctree_includes.get + def collect(parents, parents_set, docname, previous, next): # circular relationship? if docname in parents_set: @@ -1661,8 +1664,8 @@ class BuildEnvironment: # same for children if includes: for subindex, args in enumerate(zip(includes, - [None] + includes, - includes[1:] + [None])): + [None] + includes, + includes[1:] + [None])): collect([(docname, subindex)] + parents, parents_set.union([docname]), *args) relations[docname] = [parents[0][0], previous, next] From 6477151f4fb16b4ee182f6f8722de000729a79f7 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 11:57:06 +0200 Subject: [PATCH 023/293] Fix URL in docu for opensearch. --- doc/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/config.rst b/doc/config.rst index cc35a7578..a11254eaa 100644 --- a/doc/config.rst +++ b/doc/config.rst @@ -707,7 +707,7 @@ that use Sphinx's HTMLWriter class. .. confval:: html_use_opensearch - If nonempty, an `OpenSearch ` description file will be + If nonempty, an `OpenSearch `_ description file will be output, and all pages will contain a ```` tag referring to it. Since OpenSearch doesn't support relative URLs for its search page location, the value of this option must be the base URL from which these documents are From b9469b9013eca8ba4df2df527a9a9fdf3666c458 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 12:21:38 +0200 Subject: [PATCH 024/293] Changing the default role document-locally with the docutils ".. default-role::" directive is now supported. --- CHANGES | 2 ++ sphinx/directives/__init__.py | 32 +++++++++++++++++++++++++++++++- sphinx/environment.py | 1 + sphinx/roles.py | 7 ++++++- 4 files changed, 40 insertions(+), 2 deletions(-) diff --git a/CHANGES b/CHANGES index 6279c4467..114e27706 100644 --- a/CHANGES +++ b/CHANGES @@ -31,6 +31,8 @@ Features added can be shown in the traceback log files). Version requirements for extensions can be specified in projects using the new :confval:`needs_extensions` config value. +* Changing the default role within a document with the :rst:dir:`default-role` + directive is now supported. * PR#214: Added stemming support for 14 languages, so that the built-in document search can now handle these. Thanks to Shibukawa Yoshiki. * PR#202: Allow "." and "~" prefixed references in ``:param:`` doc fields diff --git a/sphinx/directives/__init__.py b/sphinx/directives/__init__.py index 52b638fe0..969426bc1 100644 --- a/sphinx/directives/__init__.py +++ b/sphinx/directives/__init__.py @@ -11,7 +11,8 @@ import re -from docutils.parsers.rst import Directive, directives +from docutils import nodes +from docutils.parsers.rst import Directive, directives, roles from sphinx import addnodes from sphinx.util.docfields import DocFieldTransformer @@ -162,6 +163,34 @@ class ObjectDescription(Directive): DescDirective = ObjectDescription +class DefaultRole(Directive): + """ + Set the default interpreted text role. Overridden from docutils. + """ + + optional_arguments = 1 + final_argument_whitespace = False + + def run(self): + if not self.arguments: + if '' in roles._roles: + # restore the "default" default role + del roles._roles[''] + return [] + role_name = self.arguments[0] + role, messages = roles.role(role_name, self.state_machine.language, + self.lineno, self.state.reporter) + if role is None: + error = self.state.reporter.error( + 'Unknown interpreted text role "%s".' % role_name, + nodes.literal_block(self.block_text, self.block_text), + line=self.lineno) + return messages + [error] + roles._roles[''] = role + self.state.document.settings.env.temp_data['default_role'] = role_name + return messages + + class DefaultDomain(Directive): """ Directive to (re-)set the default domain for this source file. @@ -186,6 +215,7 @@ class DefaultDomain(Directive): return [] +directives.register_directive('default-role', DefaultRole) directives.register_directive('default-domain', DefaultDomain) directives.register_directive('describe', ObjectDescription) # new, more consistent, name diff --git a/sphinx/environment.py b/sphinx/environment.py index 42f4da0ef..7838a5d1c 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -681,6 +681,7 @@ class BuildEnvironment: # cleanup self.temp_data.clear() + roles._roles.pop('', None) # if a document has set a local default role if save_parsed: # save the parsed doctree diff --git a/sphinx/roles.py b/sphinx/roles.py index aaf6272bd..b99b22795 100644 --- a/sphinx/roles.py +++ b/sphinx/roles.py @@ -17,6 +17,7 @@ from docutils.parsers.rst import roles from sphinx import addnodes from sphinx.locale import _ +from sphinx.errors import SphinxError from sphinx.util import ws_re from sphinx.util.nodes import split_explicit_title, process_index_entry, \ set_role_source_info @@ -96,7 +97,11 @@ class XRefRole(object): options={}, content=[]): env = inliner.document.settings.env if not typ: - typ = env.config.default_role + typ = env.temp_data.get('default_role') + if not typ: + typ = env.config.default_role + if not typ: + raise SphinxError('cannot determine default role!') else: typ = typ.lower() if ':' not in typ: From 8e3a8feeddd8231758d8f871d20bb87e8a058a88 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 12:36:21 +0200 Subject: [PATCH 025/293] clean up default-role usage in CHANGES --- CHANGES | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/CHANGES b/CHANGES index 114e27706..57d285add 100644 --- a/CHANGES +++ b/CHANGES @@ -12,8 +12,8 @@ Incompatible changes * A new node, ``sphinx.addnodes.literal_strong``, has been added, for text that should appear literally (i.e. no smart quotes) in strong font. Custom writers will have to be adapted to handle this node. -* PR#269, #1476: replace `` tag by ``. User customized stylesheets - should be updated If the css contain some styles for `` tag. +* PR#269, #1476: replace ```` tag by ````. User customized stylesheets + should be updated If the css contain some styles for ``tt>`` tag. Thanks to Takeshi Komiya. * #1543: :confval:`templates_path` is automatically added to :confval:`exclude_patterns` to avoid reading autosummary rst templates in the @@ -169,7 +169,7 @@ Bugs fixed * #1370: Ignore "toctree" nodes in text writer, instead of raising. * #1364: Fix 'make gettext' fails when the '.. todolist::' directive is present. * #1367: Fix a change of PR#96 that break sphinx.util.docfields.Field.make_field - interface/behavior for `item` argument usage. + interface/behavior for *item* argument usage. * #1363: Fix i18n: missing python domain's cross-references with currentmodule directive or currentclass directive. * #1419: Generated i18n sphinx.js files are missing message catalog entries @@ -179,14 +179,14 @@ Bugs fixed Thanks to solos. * #1466,PR#241: Fix failure of the cpp domain parser to parse C+11 "variadic templates" declarations. Thanks to Victor Zverovich. -* #1459,PR#244: Fix default mathjax js path point to `http://` that cause +* #1459,PR#244: Fix default mathjax js path point to ``http://`` that cause mixed-content error on HTTPS server. Thanks to sbrandtb and robo9k. * PR#157: autodoc remove spurious signatures from @property decorated attributes. Thanks to David Ham. * PR#159: Add coverage targets to quickstart generated Makefile and make.bat. Thanks to Matthias Troffaes. * #1251: When specifying toctree :numbered: option and :tocdepth: metadata, - sub section number that is larger depth than `:tocdepth:` is shrinked. + sub section number that is larger depth than ``:tocdepth:`` is shrunk. * PR#260: Encode underscore in citation labels for latex export. Thanks to Lennart Fricke. * PR#264: Fix could not resolve xref for figure node with :name: option. @@ -225,7 +225,7 @@ Bugs fixed :rst:dir:`c:function`. Thanks to Takeshi Komiya. * PR#278: Fix section entries were shown twice if toctree has been put under only directive. Thanks to Takeshi Komiya. -* #1547: pgen2 tokenizer doesn't recognize `...` literal (Ellipsis for py3). +* #1547: pgen2 tokenizer doesn't recognize ``...`` literal (Ellipsis for py3). Documentation ------------- @@ -241,7 +241,7 @@ Release 1.2.3 (released Sep 1, 2014) Features added -------------- -* #1518: `sphinx-apidoc` command now have a `--version` option to show version +* #1518: ``sphinx-apidoc`` command now has a ``--version`` option to show version information and exit * New locales: Hebrew, European Portuguese, Vietnamese. @@ -259,14 +259,14 @@ Bugs fixed Thanks to Jorge_C. * #1467: Exception on Python3 if nonexistent method is specified by automethod * #1441: autosummary can't handle nested classes correctly. -* #1499: With non-callable `setup` in a conf.py, now sphinx-build emits - user-friendly error message. +* #1499: With non-callable ``setup`` in a conf.py, now sphinx-build emits + a user-friendly error message. * #1502: In autodoc, fix display of parameter defaults containing backslashes. * #1226: autodoc, autosummary: importing setup.py by automodule will invoke - setup process and execute `sys.exit()`. Now sphinx avoids SystemExit + setup process and execute ``sys.exit()``. Now sphinx avoids SystemExit exception and emits warnings without unexpected termination. * #1503: py:function directive generate incorrectly signature when specifying - a default parameter with an empty list `[]`. Thanks to Geert Jansen. + a default parameter with an empty list ``[]``. Thanks to Geert Jansen. * #1508: Non-ASCII filename raise exception on make singlehtml, latex, man, texinfo and changes. * #1531: On Python3 environment, docutils.conf with 'source_link=true' in the @@ -276,11 +276,11 @@ Bugs fixed * PR#281, PR#282, #1509: TODO extension not compatible with websupport. Thanks to Takeshi Komiya. * #1477: gettext does not extract nodes.line in a table or list. -* #1544: `make text` generate wrong table when it has empty table cells. +* #1544: ``make text`` generates wrong table when it has empty table cells. * #1522: Footnotes from table get displayed twice in LaTeX. This problem has been appeared from Sphinx-1.2.1 by #949. * #508: Sphinx every time exit with zero when is invoked from setup.py command. - ex. `python setup.py build_sphinx -b doctest` return zero even if doctest + ex. ``python setup.py build_sphinx -b doctest`` return zero even if doctest failed. Release 1.2.2 (released Mar 2, 2014) @@ -307,7 +307,7 @@ Bugs fixed * #1370: Ignore "toctree" nodes in text writer, instead of raising. * #1364: Fix 'make gettext' fails when the '.. todolist::' directive is present. * #1367: Fix a change of PR#96 that break sphinx.util.docfields.Field.make_field - interface/behavior for `item` argument usage. + interface/behavior for ``item`` argument usage. Documentation ------------- @@ -426,7 +426,7 @@ Bugs fixed * Restore ``versionmodified`` CSS class for versionadded/changed and deprecated directives. -* PR#181: Fix `html_theme_path=['.']` is a trigger of rebuild all documents +* PR#181: Fix ``html_theme_path = ['.']`` is a trigger of rebuild all documents always (This change keeps the current "theme changes cause a rebuild" feature). From 430be0496ad68af31c052cf2367c0c8197a19cd6 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 12:37:58 +0200 Subject: [PATCH 026/293] Clean up reference resolution a bit; factor out :doc: and citations into their own methods. --- sphinx/environment.py | 81 +++++++++++++++++++++++-------------------- 1 file changed, 44 insertions(+), 37 deletions(-) diff --git a/sphinx/environment.py b/sphinx/environment.py index 7838a5d1c..fbc56b3da 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -1349,49 +1349,21 @@ class BuildEnvironment: domain = self.domains[node['refdomain']] except KeyError: raise NoUri - newnode = domain.resolve_xref(self, fromdocname, builder, + newnode = domain.resolve_xref(self, refdoc, builder, typ, target, node, contnode) # really hardwired reference types elif typ == 'doc': - # directly reference to document by source name; - # can be absolute or relative - docname = docname_join(refdoc, target) - if docname in self.all_docs: - if node['refexplicit']: - # reference with explicit title - caption = node.astext() - else: - caption = clean_astext(self.titles[docname]) - innernode = nodes.emphasis(caption, caption) - newnode = nodes.reference('', '', internal=True) - newnode['refuri'] = builder.get_relative_uri( - fromdocname, docname) - newnode.append(innernode) + newnode = self._resolve_doc_reference(builder, node, contnode) elif typ == 'citation': - docname, labelid = self.citations.get(target, ('', '')) - if docname: - try: - newnode = make_refnode(builder, fromdocname, - docname, labelid, contnode) - except NoUri: - # remove the ids we added in the CitationReferences - # transform since they can't be transfered to - # the contnode (if it's a Text node) - if not isinstance(contnode, nodes.Element): - del node['ids'][:] - raise - elif 'ids' in node: - # remove ids attribute that annotated at - # transforms.CitationReference.apply. - del node['ids'][:] + newnode = self._resolve_citation(builder, node, contnode) # no new node found? try the missing-reference event if newnode is None: newnode = builder.app.emit_firstresult( 'missing-reference', self, node, contnode) - # still not found? warn if in nit-picky mode + # still not found? warn if node wishes to be warned about or + # we are in nit-picky mode if newnode is None: - self._warn_missing_reference( - fromdocname, typ, target, node, domain) + self._warn_missing_reference(refdoc, typ, target, node, domain) except NoUri: newnode = contnode node.replace_self(newnode or contnode) @@ -1402,7 +1374,7 @@ class BuildEnvironment: # allow custom references to be resolved builder.app.emit('doctree-resolved', doctree, fromdocname) - def _warn_missing_reference(self, fromdoc, typ, target, node, domain): + def _warn_missing_reference(self, refdoc, typ, target, node, domain): warn = node.get('refwarn') if self.config.nitpicky: warn = True @@ -1421,13 +1393,48 @@ class BuildEnvironment: msg = 'unknown document: %(target)s' elif typ == 'citation': msg = 'citation not found: %(target)s' - elif node.get('refdomain', 'std') != 'std': + elif node.get('refdomain', 'std') not in ('', 'std'): msg = '%s:%s reference target not found: %%(target)s' % \ (node['refdomain'], typ) else: - msg = '%s reference target not found: %%(target)s' % typ + msg = '%r reference target not found: %%(target)s' % typ self.warn_node(msg % {'target': target}, node) + def _resolve_doc_reference(self, builder, node, contnode): + # directly reference to document by source name; + # can be absolute or relative + docname = docname_join(node['refdoc'], node['reftarget']) + if docname in self.all_docs: + if node['refexplicit']: + # reference with explicit title + caption = node.astext() + else: + caption = clean_astext(self.titles[docname]) + innernode = nodes.emphasis(caption, caption) + newnode = nodes.reference('', '', internal=True) + newnode['refuri'] = builder.get_relative_uri(node['refdoc'], docname) + newnode.append(innernode) + return newnode + + def _resolve_citation(self, builder, node, contnode): + docname, labelid = self.citations.get(node['reftarget'], ('', '')) + if docname: + try: + newnode = make_refnode(builder, node['refdoc'], + docname, labelid, contnode) + return newnode + except NoUri: + # remove the ids we added in the CitationReferences + # transform since they can't be transfered to + # the contnode (if it's a Text node) + if not isinstance(contnode, nodes.Element): + del node['ids'][:] + raise + elif 'ids' in node: + # remove ids attribute that annotated at + # transforms.CitationReference.apply. + del node['ids'][:] + def process_only_nodes(self, doctree, builder, fromdocname=None): # A comment on the comment() nodes being inserted: replacing by [] would # result in a "Losing ids" exception if there is a target node before From c3eb669f8aa67afff8aaf6f069b186869bd31158 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 12:59:18 +0200 Subject: [PATCH 027/293] Added the `any` role that can be used to find a cross-reference of *any* type in *any* domain. Custom domains should implement the new `~Domain.resolve_any_xref` method to make this work properly. --- CHANGES | 5 ++++ doc/markup/inline.rst | 47 +++++++++++++++++++++++++++++++-- sphinx/domains/__init__.py | 19 ++++++++++++++ sphinx/domains/c.py | 11 ++++++++ sphinx/domains/cpp.py | 34 +++++++++++++++++------- sphinx/domains/javascript.py | 12 ++++++++- sphinx/domains/python.py | 51 +++++++++++++++++++++++++++--------- sphinx/domains/rst.py | 13 +++++++++ sphinx/domains/std.py | 17 ++++++++++++ sphinx/environment.py | 45 +++++++++++++++++++++++++++++++ sphinx/roles.py | 2 ++ 11 files changed, 230 insertions(+), 26 deletions(-) diff --git a/CHANGES b/CHANGES index 57d285add..7146251c2 100644 --- a/CHANGES +++ b/CHANGES @@ -18,6 +18,8 @@ Incompatible changes * #1543: :confval:`templates_path` is automatically added to :confval:`exclude_patterns` to avoid reading autosummary rst templates in the templates directory. +* Custom domains should implement the new :meth:`~Domain.resolve_any_xref` + method to make the :rst:role:`any` role work properly. Features added -------------- @@ -26,6 +28,9 @@ Features added * Add support for docutils 0.12 * Added ``sphinx.ext.napoleon`` extension for NumPy and Google style docstring support. +* Added the :rst:role:`any` role that can be used to find a cross-reference of + *any* type in *any* domain. Custom domains should implement the new + :meth:`~Domain.resolve_any_xref` method to make this work properly. * Exception logs now contain the last 10 messages emitted by Sphinx. * Added support for extension versions (a string returned by ``setup()``, these can be shown in the traceback log files). Version requirements for extensions diff --git a/doc/markup/inline.rst b/doc/markup/inline.rst index 0cc97f43f..7d83e3178 100644 --- a/doc/markup/inline.rst +++ b/doc/markup/inline.rst @@ -12,7 +12,9 @@ They are written as ``:rolename:`content```. The default role (```content```) has no special meaning by default. You are free to use it for anything you like, e.g. variable names; use the - :confval:`default_role` config value to set it to a known role. + :confval:`default_role` config value to set it to a known role -- the + :rst:role:`any` role to find anything or the :rst:role:`py:obj` role to find + Python objects are very useful for this. See :ref:`domains` for roles added by domains. @@ -38,12 +40,53 @@ more versatile: * If you prefix the content with ``~``, the link text will only be the last component of the target. For example, ``:py:meth:`~Queue.Queue.get``` will - refer to ``Queue.Queue.get`` but only display ``get`` as the link text. + refer to ``Queue.Queue.get`` but only display ``get`` as the link text. This + does not work with all cross-reference roles, but is domain specific. In HTML output, the link's ``title`` attribute (that is e.g. shown as a tool-tip on mouse-hover) will always be the full target name. +.. _any-role: + +Cross-referencing anything +-------------------------- + +.. rst:role:: any + + .. versionadded:: 1.3 + + This convenience role tries to do its best to find a valid target for its + reference text. + + * First, it tries standard cross-reference targets that would be referenced + by :rst:role:`doc`, :rst:role:`ref` or :rst:role:`option`. + + Custom objects added to the standard domain by extensions (see + :meth:`.add_object_type`) are also searched. + + * Then, it looks for objects (targets) in all loaded domains. It is up to + the domains how specific a match must be. For example, in the Python + domain a reference of ``:any:`Builder``` would match the + ``sphinx.builders.Builder`` class. + + If none or multiple targets are found, a warning will be emitted. In the + case of multiple targets, you can change "any" to a specific role. + + This role is a good candidate for setting :confval:`default_role`. If you + do, you can write cross-references without a lot of markup overhead. For + example, in this Python function documentation :: + + .. function:: install() + + This function installs a `handler` for every signal known by the + `signal` module. See the section `about-signals` for more information. + + there could be references to a glossary term (usually ``:term:`handler```), a + Python module (usually ``:py:mod:`signal``` or ``:mod:`signal```) and a + section (usually ``:ref:`about-signals```). + + Cross-referencing objects ------------------------- diff --git a/sphinx/domains/__init__.py b/sphinx/domains/__init__.py index 51b886fdf..cfba9e913 100644 --- a/sphinx/domains/__init__.py +++ b/sphinx/domains/__init__.py @@ -155,10 +155,13 @@ class Domain(object): self._role_cache = {} self._directive_cache = {} self._role2type = {} + self._type2role = {} for name, obj in iteritems(self.object_types): for rolename in obj.roles: self._role2type.setdefault(rolename, []).append(name) + self._type2role[name] = obj.roles[0] if obj.roles else '' self.objtypes_for_role = self._role2type.get + self.role_for_objtype = self._type2role.get def role(self, name): """Return a role adapter function that always gives the registered @@ -220,6 +223,22 @@ class Domain(object): """ pass + def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode): + """Resolve the pending_xref *node* with the given *target*. + + The reference comes from an "any" or similar role, which means that we + don't know the type. Otherwise, the arguments are the same as for + :meth:`resolve_xref`. + + The method must return a list (potentially empty) of tuples + ``('domain:role', newnode)``, where ``'domain:role'`` is the name of a + role that could have created the same reference, e.g. ``'py:func'``. + ``newnode`` is what :meth:`resolve_xref` would return. + + .. versionadded:: 1.3 + """ + raise NotImplementedError + def get_objects(self): """Return an iterable of "object descriptions", which are tuples with five items: diff --git a/sphinx/domains/c.py b/sphinx/domains/c.py index 4d12c141a..a9938c256 100644 --- a/sphinx/domains/c.py +++ b/sphinx/domains/c.py @@ -279,6 +279,17 @@ class CDomain(Domain): return make_refnode(builder, fromdocname, obj[0], 'c.' + target, contnode, target) + def resolve_any_xref(self, env, fromdocname, builder, target, + node, contnode): + # strip pointer asterisk + target = target.rstrip(' *') + if target not in self.data['objects']: + return [] + obj = self.data['objects'][target] + return [('c:' + self.role_for_objtype(obj[1]), + make_refnode(builder, fromdocname, obj[0], 'c.' + target, + contnode, target))] + def get_objects(self): for refname, (docname, type) in list(self.data['objects'].items()): yield (refname, refname, type, docname, 'c.' + refname, 1) diff --git a/sphinx/domains/cpp.py b/sphinx/domains/cpp.py index 778a36bff..8992991d3 100644 --- a/sphinx/domains/cpp.py +++ b/sphinx/domains/cpp.py @@ -1838,18 +1838,18 @@ class CPPDomain(Domain): if data[0] == docname: del self.data['objects'][fullname] - def resolve_xref(self, env, fromdocname, builder, - typ, target, node, contnode): + def _resolve_xref_inner(self, env, fromdocname, builder, + target, node, contnode, warn=True): def _create_refnode(nameAst): name = text_type(nameAst) if name not in self.data['objects']: # try dropping the last template name = nameAst.get_name_no_last_template() if name not in self.data['objects']: - return None + return None, None docname, objectType, id = self.data['objects'][name] return make_refnode(builder, fromdocname, docname, id, contnode, - name) + name), objectType parser = DefinitionParser(target) try: @@ -1858,20 +1858,34 @@ class CPPDomain(Domain): if not parser.eof: raise DefinitionError('') except DefinitionError: - env.warn_node('unparseable C++ definition: %r' % target, node) - return None + if warn: + env.warn_node('unparseable C++ definition: %r' % target, node) + return None, None # try as is the name is fully qualified - refNode = _create_refnode(nameAst) - if refNode: - return refNode + res = _create_refnode(nameAst) + if res[0]: + return res # try qualifying it with the parent parent = node.get('cpp:parent', None) if parent and len(parent) > 0: return _create_refnode(nameAst.prefix_nested_name(parent[-1])) else: - return None + return None, None + + def resolve_xref(self, env, fromdocname, builder, + typ, target, node, contnode): + return self._resolve_xref_inner(env, fromdocname, builder, target, node, + contnode)[0] + + def resolve_any_xref(self, env, fromdocname, builder, target, + node, contnode): + node, objtype = self._resolve_xref_inner(env, fromdocname, builder, + target, node, contnode, warn=False) + if node: + return [('cpp:' + self.role_for_objtype(objtype), node)] + return [] def get_objects(self): for refname, (docname, type, theid) in iteritems(self.data['objects']): diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py index 2718b8727..dc65b2a39 100644 --- a/sphinx/domains/javascript.py +++ b/sphinx/domains/javascript.py @@ -179,7 +179,7 @@ class JavaScriptDomain(Domain): 'attr': JSXRefRole(), } initial_data = { - 'objects': {}, # fullname -> docname, objtype + 'objects': {}, # fullname -> docname, objtype } def clear_doc(self, docname): @@ -214,6 +214,16 @@ class JavaScriptDomain(Domain): return make_refnode(builder, fromdocname, obj[0], name.replace('$', '_S_'), contnode, name) + def resolve_any_xref(self, env, fromdocname, builder, target, node, + contnode): + objectname = node.get('js:object') # not likely + name, obj = self.find_obj(env, objectname, target, None, 1) + if not obj: + return [] + return [('js:' + self.role_for_objtype(obj[1]), + make_refnode(builder, fromdocname, obj[0], + name.replace('$', '_S_'), contnode, name))] + def get_objects(self): for refname, (docname, type) in list(self.data['objects'].items()): yield refname, refname, type, docname, \ diff --git a/sphinx/domains/python.py b/sphinx/domains/python.py index a7a93cb1e..17609692d 100644 --- a/sphinx/domains/python.py +++ b/sphinx/domains/python.py @@ -643,7 +643,10 @@ class PythonDomain(Domain): newname = None if searchmode == 1: - objtypes = self.objtypes_for_role(type) + if type is None: + objtypes = list(self.object_types) + else: + objtypes = self.objtypes_for_role(type) if objtypes is not None: if modname and classname: fullname = modname + '.' + classname + '.' + name @@ -704,22 +707,44 @@ class PythonDomain(Domain): name, obj = matches[0] if obj[1] == 'module': - # get additional info for modules - docname, synopsis, platform, deprecated = self.data['modules'][name] - assert docname == obj[0] - title = name - if synopsis: - title += ': ' + synopsis - if deprecated: - title += _(' (deprecated)') - if platform: - title += ' (' + platform + ')' - return make_refnode(builder, fromdocname, docname, - 'module-' + name, contnode, title) + return self._make_module_refnode(builder, fromdocname, name, + contnode) else: return make_refnode(builder, fromdocname, obj[0], name, contnode, name) + def resolve_any_xref(self, env, fromdocname, builder, target, + node, contnode): + modname = node.get('py:module') # it is not likely we have these + clsname = node.get('py:class') + results = [] + + # always search in "refspecific" mode with the :any: role + matches = self.find_obj(env, modname, clsname, target, None, 1) + for name, obj in matches: + if obj[1] == 'module': + results.append(('py:mod', + self._make_module_refnode(builder, fromdocname, + name, contnode))) + else: + results.append(('py:' + self.role_for_objtype(obj[1]), + make_refnode(builder, fromdocname, obj[0], name, + contnode, name))) + return results + + def _make_module_refnode(self, builder, fromdocname, name, contnode): + # get additional info for modules + docname, synopsis, platform, deprecated = self.data['modules'][name] + title = name + if synopsis: + title += ': ' + synopsis + if deprecated: + title += _(' (deprecated)') + if platform: + title += ' (' + platform + ')' + return make_refnode(builder, fromdocname, docname, + 'module-' + name, contnode, title) + def get_objects(self): for modname, info in iteritems(self.data['modules']): yield (modname, modname, 'module', info[0], 'module-' + modname, 0) diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py index e213211ab..6a4e390f1 100644 --- a/sphinx/domains/rst.py +++ b/sphinx/domains/rst.py @@ -134,6 +134,19 @@ class ReSTDomain(Domain): objtype + '-' + target, contnode, target + ' ' + objtype) + def resolve_any_xref(self, env, fromdocname, builder, target, + node, contnode): + objects = self.data['objects'] + results = [] + for objtype in self.object_types: + if (objtype, target) in self.data['objects']: + results.append(('rst:' + self.role_for_objtype(objtype), + make_refnode(builder, fromdocname, + objects[objtype, target], + objtype + '-' + target, + contnode, target + ' ' + objtype))) + return results + def get_objects(self): for (typ, name), docname in iteritems(self.data['objects']): yield name, name, typ, docname, typ + '-' + name, 1 diff --git a/sphinx/domains/std.py b/sphinx/domains/std.py index b5cc81b93..4910a52f8 100644 --- a/sphinx/domains/std.py +++ b/sphinx/domains/std.py @@ -628,6 +628,23 @@ class StandardDomain(Domain): return make_refnode(builder, fromdocname, docname, labelid, contnode) + def resolve_any_xref(self, env, fromdocname, builder, target, + node, contnode): + results = [] + for role in ('ref', 'option'): # do not try "keyword" + res = self.resolve_xref(env, fromdocname, builder, target, + role, node, contnode) + if res: + results.append(('std:ref', res)) + # all others + for objtype in self.object_types: + if (objtype, target) in self.data['objects']: + docname, labelid = self.data['objects'][objtype, target] + results.append(('std:' + self.role_for_objtype(objtype), + make_refnode(builder, fromdocname, docname, + labelid, contnode))) + return results + def get_objects(self): for (prog, option), info in iteritems(self.data['progoptions']): yield (option, option, 'option', info[0], info[1], 1) diff --git a/sphinx/environment.py b/sphinx/environment.py index fbc56b3da..6a3f1c68d 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -1352,6 +1352,8 @@ class BuildEnvironment: newnode = domain.resolve_xref(self, refdoc, builder, typ, target, node, contnode) # really hardwired reference types + elif typ == 'any': + newnode = self._resolve_any_reference(builder, node, contnode) elif typ == 'doc': newnode = self._resolve_doc_reference(builder, node, contnode) elif typ == 'citation': @@ -1435,6 +1437,49 @@ class BuildEnvironment: # transforms.CitationReference.apply. del node['ids'][:] + def _resolve_any_reference(self, builder, node, contnode): + """Resolve reference generated by the "any" role.""" + refdoc = node['refdoc'] + target = node['reftarget'] + results = [] + # first, try resolving as :doc: + doc_ref = self._resolve_doc_reference(builder, node, contnode) + if doc_ref: + results.append(('doc', doc_ref)) + # next, do the standard domain (makes this a priority) + results.extend(self.domains['std'].resolve_any_xref( + self, refdoc, builder, target, node, contnode)) + for domain in self.domains.values(): + if domain.name == 'std': + continue # we did this one already + try: + results.extend(domain.resolve_any_xref(self, refdoc, builder, + target, node, contnode)) + except NotImplementedError: + # the domain doesn't yet support the new interface + # we have to manually collect possible references (SLOW) + for role in domain.roles: + res = domain.resolve_xref(self, refdoc, builder, role, target, + node, contnode) + if res: + results.append(('%s:%s' % (domain.name, role), res)) + # now, see how many matches we got... + if not results: + return None + if len(results) > 1: + nice_results = ' or '.join(':%s:' % r[0] for r in results) + self.warn_node('more than one target found for \'any\' cross-' + 'reference %r: could be %s' % (target, nice_results), + node) + res_role, newnode = results[0] + # Override "any" class with the actual role type to get the styling + # approximately correct. + res_domain = res_role.split(':')[0] + if newnode and newnode[0].get('classes'): + newnode[0]['classes'].append(res_domain) + newnode[0]['classes'].append(res_role.replace(':', '-')) + return newnode + def process_only_nodes(self, doctree, builder, fromdocname=None): # A comment on the comment() nodes being inserted: replacing by [] would # result in a "Losing ids" exception if there is a target node before diff --git a/sphinx/roles.py b/sphinx/roles.py index b99b22795..b253097e3 100644 --- a/sphinx/roles.py +++ b/sphinx/roles.py @@ -316,6 +316,8 @@ specific_docroles = { 'download': XRefRole(nodeclass=addnodes.download_reference), # links to documents 'doc': XRefRole(warn_dangling=True), + # links to anything + 'any': XRefRole(warn_dangling=True), 'pep': indexmarkup_role, 'rfc': indexmarkup_role, From 109cb873de79504d9c9d6034a82a8bc47de5e08b Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 13:18:29 +0200 Subject: [PATCH 028/293] Use "any" role in changelog. --- CHANGES | 180 ++++++++++++++++++++++---------------------- doc/changes.rst | 2 + doc/markup/code.rst | 22 ++++-- 3 files changed, 107 insertions(+), 97 deletions(-) diff --git a/CHANGES b/CHANGES index 7146251c2..b959a251f 100644 --- a/CHANGES +++ b/CHANGES @@ -15,11 +15,11 @@ Incompatible changes * PR#269, #1476: replace ```` tag by ````. User customized stylesheets should be updated If the css contain some styles for ``tt>`` tag. Thanks to Takeshi Komiya. -* #1543: :confval:`templates_path` is automatically added to - :confval:`exclude_patterns` to avoid reading autosummary rst templates in the +* #1543: `templates_path` is automatically added to + `exclude_patterns` to avoid reading autosummary rst templates in the templates directory. -* Custom domains should implement the new :meth:`~Domain.resolve_any_xref` - method to make the :rst:role:`any` role work properly. +* Custom domains should implement the new `Domain.resolve_any_xref` + method to make the `any` role work properly. Features added -------------- @@ -28,27 +28,27 @@ Features added * Add support for docutils 0.12 * Added ``sphinx.ext.napoleon`` extension for NumPy and Google style docstring support. -* Added the :rst:role:`any` role that can be used to find a cross-reference of +* Added the `any` role that can be used to find a cross-reference of *any* type in *any* domain. Custom domains should implement the new - :meth:`~Domain.resolve_any_xref` method to make this work properly. + `Domain.resolve_any_xref` method to make this work properly. * Exception logs now contain the last 10 messages emitted by Sphinx. * Added support for extension versions (a string returned by ``setup()``, these can be shown in the traceback log files). Version requirements for extensions - can be specified in projects using the new :confval:`needs_extensions` config + can be specified in projects using the new `needs_extensions` config value. -* Changing the default role within a document with the :rst:dir:`default-role` +* Changing the default role within a document with the :dudir:`default-role` directive is now supported. * PR#214: Added stemming support for 14 languages, so that the built-in document search can now handle these. Thanks to Shibukawa Yoshiki. * PR#202: Allow "." and "~" prefixed references in ``:param:`` doc fields for Python. -* PR#184: Add :confval:`autodoc_mock_imports`, allowing to mock imports of +* PR#184: Add `autodoc_mock_imports`, allowing to mock imports of external modules that need not be present when autodocumenting. * #925: Allow list-typed config values to be provided on the command line, like ``-D key=val1,val2``. -* #668: Allow line numbering of ``code-block`` and ``literalinclude`` directives +* #668: Allow line numbering of `code-block` and `literalinclude` directives to start at an arbitrary line number, with a new ``lineno-start`` option. -* PR#172, PR#266: The :rst:dir:`code-block` and :rst:dir:`literalinclude` +* PR#172, PR#266: The `code-block` and `literalinclude` directives now can have a ``caption`` option that shows a filename before the code in the output. Thanks to Nasimul Haque, Takeshi Komiya. * Prompt for the document language in sphinx-quickstart. @@ -63,15 +63,15 @@ Features added for the ids defined on the node. Thanks to Olivier Heurtier. * PR#229: Allow registration of other translators. Thanks to Russell Sim. * Add app.set_translator() API to register or override a Docutils translator - class like :confval:`html_translator_class`. + class like `html_translator_class`. * PR#267, #1134: add 'diff' parameter to literalinclude. Thanks to Richard Wall and WAKAYAMA shirou. * PR#272: Added 'bizstyle' theme. Thanks to Shoji KUMAGAI. * Automatically compile ``*.mo`` files from ``*.po`` files when - :confval:`gettext_auto_build` is True (default) and ``*.po`` is newer than + `gettext_auto_build` is True (default) and ``*.po`` is newer than ``*.mo`` file. -* #623: :mod:`~sphinx.ext.viewcode` supports imported function/class aliases. -* PR#275: :mod:`~sphinx.ext.intersphinx` supports multiple target for the +* #623: `sphinx.ext.viewcode` supports imported function/class aliases. +* PR#275: `sphinx.ext.intersphinx` supports multiple target for the inventory. Thanks to Brigitta Sipocz. Bugs fixed @@ -81,7 +81,7 @@ Bugs fixed * #1563: :meth:`~sphinx.application.Sphinx.add_search_language` raises AssertionError for correct type of argument. Thanks to rikoman. * #1174: Fix smart quotes being applied inside roles like :rst:role:`program` or - :rst:role:`makevar`. + `makevar`. * #1335: Fix autosummary template overloading with exclamation prefix like ``{% extends "!autosummary/class.rst" %}`` cause infinite recursive function call. This was caused by PR#181. @@ -90,7 +90,7 @@ Bugs fixed This was caused by a change for #1138. * #1340: Can't search alphabetical words on the HTML quick search generated with language='ja'. -* #1319: Do not crash if the :confval:`html_logo` file does not exist. +* #1319: Do not crash if the `html_logo` file does not exist. * #603: Do not use the HTML-ized title for building the search index (that resulted in "literal" being found on every page with a literal in the title). @@ -107,7 +107,7 @@ Bugs fixed if they contain uppercase letters. * #923: Take the entire LaTeX document into account when caching pngmath-generated images. This rebuilds them correctly when - :confval:`pngmath_latex_preamble` changes. + `pngmath_latex_preamble` changes. * #901: Emit a warning when using docutils' new "math" markup without a Sphinx math extension active. * #845: In code blocks, when the selected lexer fails, display line numbers @@ -124,14 +124,14 @@ Bugs fixed * #1155: Fix autodocumenting C-defined methods as attributes in Python 3. * #1233: Allow finding both Python classes and exceptions with the "class" and "exc" roles in intersphinx. -* #1198: Allow "image" for the "figwidth" option of the :rst:dir:`figure` +* #1198: Allow "image" for the "figwidth" option of the :dudir:`figure` directive as documented by docutils. * #1152: Fix pycode parsing errors of Python 3 code by including two grammar versions for Python 2 and 3, and loading the appropriate version for the running Python version. * #1017: Be helpful and tell the user when the argument to :rst:dir:`option` does not match the required format. -* #1345: Fix two bugs with :confval:`nitpick_ignore`; now you don't have to +* #1345: Fix two bugs with `nitpick_ignore`; now you don't have to remove the store environment for changes to have effect. * #1072: In the JS search, fix issues searching for upper-cased words by lowercasing words before stemming. @@ -154,12 +154,12 @@ Bugs fixed * #1300: Fix references not working in translated documents in some instances. * #1283: Fix a bug in the detection of changed files that would try to access doctrees of deleted documents. -* #1330: Fix :confval:`exclude_patterns` behavior with subdirectories in the - :confval:`html_static_path`. +* #1330: Fix `exclude_patterns` behavior with subdirectories in the + `html_static_path`. * #1323: Fix emitting empty ``
      `` tags in the HTML writer, which is not valid HTML. * #1147: Don't emit a sidebar search box in the "singlehtml" builder. -* PR#211: When checking for existence of the :confval:`html_logo` file, check +* PR#211: When checking for existence of the `html_logo` file, check the full relative path and not the basename. * #1357: Option names documented by :rst:dir:`option` are now again allowed to not start with a dash or slash, and referencing them will work correctly. @@ -215,8 +215,8 @@ Bugs fixed qualified name. It should be rather easy to change this behaviour and potentially index by namespaces/classes as well. -* PR#258, #939: Add dedent option for :rst:dir:`code-block` and - :rst:dir:`literal-include`. Thanks to Zafar Siddiqui. +* PR#258, #939: Add dedent option for `code-block` and + `literalinclude`. Thanks to Zafar Siddiqui. * PR#268: Fix numbering section does not work at singlehtml mode. It still ad-hoc fix because there is a issue that section IDs are conflicted. Thanks to Takeshi Komiya. @@ -224,10 +224,10 @@ Bugs fixed Takeshi Komiya. * PR#274: Set its URL as a default title value if URL appears in toctree. Thanks to Takeshi Komiya. -* PR#276, #1381: :rst:role:`rfc` and :rst:role:`pep` roles support custom link +* PR#276, #1381: `rfc` and `pep` roles support custom link text. Thanks to Takeshi Komiya. * PR#277, #1513: highlights for function pointers in argument list of - :rst:dir:`c:function`. Thanks to Takeshi Komiya. + `c:function`. Thanks to Takeshi Komiya. * PR#278: Fix section entries were shown twice if toctree has been put under only directive. Thanks to Takeshi Komiya. * #1547: pgen2 tokenizer doesn't recognize ``...`` literal (Ellipsis for py3). @@ -294,7 +294,7 @@ Release 1.2.2 (released Mar 2, 2014) Bugs fixed ---------- -* PR#211: When checking for existence of the :confval:`html_logo` file, check +* PR#211: When checking for existence of the `html_logo` file, check the full relative path and not the basename. * PR#212: Fix traceback with autodoc and ``__init__`` methods without docstring. * PR#213: Fix a missing import in the setup command. @@ -334,7 +334,7 @@ Bugs fixed This was caused by a change for #1138. * #1340: Can't search alphabetical words on the HTML quick search generated with language='ja'. -* #1319: Do not crash if the :confval:`html_logo` file does not exist. +* #1319: Do not crash if the `html_logo` file does not exist. * #603: Do not use the HTML-ized title for building the search index (that resulted in "literal" being found on every page with a literal in the title). @@ -351,7 +351,7 @@ Bugs fixed if they contain uppercase letters. * #923: Take the entire LaTeX document into account when caching pngmath-generated images. This rebuilds them correctly when - :confval:`pngmath_latex_preamble` changes. + `pngmath_latex_preamble` changes. * #901: Emit a warning when using docutils' new "math" markup without a Sphinx math extension active. * #845: In code blocks, when the selected lexer fails, display line numbers @@ -368,14 +368,14 @@ Bugs fixed * #1155: Fix autodocumenting C-defined methods as attributes in Python 3. * #1233: Allow finding both Python classes and exceptions with the "class" and "exc" roles in intersphinx. -* #1198: Allow "image" for the "figwidth" option of the :rst:dir:`figure` +* #1198: Allow "image" for the "figwidth" option of the :dudir:`figure` directive as documented by docutils. * #1152: Fix pycode parsing errors of Python 3 code by including two grammar versions for Python 2 and 3, and loading the appropriate version for the running Python version. * #1017: Be helpful and tell the user when the argument to :rst:dir:`option` does not match the required format. -* #1345: Fix two bugs with :confval:`nitpick_ignore`; now you don't have to +* #1345: Fix two bugs with `nitpick_ignore`; now you don't have to remove the store environment for changes to have effect. * #1072: In the JS search, fix issues searching for upper-cased words by lowercasing words before stemming. @@ -398,8 +398,8 @@ Bugs fixed * #1300: Fix references not working in translated documents in some instances. * #1283: Fix a bug in the detection of changed files that would try to access doctrees of deleted documents. -* #1330: Fix :confval:`exclude_patterns` behavior with subdirectories in the - :confval:`html_static_path`. +* #1330: Fix `exclude_patterns` behavior with subdirectories in the + `html_static_path`. * #1323: Fix emitting empty ``
        `` tags in the HTML writer, which is not valid HTML. * #1147: Don't emit a sidebar search box in the "singlehtml" builder. @@ -498,7 +498,7 @@ Features added * Support docutils.conf 'writers' and 'html4css1 writer' section in the HTML writer. The latex, manpage and texinfo writers also support their respective 'writers' sections. -* The new :confval:`html_extra_path` config value allows to specify directories +* The new `html_extra_path` config value allows to specify directories with files that should be copied directly to the HTML output directory. * Autodoc directives for module data and attributes now support an ``annotation`` option, so that the default display of the data/attribute @@ -569,10 +569,10 @@ Incompatible changes * Removed ``sphinx.util.compat.directive_dwim()`` and ``sphinx.roles.xfileref_role()`` which were deprecated since version 1.0. -* PR#122: the files given in :confval:`latex_additional_files` now override TeX +* PR#122: the files given in `latex_additional_files` now override TeX files included by Sphinx, such as ``sphinx.sty``. -* PR#124: the node generated by :rst:dir:`versionadded`, - :rst:dir:`versionchanged` and :rst:dir:`deprecated` directives now includes +* PR#124: the node generated by `versionadded`, + `versionchanged` and `deprecated` directives now includes all added markup (such as "New in version X") as child nodes, and no additional text must be generated by writers. * PR#99: the :rst:dir:`seealso` directive now generates admonition nodes instead @@ -626,7 +626,7 @@ Features added asterisks ("*"). - The default value for the ``paragraphindent`` has been changed from 2 to 0 meaning that paragraphs are no longer indented by default. - - #1110: A new configuration value :confval:`texinfo_no_detailmenu` has been + - #1110: A new configuration value `texinfo_no_detailmenu` has been added for controlling whether a ``@detailmenu`` is added in the "Top" node's menu. - Detailed menus are no longer created except for the "Top" node. @@ -635,16 +635,16 @@ Features added * LaTeX builder: - - PR#115: Add ``'transition'`` item in :confval:`latex_elements` for + - PR#115: Add ``'transition'`` item in `latex_elements` for customizing how transitions are displayed. Thanks to Jeff Klukas. - PR#114: The LaTeX writer now includes the "cmap" package by default. The - ``'cmappkg'`` item in :confval:`latex_elements` can be used to control this. + ``'cmappkg'`` item in `latex_elements` can be used to control this. Thanks to Dmitry Shachnev. - - The ``'fontpkg'`` item in :confval:`latex_elements` now defaults to ``''`` - when the :confval:`language` uses the Cyrillic script. Suggested by Dmitry + - The ``'fontpkg'`` item in `latex_elements` now defaults to ``''`` + when the `language` uses the Cyrillic script. Suggested by Dmitry Shachnev. - - The :confval:`latex_documents`, :confval:`texinfo_documents`, and - :confval:`man_pages` configuration values will be set to default values based + - The `latex_documents`, `texinfo_documents`, and + `man_pages` configuration values will be set to default values based on the :confval:`master_doc` if not explicitly set in :file:`conf.py`. Previously, if these values were not set, no output would be generated by their respective builders. @@ -662,13 +662,13 @@ Features added - Added the Docutils-native XML and pseudo-XML builders. See :class:`XMLBuilder` and :class:`PseudoXMLBuilder`. - PR#45: The linkcheck builder now checks ``#anchor``\ s for existence. - - PR#123, #1106: Add :confval:`epub_use_index` configuration value. If - provided, it will be used instead of :confval:`html_use_index` for epub + - PR#123, #1106: Add `epub_use_index` configuration value. If + provided, it will be used instead of `html_use_index` for epub builder. - - PR#126: Add :confval:`epub_tocscope` configuration value. The setting + - PR#126: Add `epub_tocscope` configuration value. The setting controls the generation of the epub toc. The user can now also include hidden toc entries. - - PR#112: Add :confval:`epub_show_urls` configuration value. + - PR#112: Add `epub_show_urls` configuration value. * Extensions: @@ -736,7 +736,7 @@ Bugs fixed * #1127: Fix traceback when autodoc tries to tokenize a non-Python file. * #1126: Fix double-hyphen to en-dash conversion in wrong places such as command-line option names in LaTeX. -* #1123: Allow whitespaces in filenames given to :rst:dir:`literalinclude`. +* #1123: Allow whitespaces in filenames given to `literalinclude`. * #1120: Added improvements about i18n for themes "basic", "haiku" and "scrolls" that Sphinx built-in. Thanks to Leonardo J. Caballero G. * #1118: Updated Spanish translation. Thanks to Leonardo J. Caballero G. @@ -744,7 +744,7 @@ Bugs fixed * #1112: Avoid duplicate download files when referenced from documents in different ways (absolute/relative). * #1111: Fix failure to find uppercase words in search when - :confval:`html_search_language` is 'ja'. Thanks to Tomo Saito. + `html_search_language` is 'ja'. Thanks to Tomo Saito. * #1108: The text writer now correctly numbers enumerated lists with non-default start values (based on patch by Ewan Edwards). * #1102: Support multi-context "with" statements in autodoc. @@ -809,7 +809,7 @@ Release 1.1.3 (Mar 10, 2012) * #860: Do not crash when encountering invalid doctest examples, just emit a warning. -* #864: Fix crash with some settings of :confval:`modindex_common_prefix`. +* #864: Fix crash with some settings of `modindex_common_prefix`. * #862: Fix handling of ``-D`` and ``-A`` options on Python 3. @@ -873,7 +873,7 @@ Release 1.1 (Oct 9, 2011) Incompatible changes -------------------- -* The :rst:dir:`py:module` directive doesn't output its ``platform`` option +* The `py:module` directive doesn't output its ``platform`` option value anymore. (It was the only thing that the directive did output, and therefore quite inconsistent.) @@ -909,7 +909,7 @@ Features added :rst:dir:`toctree`\'s ``numbered`` option. - #586: Implemented improved :rst:dir:`glossary` markup which allows multiple terms per definition. - - #478: Added :rst:dir:`py:decorator` directive to describe decorators. + - #478: Added `py:decorator` directive to describe decorators. - C++ domain now supports array definitions. - C++ domain now supports doc fields (``:param x:`` inside directives). - Section headings in :rst:dir:`only` directives are now correctly @@ -920,7 +920,7 @@ Features added * HTML builder: - Added ``pyramid`` theme. - - #559: :confval:`html_add_permalinks` is now a string giving the + - #559: `html_add_permalinks` is now a string giving the text to display in permalinks. - #259: HTML table rows now have even/odd CSS classes to enable "Zebra styling". @@ -928,26 +928,26 @@ Features added * Other builders: - - #516: Added new value of the :confval:`latex_show_urls` option to + - #516: Added new value of the `latex_show_urls` option to show the URLs in footnotes. - - #209: Added :confval:`text_newlines` and :confval:`text_sectionchars` + - #209: Added `text_newlines` and `text_sectionchars` config values. - - Added :confval:`man_show_urls` config value. + - Added `man_show_urls` config value. - #472: linkcheck builder: Check links in parallel, use HTTP HEAD requests and allow configuring the timeout. New config values: - :confval:`linkcheck_timeout` and :confval:`linkcheck_workers`. - - #521: Added :confval:`linkcheck_ignore` config value. + `linkcheck_timeout` and `linkcheck_workers`. + - #521: Added `linkcheck_ignore` config value. - #28: Support row/colspans in tables in the LaTeX builder. * Configuration and extensibility: - - #537: Added :confval:`nitpick_ignore`. + - #537: Added `nitpick_ignore`. - #306: Added :event:`env-get-outdated` event. - :meth:`.Application.add_stylesheet` now accepts full URIs. * Autodoc: - - #564: Add :confval:`autodoc_docstring_signature`. When enabled (the + - #564: Add `autodoc_docstring_signature`. When enabled (the default), autodoc retrieves the signature from the first line of the docstring, if it is found there. - #176: Provide ``private-members`` option for autodoc directives. @@ -965,12 +965,12 @@ Features added - Added ``inline`` option to graphviz directives, and fixed the default (block-style) in LaTeX output. - #590: Added ``caption`` option to graphviz directives. - - #553: Added :rst:dir:`testcleanup` blocks in the doctest extension. - - #594: :confval:`trim_doctest_flags` now also removes ```` + - #553: Added `testcleanup` blocks in the doctest extension. + - #594: `trim_doctest_flags` now also removes ```` indicators. - #367: Added automatic exclusion of hidden members in inheritance diagrams, and an option to selectively enable it. - - Added :confval:`pngmath_add_tooltips`. + - Added `pngmath_add_tooltips`. - The math extension displaymath directives now support ``name`` in addition to ``label`` for giving the equation label, for compatibility with Docutils. @@ -1043,7 +1043,7 @@ Release 1.0.8 (Sep 23, 2011) * #669: Respect the ``noindex`` flag option in py:module directives. * #675: Fix IndexErrors when including nonexisting lines with - :rst:dir:`literalinclude`. + `literalinclude`. * #676: Respect custom function/method parameter separator strings. @@ -1126,7 +1126,7 @@ Release 1.0.6 (Jan 04, 2011) * #570: Try decoding ``-D`` and ``-A`` command-line arguments with the locale's preferred encoding. -* #528: Observe :confval:`locale_dirs` when looking for the JS +* #528: Observe `locale_dirs` when looking for the JS translations file. * #574: Add special code for better support of Japanese documents @@ -1299,51 +1299,51 @@ Features added - Added a "nitpicky" mode that emits warnings for all missing references. It is activated by the :option:`-n` command-line switch - or the :confval:`nitpicky` config value. + or the `nitpicky` config value. - Added ``latexpdf`` target in quickstart Makefile. * Markup: - - The :rst:role:`menuselection` and :rst:role:`guilabel` roles now + - The `menuselection` and `guilabel` roles now support ampersand accelerators. - New more compact doc field syntax is now recognized: ``:param type name: description``. - - Added ``tab-width`` option to :rst:dir:`literalinclude` directive. + - Added ``tab-width`` option to `literalinclude` directive. - Added ``titlesonly`` option to :rst:dir:`toctree` directive. - Added the ``prepend`` and ``append`` options to the - :rst:dir:`literalinclude` directive. + `literalinclude` directive. - #284: All docinfo metadata is now put into the document metadata, not just the author. - - The :rst:role:`ref` role can now also reference tables by caption. - - The :rst:dir:`include` directive now supports absolute paths, which + - The `ref` role can now also reference tables by caption. + - The :dudir:`include` directive now supports absolute paths, which are interpreted as relative to the source directory. - In the Python domain, references like ``:func:`.name``` now look for matching names with any prefix if no direct match is found. * Configuration: - - Added :confval:`rst_prolog` config value. - - Added :confval:`html_secnumber_suffix` config value to control + - Added `rst_prolog` config value. + - Added `html_secnumber_suffix` config value to control section numbering format. - - Added :confval:`html_compact_lists` config value to control + - Added `html_compact_lists` config value to control docutils' compact lists feature. - - The :confval:`html_sidebars` config value can now contain patterns + - The `html_sidebars` config value can now contain patterns as keys, and the values can be lists that explicitly select which sidebar templates should be rendered. That means that the builtin sidebar contents can be included only selectively. - - :confval:`html_static_path` can now contain single file entries. - - The new universal config value :confval:`exclude_patterns` makes the - old :confval:`unused_docs`, :confval:`exclude_trees` and - :confval:`exclude_dirnames` obsolete. - - Added :confval:`html_output_encoding` config value. - - Added the :confval:`latex_docclass` config value and made the + - `html_static_path` can now contain single file entries. + - The new universal config value `exclude_patterns` makes the + old ``unused_docs``, ``exclude_trees`` and + ``exclude_dirnames`` obsolete. + - Added `html_output_encoding` config value. + - Added the `latex_docclass` config value and made the "twoside" documentclass option overridable by "oneside". - - Added the :confval:`trim_doctest_flags` config value, which is true + - Added the `trim_doctest_flags` config value, which is true by default. - - Added :confval:`html_show_copyright` config value. - - Added :confval:`latex_show_pagerefs` and :confval:`latex_show_urls` + - Added `html_show_copyright` config value. + - Added `latex_show_pagerefs` and `latex_show_urls` config values. - - The behavior of :confval:`html_file_suffix` changed slightly: the + - The behavior of `html_file_suffix` changed slightly: the empty string now means "no suffix" instead of "default suffix", use ``None`` for "default suffix". @@ -1385,7 +1385,7 @@ Features added * Extension API: - Added :event:`html-collect-pages`. - - Added :confval:`needs_sphinx` config value and + - Added `needs_sphinx` config value and :meth:`~sphinx.application.Sphinx.require_sphinx` application API method. - #200: Added :meth:`~sphinx.application.Sphinx.add_stylesheet` @@ -1397,7 +1397,7 @@ Features added - Added the :mod:`~sphinx.ext.extlinks` extension. - Added support for source ordering of members in autodoc, with ``autodoc_member_order = 'bysource'``. - - Added :confval:`autodoc_default_flags` config value, which can be + - Added `autodoc_default_flags` config value, which can be used to select default flags for all autodoc directives. - Added a way for intersphinx to refer to named labels in other projects, and to specify the project you want to link to. @@ -1407,7 +1407,7 @@ Features added extension, thanks to Pauli Virtanen. - #309: The :mod:`~sphinx.ext.graphviz` extension can now output SVG instead of PNG images, controlled by the - :confval:`graphviz_output_format` config value. + `graphviz_output_format` config value. - Added ``alt`` option to :rst:dir:`graphviz` extension directives. - Added ``exclude`` argument to :func:`.autodoc.between`. diff --git a/doc/changes.rst b/doc/changes.rst index d5927a725..e42636872 100644 --- a/doc/changes.rst +++ b/doc/changes.rst @@ -1,5 +1,7 @@ :tocdepth: 2 +.. default-role:: any + .. _changes: Changes in Sphinx diff --git a/doc/markup/code.rst b/doc/markup/code.rst index f69bb161b..b948dc386 100644 --- a/doc/markup/code.rst +++ b/doc/markup/code.rst @@ -36,21 +36,29 @@ installed) and handled in a smart way: highlighted as Python). * The highlighting language can be changed using the ``highlight`` directive, - used as follows:: + used as follows: - .. highlight:: c + .. rst:directive:: .. highlight:: language - This language is used until the next ``highlight`` directive is encountered. + Example:: + + .. highlight:: c + + This language is used until the next ``highlight`` directive is encountered. * For documents that have to show snippets in different languages, there's also a :rst:dir:`code-block` directive that is given the highlighting language - directly:: + directly: - .. code-block:: ruby + .. rst:directive:: .. code-block:: language - Some Ruby code. + Use it like this:: - The directive's alias name :rst:dir:`sourcecode` works as well. + .. code-block:: ruby + + Some Ruby code. + + The directive's alias name :rst:dir:`sourcecode` works as well. * The valid values for the highlighting language are: From e1169d0937f705c333b6f70d17f73be0c8bbcb6d Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 13:25:16 +0200 Subject: [PATCH 029/293] Fix duplication of bugfixes between 1.2.x and 1.3 in changelog. --- CHANGES | 100 -------------------------------------------------------- 1 file changed, 100 deletions(-) diff --git a/CHANGES b/CHANGES index b959a251f..29efa94e5 100644 --- a/CHANGES +++ b/CHANGES @@ -82,104 +82,6 @@ Bugs fixed AssertionError for correct type of argument. Thanks to rikoman. * #1174: Fix smart quotes being applied inside roles like :rst:role:`program` or `makevar`. -* #1335: Fix autosummary template overloading with exclamation prefix like - ``{% extends "!autosummary/class.rst" %}`` cause infinite recursive function - call. This was caused by PR#181. -* #1337: Fix autodoc with ``autoclass_content="both"`` uses useless - ``object.__init__`` docstring when class does not have ``__init__``. - This was caused by a change for #1138. -* #1340: Can't search alphabetical words on the HTML quick search generated - with language='ja'. -* #1319: Do not crash if the `html_logo` file does not exist. -* #603: Do not use the HTML-ized title for building the search index (that - resulted in "literal" being found on every page with a literal in the - title). -* #751: Allow production lists longer than a page in LaTeX by using longtable. -* #764: Always look for stopwords lowercased in JS search. -* #814: autodoc: Guard against strange type objects that don't have - ``__bases__``. -* #932: autodoc: Do not crash if ``__doc__`` is not a string. -* #933: Do not crash if an :rst:role:`option` value is malformed (contains - spaces but no option name). -* #908: On Python 3, handle error messages from LaTeX correctly in the pngmath - extension. -* #943: In autosummary, recognize "first sentences" to pull from the docstring - if they contain uppercase letters. -* #923: Take the entire LaTeX document into account when caching - pngmath-generated images. This rebuilds them correctly when - `pngmath_latex_preamble` changes. -* #901: Emit a warning when using docutils' new "math" markup without a Sphinx - math extension active. -* #845: In code blocks, when the selected lexer fails, display line numbers - nevertheless if configured. -* #929: Support parsed-literal blocks in LaTeX output correctly. -* #949: Update the tabulary.sty packed with Sphinx. -* #1050: Add anonymous labels into ``objects.inv`` to be referenced via - :mod:`~sphinx.ext.intersphinx`. -* #1095: Fix print-media stylesheet being included always in the "scrolls" - theme. -* #1085: Fix current classname not getting set if class description has - ``:noindex:`` set. -* #1181: Report option errors in autodoc directives more gracefully. -* #1155: Fix autodocumenting C-defined methods as attributes in Python 3. -* #1233: Allow finding both Python classes and exceptions with the "class" and - "exc" roles in intersphinx. -* #1198: Allow "image" for the "figwidth" option of the :dudir:`figure` - directive as documented by docutils. -* #1152: Fix pycode parsing errors of Python 3 code by including two grammar - versions for Python 2 and 3, and loading the appropriate version for the - running Python version. -* #1017: Be helpful and tell the user when the argument to :rst:dir:`option` - does not match the required format. -* #1345: Fix two bugs with `nitpick_ignore`; now you don't have to - remove the store environment for changes to have effect. -* #1072: In the JS search, fix issues searching for upper-cased words by - lowercasing words before stemming. -* #1299: Make behavior of the :rst:dir:`math` directive more consistent and - avoid producing empty environments in LaTeX output. -* #1308: Strip HTML tags from the content of "raw" nodes before feeding it - to the search indexer. -* #1249: Fix duplicate LaTeX page numbering for manual documents. -* #1292: In the linkchecker, retry HEAD requests when denied by HTTP 405. - Also make the redirect code apparent and tweak the output a bit to be - more obvious. -* #1285: Avoid name clashes between C domain objects and section titles. -* #848: Always take the newest code in incremental rebuilds with the - :mod:`sphinx.ext.viewcode` extension. -* #979, #1266: Fix exclude handling in ``sphinx-apidoc``. -* #1302: Fix regression in :mod:`sphinx.ext.inheritance_diagram` when - documenting classes that can't be pickled. -* #1316: Remove hard-coded ``font-face`` resources from epub theme. -* #1329: Fix traceback with empty translation msgstr in .po files. -* #1300: Fix references not working in translated documents in some instances. -* #1283: Fix a bug in the detection of changed files that would try to access - doctrees of deleted documents. -* #1330: Fix `exclude_patterns` behavior with subdirectories in the - `html_static_path`. -* #1323: Fix emitting empty ``
          `` tags in the HTML writer, which is not - valid HTML. -* #1147: Don't emit a sidebar search box in the "singlehtml" builder. -* PR#211: When checking for existence of the `html_logo` file, check - the full relative path and not the basename. -* #1357: Option names documented by :rst:dir:`option` are now again allowed to - not start with a dash or slash, and referencing them will work correctly. -* #1358: Fix handling of image paths outside of the source directory when using - the "wildcard" style reference. -* #1374: Fix for autosummary generating overly-long summaries if first line - doesn't end with a period. -* #1391: Actually prevent using "pngmath" and "mathjax" extensions at the same - time in sphinx-quickstart. -* #1386: Fix bug preventing more than one theme being added by the entry point - mechanism. -* #1370: Ignore "toctree" nodes in text writer, instead of raising. -* #1364: Fix 'make gettext' fails when the '.. todolist::' directive is present. -* #1367: Fix a change of PR#96 that break sphinx.util.docfields.Field.make_field - interface/behavior for *item* argument usage. -* #1363: Fix i18n: missing python domain's cross-references with currentmodule - directive or currentclass directive. -* #1419: Generated i18n sphinx.js files are missing message catalog entries - from '.js_t' and '.html'. The issue was introduced in Sphinx 1.1. -* #636: Keep straight single quotes in literal blocks in the LaTeX build. * PR#235: comment db schema of websupport lacked a length of the node_id field. Thanks to solos. * #1466,PR#241: Fix failure of the cpp domain parser to parse C+11 @@ -236,8 +138,6 @@ Documentation ------------- * Add clarification about the syntax of tags. (:file:`doc/markup/misc.rst`) -* #1325: Added a "Intersphinx" tutorial section. (:file:`doc/tutorial.rst`) -* Extended the :ref:`documentation about building extensions `. Release 1.2.3 (released Sep 1, 2014) From a973daea418d361627040474c1924eb58d12280b Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 13:31:55 +0200 Subject: [PATCH 030/293] make :any: role work with intersphinx! --- doc/conf.py | 2 +- doc/markup/inline.rst | 4 ++++ sphinx/ext/intersphinx.py | 24 +++++++++++++++--------- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 3ae948217..4a6f8f580 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -83,7 +83,7 @@ texinfo_documents = [ # We're not using intersphinx right now, but if we did, this would be part of # the mapping: -intersphinx_mapping = {'python': ('http://docs.python.org/dev', None)} +intersphinx_mapping = {'python': ('http://docs.python.org/2/', None)} # Sphinx document translation with sphinx gettext feature uses these settings: locale_dirs = ['locale/'] diff --git a/doc/markup/inline.rst b/doc/markup/inline.rst index 7d83e3178..b5bb8d0c5 100644 --- a/doc/markup/inline.rst +++ b/doc/markup/inline.rst @@ -86,6 +86,10 @@ Cross-referencing anything Python module (usually ``:py:mod:`signal``` or ``:mod:`signal```) and a section (usually ``:ref:`about-signals```). + The :rst:role:`any` role also works together with the + :mod:`~sphinx.ext.intersphinx` extension: when no local cross-reference is + found, all object types of intersphinx inventories are also searched. + Cross-referencing objects ------------------------- diff --git a/sphinx/ext/intersphinx.py b/sphinx/ext/intersphinx.py index 43507a383..b429ab9db 100644 --- a/sphinx/ext/intersphinx.py +++ b/sphinx/ext/intersphinx.py @@ -222,15 +222,21 @@ def load_mappings(app): def missing_reference(app, env, node, contnode): """Attempt to resolve a missing reference via intersphinx references.""" - domain = node.get('refdomain') - if not domain: - # only objects in domains are in the inventory - return target = node['reftarget'] - objtypes = env.domains[domain].objtypes_for_role(node['reftype']) - if not objtypes: - return - objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes] + if node['reftype'] == 'any': + # we search anything! + objtypes = ['%s:%s' % (domain.name, objtype) + for domain in env.domains.values() + for objtype in domain.object_types] + else: + domain = node.get('refdomain') + if not domain: + # only objects in domains are in the inventory + return + objtypes = env.domains[domain].objtypes_for_role(node['reftype']) + if not objtypes: + return + objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes] to_try = [(env.intersphinx_inventory, target)] in_set = None if ':' in target: @@ -248,7 +254,7 @@ def missing_reference(app, env, node, contnode): # get correct path in case of subdirectories uri = path.join(relative_path(node['refdoc'], env.srcdir), uri) newnode = nodes.reference('', '', internal=False, refuri=uri, - reftitle=_('(in %s v%s)') % (proj, version)) + reftitle=_('(in %s v%s)') % (proj, version)) if node.get('refexplicit'): # use whatever title was given newnode.append(contnode) From 6c3789a35303aba74fec1a96406a6f31899f0ac9 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 14:17:36 +0200 Subject: [PATCH 031/293] Add tests for "any" role and default-role setting. Fix some minor bugs with "any". --- sphinx/domains/std.py | 38 ++++++++++++++++++++++---------------- sphinx/environment.py | 6 +++--- tests/root/markup.txt | 19 +++++++++++++++++++ tests/test_build_html.py | 3 +++ 4 files changed, 47 insertions(+), 19 deletions(-) diff --git a/sphinx/domains/std.py b/sphinx/domains/std.py index 4910a52f8..02cfecb52 100644 --- a/sphinx/domains/std.py +++ b/sphinx/domains/std.py @@ -204,29 +204,29 @@ class Program(Directive): return [] +def _split_option(text, refnode, env): + try: + program, target = re.split(' (?=-|--|/)', text, 1) + except ValueError: + env.warn_node('Malformed :option: %r, does not contain option ' + 'marker - or -- or /' % text, refnode) + return None, text + else: + program = ws_re.sub('-', program) + return program, target + class OptionXRefRole(XRefRole): innernodeclass = addnodes.literal_emphasis - def _split(self, text, refnode, env): - try: - program, target = re.split(' (?=-|--|/)', text, 1) - except ValueError: - env.warn_node('Malformed :option: %r, does not contain option ' - 'marker - or -- or /' % text, refnode) - return None, text - else: - program = ws_re.sub('-', program) - return program, target - def process_link(self, env, refnode, has_explicit_title, title, target): program = env.temp_data.get('std:program') if not has_explicit_title: if ' ' in title and not (title.startswith('/') or title.startswith('-')): - program, target = self._split(title, refnode, env) + program, target = _split_option(title, refnode, env) target = target.strip() elif ' ' in target: - program, target = self._split(target, refnode, env) + program, target = _split_option(target, refnode, env) refnode['refprogram'] = program return title, target @@ -608,7 +608,13 @@ class StandardDomain(Domain): return make_refnode(builder, fromdocname, docname, labelid, contnode) elif typ == 'option': - progname = node.get('refprogram', '') + if 'refprogram' in node: + progname = node['refprogram'] + elif ' -' in target or ' /' in target: + # maybe an "any" directive, split it ourselves + progname, target = _split_option(target, node, env) + else: + return None docname, labelid = self.data['progoptions'].get((progname, target), ('', '')) if not docname: @@ -632,8 +638,8 @@ class StandardDomain(Domain): node, contnode): results = [] for role in ('ref', 'option'): # do not try "keyword" - res = self.resolve_xref(env, fromdocname, builder, target, - role, node, contnode) + res = self.resolve_xref(env, fromdocname, builder, role, target, + node, contnode) if res: results.append(('std:ref', res)) # all others diff --git a/sphinx/environment.py b/sphinx/environment.py index 6a3f1c68d..b8090c59d 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -1357,7 +1357,7 @@ class BuildEnvironment: elif typ == 'doc': newnode = self._resolve_doc_reference(builder, node, contnode) elif typ == 'citation': - newnode = self._resolve_citation(builder, node, contnode) + newnode = self._resolve_citation(builder, refdoc, node, contnode) # no new node found? try the missing-reference event if newnode is None: newnode = builder.app.emit_firstresult( @@ -1418,11 +1418,11 @@ class BuildEnvironment: newnode.append(innernode) return newnode - def _resolve_citation(self, builder, node, contnode): + def _resolve_citation(self, builder, fromdocname, node, contnode): docname, labelid = self.citations.get(node['reftarget'], ('', '')) if docname: try: - newnode = make_refnode(builder, node['refdoc'], + newnode = make_refnode(builder, fromdocname, docname, labelid, contnode) return newnode except NoUri: diff --git a/tests/root/markup.txt b/tests/root/markup.txt index f6f955e24..8a55e6901 100644 --- a/tests/root/markup.txt +++ b/tests/root/markup.txt @@ -356,6 +356,25 @@ Only directive Always present, because set through conf.py/command line. +Any role +-------- + +.. default-role:: any + +Test referencing to `headings ` and `objects `. +Also `modules ` and `classes

          ' + '--with-option

          $', + r'\\code{-{-}with-option}$') # verify smarty-pants quotes yield verify, '"John"', '

          “John”

          ', "``John''" From b2c9d4b10736978ca60d3ee3d1258bbeb2651d77 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 20 Sep 2014 20:54:54 +0200 Subject: [PATCH 045/293] Be quiet when building docs for tox. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 2c3ddfdf3..1d26ac35a 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ setenv = BUILD_TEST_PATH = {envdir}/tests commands= {envpython} tests/run.py {posargs} - sphinx-build -W -b html -d {envtmpdir}/doctrees doc {envtmpdir}/html + sphinx-build -q -W -b html -d {envtmpdir}/doctrees doc {envtmpdir}/html [testenv:py26] deps= From 561181be7ac0d6ad6a330b323089f3e13d4656c1 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 20 Sep 2014 21:06:44 +0200 Subject: [PATCH 046/293] show drone.io status on README --- README.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.rst b/README.rst index 9b22008b9..452f59db2 100644 --- a/README.rst +++ b/README.rst @@ -35,6 +35,11 @@ If you want to use a different interpreter, e.g. ``python3``, use:: PYTHON=python3 make test +Continuous testing runs on drone.io: + +.. image:: https://drone.io/bitbucket.org/birkenfeld/sphinx/status.png + :target: https://drone.io/bitbucket.org/birkenfeld/sphinx/ + Contributing ============ From 97d2edf3804910eb80a4e172927690e768b80e65 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sat, 20 Sep 2014 21:09:33 +0200 Subject: [PATCH 047/293] Very short introduction in README --- README.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 452f59db2..ae92a2ce3 100644 --- a/README.rst +++ b/README.rst @@ -2,6 +2,9 @@ README for Sphinx ================= +This is the Sphinx documentation generator, see http://sphinx-doc.org/. + + Installing ========== @@ -17,7 +20,7 @@ Reading the docs After installing:: cd doc - sphinx-build . _build/html + make html Then, direct your browser to ``_build/html/index.html``. From db1cf80a69778d882f5c39b8966dc9fd5af256ac Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 16:37:37 +0200 Subject: [PATCH 048/293] Make pygments unconditional, it is required by setup.py anyway. --- sphinx/highlighting.py | 61 +++++++++++++----------------------------- 1 file changed, 19 insertions(+), 42 deletions(-) diff --git a/sphinx/highlighting.py b/sphinx/highlighting.py index 599a76a90..c2d2e89a6 100644 --- a/sphinx/highlighting.py +++ b/sphinx/highlighting.py @@ -24,46 +24,32 @@ from sphinx.util.pycompat import htmlescape from sphinx.util.texescape import tex_hl_escape_map_new from sphinx.ext import doctest -try: - import pygments - from pygments import highlight - from pygments.lexers import PythonLexer, PythonConsoleLexer, CLexer, \ - TextLexer, RstLexer - from pygments.lexers import get_lexer_by_name, guess_lexer - from pygments.formatters import HtmlFormatter, LatexFormatter - from pygments.filters import ErrorToken - from pygments.styles import get_style_by_name - from pygments.util import ClassNotFound - from sphinx.pygments_styles import SphinxStyle, NoneStyle -except ImportError: - pygments = None - lexers = None - HtmlFormatter = LatexFormatter = None -else: +from pygments import highlight +from pygments.lexers import PythonLexer, PythonConsoleLexer, CLexer, \ + TextLexer, RstLexer +from pygments.lexers import get_lexer_by_name, guess_lexer +from pygments.formatters import HtmlFormatter, LatexFormatter +from pygments.filters import ErrorToken +from pygments.styles import get_style_by_name +from pygments.util import ClassNotFound +from sphinx.pygments_styles import SphinxStyle, NoneStyle - lexers = dict( - none = TextLexer(), - python = PythonLexer(), - pycon = PythonConsoleLexer(), - pycon3 = PythonConsoleLexer(python3=True), - rest = RstLexer(), - c = CLexer(), - ) - for _lexer in lexers.values(): - _lexer.add_filter('raiseonerror') +lexers = dict( + none = TextLexer(), + python = PythonLexer(), + pycon = PythonConsoleLexer(), + pycon3 = PythonConsoleLexer(python3=True), + rest = RstLexer(), + c = CLexer(), +) +for _lexer in lexers.values(): + _lexer.add_filter('raiseonerror') escape_hl_chars = {ord(u'\\'): u'\\PYGZbs{}', ord(u'{'): u'\\PYGZob{}', ord(u'}'): u'\\PYGZcb{}'} -# used if Pygments is not available -_LATEX_STYLES = r''' -\newcommand\PYGZbs{\char`\\} -\newcommand\PYGZob{\char`\{} -\newcommand\PYGZcb{\char`\}} -''' - # used if Pygments is available # use textcomp quote to get a true single quote _LATEX_ADD_STYLES = r''' @@ -80,8 +66,6 @@ class PygmentsBridge(object): def __init__(self, dest='html', stylename='sphinx', trim_doctest_flags=False): self.dest = dest - if not pygments: - return if stylename is None or stylename == 'sphinx': style = SphinxStyle elif stylename == 'none': @@ -153,8 +137,6 @@ class PygmentsBridge(object): def highlight_block(self, source, lang, warn=None, force=False, **kwargs): if not isinstance(source, text_type): source = source.decode() - if not pygments: - return self.unhighlighted(source) # find out which lexer to use if lang in ('py', 'python'): @@ -213,11 +195,6 @@ class PygmentsBridge(object): return hlsource.translate(tex_hl_escape_map_new) def get_stylesheet(self): - if not pygments: - if self.dest == 'latex': - return _LATEX_STYLES - # no HTML styles needed - return '' formatter = self.get_formatter() if self.dest == 'html': return formatter.get_style_defs('.highlight') From 0511ced21ad95cef81223fae1291b7f02bb08e30 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 16:38:19 +0200 Subject: [PATCH 049/293] Override the current-directory local "docutils.conf" location with the Sphinx srcdir. --- sphinx/environment.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/sphinx/environment.py b/sphinx/environment.py index a2399422d..635845b80 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -33,6 +33,7 @@ from docutils.parsers.rst import roles, directives from docutils.parsers.rst.languages import en as english from docutils.parsers.rst.directives.html import MetaBody from docutils.writers import UnfilteredWriter +from docutils.frontend import OptionParser from sphinx import addnodes from sphinx.util import url_re, get_matching_docs, docname_join, split_into, \ @@ -582,6 +583,12 @@ class BuildEnvironment: self.patch_lookup_functions() + docutilsconf = path.join(self.srcdir, 'docutils.conf') + # read docutils.conf from source dir, not from current dir + OptionParser.standard_config_files[1] = docutilsconf + if path.isfile(docutilsconf): + self.note_dependency(docutilsconf) + if self.config.default_role: role_fn, messages = roles.role(self.config.default_role, english, 0, dummy_reporter) From c5dfd5c7328fe642d0ca2bb51be58253326af17f Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 16:52:41 +0200 Subject: [PATCH 050/293] Fix a few missing Unicode/bytes filename problems. --- sphinx/builders/changes.py | 3 +++ sphinx/directives/code.py | 3 +-- sphinx/environment.py | 6 ++++-- sphinx/util/__init__.py | 11 +++++++---- sphinx/util/osutil.py | 6 ++++++ 5 files changed, 21 insertions(+), 8 deletions(-) diff --git a/sphinx/builders/changes.py b/sphinx/builders/changes.py index aa947c96b..069d0ce6a 100644 --- a/sphinx/builders/changes.py +++ b/sphinx/builders/changes.py @@ -130,6 +130,9 @@ class ChangesBuilder(Builder): self.env.config.source_encoding) try: lines = f.readlines() + except UnicodeDecodeError: + self.warn('could not read %r for changelog creation' % docname) + continue finally: f.close() targetfn = path.join(self.outdir, 'rst', os_path(docname)) + '.html' diff --git a/sphinx/directives/code.py b/sphinx/directives/code.py index 6ea525b0f..543383dac 100644 --- a/sphinx/directives/code.py +++ b/sphinx/directives/code.py @@ -47,7 +47,6 @@ class Highlight(Directive): linenothreshold=linenothreshold)] - def dedent_lines(lines, dedent): if not dedent: return lines @@ -93,7 +92,7 @@ class CodeBlock(Directive): return [document.reporter.warning(str(err), line=self.lineno)] else: hl_lines = None - + if 'dedent' in self.options: lines = code.split('\n') lines = dedent_lines(lines, self.options['dedent']) diff --git a/sphinx/environment.py b/sphinx/environment.py index 635845b80..648e22565 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -39,7 +39,7 @@ from sphinx import addnodes from sphinx.util import url_re, get_matching_docs, docname_join, split_into, \ FilenameUniqDict from sphinx.util.nodes import clean_astext, make_refnode, WarningStream -from sphinx.util.osutil import SEP, find_catalog_files +from sphinx.util.osutil import SEP, find_catalog_files, getcwd, fs_encoding from sphinx.util.matching import compile_matchers from sphinx.util.websupport import is_commentable from sphinx.errors import SphinxError, ExtensionError @@ -774,7 +774,7 @@ class BuildEnvironment: def process_dependencies(self, docname, doctree): """Process docutils-generated dependency info.""" - cwd = os.getcwd() + cwd = getcwd() frompath = path.join(path.normpath(self.srcdir), 'dummy') deps = doctree.settings.record_dependencies if not deps: @@ -782,6 +782,8 @@ class BuildEnvironment: for dep in deps.list: # the dependency path is relative to the working dir, so get # one relative to the srcdir + if isinstance(dep, bytes): + dep = dep.decode(fs_encoding) relpath = relative_path(frompath, path.normpath(path.join(cwd, dep))) self.dependencies.setdefault(docname, set()).add(relpath) diff --git a/sphinx/util/__init__.py b/sphinx/util/__init__.py index 3a4334e7d..56cd38889 100644 --- a/sphinx/util/__init__.py +++ b/sphinx/util/__init__.py @@ -31,13 +31,14 @@ import jinja2 import sphinx from sphinx.errors import PycodeError from sphinx.util.console import strip_colors +from sphinx.util.osutil import fs_encoding # import other utilities; partly for backwards compatibility, so don't # prune unused ones indiscriminately from sphinx.util.osutil import SEP, os_path, relative_uri, ensuredir, walk, \ - mtimes_of_files, movefile, copyfile, copytimes, make_filename, ustrftime + mtimes_of_files, movefile, copyfile, copytimes, make_filename, ustrftime from sphinx.util.nodes import nested_parse_with_titles, split_explicit_title, \ - explicit_title_re, caption_ref_re + explicit_title_re, caption_ref_re from sphinx.util.matching import patfilter # Generally useful regular expressions. @@ -200,10 +201,12 @@ def save_traceback(app): last_msgs)).encode('utf-8')) if app is not None: for extname, extmod in iteritems(app._extensions): + modfile = getattr(extmod, '__file__', 'unknown') + if isinstance(modfile, bytes): + modfile = modfile.decode(fs_encoding, 'replace') os.write(fd, ('# %s (%s) from %s\n' % ( extname, app._extension_versions[extname], - getattr(extmod, '__file__', 'unknown')) - ).encode('utf-8')) + modfile)).encode('utf-8')) os.write(fd, exc.encode('utf-8')) os.close(fd) return path diff --git a/sphinx/util/osutil.py b/sphinx/util/osutil.py index 9b5f58b7e..58ee31b75 100644 --- a/sphinx/util/osutil.py +++ b/sphinx/util/osutil.py @@ -194,3 +194,9 @@ def abspath(pathdir): if isinstance(pathdir, bytes): pathdir = pathdir.decode(fs_encoding) return pathdir + + +def getcwd(): + if hasattr(os, 'getcwdu'): + return os.getcwdu() + return os.getcwd() From d47a7587f9813f2366e2077be051116291bf930e Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 17:17:02 +0200 Subject: [PATCH 051/293] Complete test suite overhaul. * rename a few test modules to make the names more consistent * do not copy/use Sphinx from build/ (unnecessary without 2to3) * use a temporary dir for *all* test projects, the source tree will stay pristine that way (default is tests/build) * speed up tests by ~3x by splitting up test projects and avoiding rebuilds --- .hgignore | 3 +- Makefile | 4 +- tests/path.py | 3 + tests/root/conf.py | 11 +- tests/root/contents.txt | 15 +- tests/root/undecodable.txt | 3 + tests/roots/test-autosummary/conf.py | 4 + tests/roots/test-autosummary/contents.rst | 13 +- .../test-autosummary/sphinx.rst} | 0 tests/roots/test-build-text/conf.py | 2 + tests/roots/test-build-text/contents.txt | 8 + tests/roots/test-build-text/lineblock.txt | 6 + tests/roots/test-build-text/maxwidth.txt | 6 + .../test-build-text/nonascii_maxwidth.txt | 5 + .../roots/test-build-text/nonascii_table.txt | 7 + .../roots/test-build-text/nonascii_title.txt | 2 + tests/roots/test-build-text/table.txt | 7 + tests/roots/test-circular/conf.py | 0 tests/roots/test-circular/contents.rst | 4 + tests/roots/test-circular/sub.rst | 3 + tests/roots/test-directive-code/dedent.rst | 37 +- .../roots/test-directive-code/dedent_code.rst | 53 +++ tests/roots/test-doctest/conf.py | 5 + .../{root => roots/test-doctest}/doctest.txt | 4 +- tests/roots/test-numbered-circular/conf.py | 0 .../roots/test-numbered-circular/contents.rst | 5 + tests/roots/test-numbered-circular/sub.rst | 3 + .../autosummary_templating.txt | 8 +- .../test-versioning}/added.txt | 0 tests/roots/test-versioning/conf.py | 3 + .../test-versioning}/deleted.txt | 0 .../test-versioning}/deleted_end.txt | 0 .../test-versioning}/index.txt | 0 .../test-versioning}/insert.txt | 0 .../test-versioning}/insert_beginning.txt | 0 .../test-versioning}/insert_similar.txt | 0 .../test-versioning}/modified.txt | 0 .../test-versioning}/original.txt | 0 tests/run.py | 62 ++- tests/test_api_translator.py | 135 ++----- tests/test_application.py | 81 ++-- tests/test_autodoc.py | 2 +- tests/test_build.py | 136 +++---- tests/test_build_gettext.py | 48 +-- tests/test_build_html.py | 142 +++---- tests/test_build_latex.py | 24 +- tests/test_build_texinfo.py | 24 +- tests/test_build_text.py | 120 ++---- .../{test_build_base.py => test_catalogs.py} | 155 ++++---- tests/test_config.py | 6 +- tests/test_directive_code.py | 287 ++++++-------- tests/test_directive_only.py | 10 +- tests/test_docutilsconf.py | 83 +--- .../{test_py_domain.py => test_domain_py.py} | 2 +- ...{test_rst_domain.py => test_domain_rst.py} | 0 tests/test_domain_std.py | 160 ++++---- tests/{test_env.py => test_environment.py} | 26 +- ...autosummary.py => test_ext_autosummary.py} | 63 +-- ...{test_coverage.py => test_ext_coverage.py} | 2 +- .../{test_doctest.py => test_ext_doctest.py} | 12 +- ...intersphinx.py => test_ext_intersphinx.py} | 22 +- ...{test_linkcode.py => test_ext_linkcode.py} | 7 +- ...{test_napoleon.py => test_ext_napoleon.py} | 0 ...ring.py => test_ext_napoleon_docstring.py} | 0 ...tors.py => test_ext_napoleon_iterators.py} | 0 tests/test_ext_viewcode.py | 75 ++-- tests/test_footnote.py | 37 -- tests/test_highlighting.py | 14 +- tests/test_i18n.py | 2 +- tests/test_intl.py | 371 +++++++++--------- tests/test_markup.py | 7 +- tests/test_metadata.py | 22 +- tests/test_quickstart.py | 26 +- tests/test_searchadapters.py | 23 +- tests/test_setup_command.py | 9 +- tests/test_templating.py | 23 +- tests/test_theming.py | 15 +- tests/test_util_i18n.py | 326 +++++++-------- tests/test_util_nodes.py | 242 ++++++------ tests/test_versioning.py | 36 +- tests/test_websupport.py | 27 +- tests/util.py | 143 ++++--- tox.ini | 2 +- 83 files changed, 1476 insertions(+), 1757 deletions(-) create mode 100644 tests/root/undecodable.txt rename tests/{root/autosummary.txt => roots/test-autosummary/sphinx.rst} (100%) create mode 100644 tests/roots/test-build-text/conf.py create mode 100644 tests/roots/test-build-text/contents.txt create mode 100644 tests/roots/test-build-text/lineblock.txt create mode 100644 tests/roots/test-build-text/maxwidth.txt create mode 100644 tests/roots/test-build-text/nonascii_maxwidth.txt create mode 100644 tests/roots/test-build-text/nonascii_table.txt create mode 100644 tests/roots/test-build-text/nonascii_title.txt create mode 100644 tests/roots/test-build-text/table.txt create mode 100644 tests/roots/test-circular/conf.py create mode 100644 tests/roots/test-circular/contents.rst create mode 100644 tests/roots/test-circular/sub.rst create mode 100644 tests/roots/test-directive-code/dedent_code.rst create mode 100644 tests/roots/test-doctest/conf.py rename tests/{root => roots/test-doctest}/doctest.txt (96%) create mode 100644 tests/roots/test-numbered-circular/conf.py create mode 100644 tests/roots/test-numbered-circular/contents.rst create mode 100644 tests/roots/test-numbered-circular/sub.rst rename tests/{root/versioning => roots/test-versioning}/added.txt (100%) create mode 100644 tests/roots/test-versioning/conf.py rename tests/{root/versioning => roots/test-versioning}/deleted.txt (100%) rename tests/{root/versioning => roots/test-versioning}/deleted_end.txt (100%) rename tests/{root/versioning => roots/test-versioning}/index.txt (100%) rename tests/{root/versioning => roots/test-versioning}/insert.txt (100%) rename tests/{root/versioning => roots/test-versioning}/insert_beginning.txt (100%) rename tests/{root/versioning => roots/test-versioning}/insert_similar.txt (100%) rename tests/{root/versioning => roots/test-versioning}/modified.txt (100%) rename tests/{root/versioning => roots/test-versioning}/original.txt (100%) rename tests/{test_build_base.py => test_catalogs.py} (77%) rename tests/{test_py_domain.py => test_domain_py.py} (98%) rename tests/{test_rst_domain.py => test_domain_rst.py} (100%) rename tests/{test_env.py => test_environment.py} (87%) rename tests/{test_autosummary.py => test_ext_autosummary.py} (66%) rename tests/{test_coverage.py => test_ext_coverage.py} (97%) rename tests/{test_doctest.py => test_ext_doctest.py} (76%) rename tests/{test_intersphinx.py => test_ext_intersphinx.py} (89%) rename tests/{test_linkcode.py => test_ext_linkcode.py} (78%) rename tests/{test_napoleon.py => test_ext_napoleon.py} (100%) rename tests/{test_napoleon_docstring.py => test_ext_napoleon_docstring.py} (100%) rename tests/{test_napoleon_iterators.py => test_ext_napoleon_iterators.py} (100%) delete mode 100644 tests/test_footnote.py diff --git a/.hgignore b/.hgignore index 1154332f7..16d29fcf8 100644 --- a/.hgignore +++ b/.hgignore @@ -7,6 +7,7 @@ ^build/ ^dist/ ^tests/.coverage +^tests/build/ ^sphinx/pycode/Grammar.*pickle ^Sphinx.egg-info/ ^doc/_build/ @@ -18,5 +19,3 @@ ~$ ^utils/.*3\.py$ ^distribute- -^tests/root/_build/* -^tests/root/generated/* diff --git a/Makefile b/Makefile index 128b2c809..0e4a9adef 100644 --- a/Makefile +++ b/Makefile @@ -48,10 +48,10 @@ reindent: @$(PYTHON) utils/reindent.py -r -n . endif -test: build +test: @cd tests; $(PYTHON) run.py -d -m '^[tT]est' $(TEST) -covertest: build +covertest: @cd tests; $(PYTHON) run.py -d -m '^[tT]est' --with-coverage \ --cover-package=sphinx $(TEST) diff --git a/tests/path.py b/tests/path.py index 3e2c8f89e..0d935fe4a 100755 --- a/tests/path.py +++ b/tests/path.py @@ -195,6 +195,9 @@ class path(text_type): """ return self.__class__(os.path.join(self, *map(self.__class__, args))) + def listdir(self): + return os.listdir(self) + __div__ = __truediv__ = joinpath def __repr__(self): diff --git a/tests/root/conf.py b/tests/root/conf.py index f0d40148b..552e1aaa9 100644 --- a/tests/root/conf.py +++ b/tests/root/conf.py @@ -3,11 +3,9 @@ import sys, os sys.path.append(os.path.abspath('.')) -sys.path.append(os.path.abspath('..')) extensions = ['sphinx.ext.autodoc', 'sphinx.ext.jsmath', 'sphinx.ext.todo', - 'sphinx.ext.coverage', 'sphinx.ext.autosummary', - 'sphinx.ext.doctest', 'sphinx.ext.extlinks', + 'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.extlinks', 'sphinx.ext.viewcode', 'ext'] jsmath_path = 'dummy.js' @@ -18,7 +16,7 @@ master_doc = 'contents' source_suffix = '.txt' project = 'Sphinx ' -copyright = '2010, Georg Brandl & Team' +copyright = '2010-2014, Georg Brandl & Team' # If this is changed, remember to update the versionchanges! version = '0.6' release = '0.6alpha1' @@ -34,7 +32,8 @@ html_theme = 'testtheme' html_theme_path = ['.'] html_theme_options = {'testopt': 'testoverride'} html_sidebars = {'**': 'customsb.html', - 'contents': ['contentssb.html', 'localtoc.html'] } + 'contents': ['contentssb.html', 'localtoc.html', + 'globaltoc.html']} html_style = 'default.css' html_static_path = ['_static', 'templated.css_t'] html_extra_path = ['robots.txt'] @@ -65,8 +64,6 @@ value_from_conf_py = 84 coverage_c_path = ['special/*.h'] coverage_c_regexes = {'function': r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'} -autosummary_generate = ['autosummary'] - extlinks = {'issue': ('http://bugs.python.org/issue%s', 'issue '), 'pyurl': ('http://python.org/%s', None)} diff --git a/tests/root/contents.txt b/tests/root/contents.txt index c6b75c630..d786b9148 100644 --- a/tests/root/contents.txt +++ b/tests/root/contents.txt @@ -21,15 +21,14 @@ Contents: bom math autodoc - autosummary metadata extensions - doctest extensions - versioning/index footnote lists + http://sphinx-doc.org/ + Latest reference Python Indices and tables @@ -44,3 +43,13 @@ References .. [Ref1] Reference target. .. [Ref_1] Reference target 2. + +Test for issue #1157 +==================== + +This used to crash: + +.. toctree:: + +.. toctree:: + :hidden: diff --git a/tests/root/undecodable.txt b/tests/root/undecodable.txt new file mode 100644 index 000000000..a4cf5c370 --- /dev/null +++ b/tests/root/undecodable.txt @@ -0,0 +1,3 @@ +:orphan: + +here: diff --git a/tests/roots/test-autosummary/conf.py b/tests/roots/test-autosummary/conf.py index 542696e98..d9a447480 100644 --- a/tests/roots/test-autosummary/conf.py +++ b/tests/roots/test-autosummary/conf.py @@ -1,3 +1,7 @@ +import sys, os + +sys.path.insert(0, os.path.abspath('.')) + extensions = ['sphinx.ext.autosummary'] # The suffix of source filenames. diff --git a/tests/roots/test-autosummary/contents.rst b/tests/roots/test-autosummary/contents.rst index 32390a32e..cd4b7c5e5 100644 --- a/tests/roots/test-autosummary/contents.rst +++ b/tests/roots/test-autosummary/contents.rst @@ -1,6 +1,7 @@ - -.. autosummary:: - :nosignatures: - :toctree: - - dummy_module + +.. autosummary:: + :nosignatures: + :toctree: + + dummy_module + sphinx diff --git a/tests/root/autosummary.txt b/tests/roots/test-autosummary/sphinx.rst similarity index 100% rename from tests/root/autosummary.txt rename to tests/roots/test-autosummary/sphinx.rst diff --git a/tests/roots/test-build-text/conf.py b/tests/roots/test-build-text/conf.py new file mode 100644 index 000000000..1ba342a65 --- /dev/null +++ b/tests/roots/test-build-text/conf.py @@ -0,0 +1,2 @@ +master_doc = 'contents' +source_suffix = '.txt' diff --git a/tests/roots/test-build-text/contents.txt b/tests/roots/test-build-text/contents.txt new file mode 100644 index 000000000..420d14280 --- /dev/null +++ b/tests/roots/test-build-text/contents.txt @@ -0,0 +1,8 @@ +.. toctree:: + + maxwidth + lineblock + nonascii_title + nonascii_table + nonascii_maxwidth + table diff --git a/tests/roots/test-build-text/lineblock.txt b/tests/roots/test-build-text/lineblock.txt new file mode 100644 index 000000000..b9cd0ed79 --- /dev/null +++ b/tests/roots/test-build-text/lineblock.txt @@ -0,0 +1,6 @@ +* one + + | line-block 1 + | line-block 2 + +followed paragraph. diff --git a/tests/roots/test-build-text/maxwidth.txt b/tests/roots/test-build-text/maxwidth.txt new file mode 100644 index 000000000..c36f8a02e --- /dev/null +++ b/tests/roots/test-build-text/maxwidth.txt @@ -0,0 +1,6 @@ +.. seealso:: ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham + +* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham +* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham + +spam egg diff --git a/tests/roots/test-build-text/nonascii_maxwidth.txt b/tests/roots/test-build-text/nonascii_maxwidth.txt new file mode 100644 index 000000000..e9f0fd9b8 --- /dev/null +++ b/tests/roots/test-build-text/nonascii_maxwidth.txt @@ -0,0 +1,5 @@ +abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc + +日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 + +abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 diff --git a/tests/roots/test-build-text/nonascii_table.txt b/tests/roots/test-build-text/nonascii_table.txt new file mode 100644 index 000000000..709e0f2fc --- /dev/null +++ b/tests/roots/test-build-text/nonascii_table.txt @@ -0,0 +1,7 @@ +.. list-table:: + + - - spam + - egg + + - - 日本語 + - 日本語 diff --git a/tests/roots/test-build-text/nonascii_title.txt b/tests/roots/test-build-text/nonascii_title.txt new file mode 100644 index 000000000..6d3b1f610 --- /dev/null +++ b/tests/roots/test-build-text/nonascii_title.txt @@ -0,0 +1,2 @@ +日本語 +====== diff --git a/tests/roots/test-build-text/table.txt b/tests/roots/test-build-text/table.txt new file mode 100644 index 000000000..84328940f --- /dev/null +++ b/tests/roots/test-build-text/table.txt @@ -0,0 +1,7 @@ + +-----+-----+ + | XXX | XXX | + +-----+-----+ + | | XXX | + +-----+-----+ + | XXX | | + +-----+-----+ diff --git a/tests/roots/test-circular/conf.py b/tests/roots/test-circular/conf.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/roots/test-circular/contents.rst b/tests/roots/test-circular/contents.rst new file mode 100644 index 000000000..294e674dd --- /dev/null +++ b/tests/roots/test-circular/contents.rst @@ -0,0 +1,4 @@ +.. toctree:: + + sub + diff --git a/tests/roots/test-circular/sub.rst b/tests/roots/test-circular/sub.rst new file mode 100644 index 000000000..070c39743 --- /dev/null +++ b/tests/roots/test-circular/sub.rst @@ -0,0 +1,3 @@ +.. toctree:: + + contents diff --git a/tests/roots/test-directive-code/dedent.rst b/tests/roots/test-directive-code/dedent.rst index d29e2cfa4..9ec1c0eeb 100644 --- a/tests/roots/test-directive-code/dedent.rst +++ b/tests/roots/test-directive-code/dedent.rst @@ -1,22 +1,35 @@ Dedent ====== -Code blocks ------------ - -.. code-block:: ruby - :linenos: - :dedent: 4 - - def ruby? - false - end - - Literal Include --------------- +.. literalinclude:: literal.inc + :language: python + :lines: 10-11 + :dedent: 0 + +.. literalinclude:: literal.inc + :language: python + :lines: 10-11 + :dedent: 1 + +.. literalinclude:: literal.inc + :language: python + :lines: 10-11 + :dedent: 2 + +.. literalinclude:: literal.inc + :language: python + :lines: 10-11 + :dedent: 3 + .. literalinclude:: literal.inc :language: python :lines: 10-11 :dedent: 4 + +.. literalinclude:: literal.inc + :language: python + :lines: 10-11 + :dedent: 1000 diff --git a/tests/roots/test-directive-code/dedent_code.rst b/tests/roots/test-directive-code/dedent_code.rst new file mode 100644 index 000000000..3e8dacd69 --- /dev/null +++ b/tests/roots/test-directive-code/dedent_code.rst @@ -0,0 +1,53 @@ +Dedent +====== + +Code blocks +----------- + +.. code-block:: ruby + :linenos: + :dedent: 0 + + def ruby? + false + end + +.. code-block:: ruby + :linenos: + :dedent: 1 + + def ruby? + false + end + +.. code-block:: ruby + :linenos: + :dedent: 2 + + def ruby? + false + end + +.. code-block:: ruby + :linenos: + :dedent: 3 + + def ruby? + false + end + +.. code-block:: ruby + :linenos: + :dedent: 4 + + def ruby? + false + end + +.. code-block:: ruby + :linenos: + :dedent: 1000 + + def ruby? + false + end diff --git a/tests/roots/test-doctest/conf.py b/tests/roots/test-doctest/conf.py new file mode 100644 index 000000000..f6a12edb9 --- /dev/null +++ b/tests/roots/test-doctest/conf.py @@ -0,0 +1,5 @@ +extensions = ['sphinx.ext.doctest'] + +project = 'test project for doctest' +master_doc = 'doctest.txt' +source_suffix = '.txt' diff --git a/tests/root/doctest.txt b/tests/roots/test-doctest/doctest.txt similarity index 96% rename from tests/root/doctest.txt rename to tests/roots/test-doctest/doctest.txt index d029cd880..ce4d88bd8 100644 --- a/tests/root/doctest.txt +++ b/tests/roots/test-doctest/doctest.txt @@ -125,5 +125,5 @@ Special directives .. testcleanup:: * - import test_doctest - test_doctest.cleanup_call() + import test_ext_doctest + test_ext_doctest.cleanup_call() diff --git a/tests/roots/test-numbered-circular/conf.py b/tests/roots/test-numbered-circular/conf.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/roots/test-numbered-circular/contents.rst b/tests/roots/test-numbered-circular/contents.rst new file mode 100644 index 000000000..c3129cd48 --- /dev/null +++ b/tests/roots/test-numbered-circular/contents.rst @@ -0,0 +1,5 @@ +.. toctree:: + :numbered: + + sub + diff --git a/tests/roots/test-numbered-circular/sub.rst b/tests/roots/test-numbered-circular/sub.rst new file mode 100644 index 000000000..070c39743 --- /dev/null +++ b/tests/roots/test-numbered-circular/sub.rst @@ -0,0 +1,3 @@ +.. toctree:: + + contents diff --git a/tests/roots/test-templating/autosummary_templating.txt b/tests/roots/test-templating/autosummary_templating.txt index 05643a02d..6b396a3f6 100644 --- a/tests/roots/test-templating/autosummary_templating.txt +++ b/tests/roots/test-templating/autosummary_templating.txt @@ -4,10 +4,4 @@ Autosummary templating test .. autosummary:: :toctree: generated - sphinx.application.Sphinx - -.. currentmodule:: sphinx.application - -.. autoclass:: TemplateBridge - - .. automethod:: render + sphinx.application.TemplateBridge diff --git a/tests/root/versioning/added.txt b/tests/roots/test-versioning/added.txt similarity index 100% rename from tests/root/versioning/added.txt rename to tests/roots/test-versioning/added.txt diff --git a/tests/roots/test-versioning/conf.py b/tests/roots/test-versioning/conf.py new file mode 100644 index 000000000..edcf92951 --- /dev/null +++ b/tests/roots/test-versioning/conf.py @@ -0,0 +1,3 @@ +project = 'versioning test root' +master_doc = 'index' +source_suffix = '.txt' diff --git a/tests/root/versioning/deleted.txt b/tests/roots/test-versioning/deleted.txt similarity index 100% rename from tests/root/versioning/deleted.txt rename to tests/roots/test-versioning/deleted.txt diff --git a/tests/root/versioning/deleted_end.txt b/tests/roots/test-versioning/deleted_end.txt similarity index 100% rename from tests/root/versioning/deleted_end.txt rename to tests/roots/test-versioning/deleted_end.txt diff --git a/tests/root/versioning/index.txt b/tests/roots/test-versioning/index.txt similarity index 100% rename from tests/root/versioning/index.txt rename to tests/roots/test-versioning/index.txt diff --git a/tests/root/versioning/insert.txt b/tests/roots/test-versioning/insert.txt similarity index 100% rename from tests/root/versioning/insert.txt rename to tests/roots/test-versioning/insert.txt diff --git a/tests/root/versioning/insert_beginning.txt b/tests/roots/test-versioning/insert_beginning.txt similarity index 100% rename from tests/root/versioning/insert_beginning.txt rename to tests/roots/test-versioning/insert_beginning.txt diff --git a/tests/root/versioning/insert_similar.txt b/tests/roots/test-versioning/insert_similar.txt similarity index 100% rename from tests/root/versioning/insert_similar.txt rename to tests/roots/test-versioning/insert_similar.txt diff --git a/tests/root/versioning/modified.txt b/tests/roots/test-versioning/modified.txt similarity index 100% rename from tests/root/versioning/modified.txt rename to tests/roots/test-versioning/modified.txt diff --git a/tests/root/versioning/original.txt b/tests/roots/test-versioning/original.txt similarity index 100% rename from tests/root/versioning/original.txt rename to tests/roots/test-versioning/original.txt diff --git a/tests/run.py b/tests/run.py index b903165d6..2672594fd 100755 --- a/tests/run.py +++ b/tests/run.py @@ -11,47 +11,37 @@ """ from __future__ import print_function +import os import sys -from os import path, chdir, listdir, environ -import shutil +import traceback +from path import path -testroot = path.dirname(__file__) or '.' -if 'BUILD_TEST_PATH' in environ: - # for tox testing - newroot = environ['BUILD_TEST_PATH'] - # tox installs the sphinx package, no need for sys.path.insert -else: - newroot = path.join(testroot, path.pardir, 'build') - newroot = path.join(newroot, listdir(newroot)[0], 'tests') +testroot = os.path.dirname(__file__) or '.' +sys.path.insert(0, os.path.abspath(os.path.join(testroot, os.path.pardir))) -shutil.rmtree(newroot, ignore_errors=True) -# just copying test directory to parallel testing -print('Copying sources to build/lib/tests...') -shutil.copytree(testroot, newroot) +# check dependencies before testing +print('Checking dependencies...') +for modname in ('nose', 'mock', 'six', 'docutils', 'jinja2', 'pygments', + 'snowballstemmer', 'babel'): + try: + __import__(modname) + except ImportError as err: + traceback.print_exc() + print('The %r package is needed to run the Sphinx test suite.' % modname) + sys.exit(1) -# always test the sphinx package from build/lib/ -sys.path.insert(0, path.abspath(path.join(newroot, path.pardir))) -# switch to the copy/converted dir so nose tests the right tests -chdir(newroot) - -try: - import nose -except ImportError: - print('The nose package is needed to run the Sphinx test suite.') - sys.exit(1) - -try: - import docutils -except ImportError: - print('Sphinx requires the docutils package to be installed.') - sys.exit(1) - -try: - import jinja2 -except ImportError: - print('Sphinx requires the jinja2 package to be installed.') - sys.exit(1) +# find a temp dir for testing and clean it up now +os.environ['SPHINX_TEST_TEMPDIR'] = \ + os.path.abspath(os.path.join(testroot, 'build')) \ + if 'SPHINX_TEST_TEMPDIR' not in os.environ \ + else os.path.abspath(os.environ['SPHINX_TEST_TEMPDIR']) +tempdir = path(os.environ['SPHINX_TEST_TEMPDIR']) +print('Temporary files will be placed in %s.' % tempdir) +if tempdir.exists(): + tempdir.rmtree() +tempdir.makedirs() print('Running Sphinx test suite...') +import nose nose.main() diff --git a/tests/test_api_translator.py b/tests/test_api_translator.py index 9fa1b3eaf..e0ba5e0f7 100644 --- a/tests/test_api_translator.py +++ b/tests/test_api_translator.py @@ -8,82 +8,57 @@ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ + import sys -from nose.tools import with_setup - -from util import with_app, test_roots +from util import with_app, rootdir def setup_module(): - sys.path.insert(0, test_roots / 'test-api-set-translator') + sys.path.insert(0, rootdir / 'roots' / 'test-api-set-translator') def teardown_module(): - sys.path.remove(test_roots / 'test-api-set-translator') + sys.path.remove(rootdir / 'roots' / 'test-api-set-translator') -def teardown_websupport(): - (test_roots / 'test-api-set-translator' / 'generated').rmtree(True) - (test_roots / 'test-api-set-translator' / 'websupport').rmtree(True) - - -@with_app( - buildername='html', - srcdir=(test_roots / 'test-api-set-translator'), - confdir=(test_roots / 'test-api-set-translator' / 'nonext'), -) -def test_html_translator(app): +@with_app('html') +def test_html_translator(app, status, warning): # no set_translator(), no html_translator_class translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'SmartyPantsHTMLTranslator' -@with_app( - buildername='html', - srcdir=(test_roots / 'test-api-set-translator'), - confdir=(test_roots / 'test-api-set-translator' / 'nonext'), - confoverrides={ - 'html_translator_class': 'translator.ExtHTMLTranslator'}, -) -def test_html_with_html_translator_class(app): +@with_app('html', confoverrides={ + 'html_translator_class': 'translator.ExtHTMLTranslator'}) +def test_html_with_html_translator_class(app, status, warning): # no set_translator(), but html_translator_class translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ExtHTMLTranslator' -@with_app( - buildername='html', - srcdir=(test_roots / 'test-api-set-translator'), - confdir=(test_roots / 'test-api-set-translator' / 'nonext'), - confoverrides={'html_use_smartypants': False}, -) -def test_html_with_smartypants(app): +@with_app('html', + confoverrides={'html_use_smartypants': False}) +def test_html_with_smartypants(app, status, warning): # no set_translator(), html_use_smartypants=False translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'HTMLTranslator' -@with_app( - buildername='html', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_html_with_set_translator_for_html_(app): +@with_app('html', testroot='api-set-translator') +def test_html_with_set_translator_for_html_(app, status, warning): # use set_translator(), no html_translator_class translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfHTMLTranslator' -@with_app( - buildername='html', - srcdir=(test_roots / 'test-api-set-translator'), - confoverrides={'html_translator_class': 'ext.ExtHTMLTranslator'}, -) -def test_html_with_set_translator_for_html_and_html_translator_class(app): +@with_app('html', testroot='api-set-translator', + confoverrides={'html_translator_class': 'ext.ExtHTMLTranslator'}) +def test_html_with_set_translator_for_html_and_html_translator_class(app, status, warning): # use set_translator() and html_translator_class. # set_translator() is given priority over html_translator_clas. translator_class = app.builder.translator_class @@ -96,108 +71,70 @@ def test_html_with_set_translator_for_html_and_html_translator_class(app): # buildername='dirhtml', # srcdir=(test_roots / 'test-api-set-translator'), # ) -# def test_dirhtml_set_translator_for_dirhtml(app): +# def test_dirhtml_set_translator_for_dirhtml(app, status, warning): # translator_class = app.builder.translator_class # assert translator_class # assert translator_class.__name__ == 'ConfDirHTMLTranslator' -@with_app( - buildername='singlehtml', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_singlehtml_set_translator_for_singlehtml(app): +@with_app('singlehtml', testroot='api-set-translator') +def test_singlehtml_set_translator_for_singlehtml(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfSingleHTMLTranslator' -@with_app( - buildername='pickle', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_pickle_set_translator_for_pickle(app): +@with_app('pickle', testroot='api-set-translator') +def test_pickle_set_translator_for_pickle(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfPickleTranslator' -@with_app( - buildername='json', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_json_set_translator_for_json(app): +@with_app('json', testroot='api-set-translator') +def test_json_set_translator_for_json(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfJsonTranslator' -@with_app( - buildername='latex', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_html_with_set_translator_for_latex(app): +@with_app('latex', testroot='api-set-translator') +def test_html_with_set_translator_for_latex(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfLaTeXTranslator' -@with_app( - buildername='man', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_html_with_set_translator_for_man(app): +@with_app('man', testroot='api-set-translator') +def test_html_with_set_translator_for_man(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfManualPageTranslator' -@with_app( - buildername='texinfo', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_html_with_set_translator_for_texinfo(app): +@with_app('texinfo', testroot='api-set-translator') +def test_html_with_set_translator_for_texinfo(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfTexinfoTranslator' -@with_app( - buildername='text', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_html_with_set_translator_for_text(app): +@with_app('text', testroot='api-set-translator') +def test_html_with_set_translator_for_text(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfTextTranslator' -@with_setup(teardown=teardown_websupport) -@with_app( - buildername='websupport', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_html_with_set_translator_for_websupport(app): - translator_class = app.builder.translator_class - assert translator_class - assert translator_class.__name__ == 'ConfWebSupportTranslator' - - -@with_app( - buildername='xml', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_html_with_set_translator_for_xml(app): +@with_app('xml', testroot='api-set-translator') +def test_html_with_set_translator_for_xml(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfXMLTranslator' -@with_app( - buildername='pseudoxml', - srcdir=(test_roots / 'test-api-set-translator'), -) -def test_html_with_set_translator_for_pseudoxml(app): +@with_app('pseudoxml', testroot='api-set-translator') +def test_html_with_set_translator_for_pseudoxml(app, status, warning): translator_class = app.builder.translator_class assert translator_class assert translator_class.__name__ == 'ConfPseudoXMLTranslator' diff --git a/tests/test_application.py b/tests/test_application.py index 49c27452a..1f188de1f 100644 --- a/tests/test_application.py +++ b/tests/test_application.py @@ -9,22 +9,21 @@ :license: BSD, see LICENSE for details. """ -from six import StringIO from docutils import nodes from sphinx.application import ExtensionError from sphinx.domains import Domain -from util import with_app, raises_msg, TestApp +from util import with_app, raises_msg @with_app() -def test_events(app): - def empty(): pass +def test_events(app, status, warning): + def empty(): + pass raises_msg(ExtensionError, "Unknown event name: invalid", app.connect, "invalid", empty) - app.add_event("my_event") raises_msg(ExtensionError, "Event 'my_event' already present", app.add_event, "my_event") @@ -43,57 +42,49 @@ def test_events(app): @with_app() -def test_emit_with_nonascii_name_node(app): +def test_emit_with_nonascii_name_node(app, status, warning): node = nodes.section(names=[u'\u65e5\u672c\u8a9e']) app.emit('my_event', node) -def test_output(): - status, warnings = StringIO(), StringIO() - app = TestApp(status=status, warning=warnings) - try: - status.truncate(0) # __init__ writes to status - status.seek(0) - app.info("Nothing here...") - assert status.getvalue() == "Nothing here...\n" - status.truncate(0) - status.seek(0) - app.info("Nothing here...", True) - assert status.getvalue() == "Nothing here..." +@with_app() +def test_output(app, status, warning): + status.truncate(0) # __init__ writes to status + status.seek(0) + app.info("Nothing here...") + assert status.getvalue() == "Nothing here...\n" + status.truncate(0) + status.seek(0) + app.info("Nothing here...", True) + assert status.getvalue() == "Nothing here..." - old_count = app._warncount - app.warn("Bad news!") - assert warnings.getvalue() == "WARNING: Bad news!\n" - assert app._warncount == old_count + 1 - finally: - app.cleanup() + old_count = app._warncount + app.warn("Bad news!") + assert warning.getvalue() == "WARNING: Bad news!\n" + assert app._warncount == old_count + 1 -def test_extensions(): - status, warnings = StringIO(), StringIO() - app = TestApp(status=status, warning=warnings) - try: - app.setup_extension('shutil') - assert warnings.getvalue().startswith("WARNING: extension 'shutil'") - finally: - app.cleanup() +@with_app() +def test_extensions(app, status, warning): + app.setup_extension('shutil') + assert warning.getvalue().startswith("WARNING: extension 'shutil'") -def test_domain_override(): + +@with_app() +def test_domain_override(app, status, warning): class A(Domain): name = 'foo' + class B(A): name = 'foo' + class C(Domain): name = 'foo' - status, warnings = StringIO(), StringIO() - app = TestApp(status=status, warning=warnings) - try: - # No domain know named foo. - raises_msg(ExtensionError, 'domain foo not yet registered', - app.override_domain, A) - assert app.add_domain(A) is None - assert app.override_domain(B) is None - raises_msg(ExtensionError, 'new domain not a subclass of registered ' - 'foo domain', app.override_domain, C) - finally: - app.cleanup() + + # No domain know named foo. + raises_msg(ExtensionError, 'domain foo not yet registered', + app.override_domain, A) + assert app.add_domain(A) is None + assert app.override_domain(B) is None + raises_msg(ExtensionError, 'new domain not a subclass of registered ' + 'foo domain', app.override_domain, C) diff --git a/tests/test_autodoc.py b/tests/test_autodoc.py index 192820bf3..f4ae0c089 100644 --- a/tests/test_autodoc.py +++ b/tests/test_autodoc.py @@ -18,7 +18,7 @@ from six import StringIO from docutils.statemachine import ViewList from sphinx.ext.autodoc import AutoDirective, add_documenter, \ - ModuleLevelDocumenter, FunctionDocumenter, cut_lines, between, ALL + ModuleLevelDocumenter, FunctionDocumenter, cut_lines, between, ALL app = None diff --git a/tests/test_build.py b/tests/test_build.py index 56fdf826b..fe38cfaf8 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -3,114 +3,86 @@ test_build ~~~~~~~~~~ - Test all builders that have no special checks. + Test all builders. :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from util import with_app, test_root, path, SkipTest, TestApp +from six import BytesIO + from textwrap import dedent +from util import with_app, rootdir, tempdir, SkipTest, TestApp + try: from docutils.writers.manpage import Writer as ManWriter except ImportError: ManWriter = None -def teardown_module(): - (test_root / '_build').rmtree(True) +class MockOpener(object): + def open(self, req, **kwargs): + class result(BytesIO): + headers = None + url = req.url + return result() + +import sphinx.builders.linkcheck +sphinx.builders.linkcheck.opener = MockOpener() -def test_build(): - for buildername in ('pickle', 'json', 'linkcheck', 'text', 'htmlhelp', - 'qthelp', 'epub', 'changes', 'singlehtml', 'xml', - 'pseudoxml'): - app = TestApp(buildername=buildername) - yield lambda app: app.builder.build_all(), app - app.cleanup() - - -@with_app(buildername='man') -def test_man(app): - if ManWriter is None: +def verify_build(buildername, srcdir): + if buildername == 'man' and ManWriter is None: raise SkipTest('man writer is not available') - app.builder.build_all() - assert (app.outdir / 'SphinxTests.1').exists() - - -def _test_nonascii_path(app): - srcdir = path(app.srcdir) - mb_name = u'\u65e5\u672c\u8a9e' + app = TestApp(buildername=buildername, srcdir=srcdir) try: - (srcdir / mb_name).makedirs() - except UnicodeEncodeError: - from path import FILESYSTEMENCODING - raise SkipTest( - 'nonascii filename not supported on this filesystem encoding: ' - '%s', FILESYSTEMENCODING) - - (srcdir / mb_name / (mb_name + '.txt')).write_text(dedent(""" - multi byte file name page - ========================== - """)) - - master_doc = srcdir / 'contents.txt' - master_doc.write_bytes((master_doc.text() + dedent(""" - .. toctree:: - - %(mb_name)s/%(mb_name)s - """ % {'mb_name': mb_name}) - ).encode('utf-8')) - app.builder.build_all() - - -def test_nonascii_path(): - (test_root / '_build').rmtree(True) #keep this to build first gettext - - builder_names = ['gettext', 'html', 'dirhtml', 'singlehtml', 'latex', - 'texinfo', 'pickle', 'json', 'linkcheck', 'text', - 'htmlhelp', 'qthelp', 'epub', 'changes', 'xml', - 'pseudoxml'] - if ManWriter is not None: - builder_names.append('man') - - for buildername in builder_names: - app = TestApp(buildername=buildername, _copy_to_temp=True) - yield _test_nonascii_path, app + app.builder.build_all() + finally: app.cleanup() -@with_app(buildername='text', srcdir='(empty)') -def test_circular_toctree(app): - contents = (".. toctree::\n" - "\n" - " sub\n") - (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8') +def test_build_all(): + # If supported, build in a non-ASCII source dir + test_name = u'\u65e5\u672c\u8a9e' + try: + srcdir = tempdir / test_name + (rootdir / 'root').copytree(tempdir / test_name) + except UnicodeEncodeError: + srcdir = tempdir / 'all' + else: + # add a doc with a non-ASCII file name to the source dir + (srcdir / (test_name + '.txt')).write_text(dedent(""" + nonascii file name page + ======================= + """)) - contents = (".. toctree::\n" - "\n" - " contents\n") - (app.srcdir / 'sub.rst').write_text(contents, encoding='utf-8') + master_doc = srcdir / 'contents.txt' + master_doc.write_bytes((master_doc.text() + dedent(""" + .. toctree:: + + %(test_name)s/%(test_name)s + """ % {'test_name': test_name}) + ).encode('utf-8')) + + # note: no 'html' - if it's ok with dirhtml it's ok with html + for buildername in ['dirhtml', 'singlehtml', 'latex', 'texinfo', + 'pickle', 'json', 'text', 'htmlhelp', 'qthelp', 'epub', + 'changes', 'xml', 'pseudoxml', 'man', 'linkcheck']: + yield verify_build, buildername, srcdir + + +@with_app(buildername='text', testroot='circular') +def test_circular_toctree(app, status, warning): app.builder.build_all() - warnings = "".join(app._warning.content) + warnings = warning.getvalue() assert 'circular toctree references detected, ignoring: sub <- contents <- sub' in warnings assert 'circular toctree references detected, ignoring: contents <- sub <- contents' in warnings -@with_app(buildername='text', srcdir='(empty)') -def test_numbered_circular_toctree(app): - contents = (".. toctree::\n" - " :numbered:\n" - "\n" - " sub\n") - (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8') - - contents = (".. toctree::\n" - "\n" - " contents\n") - (app.srcdir / 'sub.rst').write_text(contents, encoding='utf-8') +@with_app(buildername='text', testroot='numbered-circular') +def test_numbered_circular_toctree(app, status, warning): app.builder.build_all() - warnings = "\n".join(app._warning.content) + warnings = warning.getvalue() assert 'circular toctree references detected, ignoring: sub <- contents <- sub' in warnings assert 'circular toctree references detected, ignoring: contents <- sub <- contents' in warnings diff --git a/tests/test_build_gettext.py b/tests/test_build_gettext.py index e7eda179d..fe1611583 100644 --- a/tests/test_build_gettext.py +++ b/tests/test_build_gettext.py @@ -15,22 +15,17 @@ import os import re from subprocess import Popen, PIPE -from util import test_root, test_roots, with_app, SkipTest +from util import with_app, SkipTest -def teardown_module(): - (test_root / '_build').rmtree(True) - (test_roots / 'test-intl' / '_build').rmtree(True), - - -@with_app(buildername='gettext') -def test_all(app): +@with_app('gettext') +def test_all(app, status, warning): # Generic build; should fail only when the builder is horribly broken. app.builder.build_all() -@with_app(buildername='gettext') -def test_build(app): +@with_app('gettext') +def test_build(app, status, warning): # Do messages end up in the correct location? app.builder.build(['extapi', 'subdir/includes']) # top-level documents end up in a message catalog @@ -39,16 +34,16 @@ def test_build(app): assert (app.outdir / 'subdir.pot').isfile() -@with_app(buildername='gettext') -def test_seealso(app): +@with_app('gettext') +def test_seealso(app, status, warning): # regression test for issue #960 app.builder.build(['markup']) catalog = (app.outdir / 'markup.pot').text(encoding='utf-8') assert 'msgid "something, something else, something more"' in catalog -@with_app(buildername='gettext') -def test_gettext(app): +@with_app('gettext') +def test_gettext(app, status, warning): app.builder.build(['markup']) (app.outdir / 'en' / 'LC_MESSAGES').makedirs() @@ -58,7 +53,7 @@ def test_gettext(app): try: p = Popen(['msginit', '--no-translator', '-i', 'markup.pot', '--locale', 'en_US'], - stdout=PIPE, stderr=PIPE) + stdout=PIPE, stderr=PIPE) except OSError: raise SkipTest # most likely msginit was not found else: @@ -67,12 +62,12 @@ def test_gettext(app): print(stdout) print(stderr) assert False, 'msginit exited with return code %s' % \ - p.returncode + p.returncode assert (app.outdir / 'en_US.po').isfile(), 'msginit failed' try: p = Popen(['msgfmt', 'en_US.po', '-o', - os.path.join('en', 'LC_MESSAGES', 'test_root.mo')], - stdout=PIPE, stderr=PIPE) + os.path.join('en', 'LC_MESSAGES', 'test_root.mo')], + stdout=PIPE, stderr=PIPE) except OSError: raise SkipTest # most likely msgfmt was not found else: @@ -81,9 +76,9 @@ def test_gettext(app): print(stdout) print(stderr) assert False, 'msgfmt exited with return code %s' % \ - p.returncode + p.returncode assert (app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(), \ - 'msgfmt failed' + 'msgfmt failed' finally: os.chdir(cwd) @@ -91,15 +86,14 @@ def test_gettext(app): assert _("Testing various markup") == u"Testing various markup" -@with_app(buildername='gettext', - srcdir=(test_roots / 'test-intl'), - doctreedir=(test_roots / 'test-intl' / '_build' / 'doctree'), +@with_app('gettext', testroot='intl', confoverrides={'gettext_compact': False}) -def test_gettext_index_entries(app): +def test_gettext_index_entries(app, status, warning): # regression test for #976 app.builder.build(['index_entries']) _msgid_getter = re.compile(r'msgid "(.*)"').search + def msgid_getter(msgid): m = _msgid_getter(msgid) if m: @@ -139,10 +133,8 @@ def test_gettext_index_entries(app): assert msgids == [] -@with_app(buildername='gettext', - srcdir=(test_roots / 'test-intl'), - doctreedir=(test_roots / 'test-intl' / '_build' / 'doctree')) -def test_gettext_template(app): +@with_app(buildername='gettext', testroot='intl') +def test_gettext_template(app, status, warning): app.builder.build_all() assert (app.outdir / 'sphinx.pot').isfile() diff --git a/tests/test_build_html.py b/tests/test_build_html.py index 6b5c03be5..9cf21c63c 100644 --- a/tests/test_build_html.py +++ b/tests/test_build_html.py @@ -15,22 +15,11 @@ import re from six import PY3, iteritems, StringIO from six.moves import html_entities -try: - import pygments -except ImportError: - pygments = None - from sphinx import __version__ -from util import test_root, test_roots, remove_unicode_literals, gen_with_app, with_app +from util import remove_unicode_literals, gen_with_app from etree13 import ElementTree as ET -def teardown_module(): - (test_root / '_build').rmtree(True) - - -html_warnfile = StringIO() - ENV_WARNINGS = """\ %(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \ WARNING: Explicit markup ends without a blank line; unexpected \ @@ -44,6 +33,8 @@ reading included file u'.*?wrongenc.inc' seems to be wrong, try giving an \ %(root)s/includes.txt:4: WARNING: download file not readable: .*?nonexisting.png %(root)s/markup.txt:\\d+: WARNING: Malformed :option: u'Python c option', does \ not contain option marker - or -- or / or \\+ +%(root)s/undecodable.txt:3: WARNING: undecodable source characters, replacing \ +with "\\?": b?'here: >>>\\\\xbb<<<' """ HTML_WARNINGS = ENV_WARNINGS + """\ @@ -61,6 +52,7 @@ if PY3: def tail_check(check): rex = re.compile(check) + def checker(nodes): for node in nodes: if node.tail and rex.search(node.tail): @@ -84,6 +76,8 @@ HTML_XPATH = { (".//a[@href='../_downloads/img.png']", ''), (".//img[@src='../_images/img.png']", ''), (".//p", 'This is an include file.'), + (".//pre/span", 'line 1'), + (".//pre/span", 'line 2'), ], 'includes.html': [ (".//pre", u'Max Strauß'), @@ -91,6 +85,23 @@ HTML_XPATH = { (".//a[@href='_downloads/img1.png']", ''), (".//pre", u'"quotes"'), (".//pre", u"'included'"), + (".//pre/span[@class='s']", u'üöä'), + (".//div[@class='inc-pyobj1 highlight-text']//pre", + r'^class Foo:\n pass\n\s*$'), + (".//div[@class='inc-pyobj2 highlight-text']//pre", + r'^ def baz\(\):\n pass\n\s*$'), + (".//div[@class='inc-lines highlight-text']//pre", + r'^class Foo:\n pass\nclass Bar:\n$'), + (".//div[@class='inc-startend highlight-text']//pre", + u'^foo = "Including Unicode characters: üöä"\\n$'), + (".//div[@class='inc-preappend highlight-text']//pre", + r'(?m)^START CODE$'), + (".//div[@class='inc-pyobj-dedent highlight-python']//span", + r'def'), + (".//div[@class='inc-tab3 highlight-text']//pre", + r'-| |-'), + (".//div[@class='inc-tab8 highlight-python']//pre/span", + r'-| |-'), ], 'autodoc.html': [ (".//dt[@id='test_autodoc.Class']", ''), @@ -215,12 +226,10 @@ HTML_XPATH = { (".//h4", 'Custom sidebar'), # docfields (".//td[@class='field-body']/strong", '^moo$'), - (".//td[@class='field-body']/strong", - tail_check(r'\(Moo\) .* Moo')), + (".//td[@class='field-body']/strong", tail_check(r'\(Moo\) .* Moo')), (".//td[@class='field-body']/ul/li/strong", '^hour$'), (".//td[@class='field-body']/ul/li/em", '^DuplicateType$'), - (".//td[@class='field-body']/ul/li/em", - tail_check(r'.* Some parameter')), + (".//td[@class='field-body']/ul/li/em", tail_check(r'.* Some parameter')), ], 'contents.html': [ (".//meta[@name='hc'][@content='hcval']", ''), @@ -241,6 +250,11 @@ HTML_XPATH = { (".//h4", 'Contents sidebar'), # custom JavaScript (".//script[@src='file://moo.js']", ''), + # URL in contents + (".//a[@class='reference external'][@href='http://sphinx-doc.org/']", + 'http://sphinx-doc.org/'), + (".//a[@class='reference external'][@href='http://sphinx-doc.org/latest/']", + 'Latest reference'), ], 'bom.html': [ (".//title", " File with UTF-8 BOM"), @@ -260,33 +274,19 @@ HTML_XPATH = { (".//a/strong", "Other"), (".//a", "entry"), (".//dt/a", "double"), - ] + ], + 'footnote.html': [ + (".//a[@class='footnote-reference'][@href='#id5'][@id='id1']", r"\[1\]"), + (".//a[@class='footnote-reference'][@href='#id6'][@id='id2']", r"\[2\]"), + (".//a[@class='footnote-reference'][@href='#foo'][@id='id3']", r"\[3\]"), + (".//a[@class='reference internal'][@href='#bar'][@id='id4']", r"\[bar\]"), + (".//a[@class='fn-backref'][@href='#id1']", r"\[1\]"), + (".//a[@class='fn-backref'][@href='#id2']", r"\[2\]"), + (".//a[@class='fn-backref'][@href='#id3']", r"\[3\]"), + (".//a[@class='fn-backref'][@href='#id4']", r"\[bar\]"), + ], } -if pygments: - HTML_XPATH['includes.html'].extend([ - (".//pre/span[@class='s']", u'üöä'), - (".//div[@class='inc-pyobj1 highlight-text']//pre", - r'^class Foo:\n pass\n\s*$'), - (".//div[@class='inc-pyobj2 highlight-text']//pre", - r'^ def baz\(\):\n pass\n\s*$'), - (".//div[@class='inc-lines highlight-text']//pre", - r'^class Foo:\n pass\nclass Bar:\n$'), - (".//div[@class='inc-startend highlight-text']//pre", - u'^foo = "Including Unicode characters: üöä"\\n$'), - (".//div[@class='inc-preappend highlight-text']//pre", - r'(?m)^START CODE$'), - (".//div[@class='inc-pyobj-dedent highlight-python']//span", - r'def'), - (".//div[@class='inc-tab3 highlight-text']//pre", - r'-| |-'), - (".//div[@class='inc-tab8 highlight-python']//pre/span", - r'-| |-'), - ]) - HTML_XPATH['subdir/includes.html'].extend([ - (".//pre/span", 'line 1'), - (".//pre/span", 'line 2'), - ]) class NslessParser(ET.XMLParser): """XMLParser that throws away namespaces in tag names.""" @@ -320,7 +320,8 @@ def check_xpath(etree, fname, path, check, be_found=True): else: assert False, ('%r not found in any node matching ' 'path %s in %s: %r' % (check, path, fname, - [node.text for node in nodes])) + [node.text for node in nodes])) + def check_static_entries(outdir): staticdir = outdir / '_static' @@ -335,21 +336,23 @@ def check_static_entries(outdir): # a file from _static, but matches exclude_patterns assert not (staticdir / 'excluded.css').exists() + def check_extra_entries(outdir): assert (outdir / 'robots.txt').isfile() -@gen_with_app(buildername='html', warning=html_warnfile, cleanenv=True, + +@gen_with_app(buildername='html', freshenv=True, confoverrides={'html_context.hckey_co': 'hcval_co'}, tags=['testtag']) -def test_html(app): +def test_html_output(app, status, warning): app.builder.build_all() - html_warnings = html_warnfile.getvalue().replace(os.sep, '/') + html_warnings = warning.getvalue().replace(os.sep, '/') html_warnings_exp = HTML_WARNINGS % { - 'root': re.escape(app.srcdir.replace(os.sep, '/'))} + 'root': re.escape(app.srcdir.replace(os.sep, '/'))} assert re.match(html_warnings_exp + '$', html_warnings), \ - 'Warnings don\'t match:\n' + \ - '--- Expected (regex):\n' + html_warnings_exp + \ - '--- Got:\n' + html_warnings + 'Warnings don\'t match:\n' + \ + '--- Expected (regex):\n' + html_warnings_exp + \ + '--- Got:\n' + html_warnings for fname, paths in iteritems(HTML_XPATH): parser = NslessParser() @@ -365,23 +368,9 @@ def test_html(app): check_static_entries(app.builder.outdir) check_extra_entries(app.builder.outdir) -@with_app(buildername='html', srcdir='(empty)', - confoverrides={'html_sidebars': {'*': ['globaltoc.html']}}, - ) -def test_html_with_globaltoc_and_hidden_toctree(app): - # issue #1157: combination of 'globaltoc.html' and hidden toctree cause - # exception. - (app.srcdir / 'contents.rst').write_text( - '\n.. toctree::' - '\n' - '\n.. toctree::' - '\n :hidden:' - '\n') - app.builder.build_all() - -@gen_with_app(buildername='html', srcdir=(test_roots / 'test-tocdepth')) -def test_tocdepth(app): +@gen_with_app(buildername='html', testroot='tocdepth') +def test_tocdepth(app, status, warning): # issue #1251 app.builder.build_all() @@ -391,14 +380,14 @@ def test_tocdepth(app): (".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True), (".//li[@class='toctree-l3']/a", '2.1.1. Bar A1', False), (".//li[@class='toctree-l3']/a", '2.2.1. Bar B1', False), - ], + ], 'foo.html': [ (".//h1", '1. Foo', True), (".//h2", '1.1. Foo A', True), (".//h3", '1.1.1. Foo A1', True), (".//h2", '1.2. Foo B', True), (".//h3", '1.2.1. Foo B1', True), - ], + ], 'bar.html': [ (".//h1", '2. Bar', True), (".//h2", '2.1. Bar A', True), @@ -423,8 +412,8 @@ def test_tocdepth(app): yield check_xpath, etree, fname, xpath, check, be_found -@gen_with_app(buildername='singlehtml', srcdir=(test_roots / 'test-tocdepth')) -def test_tocdepth_singlehtml(app): +@gen_with_app(buildername='singlehtml', testroot='tocdepth') +def test_tocdepth_singlehtml(app, status, warning): app.builder.build_all() expects = { @@ -466,18 +455,3 @@ def test_tocdepth_singlehtml(app): for xpath, check, be_found in paths: yield check_xpath, etree, fname, xpath, check, be_found - - -@with_app(buildername='html', srcdir='(empty)') -def test_url_in_toctree(app): - contents = (".. toctree::\n" - "\n" - " http://sphinx-doc.org/\n" - " Latest reference \n") - - (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8') - app.builder.build_all() - - result = (app.outdir / 'contents.html').text(encoding='utf-8') - assert 'http://sphinx-doc.org/' in result - assert 'Latest reference' in result diff --git a/tests/test_build_latex.py b/tests/test_build_latex.py index 41ae03df3..8edea98ff 100644 --- a/tests/test_build_latex.py +++ b/tests/test_build_latex.py @@ -14,20 +14,14 @@ import os import re from subprocess import Popen, PIPE -from six import PY3, StringIO +from six import PY3 from sphinx.writers.latex import LaTeXTranslator -from util import test_root, SkipTest, remove_unicode_literals, with_app +from util import SkipTest, remove_unicode_literals, with_app from test_build_html import ENV_WARNINGS -def teardown_module(): - (test_root / '_build').rmtree(True) - - -latex_warnfile = StringIO() - LATEX_WARNINGS = ENV_WARNINGS + """\ None:None: WARNING: citation not found: missing None:None: WARNING: no matching candidate for image URI u'foo.\\*' @@ -39,17 +33,17 @@ if PY3: LATEX_WARNINGS = remove_unicode_literals(LATEX_WARNINGS) -@with_app(buildername='latex', warning=latex_warnfile, cleanenv=True) -def test_latex(app): +@with_app(buildername='latex', freshenv=True) +def test_latex(app, status, warning): LaTeXTranslator.ignore_missing_images = True app.builder.build_all() - latex_warnings = latex_warnfile.getvalue().replace(os.sep, '/') + latex_warnings = warning.getvalue().replace(os.sep, '/') latex_warnings_exp = LATEX_WARNINGS % { - 'root': re.escape(app.srcdir.replace(os.sep, '/'))} + 'root': re.escape(app.srcdir.replace(os.sep, '/'))} assert re.match(latex_warnings_exp + '$', latex_warnings), \ - 'Warnings don\'t match:\n' + \ - '--- Expected (regex):\n' + latex_warnings_exp + \ - '--- Got:\n' + latex_warnings + 'Warnings don\'t match:\n' + \ + '--- Expected (regex):\n' + latex_warnings_exp + \ + '--- Got:\n' + latex_warnings # file from latex_additional_files assert (app.outdir / 'svgimg.svg').isfile() diff --git a/tests/test_build_texinfo.py b/tests/test_build_texinfo.py index fbe8a1731..7f50857f9 100644 --- a/tests/test_build_texinfo.py +++ b/tests/test_build_texinfo.py @@ -14,20 +14,14 @@ import os import re from subprocess import Popen, PIPE -from six import PY3, StringIO +from six import PY3 from sphinx.writers.texinfo import TexinfoTranslator -from util import test_root, SkipTest, remove_unicode_literals, with_app +from util import SkipTest, remove_unicode_literals, with_app from test_build_html import ENV_WARNINGS -def teardown_module(): - (test_root / '_build').rmtree(True) - - -texinfo_warnfile = StringIO() - TEXINFO_WARNINGS = ENV_WARNINGS + """\ None:None: WARNING: citation not found: missing None:None: WARNING: no matching candidate for image URI u'foo.\\*' @@ -38,17 +32,17 @@ if PY3: TEXINFO_WARNINGS = remove_unicode_literals(TEXINFO_WARNINGS) -@with_app(buildername='texinfo', warning=texinfo_warnfile, cleanenv=True) -def test_texinfo(app): +@with_app('texinfo', freshenv=True) +def test_texinfo(app, status, warning): TexinfoTranslator.ignore_missing_images = True app.builder.build_all() - texinfo_warnings = texinfo_warnfile.getvalue().replace(os.sep, '/') + texinfo_warnings = warning.getvalue().replace(os.sep, '/') texinfo_warnings_exp = TEXINFO_WARNINGS % { - 'root': re.escape(app.srcdir.replace(os.sep, '/'))} + 'root': re.escape(app.srcdir.replace(os.sep, '/'))} assert re.match(texinfo_warnings_exp + '$', texinfo_warnings), \ - 'Warnings don\'t match:\n' + \ - '--- Expected (regex):\n' + texinfo_warnings_exp + \ - '--- Got:\n' + texinfo_warnings + 'Warnings don\'t match:\n' + \ + '--- Expected (regex):\n' + texinfo_warnings_exp + \ + '--- Got:\n' + texinfo_warnings # now, try to run makeinfo over it cwd = os.getcwd() os.chdir(app.outdir) diff --git a/tests/test_build_text.py b/tests/test_build_text.py index e6e4d5be2..d486bed2f 100644 --- a/tests/test_build_text.py +++ b/tests/test_build_text.py @@ -18,29 +18,16 @@ from util import with_app def with_text_app(*args, **kw): default_kw = { 'buildername': 'text', - 'srcdir': '(empty)', - 'confoverrides': { - 'project': 'text', - 'master_doc': 'contents', - }, + 'testroot': 'build-text', } default_kw.update(kw) return with_app(*args, **default_kw) @with_text_app() -def test_maxwitdh_with_prefix(app): - long_string = u' '.join([u"ham"] * 30) - contents = ( - u".. seealso:: %(long_string)s\n\n" - u"* %(long_string)s\n" - u"* %(long_string)s\n" - u"\nspam egg\n" - ) % locals() - - (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8') - app.builder.build_all() - result = (app.outdir / 'contents.txt').text(encoding='utf-8') +def test_maxwitdh_with_prefix(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'maxwidth.txt').text(encoding='utf-8') lines = result.splitlines() line_widths = [column_width(line) for line in lines] @@ -58,105 +45,52 @@ def test_maxwitdh_with_prefix(app): @with_text_app() -def test_lineblock(app): +def test_lineblock(app, status, warning): # regression test for #1109: need empty line after line block - contents = ( - u"* one\n" - u"\n" - u" | line-block 1\n" - u" | line-block 2\n" - u"\n" - u"followed paragraph.\n" - ) - - (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8') - app.builder.build_all() - result = (app.outdir / 'contents.txt').text(encoding='utf-8') - + app.builder.build_update() + result = (app.outdir / 'lineblock.txt').text(encoding='utf-8') expect = ( - u"* one\n" - u"\n" - u" line-block 1\n" - u" line-block 2\n" - u"\n" - u"followed paragraph.\n" - ) - + u"* one\n" + u"\n" + u" line-block 1\n" + u" line-block 2\n" + u"\n" + u"followed paragraph.\n" + ) assert result == expect @with_text_app() -def test_nonascii_title_line(app): - title = u'\u65e5\u672c\u8a9e' - underline = u'=' * column_width(title) - content = u'\n'.join((title, underline, u'')) - - (app.srcdir / 'contents.rst').write_text(content, encoding='utf-8') - app.builder.build_all() - result = (app.outdir / 'contents.txt').text(encoding='utf-8') - - expect_underline = underline.replace('=', '*') +def test_nonascii_title_line(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'nonascii_title.txt').text(encoding='utf-8') + expect_underline = '******' result_underline = result.splitlines()[2].strip() assert expect_underline == result_underline @with_text_app() -def test_nonascii_table(app): - text = u'\u65e5\u672c\u8a9e' - contents = (u"\n.. list-table::" - "\n" - "\n - - spam" - "\n - egg" - "\n" - "\n - - %(text)s" - "\n - %(text)s" - "\n" % locals()) - - (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8') - app.builder.build_all() - result = (app.outdir / 'contents.txt').text(encoding='utf-8') - +def test_nonascii_table(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'nonascii_table.txt').text(encoding='utf-8') lines = [line.strip() for line in result.splitlines() if line.strip()] line_widths = [column_width(line) for line in lines] assert len(set(line_widths)) == 1 # same widths @with_text_app() -def test_nonascii_maxwidth(app): - sb_text = u'abc' #length=3 - mb_text = u'\u65e5\u672c\u8a9e' #length=3 - - sb_line = ' '.join([sb_text] * int(MAXWIDTH / 3)) - mb_line = ' '.join([mb_text] * int(MAXWIDTH / 3)) - mix_line = ' '.join([sb_text, mb_text] * int(MAXWIDTH / 6)) - - contents = u'\n\n'.join((sb_line, mb_line, mix_line)) - - (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8') - app.builder.build_all() - result = (app.outdir / 'contents.txt').text(encoding='utf-8') - +def test_nonascii_maxwidth(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'nonascii_maxwidth.txt').text(encoding='utf-8') lines = [line.strip() for line in result.splitlines() if line.strip()] line_widths = [column_width(line) for line in lines] assert max(line_widths) < MAXWIDTH @with_text_app() -def test_table_with_empty_cell(app): - contents = (u""" - +-----+-----+ - | XXX | XXX | - +-----+-----+ - | | XXX | - +-----+-----+ - | XXX | | - +-----+-----+ - """) - - (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8') - app.builder.build_all() - result = (app.outdir / 'contents.txt').text(encoding='utf-8') - +def test_table_with_empty_cell(app, status, warning): + app.builder.build_update() + result = (app.outdir / 'table.txt').text(encoding='utf-8') lines = [line.strip() for line in result.splitlines() if line.strip()] assert lines[0] == "+-------+-------+" assert lines[1] == "| XXX | XXX |" diff --git a/tests/test_build_base.py b/tests/test_catalogs.py similarity index 77% rename from tests/test_build_base.py rename to tests/test_catalogs.py index ee2706261..c4f5c08f7 100644 --- a/tests/test_build_base.py +++ b/tests/test_catalogs.py @@ -1,77 +1,78 @@ -# -*- coding: utf-8 -*- -""" - test_build_base - ~~~~~~~~~~~~~~~ - - Test the base build process. - - :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" -import shutil - -from nose.tools import with_setup - -from util import test_roots, with_app, find_files - -root = test_roots / 'test-intl' -build_dir = root / '_build' -locale_dir = build_dir / 'locale' - - -def setup_test(): - # Delete remnants left over after failed build - locale_dir.rmtree(True) - # copy all catalogs into locale layout directory - for po in find_files(root, '.po'): - copy_po = (locale_dir / 'en' / 'LC_MESSAGES' / po) - if not copy_po.parent.exists(): - copy_po.parent.makedirs() - shutil.copy(root / po, copy_po) - - -def teardown_test(): - build_dir.rmtree(True), - - -@with_setup(setup_test, teardown_test) -@with_app(buildername='html', srcdir=root, - confoverrides={'language': 'en', 'locale_dirs': [locale_dir]}) -def test_compile_all_catalogs(app): - app.builder.compile_all_catalogs() - - catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' - expect = set([ - x.replace('.po', '.mo') - for x in find_files(catalog_dir, '.po') - ]) - actual = set(find_files(catalog_dir, '.mo')) - assert actual # not empty - assert actual == expect - - -@with_setup(setup_test, teardown_test) -@with_app(buildername='html', srcdir=root, - confoverrides={'language': 'en', 'locale_dirs': [locale_dir]}) -def test_compile_specific_catalogs(app): - app.builder.compile_specific_catalogs(['admonitions']) - - catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' - actual = set(find_files(catalog_dir, '.mo')) - assert actual == set(['admonitions.mo']) - - -@with_setup(setup_test, teardown_test) -@with_app(buildername='html', srcdir=root, - confoverrides={'language': 'en', 'locale_dirs': [locale_dir]}) -def test_compile_update_catalogs(app): - app.builder.compile_update_catalogs() - - catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' - expect = set([ - x.replace('.po', '.mo') - for x in find_files(catalog_dir, '.po') - ]) - actual = set(find_files(catalog_dir, '.mo')) - assert actual # not empty - assert actual == expect +# -*- coding: utf-8 -*- +""" + test_build_base + ~~~~~~~~~~~~~~~ + + Test the base build process. + + :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +import shutil + +from nose.tools import with_setup + +from util import with_app, find_files, rootdir, tempdir + +root = tempdir / 'test-intl' +build_dir = root / '_build' +locale_dir = build_dir / 'locale' + + +def setup_test(): + # delete remnants left over after failed build + root.rmtree(True) + (rootdir / 'roots' / 'test-intl').copytree(root) + # copy all catalogs into locale layout directory + for po in find_files(root, '.po'): + copy_po = (locale_dir / 'en' / 'LC_MESSAGES' / po) + if not copy_po.parent.exists(): + copy_po.parent.makedirs() + shutil.copy(root / po, copy_po) + + +def teardown_test(): + build_dir.rmtree(True) + + +@with_setup(setup_test, teardown_test) +@with_app(buildername='html', testroot='intl', + confoverrides={'language': 'en', 'locale_dirs': [locale_dir]}) +def test_compile_all_catalogs(app, status, warning): + app.builder.compile_all_catalogs() + + catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' + expect = set([ + x.replace('.po', '.mo') + for x in find_files(catalog_dir, '.po') + ]) + actual = set(find_files(catalog_dir, '.mo')) + assert actual # not empty + assert actual == expect + + +@with_setup(setup_test, teardown_test) +@with_app(buildername='html', testroot='intl', + confoverrides={'language': 'en', 'locale_dirs': [locale_dir]}) +def test_compile_specific_catalogs(app, status, warning): + app.builder.compile_specific_catalogs(['admonitions']) + + catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' + actual = set(find_files(catalog_dir, '.mo')) + assert actual == set(['admonitions.mo']) + + +@with_setup(setup_test, teardown_test) +@with_app(buildername='html', testroot='intl', + confoverrides={'language': 'en', 'locale_dirs': [locale_dir]}) +def test_compile_update_catalogs(app, status, warning): + app.builder.compile_update_catalogs() + + catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' + expect = set([ + x.replace('.po', '.mo') + for x in find_files(catalog_dir, '.po') + ]) + actual = set(find_files(catalog_dir, '.mo')) + assert actual # not empty + assert actual == expect diff --git a/tests/test_config.py b/tests/test_config.py index 36a8d9571..0dcf3fa3e 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -20,7 +20,7 @@ from sphinx.errors import ExtensionError, ConfigError, VersionRequirementError @with_app(confoverrides={'master_doc': 'master', 'nonexisting_value': 'True', 'latex_elements.docclass': 'scrartcl', 'modindex_common_prefix': 'path1,path2'}) -def test_core_config(app): +def test_core_config(app, status, warning): cfg = app.config # simple values @@ -36,7 +36,7 @@ def test_core_config(app): # simple default values assert 'locale_dirs' not in cfg.__dict__ assert cfg.locale_dirs == [] - assert cfg.trim_footnote_reference_space == False + assert cfg.trim_footnote_reference_space is False # complex default values assert 'html_title' not in cfg.__dict__ @@ -68,7 +68,7 @@ def test_core_config(app): @with_app() -def test_extension_values(app): +def test_extension_values(app, status, warning): cfg = app.config # default value diff --git a/tests/test_directive_code.py b/tests/test_directive_code.py index 4dbdff881..295e201b1 100644 --- a/tests/test_directive_code.py +++ b/tests/test_directive_code.py @@ -1,173 +1,114 @@ -# -*- coding: utf-8 -*- -""" - test_directive_code - ~~~~~~~~~~~~~~~~~~~ - - Test the code-block directive. - - :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re -from xml.etree import ElementTree - -from util import with_app, test_roots - - -def teardown_module(): - (test_roots / 'test-directive-code' / '_build').rmtree(True) - - -@with_app(buildername='xml', - srcdir=(test_roots / 'test-directive-code'), - _copy_to_temp=True) -def test_code_block(app): - app.builder.build('index') - et = ElementTree.parse(app.outdir / 'index.xml') - secs = et.findall('./section/section') - code_block = secs[0].findall('literal_block') - assert len(code_block) > 0 - actual = code_block[0].text - expect = ( - " def ruby?\n" + - " false\n" + - " end" - ) - assert actual == expect - - -@with_app(buildername='xml', - srcdir=(test_roots / 'test-directive-code'), - _copy_to_temp=True) -def test_code_block_dedent(app): - outdir = app.outdir - - def get_dedent_actual(dedent): - dedent_text = (app.srcdir / 'dedent.rst').text(encoding='utf-8') - dedent_text = re.sub( - r':dedent: \d', ':dedent: %d' % dedent, dedent_text) - (app.srcdir / 'dedent.rst').write_text(dedent_text, encoding='utf-8') - - # use another output dir to force rebuild - app.outdir = outdir / str(dedent) - app._init_env(freshenv=True) - app._init_builder(app.builder.name) - app.builder.build(['dedent'], method='specific') - - et = ElementTree.parse(app.outdir / 'dedent.xml') - secs = et.findall('./section/section') - code_block = secs[0].findall('literal_block') - - assert len(code_block) > 0 - actual = code_block[0].text - return actual - - for i in range(5): # 0-4 - actual = get_dedent_actual(i) - indent = " " * (4 - i) - expect = ( - indent + "def ruby?\n" + - indent + " false\n" + - indent + "end" - ) - assert (i, actual) == (i, expect) - - actual = get_dedent_actual(1000) - assert actual == '\n\n' - - -@with_app(buildername='html', - srcdir=(test_roots / 'test-directive-code'), - _copy_to_temp=True) -def test_code_block_caption_html(app): - app.builder.build('index') - html = (app.outdir / 'caption.html').text() - caption = '
          caption-test.rb
          ' - assert caption in html - - -@with_app(buildername='latex', - srcdir=(test_roots / 'test-directive-code'), - _copy_to_temp=True) -def test_code_block_caption_latex(app): - app.builder.build('index') - latex = (app.outdir / 'Python.tex').text() - caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]' - '{\\small\\texttt{caption-test.rb}}}}') - assert caption in latex - - -@with_app(buildername='xml', - srcdir=(test_roots / 'test-directive-code'), - _copy_to_temp=True) -def test_literal_include(app): - app.builder.build('index') - et = ElementTree.parse(app.outdir / 'index.xml') - secs = et.findall('./section/section') - literal_include = secs[1].findall('literal_block') - literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8') - assert len(literal_include) > 0 - actual = literal_include[0].text - assert actual == literal_src - - -@with_app(buildername='xml', - srcdir=(test_roots / 'test-directive-code'), - _copy_to_temp=True) -def test_literal_include_dedent(app): - outdir = app.outdir - literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8') - literal_lines = [l[4:] for l in literal_src.split('\n')[9:11]] - - def get_dedent_actual(dedent): - dedent_text = (app.srcdir / 'dedent.rst').text(encoding='utf-8') - dedent_text = re.sub( - r':dedent: \d', ':dedent: %d' % dedent, dedent_text) - (app.srcdir / 'dedent.rst').write_text(dedent_text, encoding='utf-8') - - # use another output dir to force rebuild - app.outdir = outdir / str(dedent) - app._init_env(freshenv=True) - app._init_builder(app.builder.name) - app.builder.build(['dedent']) - - et = ElementTree.parse(app.outdir / 'dedent.xml') - secs = et.findall('./section/section') - literal_include = secs[1].findall('literal_block') - - assert len(literal_include) > 0 - actual = literal_include[0].text - return actual - - - for i in range(5): # 0-4 - actual = get_dedent_actual(i) - indent = " " * (4 - i) - expect = '\n'.join(indent + l for l in literal_lines) + '\n' - assert (i, actual) == (i, expect) - - - actual = get_dedent_actual(1000) - assert actual == '\n\n' - - -@with_app(buildername='html', - srcdir=(test_roots / 'test-directive-code'), - _copy_to_temp=True) -def test_literalinclude_caption_html(app): - app.builder.build('index') - html = (app.outdir / 'caption.html').text() - caption = '
          caption-test.py
          ' - assert caption in html - - -@with_app(buildername='latex', - srcdir=(test_roots / 'test-directive-code'), - _copy_to_temp=True) -def test_literalinclude_caption_latex(app): - app.builder.build('index') - latex = (app.outdir / 'Python.tex').text() - caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]' - '{\\small\\texttt{caption-test.py}}}}') - assert caption in latex +# -*- coding: utf-8 -*- +""" + test_directive_code + ~~~~~~~~~~~~~~~~~~~ + + Test the code-block directive. + + :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +from xml.etree import ElementTree + +from util import with_app + + +@with_app('xml', testroot='directive-code') +def test_code_block(app, status, warning): + app.builder.build('index') + et = ElementTree.parse(app.outdir / 'index.xml') + secs = et.findall('./section/section') + code_block = secs[0].findall('literal_block') + assert len(code_block) > 0 + actual = code_block[0].text + expect = ( + " def ruby?\n" + + " false\n" + + " end" + ) + assert actual == expect + + +@with_app('xml', testroot='directive-code') +def test_code_block_dedent(app, status, warning): + app.builder.build(['dedent_code']) + et = ElementTree.parse(app.outdir / 'dedent_code.xml') + blocks = et.findall('./section/section/literal_block') + + for i in range(5): # 0-4 + actual = blocks[i].text + indent = " " * (4 - i) + expect = ( + indent + "def ruby?\n" + + indent + " false\n" + + indent + "end" + ) + assert (i, actual) == (i, expect) + + assert blocks[5].text == '\n\n' # dedent: 1000 + + +@with_app('html', testroot='directive-code') +def test_code_block_caption_html(app, status, warning): + app.builder.build(['caption']) + html = (app.outdir / 'caption.html').text() + caption = '
          caption-test.rb
          ' + assert caption in html + + +@with_app('latex', testroot='directive-code') +def test_code_block_caption_latex(app, status, warning): + app.builder.build_all() + latex = (app.outdir / 'Python.tex').text() + caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]' + '{\\small\\texttt{caption-test.rb}}}}') + assert caption in latex + + +@with_app('xml', testroot='directive-code') +def test_literal_include(app, status, warning): + app.builder.build(['index']) + et = ElementTree.parse(app.outdir / 'index.xml') + secs = et.findall('./section/section') + literal_include = secs[1].findall('literal_block') + literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8') + assert len(literal_include) > 0 + actual = literal_include[0].text + assert actual == literal_src + + +@with_app('xml', testroot='directive-code') +def test_literal_include_dedent(app, status, warning): + literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8') + literal_lines = [l[4:] for l in literal_src.split('\n')[9:11]] + + app.builder.build(['dedent']) + et = ElementTree.parse(app.outdir / 'dedent.xml') + blocks = et.findall('./section/section/literal_block') + + for i in range(5): # 0-4 + actual = blocks[i].text + indent = ' ' * (4 - i) + expect = '\n'.join(indent + l for l in literal_lines) + '\n' + assert (i, actual) == (i, expect) + + assert blocks[5].text == '\n\n' # dedent: 1000 + + +@with_app('html', testroot='directive-code') +def test_literalinclude_caption_html(app, status, warning): + app.builder.build('index') + html = (app.outdir / 'caption.html').text() + caption = '
          caption-test.py
          ' + assert caption in html + + +@with_app('latex', testroot='directive-code') +def test_literalinclude_caption_latex(app, status, warning): + app.builder.build('index') + latex = (app.outdir / 'Python.tex').text() + caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]' + '{\\small\\texttt{caption-test.py}}}}') + assert caption in latex diff --git a/tests/test_directive_only.py b/tests/test_directive_only.py index 7fb1f5bb9..0cf446636 100644 --- a/tests/test_directive_only.py +++ b/tests/test_directive_only.py @@ -13,15 +13,11 @@ import re from docutils import nodes -from util import with_app, test_roots +from util import with_app -def teardown_module(): - (test_roots / 'test-directive-only' / '_build').rmtree(True) - - -@with_app(buildername='text', srcdir=(test_roots / 'test-directive-only')) -def test_sectioning(app): +@with_app('text', testroot='directive-only') +def test_sectioning(app, status, warning): def getsects(section): if not isinstance(section, nodes.section): diff --git a/tests/test_docutilsconf.py b/tests/test_docutilsconf.py index c0ee4a17f..90fa5db32 100644 --- a/tests/test_docutilsconf.py +++ b/tests/test_docutilsconf.py @@ -9,50 +9,17 @@ :license: BSD, see LICENSE for details. """ -import os import re -from functools import wraps -from six import StringIO - -from util import test_roots, TestApp, path, SkipTest - - -html_warnfile = StringIO() -root = test_roots / 'test-docutilsconf' - - -# need cleanenv to rebuild everytime. -# docutils.conf change did not effect to rebuild. -def with_conf_app(docutilsconf='', *args, **kwargs): - default_kw = { - 'srcdir': root, - 'cleanenv': True, - } - default_kw.update(kwargs) - def generator(func): - @wraps(func) - def deco(*args2, **kwargs2): - app = TestApp(*args, **default_kw) - (app.srcdir / 'docutils.conf').write_text(docutilsconf) - try: - cwd = os.getcwd() - os.chdir(app.srcdir) - func(app, *args2, **kwargs2) - finally: - os.chdir(cwd) - # don't execute cleanup if test failed - app.cleanup() - return deco - return generator +from util import with_app, path, SkipTest def regex_count(expr, result): return len(re.findall(expr, result)) -@with_conf_app(buildername='html') -def test_html_with_default_docutilsconf(app): +@with_app('html', testroot='docutilsconf', freshenv=True, docutilsconf='') +def test_html_with_default_docutilsconf(app, status, warning): app.builder.build(['contents']) result = (app.outdir / 'contents.html').text(encoding='utf-8') @@ -62,13 +29,13 @@ def test_html_with_default_docutilsconf(app): assert regex_count(r'', result) == 1 -@with_conf_app(buildername='html', docutilsconf=( +@with_app('html', testroot='docutilsconf', freshenv=True, docutilsconf=( '\n[html4css1 writer]' '\noption-limit:1' '\nfield-name-limit:1' '\n') ) -def test_html_with_docutilsconf(app): +def test_html_with_docutilsconf(app, status, warning): app.builder.build(['contents']) result = (app.outdir / 'contents.html').text(encoding='utf-8') @@ -78,41 +45,32 @@ def test_html_with_docutilsconf(app): assert regex_count(r'', result) == 2 -@with_conf_app(buildername='html', warning=html_warnfile) -def test_html(app): +@with_app('html', testroot='docutilsconf') +def test_html(app, status, warning): app.builder.build(['contents']) - assert html_warnfile.getvalue() == '' + assert warning.getvalue() == '' -@with_conf_app(buildername='latex', warning=html_warnfile) -def test_latex(app): +@with_app('latex', testroot='docutilsconf') +def test_latex(app, status, warning): app.builder.build(['contents']) - assert html_warnfile.getvalue() == '' + assert warning.getvalue() == '' -@with_conf_app(buildername='man', warning=html_warnfile) -def test_man(app): +@with_app('man', testroot='docutilsconf') +def test_man(app, status, warning): app.builder.build(['contents']) - assert html_warnfile.getvalue() == '' + assert warning.getvalue() == '' -@with_conf_app(buildername='texinfo', warning=html_warnfile) -def test_texinfo(app): +@with_app('texinfo', testroot='docutilsconf') +def test_texinfo(app, status, warning): app.builder.build(['contents']) -@with_conf_app(buildername='html', srcdir='(empty)', - docutilsconf='[general]\nsource_link=true\n') -def test_docutils_source_link(app): - srcdir = path(app.srcdir) - (srcdir / 'conf.py').write_text('') - (srcdir / 'contents.rst').write_text('') - app.builder.build_all() - - -@with_conf_app(buildername='html', srcdir='(empty)', - docutilsconf='[general]\nsource_link=true\n') -def test_docutils_source_link_with_nonascii_file(app): +@with_app('html', testroot='docutilsconf', + docutilsconf='[general]\nsource_link=true\n') +def test_docutils_source_link_with_nonascii_file(app, status, warning): srcdir = path(app.srcdir) mb_name = u'\u65e5\u672c\u8a9e' try: @@ -123,7 +81,4 @@ def test_docutils_source_link_with_nonascii_file(app): 'nonascii filename not supported on this filesystem encoding: ' '%s', FILESYSTEMENCODING) - (srcdir / 'conf.py').write_text('') - (srcdir / 'contents.rst').write_text('') - app.builder.build_all() diff --git a/tests/test_py_domain.py b/tests/test_domain_py.py similarity index 98% rename from tests/test_py_domain.py rename to tests/test_domain_py.py index 87f6eb98b..1d0fcc5ff 100644 --- a/tests/test_py_domain.py +++ b/tests/test_domain_py.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """ - test_py_domain + test_domain_py ~~~~~~~~~~~~~~ Tests the Python Domain diff --git a/tests/test_rst_domain.py b/tests/test_domain_rst.py similarity index 100% rename from tests/test_rst_domain.py rename to tests/test_domain_rst.py diff --git a/tests/test_domain_std.py b/tests/test_domain_std.py index 81dbe6a4e..a1e5bdc1b 100644 --- a/tests/test_domain_std.py +++ b/tests/test_domain_std.py @@ -1,80 +1,80 @@ -# -*- coding: utf-8 -*- -""" - test_domain_std - ~~~~~~~~~~~~~~~ - - Tests the std domain - - :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from docutils import nodes - -from sphinx.domains.std import StandardDomain -from util import mock - - -def test_process_doc_handle_figure_caption(): - env = mock.Mock(domaindata={}) - figure_node = nodes.figure( - '', - nodes.caption('caption text', 'caption text'), - ) - document = mock.Mock( - nametypes={'testname': True}, - nameids={'testname': 'testid'}, - ids={'testid': figure_node}, - ) - - domain = StandardDomain(env) - if 'testname' in domain.data['labels']: - del domain.data['labels']['testname'] - domain.process_doc(env, 'testdoc', document) - assert 'testname' in domain.data['labels'] - assert domain.data['labels']['testname'] == ( - 'testdoc', 'testid', 'caption text') - - -def test_process_doc_handle_image_parent_figure_caption(): - env = mock.Mock(domaindata={}) - img_node = nodes.image('', alt='image alt') - figure_node = nodes.figure( - '', - nodes.caption('caption text', 'caption text'), - img_node, - ) - document = mock.Mock( - nametypes={'testname': True}, - nameids={'testname': 'testid'}, - ids={'testid': img_node}, - ) - - domain = StandardDomain(env) - if 'testname' in domain.data['labels']: - del domain.data['labels']['testname'] - domain.process_doc(env, 'testdoc', document) - assert 'testname' in domain.data['labels'] - assert domain.data['labels']['testname'] == ( - 'testdoc', 'testid', 'caption text') - - -def test_process_doc_handle_table_title(): - env = mock.Mock(domaindata={}) - table_node = nodes.table( - '', - nodes.title('title text', 'title text'), - ) - document = mock.Mock( - nametypes={'testname': True}, - nameids={'testname': 'testid'}, - ids={'testid': table_node}, - ) - - domain = StandardDomain(env) - if 'testname' in domain.data['labels']: - del domain.data['labels']['testname'] - domain.process_doc(env, 'testdoc', document) - assert 'testname' in domain.data['labels'] - assert domain.data['labels']['testname'] == ( - 'testdoc', 'testid', 'title text') +# -*- coding: utf-8 -*- +""" + test_domain_std + ~~~~~~~~~~~~~~~ + + Tests the std domain + + :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from docutils import nodes + +from sphinx.domains.std import StandardDomain +from util import mock + + +def test_process_doc_handle_figure_caption(): + env = mock.Mock(domaindata={}) + figure_node = nodes.figure( + '', + nodes.caption('caption text', 'caption text'), + ) + document = mock.Mock( + nametypes={'testname': True}, + nameids={'testname': 'testid'}, + ids={'testid': figure_node}, + ) + + domain = StandardDomain(env) + if 'testname' in domain.data['labels']: + del domain.data['labels']['testname'] + domain.process_doc(env, 'testdoc', document) + assert 'testname' in domain.data['labels'] + assert domain.data['labels']['testname'] == ( + 'testdoc', 'testid', 'caption text') + + +def test_process_doc_handle_image_parent_figure_caption(): + env = mock.Mock(domaindata={}) + img_node = nodes.image('', alt='image alt') + figure_node = nodes.figure( + '', + nodes.caption('caption text', 'caption text'), + img_node, + ) + document = mock.Mock( + nametypes={'testname': True}, + nameids={'testname': 'testid'}, + ids={'testid': img_node}, + ) + + domain = StandardDomain(env) + if 'testname' in domain.data['labels']: + del domain.data['labels']['testname'] + domain.process_doc(env, 'testdoc', document) + assert 'testname' in domain.data['labels'] + assert domain.data['labels']['testname'] == ( + 'testdoc', 'testid', 'caption text') + + +def test_process_doc_handle_table_title(): + env = mock.Mock(domaindata={}) + table_node = nodes.table( + '', + nodes.title('title text', 'title text'), + ) + document = mock.Mock( + nametypes={'testname': True}, + nameids={'testname': 'testid'}, + ids={'testid': table_node}, + ) + + domain = StandardDomain(env) + if 'testname' in domain.data['labels']: + del domain.data['labels']['testname'] + domain.process_doc(env, 'testdoc', document) + assert 'testname' in domain.data['labels'] + assert domain.data['labels']['testname'] == ( + 'testdoc', 'testid', 'title text') diff --git a/tests/test_env.py b/tests/test_environment.py similarity index 87% rename from tests/test_env.py rename to tests/test_environment.py index 3dc7431b9..4776bbd66 100644 --- a/tests/test_env.py +++ b/tests/test_environment.py @@ -8,9 +8,12 @@ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ + +import sys + from six import PY3 -from util import TestApp, remove_unicode_literals, path, with_app +from util import TestApp, remove_unicode_literals, path from sphinx.builders.html import StandaloneHTMLBuilder from sphinx.builders.latex import LaTeXBuilder @@ -18,21 +21,25 @@ from sphinx.builders.latex import LaTeXBuilder app = env = None warnings = [] + def setup_module(): global app, env - app = TestApp(freshenv=True, _copy_to_temp=True) + app = TestApp(srcdir='env-test') env = app.env env.set_warnfunc(lambda *args: warnings.append(args)) + def teardown_module(): app.cleanup() + def warning_emitted(file, text): for warning in warnings: if len(warning) == 2 and file in warning[1] and text in warning[0]: return True return False + # Tests are run in the order they appear in the file, therefore we can # afford to not run update() in the setup but in its own test @@ -46,6 +53,7 @@ def test_first_update(): # test if exclude_patterns works ok assert 'subdir/excluded' not in env.found_docs + def test_images(): assert warning_emitted('images', 'image file not readable: foo.png') assert warning_emitted('images', 'nonlocal image URI found: ' @@ -75,6 +83,7 @@ def test_images(): assert set(latexbuilder.images.values()) == \ set(['img.pdf', 'img.png', 'img1.png', 'simg.png', 'svgimg.pdf']) + def test_second_update(): # delete, add and "edit" (change saved mtime) some files and update again env.all_docs['contents'] = 0 @@ -96,19 +105,6 @@ def test_second_update(): assert 'autodoc' not in env.found_docs -@with_app(srcdir='(empty)') -def test_undecodable_source_reading_emit_warnings(app): - # issue #1524 - warnings[:] = [] - app.env.set_warnfunc(lambda *args: warnings.append(args)) - (app.srcdir / 'contents.rst').write_bytes(b'1\xbb2') - _, _, it = app.env.update(app.config, app.srcdir, app.doctreedir, app) - list(it) # the generator does all the work - assert warning_emitted( - 'contents', 'undecodable source characters, replacing with "?":' - ) - - def test_object_inventory(): refs = env.domaindata['py']['objects'] diff --git a/tests/test_autosummary.py b/tests/test_ext_autosummary.py similarity index 66% rename from tests/test_autosummary.py rename to tests/test_ext_autosummary.py index 8803f88db..363c11e99 100644 --- a/tests/test_autosummary.py +++ b/tests/test_ext_autosummary.py @@ -8,49 +8,24 @@ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import sys -from functools import wraps from six import iteritems, StringIO from sphinx.ext.autosummary import mangle_signature -from util import test_roots, TestApp +from util import with_app html_warnfile = StringIO() -def with_autosummary_app(*args, **kw): - default_kw = { - 'srcdir': (test_roots / 'test-autosummary'), - 'confoverrides': { - 'extensions': ['sphinx.ext.autosummary'], - 'autosummary_generate': True, - 'source_suffix': '.rst' - } +default_kw = { + 'testroot': 'autosummary', + 'confoverrides': { + 'extensions': ['sphinx.ext.autosummary'], + 'autosummary_generate': True, + 'source_suffix': '.rst' } - default_kw.update(kw) - def generator(func): - @wraps(func) - def deco(*args2, **kwargs2): - # Now, modify the python path... - srcdir = default_kw['srcdir'] - sys.path.insert(0, srcdir) - try: - app = TestApp(*args, **default_kw) - func(app, *args2, **kwargs2) - finally: - if srcdir in sys.path: - sys.path.remove(srcdir) - # remove the auto-generated dummy_module.rst - dummy_rst = srcdir / 'dummy_module.rst' - if dummy_rst.isfile(): - dummy_rst.unlink() - - # don't execute cleanup if test failed - app.cleanup() - return deco - return generator +} def test_mangle_signature(): @@ -79,10 +54,8 @@ def test_mangle_signature(): assert res == outp, (u"'%s' -> '%s' != '%s'" % (inp, res, outp)) -@with_autosummary_app(buildername='html', warning=html_warnfile) -def test_get_items_summary(app): - app.builddir.rmtree(True) - +@with_app(buildername='html', **default_kw) +def test_get_items_summary(app, status, warning): # monkey-patch Autosummary.get_items so we can easily get access to it's # results.. import sphinx.ext.autosummary @@ -96,13 +69,17 @@ def test_get_items_summary(app): autosummary_items[name] = result return results + def handler(app, what, name, obj, options, lines): + assert isinstance(lines, list) + app.connect('autodoc-process-docstring', handler) + sphinx.ext.autosummary.Autosummary.get_items = new_get_items try: app.builder.build_all() finally: sphinx.ext.autosummary.Autosummary.get_items = orig_get_items - html_warnings = html_warnfile.getvalue() + html_warnings = warning.getvalue() assert html_warnings == '' expected_values = { @@ -118,13 +95,3 @@ def test_get_items_summary(app): for key, expected in iteritems(expected_values): assert autosummary_items[key][2] == expected, 'Summary for %s was %r -'\ ' expected %r' % (key, autosummary_items[key], expected) - - -@with_autosummary_app(buildername='html') -def test_process_doc_event(app): - app.builddir.rmtree(True) - - def handler(app, what, name, obj, options, lines): - assert isinstance(lines, list) - app.connect('autodoc-process-docstring', handler) - app.builder.build_all() diff --git a/tests/test_coverage.py b/tests/test_ext_coverage.py similarity index 97% rename from tests/test_coverage.py rename to tests/test_ext_coverage.py index bfa76a98b..ec1916d94 100644 --- a/tests/test_coverage.py +++ b/tests/test_ext_coverage.py @@ -15,7 +15,7 @@ from util import with_app @with_app(buildername='coverage') -def test_build(app): +def test_build(app, status, warning): app.builder.build_all() py_undoc = (app.outdir / 'python.txt').text() diff --git a/tests/test_doctest.py b/tests/test_ext_doctest.py similarity index 76% rename from tests/test_doctest.py rename to tests/test_ext_doctest.py index 9fb8a2ead..67e08d344 100644 --- a/tests/test_doctest.py +++ b/tests/test_ext_doctest.py @@ -12,26 +12,24 @@ from __future__ import print_function import sys -from six import StringIO - from util import with_app -status = StringIO() cleanup_called = 0 -@with_app(buildername='doctest', status=status) -def test_build(app): + +@with_app(buildername='doctest', testroot='doctest') +def test_build(app, status, warning): global cleanup_called cleanup_called = 0 app.builder.build_all() if app.statuscode != 0: - print(status.getvalue(), file=sys.stderr) - assert False, 'failures in doctests' + assert False, 'failures in doctests:' + status.getvalue() # in doctest.txt, there are two named groups and the default group, # so the cleanup function must be called three times assert cleanup_called == 3, 'testcleanup did not get executed enough times' + def cleanup_call(): global cleanup_called cleanup_called += 1 diff --git a/tests/test_intersphinx.py b/tests/test_ext_intersphinx.py similarity index 89% rename from tests/test_intersphinx.py rename to tests/test_ext_intersphinx.py index dd71c6fbd..fb5406686 100644 --- a/tests/test_intersphinx.py +++ b/tests/test_ext_intersphinx.py @@ -17,7 +17,7 @@ from docutils import nodes from sphinx import addnodes from sphinx.ext.intersphinx import read_inventory_v1, read_inventory_v2, \ - load_mappings, missing_reference + load_mappings, missing_reference from util import with_app, with_tempdir @@ -49,9 +49,9 @@ def test_read_inventory_v1(): f.readline() invdata = read_inventory_v1(f, '/util', posixpath.join) assert invdata['py:module']['module'] == \ - ('foo', '1.0', '/util/foo.html#module-module', '-') + ('foo', '1.0', '/util/foo.html#module-module', '-') assert invdata['py:class']['module.cls'] == \ - ('foo', '1.0', '/util/foo.html#module.cls', '-') + ('foo', '1.0', '/util/foo.html#module.cls', '-') def test_read_inventory_v2(): @@ -68,19 +68,19 @@ def test_read_inventory_v2(): assert len(invdata1['py:module']) == 2 assert invdata1['py:module']['module1'] == \ - ('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc') + ('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc') assert invdata1['py:module']['module2'] == \ - ('foo', '2.0', '/util/foo.html#module-module2', '-') + ('foo', '2.0', '/util/foo.html#module-module2', '-') assert invdata1['py:function']['module1.func'][2] == \ - '/util/sub/foo.html#module1.func' + '/util/sub/foo.html#module1.func' assert invdata1['c:function']['CFunc'][2] == '/util/cfunc.html#CFunc' assert invdata1['std:term']['a term'][2] == \ - '/util/glossary.html#term-a-term' + '/util/glossary.html#term-a-term' @with_app() @with_tempdir -def test_missing_reference(tempdir, app): +def test_missing_reference(tempdir, app, status, warning): inv_file = tempdir / 'inventory' inv_file.write_bytes(inventory_v2) app.config.intersphinx_mapping = { @@ -94,7 +94,7 @@ def test_missing_reference(tempdir, app): inv = app.env.intersphinx_inventory assert inv['py:module']['module2'] == \ - ('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-') + ('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-') # create fake nodes and check referencing @@ -156,7 +156,7 @@ def test_missing_reference(tempdir, app): @with_app() @with_tempdir -def test_load_mappings_warnings(tempdir, app): +def test_load_mappings_warnings(tempdir, app, status, warning): """ load_mappings issues a warning if new-style mapping identifiers are not alphanumeric @@ -174,4 +174,4 @@ def test_load_mappings_warnings(tempdir, app): app.config.intersphinx_cache_limit = 0 # load the inventory and check if it's done correctly load_mappings(app) - assert len(app._warning.content) == 2 + assert warning.getvalue().count('\n') == 2 diff --git a/tests/test_linkcode.py b/tests/test_ext_linkcode.py similarity index 78% rename from tests/test_linkcode.py rename to tests/test_ext_linkcode.py index 458477781..34b2acf80 100644 --- a/tests/test_linkcode.py +++ b/tests/test_ext_linkcode.py @@ -9,13 +9,12 @@ :license: BSD, see LICENSE for details. """ -import os from util import with_app -@with_app(buildername='html', tags=['test_linkcode'], _copy_to_temp=True) -def test_html(app): - app.builder.build_all() +@with_app('html', tags=['test_linkcode']) +def test_html(app, status, warning): + app.builder.build(['objects']) stuff = (app.outdir / 'objects.html').text(encoding='utf-8') diff --git a/tests/test_napoleon.py b/tests/test_ext_napoleon.py similarity index 100% rename from tests/test_napoleon.py rename to tests/test_ext_napoleon.py diff --git a/tests/test_napoleon_docstring.py b/tests/test_ext_napoleon_docstring.py similarity index 100% rename from tests/test_napoleon_docstring.py rename to tests/test_ext_napoleon_docstring.py diff --git a/tests/test_napoleon_iterators.py b/tests/test_ext_napoleon_iterators.py similarity index 100% rename from tests/test_napoleon_iterators.py rename to tests/test_ext_napoleon_iterators.py diff --git a/tests/test_ext_viewcode.py b/tests/test_ext_viewcode.py index 60ab79411..561fcb6a3 100644 --- a/tests/test_ext_viewcode.py +++ b/tests/test_ext_viewcode.py @@ -1,43 +1,32 @@ -# -*- coding: utf-8 -*- -""" - test_ext_viewcode - ~~~~~~~~~~~~~~~~~ - - Test sphinx.ext.viewcode extension. - - :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re - -from six import StringIO - -from util import test_roots, with_app - - -warnfile = StringIO() -root = test_roots / 'test-ext-viewcode' -doctreedir = root / '_build' / 'doctree' - - -def teardown_module(): - (root / '_build').rmtree(True) - - -@with_app(srcdir=root, warning=warnfile) -def test_simple(app): - app.builder.build_all() - - warnings = re.sub(r'\\+', '/', warnfile.getvalue()) - assert re.findall( - r"index.rst:\d+: WARNING: Object named 'func1' not found in include " + - r"file .*/spam/__init__.py'", - warnings - ) - - result = (app.outdir / 'index.html').text(encoding='utf-8') - assert result.count('href="_modules/spam/mod1.html#func1"') == 2 - assert result.count('href="_modules/spam/mod2.html#func2"') == 2 - assert result.count('href="_modules/spam/mod1.html#Class1"') == 2 - assert result.count('href="_modules/spam/mod2.html#Class2"') == 2 +# -*- coding: utf-8 -*- +""" + test_ext_viewcode + ~~~~~~~~~~~~~~~~~ + + Test sphinx.ext.viewcode extension. + + :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from util import with_app + + +@with_app(testroot='ext-viewcode') +def test_simple(app, status, warning): + app.builder.build_all() + + warnings = re.sub(r'\\+', '/', warning.getvalue()) + assert re.findall( + r"index.rst:\d+: WARNING: Object named 'func1' not found in include " + + r"file .*/spam/__init__.py'", + warnings + ) + + result = (app.outdir / 'index.html').text(encoding='utf-8') + assert result.count('href="_modules/spam/mod1.html#func1"') == 2 + assert result.count('href="_modules/spam/mod2.html#func2"') == 2 + assert result.count('href="_modules/spam/mod1.html#Class1"') == 2 + assert result.count('href="_modules/spam/mod2.html#Class2"') == 2 diff --git a/tests/test_footnote.py b/tests/test_footnote.py deleted file mode 100644 index 964bb3e7f..000000000 --- a/tests/test_footnote.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -""" - test_footnote - ~~~~~~~~~~~~~ - - Test for footnote and citation. - - :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re - -from util import test_root, with_app - - -def teardown_module(): - (test_root / '_build').rmtree(True) - - -@with_app(buildername='html') -def test_html(app): - app.builder.build(['footnote']) - result = (app.outdir / 'footnote.html').text(encoding='utf-8') - expects = [ - '[1]', - '[2]', - '[3]', - '[bar]', - '[1]', - '[2]', - '[3]', - '[bar]', - ] - for expect in expects: - matches = re.findall(re.escape(expect), result) - assert len(matches) == 1 diff --git a/tests/test_highlighting.py b/tests/test_highlighting.py index b4e5149f5..5044ab0ea 100644 --- a/tests/test_highlighting.py +++ b/tests/test_highlighting.py @@ -15,12 +15,7 @@ from pygments.formatters.html import HtmlFormatter from sphinx.highlighting import PygmentsBridge -from util import with_app, SkipTest - -try: - import pygments -except ImportError: - raise SkipTest('pygments not available') +from util import with_app class MyLexer(RegexLexer): @@ -46,13 +41,14 @@ class ComplainOnUnhighlighted(PygmentsBridge): @with_app() -def test_add_lexer(app): +def test_add_lexer(app, status, warning): app.add_lexer('test', MyLexer()) bridge = PygmentsBridge('html') ret = bridge.highlight_block('ab', 'test') assert 'ab' in ret + def test_detect_interactive(): bridge = ComplainOnUnhighlighted('html') blocks = [ @@ -60,11 +56,12 @@ def test_detect_interactive(): >>> testing() True """, - ] + ] for block in blocks: ret = bridge.highlight_block(block.lstrip(), 'python') assert ret.startswith("
          ") + def test_set_formatter(): PygmentsBridge.html_formatter = MyFormatter try: @@ -74,6 +71,7 @@ def test_set_formatter(): finally: PygmentsBridge.html_formatter = HtmlFormatter + def test_trim_doctest_flags(): PygmentsBridge.html_formatter = MyFormatter try: diff --git a/tests/test_i18n.py b/tests/test_i18n.py index 06f6b28bd..8144663cb 100644 --- a/tests/test_i18n.py +++ b/tests/test_i18n.py @@ -13,5 +13,5 @@ from util import with_app @with_app(confoverrides={'language': 'de'}) -def test_i18n(app): +def test_i18n(app, status, warning): app.builder.build_all() diff --git a/tests/test_intl.py b/tests/test_intl.py index bb54e5df9..4f0f3cc2b 100644 --- a/tests/test_intl.py +++ b/tests/test_intl.py @@ -16,20 +16,17 @@ import re from subprocess import Popen, PIPE from xml.etree import ElementTree -from six import StringIO, string_types +from six import string_types -from util import test_roots, path, with_app, SkipTest +from util import tempdir, rootdir, path, with_app, SkipTest -warnfile = StringIO() -root = test_roots / 'test-intl' -doctreedir = root / '_build' / 'doctree' +root = tempdir / 'test-intl' def with_intl_app(*args, **kw): default_kw = { - 'srcdir': root, - 'doctreedir': doctreedir, + 'testroot': 'intl', 'confoverrides': { 'language': 'xx', 'locale_dirs': ['.'], 'gettext_compact': False, @@ -40,21 +37,21 @@ def with_intl_app(*args, **kw): def setup_module(): + if not root.exists(): + (rootdir / 'roots' / 'test-intl').copytree(root) # Delete remnants left over after failed build - (root / 'xx').rmtree(True) - (root / 'xx' / 'LC_MESSAGES').makedirs() # Compile all required catalogs into binary format (*.mo). for dirpath, dirs, files in os.walk(root): dirpath = path(dirpath) for f in [f for f in files if f.endswith('.po')]: po = dirpath / f mo = root / 'xx' / 'LC_MESSAGES' / ( - os.path.relpath(po[:-3], root) + '.mo') + os.path.relpath(po[:-3], root) + '.mo') if not mo.parent.exists(): mo.parent.makedirs() try: p = Popen(['msgfmt', po, '-o', mo], - stdout=PIPE, stderr=PIPE) + stdout=PIPE, stderr=PIPE) except OSError: raise SkipTest # most likely msgfmt was not found else: @@ -67,11 +64,6 @@ def setup_module(): assert mo.isfile(), 'msgfmt failed' -def teardown_module(): - (root / '_build').rmtree(True) - (root / 'xx').rmtree(True) - - def elem_gettexts(elem): def itertext(self): # this function copied from Python-2.7 'ElementTree.itertext'. @@ -106,25 +98,26 @@ def assert_elem(elem, texts=None, refs=None, names=None): @with_intl_app(buildername='text') -def test_simple(app): +def test_simple(app, status, warning): app.builder.build(['bom']) result = (app.outdir / 'bom.txt').text(encoding='utf-8') expect = (u"\nDatei mit UTF-8" - u"\n***************\n" # underline matches new translation + u"\n***************\n" # underline matches new translation u"\nThis file has umlauts: äöü.\n") assert result == expect @with_intl_app(buildername='text') -def test_subdir(app): +def test_subdir(app, status, warning): app.builder.build(['subdir/contents']) result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8') assert result.startswith(u"\nsubdir contents\n***************\n") -@with_intl_app(buildername='text', warning=warnfile) -def test_i18n_warnings_in_translation(app): - app.builddir.rmtree(True) +@with_intl_app(buildername='text') +def test_i18n_warnings_in_translation(app, status, warning): + app.outdir.rmtree(True) # for warnings acceleration + app.doctreedir.rmtree(True) app.builder.build(['warnings']) result = (app.outdir / 'warnings.txt').text(encoding='utf-8') expect = (u"\nI18N WITH REST WARNINGS" @@ -133,62 +126,62 @@ def test_i18n_warnings_in_translation(app): assert result == expect - warnings = warnfile.getvalue().replace(os.sep, '/') + warnings = warning.getvalue().replace(os.sep, '/') warning_expr = u'.*/warnings.txt:4: ' \ - u'WARNING: Inline literal start-string without end-string.\n' + u'WARNING: Inline literal start-string without end-string.\n' assert re.search(warning_expr, warnings) -@with_intl_app(buildername='html', cleanenv=True) -def test_i18n_footnote_break_refid(app): - """test for #955 cant-build-html-with-footnotes-when-using""" +@with_intl_app(buildername='html', freshenv=True) +def test_i18n_footnote_break_refid(app, status, warning): + # test for #955 cant-build-html-with-footnotes-when-using app.builder.build(['footnote']) - result = (app.outdir / 'footnote.html').text(encoding='utf-8') + (app.outdir / 'footnote.html').text(encoding='utf-8') # expect no error by build -@with_intl_app(buildername='xml', warning=warnfile) -def test_i18n_footnote_regression(app): +@with_intl_app(buildername='xml') +def test_i18n_footnote_regression(app, status, warning): # regression test for fix #955, #1176 - app.builddir.rmtree(True) + #app.builddir.rmtree(True) app.builder.build(['footnote']) et = ElementTree.parse(app.outdir / 'footnote.xml') secs = et.findall('section') para0 = secs[0].findall('paragraph') assert_elem( - para0[0], - texts=['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS', - '2', '[ref]', '1', '100', '.'], - refs=['i18n-with-footnote', 'ref']) + para0[0], + texts=['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS', + '2', '[ref]', '1', '100', '.'], + refs=['i18n-with-footnote', 'ref']) footnote0 = secs[0].findall('footnote') assert_elem( - footnote0[0], - texts=['1','THIS IS A AUTO NUMBERED FOOTNOTE.'], - names=['1']) + footnote0[0], + texts=['1', 'THIS IS A AUTO NUMBERED FOOTNOTE.'], + names=['1']) assert_elem( - footnote0[1], - texts=['100','THIS IS A NUMBERED FOOTNOTE.'], - names=['100']) + footnote0[1], + texts=['100', 'THIS IS A NUMBERED FOOTNOTE.'], + names=['100']) assert_elem( - footnote0[2], - texts=['2','THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'], - names=['named']) + footnote0[2], + texts=['2', 'THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'], + names=['named']) citation0 = secs[0].findall('citation') assert_elem( - citation0[0], - texts=['ref','THIS IS A NAMED FOOTNOTE.'], - names=['ref']) + citation0[0], + texts=['ref', 'THIS IS A NAMED FOOTNOTE.'], + names=['ref']) - warnings = warnfile.getvalue().replace(os.sep, '/') + warnings = warning.getvalue().replace(os.sep, '/') warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n' assert not re.search(warning_expr, warnings) -@with_intl_app(buildername='xml', cleanenv=True) -def test_i18n_footnote_backlink(app): +@with_intl_app(buildername='xml', freshenv=True) +def test_i18n_footnote_backlink(app, status, warning): # i18n test for #1058 app.builder.build(['footnote']) et = ElementTree.parse(app.outdir / 'footnote.xml') @@ -206,8 +199,8 @@ def test_i18n_footnote_backlink(app): assert refid2id[ids] == backrefs -@with_intl_app(buildername='xml', warning=warnfile) -def test_i18n_refs_python_domain(app): +@with_intl_app(buildername='xml') +def test_i18n_refs_python_domain(app, status, warning): app.builder.build(['refs_python_domain']) et = ElementTree.parse(app.outdir / 'refs_python_domain.xml') secs = et.findall('section') @@ -220,9 +213,9 @@ def test_i18n_refs_python_domain(app): refs=['sensitive.sensitive_variables']) -@with_intl_app(buildername='text', warning=warnfile, cleanenv=True) -def test_i18n_warn_for_number_of_references_inconsistency(app): - app.builddir.rmtree(True) +@with_intl_app(buildername='text', freshenv=True) +def test_i18n_warn_for_number_of_references_inconsistency(app, status, warning): + #app.builddir.rmtree(True) app.builder.build(['refs_inconsistency']) result = (app.outdir / 'refs_inconsistency.txt').text(encoding='utf-8') expect = (u"\nI18N WITH REFS INCONSISTENCY" @@ -235,9 +228,9 @@ def test_i18n_warn_for_number_of_references_inconsistency(app): u"\n[100] THIS IS A NUMBERED FOOTNOTE.\n") assert result == expect - warnings = warnfile.getvalue().replace(os.sep, '/') + warnings = warning.getvalue().replace(os.sep, '/') warning_fmt = u'.*/refs_inconsistency.txt:\\d+: ' \ - u'WARNING: inconsistent %s in translated message\n' + u'WARNING: inconsistent %s in translated message\n' expected_warning_expr = ( warning_fmt % 'footnote references' + warning_fmt % 'references' + @@ -245,8 +238,8 @@ def test_i18n_warn_for_number_of_references_inconsistency(app): assert re.search(expected_warning_expr, warnings) -@with_intl_app(buildername='html', cleanenv=True) -def test_i18n_link_to_undefined_reference(app): +@with_intl_app(buildername='html', freshenv=True) +def test_i18n_link_to_undefined_reference(app, status, warning): app.builder.build(['refs_inconsistency']) result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8') @@ -264,8 +257,8 @@ def test_i18n_link_to_undefined_reference(app): assert len(re.findall(expected_expr, result)) == 1 -@with_intl_app(buildername='xml', cleanenv=True) -def test_i18n_keep_external_links(app): +@with_intl_app(buildername='xml', freshenv=True) +def test_i18n_keep_external_links(app, status, warning): # regression test for #1044 app.builder.build(['external_links']) et = ElementTree.parse(app.outdir / 'external_links.xml') @@ -274,56 +267,56 @@ def test_i18n_keep_external_links(app): para0 = secs[0].findall('paragraph') # external link check assert_elem( - para0[0], - texts=['EXTERNAL LINK TO', 'Python', '.'], - refs=['http://python.org/index.html']) + para0[0], + texts=['EXTERNAL LINK TO', 'Python', '.'], + refs=['http://python.org/index.html']) # internal link check assert_elem( - para0[1], - texts=['EXTERNAL LINKS', 'IS INTERNAL LINK.'], - refs=['i18n-with-external-links']) + para0[1], + texts=['EXTERNAL LINKS', 'IS INTERNAL LINK.'], + refs=['i18n-with-external-links']) # inline link check assert_elem( - para0[2], - texts=['INLINE LINK BY', 'THE SPHINX SITE', '.'], - refs=['http://sphinx-doc.org']) + para0[2], + texts=['INLINE LINK BY', 'THE SPHINX SITE', '.'], + refs=['http://sphinx-doc.org']) # unnamed link check assert_elem( - para0[3], - texts=['UNNAMED', 'LINK', '.'], - refs=['http://google.com']) + para0[3], + texts=['UNNAMED', 'LINK', '.'], + refs=['http://google.com']) # link target swapped translation para1 = secs[1].findall('paragraph') assert_elem( - para1[0], - texts=['LINK TO', 'external2', 'AND', 'external1', '.'], - refs=['http://example.com/external2', - 'http://example.com/external1']) + para1[0], + texts=['LINK TO', 'external2', 'AND', 'external1', '.'], + refs=['http://example.com/external2', + 'http://example.com/external1']) assert_elem( - para1[1], - texts=['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE', - '.'], - refs=['http://python.org', 'http://sphinx-doc.org']) + para1[1], + texts=['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE', + '.'], + refs=['http://python.org', 'http://sphinx-doc.org']) # multiple references in the same line para2 = secs[2].findall('paragraph') assert_elem( - para2[0], - texts=['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',', - 'THE SPHINX SITE', ',', 'UNNAMED', 'AND', - 'THE PYTHON SITE', '.'], - refs=['i18n-with-external-links', 'http://python.org/index.html', - 'http://sphinx-doc.org', 'http://google.com', - 'http://python.org']) + para2[0], + texts=['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',', + 'THE SPHINX SITE', ',', 'UNNAMED', 'AND', + 'THE PYTHON SITE', '.'], + refs=['i18n-with-external-links', 'http://python.org/index.html', + 'http://sphinx-doc.org', 'http://google.com', + 'http://python.org']) -@with_intl_app(buildername='text', warning=warnfile, cleanenv=True) -def test_i18n_literalblock_warning(app): - app.builddir.rmtree(True) #for warnings acceleration +@with_intl_app(buildername='text', freshenv=True) +def test_i18n_literalblock_warning(app, status, warning): + #app.builddir.rmtree(True) # for warnings acceleration app.builder.build(['literalblock']) result = (app.outdir / 'literalblock.txt').text(encoding='utf-8') expect = (u"\nI18N WITH LITERAL BLOCK" @@ -335,14 +328,14 @@ def test_i18n_literalblock_warning(app): u"\n' in result -@with_app(buildername='html', srcdir=(test_roots / 'test-templating')) -def test_autosummary_class_template_overloading(app): - app.builder.build_all() +@with_app('html', testroot='templating') +def test_autosummary_class_template_overloading(app, status, warning): + app.builder.build_update() - result = (app.outdir / 'generated' / 'sphinx.application.Sphinx.html').text( - encoding='utf-8') + result = (app.outdir / 'generated' / 'sphinx.application.TemplateBridge.html').text( + encoding='utf-8') assert 'autosummary/class.rst method block overloading' in result - diff --git a/tests/test_theming.py b/tests/test_theming.py index 067c43199..d6f41da1e 100644 --- a/tests/test_theming.py +++ b/tests/test_theming.py @@ -19,14 +19,14 @@ from util import with_app, raises @with_app(confoverrides={'html_theme': 'ziptheme', 'html_theme_options.testopt': 'foo'}) -def test_theme_api(app): +def test_theme_api(app, status, warning): cfg = app.config # test Theme class API assert set(Theme.themes.keys()) == \ - set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku', - 'traditional', 'testtheme', 'ziptheme', 'epub', 'nature', - 'pyramid', 'bizstyle']) + set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku', + 'traditional', 'testtheme', 'ziptheme', 'epub', 'nature', + 'pyramid', 'bizstyle']) assert Theme.themes['testtheme'][1] is None assert isinstance(Theme.themes['ziptheme'][1], zipfile.ZipFile) @@ -56,14 +56,15 @@ def test_theme_api(app): theme.cleanup() assert not os.path.exists(themedir) -@with_app(buildername='html') -def test_js_source(app): + +@with_app() +def test_js_source(app, status, warning): # Now sphinx provides non-minified JS files for jquery.js and underscore.js # to clarify the source of the minified files. see also #1434. # If you update the version of the JS file, please update the source of the # JS file and version number in this test. - app.builder.build_all() + app.builder.build(['contents']) v = '1.8.3' msg = 'jquery.js version does not match to {v}'.format(v=v) diff --git a/tests/test_util_i18n.py b/tests/test_util_i18n.py index afc9fb36d..d69c2acdc 100644 --- a/tests/test_util_i18n.py +++ b/tests/test_util_i18n.py @@ -1,163 +1,163 @@ -# -*- coding: utf-8 -*- -""" - test_util_i18n - ~~~~~~~~~~~~~~ - - Test i18n util. - - :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" -from __future__ import print_function - -import os -from os import path - -from babel.messages.mofile import read_mo -from sphinx.util import i18n - -from util import with_tempdir - - -def test_catalog_info_for_file_and_path(): - cat = i18n.CatalogInfo('path', 'domain') - assert cat.po_file == 'domain.po' - assert cat.mo_file == 'domain.mo' - assert cat.po_path == path.join('path', 'domain.po') - assert cat.mo_path == path.join('path', 'domain.mo') - - -def test_catalog_info_for_sub_domain_file_and_path(): - cat = i18n.CatalogInfo('path', 'sub/domain') - assert cat.po_file == 'sub/domain.po' - assert cat.mo_file == 'sub/domain.mo' - assert cat.po_path == path.join('path', 'sub/domain.po') - assert cat.mo_path == path.join('path', 'sub/domain.mo') - - -@with_tempdir -def test_catalog_outdated(dir): - (dir / 'test.po').write_text('#') - cat = i18n.CatalogInfo(dir, 'test') - assert cat.is_outdated() # if mo is not exist - - mo_file = (dir / 'test.mo') - mo_file.write_text('#') - assert not cat.is_outdated() # if mo is exist and newer than po - - os.utime(mo_file, (os.stat(mo_file).st_mtime - 10,) * 2) # to be outdate - assert cat.is_outdated() # if mo is exist and older than po - - -@with_tempdir -def test_catalog_write_mo(dir): - (dir / 'test.po').write_text('#') - cat = i18n.CatalogInfo(dir, 'test') - cat.write_mo('en') - assert path.exists(cat.mo_path) - assert read_mo(open(cat.mo_path, 'rb')) is not None - - -@with_tempdir -def test_get_catalogs_for_xx(dir): - (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs() - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#') - (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs() - (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'test6.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_ALL').makedirs() - (dir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#') - - catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False) - domains = set(c.domain for c in catalogs) - assert domains == set([ - 'test1', - 'test2', - path.normpath('sub/test4'), - path.normpath('sub/test5'), - ]) - - -@with_tempdir -def test_get_catalogs_for_en(dir): - (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'xx_dom.po').write_text('#') - (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs() - (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#') - - catalogs = i18n.get_catalogs([dir / 'loc1'], 'en', force_all=False) - domains = set(c.domain for c in catalogs) - assert domains == set(['en_dom']) - - -@with_tempdir -def test_get_catalogs_with_non_existent_locale(dir): - catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx') - assert not catalogs - - catalogs = i18n.get_catalogs([dir / 'loc1'], None) - assert not catalogs - - -def test_get_catalogs_with_non_existent_locale_dirs(): - catalogs = i18n.get_catalogs(['dummy'], 'xx') - assert not catalogs - - -@with_tempdir -def test_get_catalogs_for_xx_without_outdated(dir): - (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.mo').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.mo').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.mo').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs() - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.mo').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.mo').write_text('#') - - catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False) - assert not catalogs - - catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=True) - domains = set(c.domain for c in catalogs) - assert domains == set([ - 'test1', - 'test2', - path.normpath('sub/test4'), - path.normpath('sub/test5'), - ]) - - -@with_tempdir -def test_get_catalogs_from_multiple_locale_dirs(dir): - (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') - (dir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs() - (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') - (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#') - - catalogs = i18n.get_catalogs([dir / 'loc1', dir / 'loc2'], 'xx') - domains = sorted(c.domain for c in catalogs) - assert domains == ['test1', 'test1', 'test2'] - - -@with_tempdir -def test_get_catalogs_with_compact(dir): - (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs() - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#') - (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#') - - catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', gettext_compact=True) - domains = set(c.domain for c in catalogs) - assert domains == set(['test1', 'test2', 'sub']) +# -*- coding: utf-8 -*- +""" + test_util_i18n + ~~~~~~~~~~~~~~ + + Test i18n util. + + :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +from __future__ import print_function + +import os +from os import path + +from babel.messages.mofile import read_mo +from sphinx.util import i18n + +from util import with_tempdir + + +def test_catalog_info_for_file_and_path(): + cat = i18n.CatalogInfo('path', 'domain') + assert cat.po_file == 'domain.po' + assert cat.mo_file == 'domain.mo' + assert cat.po_path == path.join('path', 'domain.po') + assert cat.mo_path == path.join('path', 'domain.mo') + + +def test_catalog_info_for_sub_domain_file_and_path(): + cat = i18n.CatalogInfo('path', 'sub/domain') + assert cat.po_file == 'sub/domain.po' + assert cat.mo_file == 'sub/domain.mo' + assert cat.po_path == path.join('path', 'sub/domain.po') + assert cat.mo_path == path.join('path', 'sub/domain.mo') + + +@with_tempdir +def test_catalog_outdated(dir): + (dir / 'test.po').write_text('#') + cat = i18n.CatalogInfo(dir, 'test') + assert cat.is_outdated() # if mo is not exist + + mo_file = (dir / 'test.mo') + mo_file.write_text('#') + assert not cat.is_outdated() # if mo is exist and newer than po + + os.utime(mo_file, (os.stat(mo_file).st_mtime - 10,) * 2) # to be outdate + assert cat.is_outdated() # if mo is exist and older than po + + +@with_tempdir +def test_catalog_write_mo(dir): + (dir / 'test.po').write_text('#') + cat = i18n.CatalogInfo(dir, 'test') + cat.write_mo('en') + assert path.exists(cat.mo_path) + assert read_mo(open(cat.mo_path, 'rb')) is not None + + +@with_tempdir +def test_get_catalogs_for_xx(dir): + (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs() + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#') + (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs() + (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'test6.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_ALL').makedirs() + (dir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#') + + catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False) + domains = set(c.domain for c in catalogs) + assert domains == set([ + 'test1', + 'test2', + path.normpath('sub/test4'), + path.normpath('sub/test5'), + ]) + + +@with_tempdir +def test_get_catalogs_for_en(dir): + (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'xx_dom.po').write_text('#') + (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs() + (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#') + + catalogs = i18n.get_catalogs([dir / 'loc1'], 'en', force_all=False) + domains = set(c.domain for c in catalogs) + assert domains == set(['en_dom']) + + +@with_tempdir +def test_get_catalogs_with_non_existent_locale(dir): + catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx') + assert not catalogs + + catalogs = i18n.get_catalogs([dir / 'loc1'], None) + assert not catalogs + + +def test_get_catalogs_with_non_existent_locale_dirs(): + catalogs = i18n.get_catalogs(['dummy'], 'xx') + assert not catalogs + + +@with_tempdir +def test_get_catalogs_for_xx_without_outdated(dir): + (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.mo').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.mo').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.mo').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs() + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.mo').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.mo').write_text('#') + + catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False) + assert not catalogs + + catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=True) + domains = set(c.domain for c in catalogs) + assert domains == set([ + 'test1', + 'test2', + path.normpath('sub/test4'), + path.normpath('sub/test5'), + ]) + + +@with_tempdir +def test_get_catalogs_from_multiple_locale_dirs(dir): + (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') + (dir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs() + (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') + (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#') + + catalogs = i18n.get_catalogs([dir / 'loc1', dir / 'loc2'], 'xx') + domains = sorted(c.domain for c in catalogs) + assert domains == ['test1', 'test1', 'test2'] + + +@with_tempdir +def test_get_catalogs_with_compact(dir): + (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs() + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs() + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#') + (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#') + + catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', gettext_compact=True) + domains = set(c.domain for c in catalogs) + assert domains == set(['test1', 'test2', 'sub']) diff --git a/tests/test_util_nodes.py b/tests/test_util_nodes.py index 9ddc049dc..a385245d8 100644 --- a/tests/test_util_nodes.py +++ b/tests/test_util_nodes.py @@ -1,121 +1,121 @@ -# -*- coding: utf-8 -*- -""" - test_util_nodes - ~~~~~~~~~~~~~~~ - - Tests uti.nodes functions. - - :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" -from textwrap import dedent - -from docutils import nodes -from docutils.parsers import rst -from docutils.utils import new_document -from docutils import frontend - -from sphinx.util.nodes import extract_messages - - -def _get_doctree(text): - settings = frontend.OptionParser( - components=(rst.Parser,)).get_default_values() - document = new_document('dummy.txt', settings) - rst.Parser().parse(text, document) - return document - - -def assert_node_count(messages, node_type, expect_count): - count = 0 - node_list = [node for node, msg in messages] - for node in node_list: - if isinstance(node, node_type): - count += 1 - - assert count == expect_count, ( - "Count of %r in the %r is %d instead of %d" - % (node_type, node_list, count, expect_count)) - - -def test_extract_messages(): - text = dedent( - """ - .. admonition:: admonition title - - admonition body - """ - ) - yield ( - assert_node_count, - extract_messages(_get_doctree(text)), - nodes.title, 1, - ) - - text = dedent( - """ - .. figure:: foo.jpg - - this is title - """ - ) - yield ( - assert_node_count, - extract_messages(_get_doctree(text)), - nodes.caption, 1, - ) - - text = dedent( - """ - .. rubric:: spam - """ - ) - yield ( - assert_node_count, - extract_messages(_get_doctree(text)), - nodes.rubric, 1, - ) - - - text = dedent( - """ - | spam - | egg - """ - ) - yield ( - assert_node_count, - extract_messages(_get_doctree(text)), - nodes.line, 2, - ) - - - text = dedent( - """ - section - ======= - - +----------------+ - | | **Title 1** | - | | Message 1 | - +----------------+ - """ - ) - yield ( - assert_node_count, - extract_messages(_get_doctree(text)), - nodes.line, 2, - ) - - - text = dedent( - """ - * | **Title 1** - | Message 1 - """ - ) - yield ( - assert_node_count, - extract_messages(_get_doctree(text)), - nodes.line, 2, - ) +# -*- coding: utf-8 -*- +""" + test_util_nodes + ~~~~~~~~~~~~~~~ + + Tests uti.nodes functions. + + :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +from textwrap import dedent + +from docutils import nodes +from docutils.parsers import rst +from docutils.utils import new_document +from docutils import frontend + +from sphinx.util.nodes import extract_messages + + +def _get_doctree(text): + settings = frontend.OptionParser( + components=(rst.Parser,)).get_default_values() + document = new_document('dummy.txt', settings) + rst.Parser().parse(text, document) + return document + + +def assert_node_count(messages, node_type, expect_count): + count = 0 + node_list = [node for node, msg in messages] + for node in node_list: + if isinstance(node, node_type): + count += 1 + + assert count == expect_count, ( + "Count of %r in the %r is %d instead of %d" + % (node_type, node_list, count, expect_count)) + + +def test_extract_messages(): + text = dedent( + """ + .. admonition:: admonition title + + admonition body + """ + ) + yield ( + assert_node_count, + extract_messages(_get_doctree(text)), + nodes.title, 1, + ) + + text = dedent( + """ + .. figure:: foo.jpg + + this is title + """ + ) + yield ( + assert_node_count, + extract_messages(_get_doctree(text)), + nodes.caption, 1, + ) + + text = dedent( + """ + .. rubric:: spam + """ + ) + yield ( + assert_node_count, + extract_messages(_get_doctree(text)), + nodes.rubric, 1, + ) + + + text = dedent( + """ + | spam + | egg + """ + ) + yield ( + assert_node_count, + extract_messages(_get_doctree(text)), + nodes.line, 2, + ) + + + text = dedent( + """ + section + ======= + + +----------------+ + | | **Title 1** | + | | Message 1 | + +----------------+ + """ + ) + yield ( + assert_node_count, + extract_messages(_get_doctree(text)), + nodes.line, 2, + ) + + + text = dedent( + """ + * | **Title 1** + | Message 1 + """ + ) + yield ( + assert_node_count, + extract_messages(_get_doctree(text)), + nodes.line, 2, + ) diff --git a/tests/test_versioning.py b/tests/test_versioning.py index d9cbf8ebe..bd8c697c9 100644 --- a/tests/test_versioning.py +++ b/tests/test_versioning.py @@ -16,39 +16,46 @@ from docutils.parsers.rst.directives.html import MetaBody from sphinx import addnodes from sphinx.versioning import add_uids, merge_doctrees, get_ratio -from util import test_root, TestApp +from util import TestApp app = original = original_uids = None + def setup_module(): global app, original, original_uids - app = TestApp() + app = TestApp(testroot='versioning') app.builder.env.app = app app.connect('doctree-resolved', on_doctree_resolved) app.build() - original = doctrees['versioning/original'] + original = doctrees['original'] original_uids = [n.uid for n in add_uids(original, is_paragraph)] + def teardown_module(): app.cleanup() - (test_root / '_build').rmtree(True) + doctrees = {} + def on_doctree_resolved(app, doctree, docname): doctrees[docname] = doctree + def is_paragraph(node): return node.__class__.__name__ == 'paragraph' + def test_get_ratio(): assert get_ratio('', 'a') assert get_ratio('a', '') + def test_add_uids(): assert len(original_uids) == 3 + def test_picklablility(): # we have to modify the doctree so we can pickle it copy = original.copy() @@ -62,44 +69,50 @@ def test_picklablility(): loaded = pickle.loads(pickle.dumps(copy, pickle.HIGHEST_PROTOCOL)) assert all(getattr(n, 'uid', False) for n in loaded.traverse(is_paragraph)) + def test_modified(): - modified = doctrees['versioning/modified'] + modified = doctrees['modified'] new_nodes = list(merge_doctrees(original, modified, is_paragraph)) uids = [n.uid for n in modified.traverse(is_paragraph)] assert not new_nodes assert original_uids == uids + def test_added(): - added = doctrees['versioning/added'] + added = doctrees['added'] new_nodes = list(merge_doctrees(original, added, is_paragraph)) uids = [n.uid for n in added.traverse(is_paragraph)] assert len(new_nodes) == 1 assert original_uids == uids[:-1] + def test_deleted(): - deleted = doctrees['versioning/deleted'] + deleted = doctrees['deleted'] new_nodes = list(merge_doctrees(original, deleted, is_paragraph)) uids = [n.uid for n in deleted.traverse(is_paragraph)] assert not new_nodes assert original_uids[::2] == uids + def test_deleted_end(): - deleted_end = doctrees['versioning/deleted_end'] + deleted_end = doctrees['deleted_end'] new_nodes = list(merge_doctrees(original, deleted_end, is_paragraph)) uids = [n.uid for n in deleted_end.traverse(is_paragraph)] assert not new_nodes assert original_uids[:-1] == uids + def test_insert(): - insert = doctrees['versioning/insert'] + insert = doctrees['insert'] new_nodes = list(merge_doctrees(original, insert, is_paragraph)) uids = [n.uid for n in insert.traverse(is_paragraph)] assert len(new_nodes) == 1 assert original_uids[0] == uids[0] assert original_uids[1:] == uids[2:] + def test_insert_beginning(): - insert_beginning = doctrees['versioning/insert_beginning'] + insert_beginning = doctrees['insert_beginning'] new_nodes = list(merge_doctrees(original, insert_beginning, is_paragraph)) uids = [n.uid for n in insert_beginning.traverse(is_paragraph)] assert len(new_nodes) == 1 @@ -107,8 +120,9 @@ def test_insert_beginning(): assert original_uids == uids[1:] assert original_uids[0] != uids[0] + def test_insert_similar(): - insert_similar = doctrees['versioning/insert_similar'] + insert_similar = doctrees['insert_similar'] new_nodes = list(merge_doctrees(original, insert_similar, is_paragraph)) uids = [n.uid for n in insert_similar.traverse(is_paragraph)] assert len(new_nodes) == 1 diff --git a/tests/test_websupport.py b/tests/test_websupport.py index d355422c9..9e88a60fe 100644 --- a/tests/test_websupport.py +++ b/tests/test_websupport.py @@ -9,34 +9,33 @@ :license: BSD, see LICENSE for details. """ -import os from functools import wraps from six import StringIO from sphinx.websupport import WebSupport from sphinx.websupport.errors import DocumentNotFoundError, \ - CommentNotAllowedError, UserNotAuthorizedError + CommentNotAllowedError, UserNotAuthorizedError from sphinx.websupport.storage import StorageBackend from sphinx.websupport.storage.differ import CombinedHtmlDiff try: from sphinx.websupport.storage.sqlalchemystorage import Session, \ - Comment, CommentVote + Comment, CommentVote from sphinx.websupport.storage.sqlalchemy_db import Node sqlalchemy_missing = False except ImportError: sqlalchemy_missing = True -from util import test_root, raises, skip_if +from util import rootdir, tempdir, raises, skip_if -default_settings = {'builddir': os.path.join(test_root, 'websupport'), +default_settings = {'builddir': tempdir / 'websupport', 'status': StringIO(), 'warning': StringIO()} + def teardown_module(): - (test_root / 'generated').rmtree(True) - (test_root / 'websupport').rmtree(True) + (tempdir / 'websupport').rmtree(True) def with_support(*args, **kwargs): @@ -59,12 +58,12 @@ class NullStorage(StorageBackend): @with_support(storage=NullStorage()) def test_no_srcdir(support): - """Make sure the correct exception is raised if srcdir is not given.""" + # make sure the correct exception is raised if srcdir is not given. raises(RuntimeError, support.build) @skip_if(sqlalchemy_missing, 'needs sqlalchemy') -@with_support(srcdir=test_root) +@with_support(srcdir=rootdir / 'root') def test_build(support): support.build() @@ -173,9 +172,9 @@ def test_proposals(support): source = data['source'] proposal = source[:5] + source[10:15] + 'asdf' + source[15:] - comment = support.add_comment('Proposal comment', - node_id=node.id, - proposal=proposal) + support.add_comment('Proposal comment', + node_id=node.id, + proposal=proposal) @skip_if(sqlalchemy_missing, 'needs sqlalchemy') @@ -234,6 +233,8 @@ def test_update_username(support): called = False + + def moderation_callback(comment): global called called = True @@ -251,7 +252,7 @@ def test_moderation(support): deleted = support.add_comment('Comment to delete', node_id=node.id, displayed=False) # Make sure the moderation_callback is called. - assert called == True + assert called # Make sure the user must be a moderator. raises(UserNotAuthorizedError, support.accept_comment, accepted['id']) raises(UserNotAuthorizedError, support.delete_comment, deleted['id']) diff --git a/tests/util.py b/tests/util.py index bbfb2d4b9..c957ad448 100644 --- a/tests/util.py +++ b/tests/util.py @@ -8,22 +8,22 @@ """ import os +import re import sys import tempfile -import shutil -import re from functools import wraps from six import StringIO +from nose import tools, SkipTest + from sphinx import application from sphinx.theming import Theme from sphinx.ext.autodoc import AutoDirective +from sphinx.pycode import ModuleAnalyzer from path import path -from nose import tools, SkipTest - try: # Python >=3.3 from unittest import mock @@ -32,7 +32,7 @@ except ImportError: __all__ = [ - 'test_root', 'test_roots', 'raises', 'raises_msg', + 'rootdir', 'tempdir', 'raises', 'raises_msg', 'skip_if', 'skip_unless', 'skip_unless_importable', 'Struct', 'ListOutput', 'TestApp', 'with_app', 'gen_with_app', 'path', 'with_tempdir', @@ -41,8 +41,8 @@ __all__ = [ ] -test_root = path(__file__).parent.joinpath('root').abspath() -test_roots = path(__file__).parent.joinpath('roots').abspath() +rootdir = path(os.path.dirname(__file__) or '.').abspath() +tempdir = path(os.environ['SPHINX_TEST_TEMPDIR']).abspath() def _excstr(exc): @@ -50,11 +50,9 @@ def _excstr(exc): return str(tuple(map(_excstr, exc))) return exc.__name__ + def raises(exc, func, *args, **kwds): - """ - Raise :exc:`AssertionError` if ``func(*args, **kwds)`` does not - raise *exc*. - """ + """Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*.""" try: func(*args, **kwds) except exc: @@ -63,10 +61,10 @@ def raises(exc, func, *args, **kwds): raise AssertionError('%s did not raise %s' % (func.__name__, _excstr(exc))) + def raises_msg(exc, msg, func, *args, **kwds): - """ - Raise :exc:`AssertionError` if ``func(*args, **kwds)`` does not - raise *exc*, and check if the message contains *msg*. + """Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*, + and check if the message contains *msg*. """ try: func(*args, **kwds) @@ -76,6 +74,7 @@ def raises_msg(exc, msg, func, *args, **kwds): raise AssertionError('%s did not raise %s' % (func.__name__, _excstr(exc))) + def skip_if(condition, msg=None): """Decorator to skip test if condition is true.""" def deco(test): @@ -87,10 +86,12 @@ def skip_if(condition, msg=None): return skipper return deco + def skip_unless(condition, msg=None): """Decorator to skip test if condition is false.""" return skip_if(not condition, msg) + def skip_unless_importable(module, msg=None): """Decorator to skip test if module is not importable.""" try: @@ -127,61 +128,47 @@ class TestApp(application.Sphinx): better default values for the initialization parameters. """ - def __init__(self, srcdir=None, confdir=None, outdir=None, doctreedir=None, - buildername='html', confoverrides=None, - status=None, warning=None, freshenv=None, - warningiserror=None, tags=None, - confname='conf.py', cleanenv=False, - _copy_to_temp=False, - ): - - application.CONFIG_FILENAME = confname - - self.cleanup_trees = [test_root / 'generated'] - - if srcdir is None: - srcdir = test_root - elif srcdir == '(empty)': - tempdir = path(tempfile.mkdtemp()) - self.cleanup_trees.append(tempdir) - temproot = tempdir / 'root' - temproot.makedirs() - (temproot / 'conf.py').write_text('') - srcdir = temproot + def __init__(self, buildername='html', testroot=None, srcdir=None, + freshenv=False, confoverrides=None, status=None, warning=None, + tags=None, docutilsconf=None): + if testroot is None: + defaultsrcdir = 'root' + testroot = rootdir / 'root' else: - srcdir = path(srcdir) + defaultsrcdir = 'test-' + testroot + testroot = rootdir / 'roots' / ('test-' + testroot) + if srcdir is None: + srcdir = tempdir / defaultsrcdir + else: + srcdir = tempdir / srcdir - if _copy_to_temp: - tempdir = path(tempfile.mkdtemp()) - self.cleanup_trees.append(tempdir) - temproot = tempdir / srcdir.basename() - srcdir.copytree(temproot) - srcdir = temproot + if not srcdir.exists(): + testroot.copytree(srcdir) - self.builddir = srcdir.joinpath('_build') - if confdir is None: - confdir = srcdir - if outdir is None: - outdir = srcdir.joinpath(self.builddir, buildername) - if not outdir.isdir(): - outdir.makedirs() - self.cleanup_trees.insert(0, outdir) - if doctreedir is None: - doctreedir = srcdir.joinpath(srcdir, self.builddir, 'doctrees') - if not doctreedir.isdir(): - doctreedir.makedirs() - if cleanenv: - self.cleanup_trees.insert(0, doctreedir) + if docutilsconf is not None: + (srcdir / 'docutils.conf').write_text(docutilsconf) + + builddir = srcdir / '_build' +# if confdir is None: + confdir = srcdir +# if outdir is None: + outdir = builddir.joinpath(buildername) + if not outdir.isdir(): + outdir.makedirs() +# if doctreedir is None: + doctreedir = builddir.joinpath('doctrees') + if not doctreedir.isdir(): + doctreedir.makedirs() if confoverrides is None: confoverrides = {} if status is None: status = StringIO() if warning is None: warning = ListOutput('stderr') - if freshenv is None: - freshenv = False - if warningiserror is None: - warningiserror = False +# if warningiserror is None: + warningiserror = False + + self._saved_path = sys.path[:] application.Sphinx.__init__(self, srcdir, confdir, outdir, doctreedir, buildername, confoverrides, status, warning, @@ -190,8 +177,9 @@ class TestApp(application.Sphinx): def cleanup(self, doctrees=False): Theme.themes.clear() AutoDirective._registry.clear() - for tree in self.cleanup_trees: - shutil.rmtree(tree, True) + ModuleAnalyzer.cache.clear() + sys.path[:] = self._saved_path + sys.modules.pop('autodoc_fodder', None) def __repr__(self): return '<%s buildername=%r>' % (self.__class__.__name__, self.builder.name) @@ -205,10 +193,14 @@ def with_app(*args, **kwargs): def generator(func): @wraps(func) def deco(*args2, **kwargs2): + status, warning = StringIO(), StringIO() + kwargs['status'] = status + kwargs['warning'] = warning app = TestApp(*args, **kwargs) - func(app, *args2, **kwargs2) - # don't execute cleanup if test failed - app.cleanup() + try: + func(app, status, warning, *args2, **kwargs2) + finally: + app.cleanup() return deco return generator @@ -221,20 +213,24 @@ def gen_with_app(*args, **kwargs): def generator(func): @wraps(func) def deco(*args2, **kwargs2): + status, warning = StringIO(), StringIO() + kwargs['status'] = status + kwargs['warning'] = warning app = TestApp(*args, **kwargs) - for item in func(app, *args2, **kwargs2): - yield item - # don't execute cleanup if test failed - app.cleanup() + try: + for item in func(app, status, warning, *args2, **kwargs2): + yield item + finally: + app.cleanup() return deco return generator def with_tempdir(func): def new_func(*args, **kwds): - tempdir = path(tempfile.mkdtemp()) - func(tempdir, *args, **kwds) - tempdir.rmtree() + new_tempdir = path(tempfile.mkdtemp(dir=tempdir)) + func(new_tempdir, *args, **kwds) + new_tempdir.rmtree() # not when test fails... new_func.__name__ = func.__name__ return new_func @@ -242,7 +238,10 @@ def with_tempdir(func): def sprint(*args): sys.stderr.write(' '.join(map(str, args)) + '\n') + _unicode_literals_re = re.compile(r'u(".*?")|u(\'.*?\')') + + def remove_unicode_literals(s): return _unicode_literals_re.sub(lambda x: x.group(1) or x.group(2), s) diff --git a/tox.ini b/tox.ini index 1d26ac35a..f308880e1 100644 --- a/tox.ini +++ b/tox.ini @@ -7,7 +7,7 @@ deps= sqlalchemy whoosh setenv = - BUILD_TEST_PATH = {envdir}/tests + SPHINX_TEST_TEMPDIR = {envdir}/testbuild commands= {envpython} tests/run.py {posargs} sphinx-build -q -W -b html -d {envtmpdir}/doctrees doc {envtmpdir}/html From 78af37370c1165293450071453a3cd8860df538f Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 17:32:52 +0200 Subject: [PATCH 052/293] mock is builtin in Py3. --- tests/run.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/run.py b/tests/run.py index 2672594fd..8643606d2 100755 --- a/tests/run.py +++ b/tests/run.py @@ -27,6 +27,8 @@ for modname in ('nose', 'mock', 'six', 'docutils', 'jinja2', 'pygments', try: __import__(modname) except ImportError as err: + if modname == 'mock' and sys.version_info[0] == 3: + continue traceback.print_exc() print('The %r package is needed to run the Sphinx test suite.' % modname) sys.exit(1) From a73ab32bc5293f20765eb2f306aef8e5d5bd39ec Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 17:34:16 +0200 Subject: [PATCH 053/293] Split viewcode and linkcode tests out of main test root. --- tests/root/conf.py | 39 ++--- tests/roots/test-ext-viewcode/conf.py | 32 +++- tests/roots/test-ext-viewcode/index.rst | 63 ++++---- tests/roots/test-ext-viewcode/objects.rst | 169 ++++++++++++++++++++++ tests/test_ext_doctest.py | 4 - tests/test_ext_linkcode.py | 24 --- tests/test_ext_viewcode.py | 14 +- 7 files changed, 250 insertions(+), 95 deletions(-) create mode 100644 tests/roots/test-ext-viewcode/objects.rst delete mode 100644 tests/test_ext_linkcode.py diff --git a/tests/root/conf.py b/tests/root/conf.py index 552e1aaa9..d12e8167f 100644 --- a/tests/root/conf.py +++ b/tests/root/conf.py @@ -5,8 +5,7 @@ import sys, os sys.path.append(os.path.abspath('.')) extensions = ['sphinx.ext.autodoc', 'sphinx.ext.jsmath', 'sphinx.ext.todo', - 'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.extlinks', - 'sphinx.ext.viewcode', 'ext'] + 'sphinx.ext.coverage', 'sphinx.ext.extlinks', 'ext'] jsmath_path = 'dummy.js' @@ -43,15 +42,15 @@ html_context = {'hckey': 'hcval', 'hckey_co': 'wrong_hcval_co'} htmlhelp_basename = 'SphinxTestsdoc' latex_documents = [ - ('contents', 'SphinxTests.tex', 'Sphinx Tests Documentation', - 'Georg Brandl \\and someone else', 'manual'), + ('contents', 'SphinxTests.tex', 'Sphinx Tests Documentation', + 'Georg Brandl \\and someone else', 'manual'), ] latex_additional_files = ['svgimg.svg'] texinfo_documents = [ - ('contents', 'SphinxTests', 'Sphinx Tests', - 'Georg Brandl \\and someone else', 'Sphinx Testing', 'Miscellaneous'), + ('contents', 'SphinxTests', 'Sphinx Tests', + 'Georg Brandl \\and someone else', 'Sphinx Testing', 'Miscellaneous'), ] man_pages = [ @@ -77,35 +76,13 @@ autodoc_mock_imports = [ # modify tags from conf.py tags.add('confpytag') -# -- linkcode - -if 'test_linkcode' in tags: - import glob - - extensions.remove('sphinx.ext.viewcode') - extensions.append('sphinx.ext.linkcode') - - exclude_patterns.extend(glob.glob('*.txt') + glob.glob('*/*.txt')) - exclude_patterns.remove('contents.txt') - exclude_patterns.remove('objects.txt') - - def linkcode_resolve(domain, info): - if domain == 'py': - fn = info['module'].replace('.', '/') - return "http://foobar/source/%s.py" % fn - elif domain == "js": - return "http://foobar/js/" + info['fullname'] - elif domain in ("c", "cpp"): - return "http://foobar/%s/%s" % (domain, "".join(info['names'])) - else: - raise AssertionError() - # -- extension API from docutils import nodes from sphinx import addnodes from sphinx.util.compat import Directive + def userdesc_parse(env, sig, signode): x, y = sig.split(':') signode += addnodes.desc_name(x, x) @@ -113,15 +90,19 @@ def userdesc_parse(env, sig, signode): signode[-1] += addnodes.desc_parameter(y, y) return x + def functional_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): return [nodes.strong(text='from function: %s' % options['opt'])] + class ClassDirective(Directive): option_spec = {'opt': lambda x: x} + def run(self): return [nodes.strong(text='from class: %s' % self.options['opt'])] + def setup(app): app.add_config_value('value_from_conf_py', 42, False) app.add_directive('funcdir', functional_directive, opt=lambda x: x) diff --git a/tests/roots/test-ext-viewcode/conf.py b/tests/roots/test-ext-viewcode/conf.py index 946cb786d..a99a72bbc 100644 --- a/tests/roots/test-ext-viewcode/conf.py +++ b/tests/roots/test-ext-viewcode/conf.py @@ -1,8 +1,24 @@ -# -*- coding: utf-8 -*- - -import sys -import os - -sys.path.insert(0, os.path.abspath('.')) -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] -master_doc = 'index' +# -*- coding: utf-8 -*- + +import sys +import os + +sys.path.insert(0, os.path.abspath('.')) +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] +master_doc = 'index' + + +if 'test_linkcode' in tags: + extensions.remove('sphinx.ext.viewcode') + extensions.append('sphinx.ext.linkcode') + + def linkcode_resolve(domain, info): + if domain == 'py': + fn = info['module'].replace('.', '/') + return "http://foobar/source/%s.py" % fn + elif domain == "js": + return "http://foobar/js/" + info['fullname'] + elif domain in ("c", "cpp"): + return "http://foobar/%s/%s" % (domain, "".join(info['names'])) + else: + raise AssertionError() diff --git a/tests/roots/test-ext-viewcode/index.rst b/tests/roots/test-ext-viewcode/index.rst index 72e943219..b5776cfa7 100644 --- a/tests/roots/test-ext-viewcode/index.rst +++ b/tests/roots/test-ext-viewcode/index.rst @@ -1,29 +1,34 @@ -viewcode -======== - -.. py:module:: spam - -.. autofunction:: func1 - -.. autofunction:: func2 - -.. autofunction:: spam.mod1.func1 - -.. autofunction:: spam.mod2.func2 - -.. autofunction:: Class1 - -.. autofunction:: Class2 - -.. autofunction:: spam.mod1.Class1 - -.. autofunction:: spam.mod2.Class2 - - -.. literalinclude:: spam/__init__.py - :language: python - :pyobject: func1 - -.. literalinclude:: spam/mod1.py - :language: python - :pyobject: func1 +viewcode +======== + +.. py:module:: spam + +.. autofunction:: func1 + +.. autofunction:: func2 + +.. autofunction:: spam.mod1.func1 + +.. autofunction:: spam.mod2.func2 + +.. autofunction:: Class1 + +.. autofunction:: Class2 + +.. autofunction:: spam.mod1.Class1 + +.. autofunction:: spam.mod2.Class2 + + +.. literalinclude:: spam/__init__.py + :language: python + :pyobject: func1 + +.. literalinclude:: spam/mod1.py + :language: python + :pyobject: func1 + + +.. toctree:: + + objects diff --git a/tests/roots/test-ext-viewcode/objects.rst b/tests/roots/test-ext-viewcode/objects.rst new file mode 100644 index 000000000..8d304bece --- /dev/null +++ b/tests/roots/test-ext-viewcode/objects.rst @@ -0,0 +1,169 @@ +Testing object descriptions +=========================== + +.. function:: func_without_module(a, b, *c[, d]) + + Does something. + +.. function:: func_without_body() + +.. function:: func_noindex + :noindex: + +.. function:: func_with_module + :module: foolib + +Referring to :func:`func with no index `. +Referring to :func:`nothing <>`. + +.. module:: mod + :synopsis: Module synopsis. + :platform: UNIX + +.. function:: func_in_module + +.. class:: Cls + + .. method:: meth1 + + .. staticmethod:: meths + + .. attribute:: attr + +.. explicit class given +.. method:: Cls.meth2 + +.. explicit module given +.. exception:: Error(arg1, arg2) + :module: errmod + +.. data:: var + + +.. currentmodule:: None + +.. function:: func_without_module2() -> annotation + +.. object:: long(parameter, \ + list) + another one + +.. class:: TimeInt + + Has only one parameter (triggers special behavior...) + + :param moo: |test| + :type moo: |test| + +.. |test| replace:: Moo + +.. class:: Time(hour, minute, isdst) + + :param year: The year. + :type year: TimeInt + :param TimeInt minute: The minute. + :param isdst: whether it's DST + :type isdst: * some complex + * expression + :returns: a new :class:`Time` instance + :rtype: :class:`Time` + :raises ValueError: if the values are out of range + :ivar int hour: like *hour* + :ivar minute: like *minute* + :vartype minute: int + :param hour: Some parameter + :type hour: DuplicateType + :param hour: Duplicate param. Should not lead to crashes. + :type hour: DuplicateType + :param .Cls extcls: A class from another module. + + +C items +======= + +.. c:function:: Sphinx_DoSomething() + +.. c:member:: SphinxStruct.member + +.. c:macro:: SPHINX_USE_PYTHON + +.. c:type:: SphinxType + +.. c:var:: sphinx_global + + +Javascript items +================ + +.. js:function:: foo() + +.. js:data:: bar + +.. documenting the method of any object +.. js:function:: bar.baz(href, callback[, errback]) + + :param string href: The location of the resource. + :param callback: Get's called with the data returned by the resource. + :throws InvalidHref: If the `href` is invalid. + :returns: `undefined` + +.. js:attribute:: bar.spam + +References +========== + +Referencing :class:`mod.Cls` or :Class:`mod.Cls` should be the same. + +With target: :c:func:`Sphinx_DoSomething()` (parentheses are handled), +:c:member:`SphinxStruct.member`, :c:macro:`SPHINX_USE_PYTHON`, +:c:type:`SphinxType *` (pointer is handled), :c:data:`sphinx_global`. + +Without target: :c:func:`CFunction`. :c:func:`!malloc`. + +:js:func:`foo()` +:js:func:`foo` + +:js:data:`bar` +:js:func:`bar.baz()` +:js:func:`bar.baz` +:js:func:`~bar.baz()` + +:js:attr:`bar.baz` + + +Others +====== + +.. envvar:: HOME + +.. program:: python + +.. cmdoption:: -c command + +.. program:: perl + +.. cmdoption:: -c + +.. option:: +p + +Link to :option:`perl +p`. + + +User markup +=========== + +.. userdesc:: myobj:parameter + + Description of userdesc. + + +Referencing :userdescrole:`myobj`. + + +CPP domain +========== + +.. cpp:class:: n::Array + + .. cpp:function:: T& operator[]( unsigned j ) + const T& operator[]( unsigned j ) const diff --git a/tests/test_ext_doctest.py b/tests/test_ext_doctest.py index 67e08d344..002afff6f 100644 --- a/tests/test_ext_doctest.py +++ b/tests/test_ext_doctest.py @@ -8,13 +8,9 @@ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - -import sys from util import with_app - cleanup_called = 0 diff --git a/tests/test_ext_linkcode.py b/tests/test_ext_linkcode.py deleted file mode 100644 index 34b2acf80..000000000 --- a/tests/test_ext_linkcode.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -""" - test_linkcode - ~~~~~~~~~~~~~ - - Test the sphinx.ext.linkcode extension. - - :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from util import with_app - - -@with_app('html', tags=['test_linkcode']) -def test_html(app, status, warning): - app.builder.build(['objects']) - - stuff = (app.outdir / 'objects.html').text(encoding='utf-8') - - assert 'http://foobar/source/foolib.py' in stuff - assert 'http://foobar/js/' in stuff - assert 'http://foobar/c/' in stuff - assert 'http://foobar/cpp/' in stuff diff --git a/tests/test_ext_viewcode.py b/tests/test_ext_viewcode.py index 561fcb6a3..fb24f765a 100644 --- a/tests/test_ext_viewcode.py +++ b/tests/test_ext_viewcode.py @@ -15,7 +15,7 @@ from util import with_app @with_app(testroot='ext-viewcode') -def test_simple(app, status, warning): +def test_viewcode(app, status, warning): app.builder.build_all() warnings = re.sub(r'\\+', '/', warning.getvalue()) @@ -30,3 +30,15 @@ def test_simple(app, status, warning): assert result.count('href="_modules/spam/mod2.html#func2"') == 2 assert result.count('href="_modules/spam/mod1.html#Class1"') == 2 assert result.count('href="_modules/spam/mod2.html#Class2"') == 2 + + +@with_app(testroot='ext-viewcode', tags=['test_linkcode']) +def test_linkcode(app, status, warning): + app.builder.build(['objects']) + + stuff = (app.outdir / 'objects.html').text(encoding='utf-8') + + assert 'http://foobar/source/foolib.py' in stuff + assert 'http://foobar/js/' in stuff + assert 'http://foobar/c/' in stuff + assert 'http://foobar/cpp/' in stuff From be72ed6dc7ed9089b089bad213b2026b8af4133f Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 18:26:50 +0200 Subject: [PATCH 054/293] Refactor/speed up test_intl by combining all tests with a certain builder into a generator. --- tests/path.py | 3 + tests/test_intl.py | 775 +++++++++++++++++++++------------------------ 2 files changed, 368 insertions(+), 410 deletions(-) diff --git a/tests/path.py b/tests/path.py index 0d935fe4a..573d3d3ca 100755 --- a/tests/path.py +++ b/tests/path.py @@ -123,6 +123,9 @@ class path(text_type): """ os.unlink(self) + def utime(self, arg): + os.utime(self, arg) + def write_text(self, text, **kwargs): """ Writes the given `text` to the file. diff --git a/tests/test_intl.py b/tests/test_intl.py index 4f0f3cc2b..bbcf93eba 100644 --- a/tests/test_intl.py +++ b/tests/test_intl.py @@ -16,15 +16,31 @@ import re from subprocess import Popen, PIPE from xml.etree import ElementTree +from nose.tools import assert_equal, assert_in, assert_not_in from six import string_types -from util import tempdir, rootdir, path, with_app, SkipTest +from util import tempdir, rootdir, path, gen_with_app, SkipTest root = tempdir / 'test-intl' -def with_intl_app(*args, **kw): +def re_search(regex, text, flags=0): + if not re.search(regex, text, flags): + assert False, '%r did not match %r' % (regex, text) + + +def not_re_search(regex, text, flags=0): + if re.search(regex, text, flags): + assert False, '%r did match %r' % (regex, text) + + +def startswith(thing, prefix): + if not thing.startswith(prefix): + assert False, '%r does not start with %r' % (thing, prefix) + + +def gen_with_intl_app(*args, **kw): default_kw = { 'testroot': 'intl', 'confoverrides': { @@ -33,7 +49,7 @@ def with_intl_app(*args, **kw): }, } default_kw.update(kw) - return with_app(*args, **default_kw) + return gen_with_app(*args, **default_kw) def setup_module(): @@ -97,126 +113,38 @@ def assert_elem(elem, texts=None, refs=None, names=None): assert _names == names -@with_intl_app(buildername='text') -def test_simple(app, status, warning): - app.builder.build(['bom']) - result = (app.outdir / 'bom.txt').text(encoding='utf-8') - expect = (u"\nDatei mit UTF-8" - u"\n***************\n" # underline matches new translation - u"\nThis file has umlauts: äöü.\n") - assert result == expect +@gen_with_intl_app('text', freshenv=True) +def test_text_builder(app, status, warning): + app.builder.build_all() - -@with_intl_app(buildername='text') -def test_subdir(app, status, warning): - app.builder.build(['subdir/contents']) - result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8') - assert result.startswith(u"\nsubdir contents\n***************\n") - - -@with_intl_app(buildername='text') -def test_i18n_warnings_in_translation(app, status, warning): - app.outdir.rmtree(True) # for warnings acceleration - app.doctreedir.rmtree(True) - app.builder.build(['warnings']) - result = (app.outdir / 'warnings.txt').text(encoding='utf-8') - expect = (u"\nI18N WITH REST WARNINGS" - u"\n***********************\n" - u"\nLINE OF >>``<>``<reference') - assert len(re.findall(expected_expr, result)) == 2 - - expected_expr = ('reference') - assert len(re.findall(expected_expr, result)) == 0 - - expected_expr = ('I18N WITH ' - 'REFS INCONSISTENCY') - assert len(re.findall(expected_expr, result)) == 1 - - -@with_intl_app(buildername='xml', freshenv=True) -def test_i18n_keep_external_links(app, status, warning): - # regression test for #1044 - app.builder.build(['external_links']) - et = ElementTree.parse(app.outdir / 'external_links.xml') - secs = et.findall('section') - - para0 = secs[0].findall('paragraph') - # external link check - assert_elem( - para0[0], - texts=['EXTERNAL LINK TO', 'Python', '.'], - refs=['http://python.org/index.html']) - - # internal link check - assert_elem( - para0[1], - texts=['EXTERNAL LINKS', 'IS INTERNAL LINK.'], - refs=['i18n-with-external-links']) - - # inline link check - assert_elem( - para0[2], - texts=['INLINE LINK BY', 'THE SPHINX SITE', '.'], - refs=['http://sphinx-doc.org']) - - # unnamed link check - assert_elem( - para0[3], - texts=['UNNAMED', 'LINK', '.'], - refs=['http://google.com']) - - # link target swapped translation - para1 = secs[1].findall('paragraph') - assert_elem( - para1[0], - texts=['LINK TO', 'external2', 'AND', 'external1', '.'], - refs=['http://example.com/external2', - 'http://example.com/external1']) - assert_elem( - para1[1], - texts=['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE', - '.'], - refs=['http://python.org', 'http://sphinx-doc.org']) - - # multiple references in the same line - para2 = secs[2].findall('paragraph') - assert_elem( - para2[0], - texts=['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',', - 'THE SPHINX SITE', ',', 'UNNAMED', 'AND', - 'THE PYTHON SITE', '.'], - refs=['i18n-with-external-links', 'http://python.org/index.html', - 'http://sphinx-doc.org', 'http://google.com', - 'http://python.org']) - - -@with_intl_app(buildername='text', freshenv=True) -def test_i18n_literalblock_warning(app, status, warning): - #app.builddir.rmtree(True) # for warnings acceleration - app.builder.build(['literalblock']) result = (app.outdir / 'literalblock.txt').text(encoding='utf-8') expect = (u"\nI18N WITH LITERAL BLOCK" u"\n***********************\n" @@ -326,18 +175,15 @@ def test_i18n_literalblock_warning(app, status, warning): u"\n literal block\n" u"\nMISSING LITERAL BLOCK:\n" u"\nreference') + yield assert_equal, len(re.findall(expected_expr, result)), 2 + + expected_expr = ('reference') + yield assert_equal, len(re.findall(expected_expr, result)), 0 + + expected_expr = ('I18N WITH ' + 'REFS INCONSISTENCY') + yield assert_equal, len(re.findall(expected_expr, result)), 1 + + # --- index entries: regression test for #976 + result = (app.outdir / 'genindex.html').text(encoding='utf-8') def wrap(tag, keyword): @@ -579,12 +353,10 @@ def test_i18n_index_entries(app, status, warning): wrap('a', 'BUILTIN'), ] for expr in expected_exprs: - assert re.search(expr, result, re.M) + yield re_search, expr, result, re.M + # --- versionchanges -@with_intl_app(buildername='html', freshenv=True) -def test_versionchange(app, status, warning): - app.builder.build(['versionchange']) result = (app.outdir / 'versionchange.html').text(encoding='utf-8') def get_content(result, name): @@ -600,83 +372,266 @@ def test_versionchange(app, status, warning): u"""THIS IS THE FIRST PARAGRAPH OF DEPRECATED.

          \n""" u"""

          THIS IS THE SECOND PARAGRAPH OF DEPRECATED.

          \n""") matched_content = get_content(result, "deprecated") - assert expect1 == matched_content + yield assert_equal, expect1, matched_content expect2 = ( u"""

          New in version 1.0: """ u"""THIS IS THE FIRST PARAGRAPH OF VERSIONADDED.

          \n""") matched_content = get_content(result, "versionadded") - assert expect2 == matched_content + yield assert_equal, expect2, matched_content expect3 = ( u"""

          Changed in version 1.0: """ u"""THIS IS THE FIRST PARAGRAPH OF VERSIONCHANGED.

          \n""") matched_content = get_content(result, "versionchanged") - assert expect3 == matched_content + yield assert_equal, expect3, matched_content + # --- docfields -@with_intl_app(buildername='text', freshenv=True) -def test_i18n_docfields(app, status, warning): - app.builder.build(['docfields']) - result = (app.outdir / 'docfields.txt').text(encoding='utf-8') - expect = (u"\nI18N WITH DOCFIELDS" - u"\n*******************\n" - u"\nclass class Cls1\n" - u"\n Parameters:" - u"\n **param** -- DESCRIPTION OF PARAMETER param\n" - u"\nclass class Cls2\n" - u"\n Parameters:" - u"\n * **foo** -- DESCRIPTION OF PARAMETER foo\n" - u"\n * **bar** -- DESCRIPTION OF PARAMETER bar\n" - u"\nclass class Cls3(values)\n" - u"\n Raises ValueError:" - u"\n IF THE VALUES ARE OUT OF RANGE\n" - u"\nclass class Cls4(values)\n" - u"\n Raises:" - u"\n * **TypeError** -- IF THE VALUES ARE NOT VALID\n" - u"\n * **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n" - u"\nclass class Cls5\n" - u"\n Returns:" - u'\n A NEW "Cls3" INSTANCE\n') - assert result == expect - - -@with_intl_app(buildername='text', freshenv=True) -def test_i18n_admonitions(app, status, warning): - # #1206: gettext did not translate admonition directive's title - # seealso: http://docutils.sourceforge.net/docs/ref/rst/directives.html#admonitions - app.builder.build(['admonitions']) - result = (app.outdir / 'admonitions.txt').text(encoding='utf-8') - directives = ( - "attention", "caution", "danger", "error", "hint", - "important", "note", "tip", "warning", "admonition",) - for d in directives: - assert d.upper() + " TITLE" in result - assert d.upper() + " BODY" in result - - -@with_intl_app(buildername='html', freshenv=True) -def test_i18n_docfields_html(app, status, warning): - app.builder.build(['docfields']) - (app.outdir / 'docfields.html').text(encoding='utf-8') # expect no error by build + (app.outdir / 'docfields.html').text(encoding='utf-8') + # --- gettext template -@with_intl_app(buildername='html') -def test_gettext_template(app, status, warning): - app.builder.build_all() result = (app.outdir / 'index.html').text(encoding='utf-8') - assert "WELCOME" in result - assert "SPHINX 2013.120" in result + yield assert_in, "WELCOME", result + yield assert_in, "SPHINX 2013.120", result + # --- rebuild by .mo mtime -@with_intl_app(buildername='html') -def test_rebuild_by_mo_mtime(app, status, warning): app.builder.build_update() _, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app) - assert count == 0 + yield assert_equal, count, 0 - mo = (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').bytes() - (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').write_bytes(mo) + (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').utime(None) _, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app) - assert count == 1 + yield assert_equal, count, 1 + + +@gen_with_intl_app('xml', freshenv=True) +def test_xml_builder(app, status, warning): + app.builder.build_all() + + # --- footnotes: regression test for fix #955, #1176 + + et = ElementTree.parse(app.outdir / 'footnote.xml') + secs = et.findall('section') + + para0 = secs[0].findall('paragraph') + yield (assert_elem, + para0[0], + ['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS', + '2', '[ref]', '1', '100', '.'], + ['i18n-with-footnote', 'ref']) + + footnote0 = secs[0].findall('footnote') + yield (assert_elem, + footnote0[0], + ['1', 'THIS IS A AUTO NUMBERED FOOTNOTE.'], + None, + ['1']) + yield (assert_elem, + footnote0[1], + ['100', 'THIS IS A NUMBERED FOOTNOTE.'], + None, + ['100']) + yield (assert_elem, + footnote0[2], + ['2', 'THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'], + None, + ['named']) + + citation0 = secs[0].findall('citation') + yield (assert_elem, + citation0[0], + ['ref', 'THIS IS A NAMED FOOTNOTE.'], + None, + ['ref']) + + warnings = warning.getvalue().replace(os.sep, '/') + warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n' + yield not_re_search, warning_expr, warnings + + # --- footnote backlinks: i18n test for #1058 + + et = ElementTree.parse(app.outdir / 'footnote.xml') + secs = et.findall('section') + + para0 = secs[0].findall('paragraph') + refs0 = para0[0].findall('footnote_reference') + refid2id = dict([ + (r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0]) + + footnote0 = secs[0].findall('footnote') + for footnote in footnote0: + ids = footnote.attrib.get('ids') + backrefs = footnote.attrib.get('backrefs') + yield assert_equal, refid2id[ids], backrefs + + # --- refs in the Python domain + + et = ElementTree.parse(app.outdir / 'refs_python_domain.xml') + secs = et.findall('section') + + # regression test for fix #1363 + para0 = secs[0].findall('paragraph') + yield (assert_elem, + para0[0], + ['SEE THIS DECORATOR:', 'sensitive_variables()', '.'], + ['sensitive.sensitive_variables']) + + # --- keep external links: regression test for #1044 + + et = ElementTree.parse(app.outdir / 'external_links.xml') + secs = et.findall('section') + + para0 = secs[0].findall('paragraph') + # external link check + yield (assert_elem, + para0[0], + ['EXTERNAL LINK TO', 'Python', '.'], + ['http://python.org/index.html']) + + # internal link check + yield (assert_elem, + para0[1], + ['EXTERNAL LINKS', 'IS INTERNAL LINK.'], + ['i18n-with-external-links']) + + # inline link check + yield (assert_elem, + para0[2], + ['INLINE LINK BY', 'THE SPHINX SITE', '.'], + ['http://sphinx-doc.org']) + + # unnamed link check + yield (assert_elem, + para0[3], + ['UNNAMED', 'LINK', '.'], + ['http://google.com']) + + # link target swapped translation + para1 = secs[1].findall('paragraph') + yield (assert_elem, + para1[0], + ['LINK TO', 'external2', 'AND', 'external1', '.'], + ['http://example.com/external2', + 'http://example.com/external1']) + yield (assert_elem, + para1[1], + ['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE', '.'], + ['http://python.org', 'http://sphinx-doc.org']) + + # multiple references in the same line + para2 = secs[2].findall('paragraph') + yield (assert_elem, + para2[0], + ['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',', + 'THE SPHINX SITE', ',', 'UNNAMED', 'AND', + 'THE PYTHON SITE', '.'], + ['i18n-with-external-links', 'http://python.org/index.html', + 'http://sphinx-doc.org', 'http://google.com', + 'http://python.org']) + + # --- role xref: regression test for #1090, #1193 + + et = ElementTree.parse(app.outdir / 'role_xref.xml') + sec1, sec2 = et.findall('section') + + para1, = sec1.findall('paragraph') + yield (assert_elem, + para1, + ['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',', + 'SOME NEW TERM', '.'], + ['i18n-role-xref', 'contents', + 'glossary_terms#term-some-term']) + + para2 = sec2.findall('paragraph') + yield (assert_elem, + para2[0], + ['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM', '.'], + ['glossary_terms#term-some-other-term', + 'glossary_terms#term-some-term']) + yield(assert_elem, + para2[1], + ['LINK TO', 'SAME TYPE LINKS', 'AND', + "I18N ROCK'N ROLE XREF", '.'], + ['same-type-links', 'i18n-role-xref']) + yield (assert_elem, + para2[2], + ['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS', '.'], + ['glossary_terms', 'contents']) + yield (assert_elem, + para2[3], + ['LINK TO', '--module', 'AND', '-m', '.'], + ['cmdoption--module', 'cmdoption-m']) + yield (assert_elem, + para2[4], + ['LINK TO', 'env2', 'AND', 'env1', '.'], + ['envvar-env2', 'envvar-env1']) + yield (assert_elem, + para2[5], + ['LINK TO', 'token2', 'AND', 'token1', '.'], + []) # TODO: how do I link token role to productionlist? + yield (assert_elem, + para2[6], + ['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'], + ['same-type-links', 'i18n-role-xref']) + + # warnings + warnings = warning.getvalue().replace(os.sep, '/') + yield assert_not_in, 'term not in glossary', warnings + yield assert_not_in, 'undefined label', warnings + yield assert_not_in, 'unknown document', warnings + + # --- label targets: regression test for #1193, #1265 + + et = ElementTree.parse(app.outdir / 'label_target.xml') + secs = et.findall('section') + + para0 = secs[0].findall('paragraph') + yield (assert_elem, + para0[0], + ['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND', + 'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'], + ['implicit-target', 'section-and-label']) + + para1 = secs[1].findall('paragraph') + yield (assert_elem, + para1[0], + ['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND', + 'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1', + '.'], + ['explicit-target', 'id1']) + + para2 = secs[2].findall('paragraph') + yield (assert_elem, + para2[0], + ['X IMPLICIT SECTION NAME', 'POINT TO', + 'implicit-section-name', '.'], + ['implicit-section-name']) + + sec2 = secs[2].findall('section') + + para2_0 = sec2[0].findall('paragraph') + yield (assert_elem, + para2_0[0], + ['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'], + []) + + para3 = secs[3].findall('paragraph') + yield (assert_elem, + para3[0], + ['X', 'bridge label', + 'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' + + 'SECTION TITLE.'], + ['label-bridged-target-section']) + yield (assert_elem, + para3[1], + ['X', 'bridge label', 'POINT TO', + 'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2', + 'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED', + 'bridge label2', 'POINT TO CORRECT TARGET.'], + ['label-bridged-target-section', + 'section-and-label', + 'section-and-label']) From 2661c1060dd55ae28b943d6c866ebfe0c27b3401 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 18:36:27 +0200 Subject: [PATCH 055/293] Refactor/speed up test_build_gettext using generators --- tests/test_build_gettext.py | 33 +++++++++++---------------------- 1 file changed, 11 insertions(+), 22 deletions(-) diff --git a/tests/test_build_gettext.py b/tests/test_build_gettext.py index fe1611583..9edacf538 100644 --- a/tests/test_build_gettext.py +++ b/tests/test_build_gettext.py @@ -10,41 +10,30 @@ """ from __future__ import print_function -import gettext import os import re +import gettext from subprocess import Popen, PIPE -from util import with_app, SkipTest +from nose.tools import assert_true, assert_in, assert_equal + +from util import with_app, gen_with_app, SkipTest -@with_app('gettext') +@gen_with_app('gettext') def test_all(app, status, warning): # Generic build; should fail only when the builder is horribly broken. app.builder.build_all() - -@with_app('gettext') -def test_build(app, status, warning): # Do messages end up in the correct location? - app.builder.build(['extapi', 'subdir/includes']) # top-level documents end up in a message catalog - assert (app.outdir / 'extapi.pot').isfile() + yield assert_true, (app.outdir / 'extapi.pot').isfile() # directory items are grouped into sections - assert (app.outdir / 'subdir.pot').isfile() + yield assert_true, (app.outdir / 'subdir.pot').isfile() - -@with_app('gettext') -def test_seealso(app, status, warning): # regression test for issue #960 - app.builder.build(['markup']) catalog = (app.outdir / 'markup.pot').text(encoding='utf-8') - assert 'msgid "something, something else, something more"' in catalog - - -@with_app('gettext') -def test_gettext(app, status, warning): - app.builder.build(['markup']) + yield assert_in, 'msgid "something, something else, something more"', catalog (app.outdir / 'en' / 'LC_MESSAGES').makedirs() cwd = os.getcwd() @@ -63,7 +52,7 @@ def test_gettext(app, status, warning): print(stderr) assert False, 'msginit exited with return code %s' % \ p.returncode - assert (app.outdir / 'en_US.po').isfile(), 'msginit failed' + yield assert_true, (app.outdir / 'en_US.po').isfile(), 'msginit failed' try: p = Popen(['msgfmt', 'en_US.po', '-o', os.path.join('en', 'LC_MESSAGES', 'test_root.mo')], @@ -77,13 +66,13 @@ def test_gettext(app, status, warning): print(stderr) assert False, 'msgfmt exited with return code %s' % \ p.returncode - assert (app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(), \ + yield assert_true, (app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(), \ 'msgfmt failed' finally: os.chdir(cwd) _ = gettext.translation('test_root', app.outdir, languages=['en']).gettext - assert _("Testing various markup") == u"Testing various markup" + yield assert_equal, _("Testing various markup"), u"Testing various markup" @with_app('gettext', testroot='intl', From b335e03b029582f38b6c6245072ae4b03284bd36 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 18:37:44 +0200 Subject: [PATCH 056/293] Speed up test_theming using a minimal project. --- tests/test_theming.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_theming.py b/tests/test_theming.py index d6f41da1e..404c31974 100644 --- a/tests/test_theming.py +++ b/tests/test_theming.py @@ -57,7 +57,7 @@ def test_theme_api(app, status, warning): assert not os.path.exists(themedir) -@with_app() +@with_app(testroot='tocdepth') # a minimal root def test_js_source(app, status, warning): # Now sphinx provides non-minified JS files for jquery.js and underscore.js # to clarify the source of the minified files. see also #1434. From b637f0a72812008e471b9a8985ebca8fb748ee64 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 18:41:08 +0200 Subject: [PATCH 057/293] No need to insist on fresh builds with test_build_* --- tests/test_build_html.py | 10 +++++----- tests/test_build_latex.py | 2 +- tests/test_build_texinfo.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test_build_html.py b/tests/test_build_html.py index 9cf21c63c..214580de7 100644 --- a/tests/test_build_html.py +++ b/tests/test_build_html.py @@ -21,21 +21,21 @@ from etree13 import ElementTree as ET ENV_WARNINGS = """\ -%(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \ +(%(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \ WARNING: Explicit markup ends without a blank line; unexpected \ unindent\\.\\n? -%(root)s/images.txt:9: WARNING: image file not readable: foo.png +)?%(root)s/images.txt:9: WARNING: image file not readable: foo.png %(root)s/images.txt:23: WARNING: nonlocal image URI found: \ http://www.python.org/logo.png %(root)s/includes.txt:\\d*: WARNING: Encoding 'utf-8-sig' used for \ reading included file u'.*?wrongenc.inc' seems to be wrong, try giving an \ :encoding: option\\n? %(root)s/includes.txt:4: WARNING: download file not readable: .*?nonexisting.png -%(root)s/markup.txt:\\d+: WARNING: Malformed :option: u'Python c option', does \ +(%(root)s/markup.txt:\\d+: WARNING: Malformed :option: u'Python c option', does \ not contain option marker - or -- or / or \\+ %(root)s/undecodable.txt:3: WARNING: undecodable source characters, replacing \ with "\\?": b?'here: >>>\\\\xbb<<<' -""" +)?""" HTML_WARNINGS = ENV_WARNINGS + """\ %(root)s/images.txt:20: WARNING: no matching candidate for image URI u'foo.\\*' @@ -341,7 +341,7 @@ def check_extra_entries(outdir): assert (outdir / 'robots.txt').isfile() -@gen_with_app(buildername='html', freshenv=True, +@gen_with_app(buildername='html', confoverrides={'html_context.hckey_co': 'hcval_co'}, tags=['testtag']) def test_html_output(app, status, warning): diff --git a/tests/test_build_latex.py b/tests/test_build_latex.py index 8edea98ff..9e4c11d59 100644 --- a/tests/test_build_latex.py +++ b/tests/test_build_latex.py @@ -33,7 +33,7 @@ if PY3: LATEX_WARNINGS = remove_unicode_literals(LATEX_WARNINGS) -@with_app(buildername='latex', freshenv=True) +@with_app(buildername='latex') def test_latex(app, status, warning): LaTeXTranslator.ignore_missing_images = True app.builder.build_all() diff --git a/tests/test_build_texinfo.py b/tests/test_build_texinfo.py index 7f50857f9..bb10f8fa4 100644 --- a/tests/test_build_texinfo.py +++ b/tests/test_build_texinfo.py @@ -32,7 +32,7 @@ if PY3: TEXINFO_WARNINGS = remove_unicode_literals(TEXINFO_WARNINGS) -@with_app('texinfo', freshenv=True) +@with_app('texinfo') def test_texinfo(app, status, warning): TexinfoTranslator.ignore_missing_images = True app.builder.build_all() From 70cf797c103adff311dd27ff25da62f5ecffc699 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 18:46:15 +0200 Subject: [PATCH 058/293] fix incompatibility in doctree between gettext and the rest --- tests/test_build_gettext.py | 2 +- tests/test_environment.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_build_gettext.py b/tests/test_build_gettext.py index 9edacf538..23a5ef806 100644 --- a/tests/test_build_gettext.py +++ b/tests/test_build_gettext.py @@ -20,7 +20,7 @@ from nose.tools import assert_true, assert_in, assert_equal from util import with_app, gen_with_app, SkipTest -@gen_with_app('gettext') +@gen_with_app('gettext', srcdir='root-gettext') def test_all(app, status, warning): # Generic build; should fail only when the builder is horribly broken. app.builder.build_all() diff --git a/tests/test_environment.py b/tests/test_environment.py index 4776bbd66..115795453 100644 --- a/tests/test_environment.py +++ b/tests/test_environment.py @@ -24,7 +24,7 @@ warnings = [] def setup_module(): global app, env - app = TestApp(srcdir='env-test') + app = TestApp(srcdir='root-envtest') env = app.env env.set_warnfunc(lambda *args: warnings.append(args)) From fa91f19e5513e5a694472d657d6463ee4b21b231 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 18:48:21 +0200 Subject: [PATCH 059/293] Reimplement assert_in and assert_not_in, they are not in nose in Py2.6. --- tests/test_intl.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/tests/test_intl.py b/tests/test_intl.py index bbcf93eba..19d7189aa 100644 --- a/tests/test_intl.py +++ b/tests/test_intl.py @@ -16,7 +16,7 @@ import re from subprocess import Popen, PIPE from xml.etree import ElementTree -from nose.tools import assert_equal, assert_in, assert_not_in +from nose.tools import assert_equal from six import string_types from util import tempdir, rootdir, path, gen_with_app, SkipTest @@ -40,6 +40,16 @@ def startswith(thing, prefix): assert False, '%r does not start with %r' % (thing, prefix) +def assert_in(x, thing): + if x not in thing: + assert False, '%r is not in %r' % (x, thing) + + +def assert_not_in(x, thing): + if x in thing: + assert False, '%r is in %r' % (x, thing) + + def gen_with_intl_app(*args, **kw): default_kw = { 'testroot': 'intl', @@ -299,7 +309,6 @@ def test_text_builder(app, status, warning): yield assert_in, d.upper() + " BODY", result - @gen_with_intl_app('html', freshenv=True) def test_html_builder(app, status, warning): app.builder.build_all() From ac690b4b074bc0c5b28e3151b7e93265f12761bb Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 18:54:01 +0200 Subject: [PATCH 060/293] move new assertion helpers to util.py --- tests/test_build_gettext.py | 4 ++-- tests/test_intl.py | 45 +++++++++---------------------------- tests/util.py | 25 +++++++++++++++++++++ 3 files changed, 38 insertions(+), 36 deletions(-) diff --git a/tests/test_build_gettext.py b/tests/test_build_gettext.py index 23a5ef806..d71894432 100644 --- a/tests/test_build_gettext.py +++ b/tests/test_build_gettext.py @@ -15,9 +15,9 @@ import re import gettext from subprocess import Popen, PIPE -from nose.tools import assert_true, assert_in, assert_equal +from nose.tools import assert_true, assert_equal -from util import with_app, gen_with_app, SkipTest +from util import with_app, gen_with_app, SkipTest, assert_in @gen_with_app('gettext', srcdir='root-gettext') diff --git a/tests/test_intl.py b/tests/test_intl.py index 19d7189aa..67dd02dea 100644 --- a/tests/test_intl.py +++ b/tests/test_intl.py @@ -19,37 +19,14 @@ from xml.etree import ElementTree from nose.tools import assert_equal from six import string_types -from util import tempdir, rootdir, path, gen_with_app, SkipTest +from util import tempdir, rootdir, path, gen_with_app, SkipTest, \ + assert_re_search, assert_not_re_search, assert_in, assert_not_in, \ + assert_startswith root = tempdir / 'test-intl' -def re_search(regex, text, flags=0): - if not re.search(regex, text, flags): - assert False, '%r did not match %r' % (regex, text) - - -def not_re_search(regex, text, flags=0): - if re.search(regex, text, flags): - assert False, '%r did match %r' % (regex, text) - - -def startswith(thing, prefix): - if not thing.startswith(prefix): - assert False, '%r does not start with %r' % (thing, prefix) - - -def assert_in(x, thing): - if x not in thing: - assert False, '%r is not in %r' % (x, thing) - - -def assert_not_in(x, thing): - if x in thing: - assert False, '%r is in %r' % (x, thing) - - def gen_with_intl_app(*args, **kw): default_kw = { 'testroot': 'intl', @@ -132,7 +109,7 @@ def test_text_builder(app, status, warning): warnings = warning.getvalue().replace(os.sep, '/') warning_expr = u'.*/warnings.txt:4: ' \ u'WARNING: Inline literal start-string without end-string.\n' - yield re_search, warning_expr, warnings + yield assert_re_search, warning_expr, warnings result = (app.outdir / 'warnings.txt').text(encoding='utf-8') expect = (u"\nI18N WITH REST WARNINGS" @@ -151,7 +128,7 @@ def test_text_builder(app, status, warning): # --- check translation in subdirs result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8') - yield startswith, result, u"\nsubdir contents\n***************\n" + yield assert_startswith, result, u"\nsubdir contents\n***************\n" # --- check warnings for inconsistency in number of references @@ -173,7 +150,7 @@ def test_text_builder(app, status, warning): warning_fmt % 'footnote references' + warning_fmt % 'references' + warning_fmt % 'references') - yield re_search, expected_warning_expr, warnings + yield assert_re_search, expected_warning_expr, warnings # --- check warning for literal block @@ -185,12 +162,12 @@ def test_text_builder(app, status, warning): u"\n literal block\n" u"\nMISSING LITERAL BLOCK:\n" u"\n Date: Sun, 21 Sep 2014 18:59:56 +0200 Subject: [PATCH 061/293] add a bullet point about keeping tests fast --- doc/devguide.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/devguide.rst b/doc/devguide.rst index 885d52b0e..9d85ec0b0 100644 --- a/doc/devguide.rst +++ b/doc/devguide.rst @@ -130,6 +130,11 @@ These are the basic steps needed to start developing on Sphinx. * For bug fixes, first add a test that fails without your changes and passes after they are applied. + * Tests that need a sphinx-build run should be integrated in one of the + existing test modules if possible. New tests that to ``@with_app`` and + then ``build_all`` for a few assertions are not good since *the test suite + should not take more than a minute to run*. + #. Please add a bullet point to :file:`CHANGES` if the fix or feature is not trivial (small doc updates, typo fixes). Then commit:: From c5cc2a8cf9edc00ff0ed695e8a0b5c98474c0abf Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 19:01:03 +0200 Subject: [PATCH 062/293] flush stdout after printing messages, might fix output ordering on drone.io --- tests/run.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/run.py b/tests/run.py index 8643606d2..e143ac15f 100755 --- a/tests/run.py +++ b/tests/run.py @@ -45,5 +45,7 @@ if tempdir.exists(): tempdir.makedirs() print('Running Sphinx test suite...') +sys.stdout.flush() + import nose nose.main() From fe077ea18ba2b8c5618b3ca89b9d97214ca08e2c Mon Sep 17 00:00:00 2001 From: Guillem Barba Date: Sun, 21 Sep 2014 19:50:46 +0200 Subject: [PATCH 063/293] Don't multiply docnames list size per each listener of env-read-docs The env-read-docs listener function doesn't return the modified docnames list but change the list received as parameter. Updated the tests --- sphinx/environment.py | 6 +----- tests/test_env_read_docs.py | 6 ++---- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/sphinx/environment.py b/sphinx/environment.py index a4901a746..38218729f 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -471,11 +471,7 @@ class BuildEnvironment: # read all new and changed files docnames = sorted(added | changed) if app: - new_docnames = [] - for mod_docnames in app.emit('env-read-docs', self, docnames): - new_docnames.extend(mod_docnames) - if new_docnames: - docnames = new_docnames + app.emit('env-read-docs', self, docnames) for docname in docnames: yield docname self.read_doc(docname, app=app) diff --git a/tests/test_env_read_docs.py b/tests/test_env_read_docs.py index 0b57815bd..2e7033344 100644 --- a/tests/test_env_read_docs.py +++ b/tests/test_env_read_docs.py @@ -24,7 +24,7 @@ def setup_module(): def test_default_docnames_order(): """By default, docnames are read in alphanumeric order""" def on_env_read_docs(app, env, docnames): - return docnames + pass app = TestApp(srcdir='(temp)', freshenv=True) env = app.env @@ -38,7 +38,6 @@ def test_inverse_docnames_order(): """By default, docnames are read in alphanumeric order""" def on_env_read_docs(app, env, docnames): docnames.reverse() - return docnames app = TestApp(srcdir='(temp)', freshenv=True) env = app.env @@ -46,8 +45,7 @@ def test_inverse_docnames_order(): msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app) read_docnames = [docname for docname in it] - reversed_read_docnames = sorted(read_docnames)[:] - reversed_read_docnames.reverse() + reversed_read_docnames = sorted(read_docnames, reverse=True) assert len(read_docnames) > 1 and read_docnames == reversed_read_docnames def teardown_module(): From df53ece2f7b9a79133d7f52a64c5f2795fda95a4 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Sun, 21 Sep 2014 20:21:43 +0200 Subject: [PATCH 064/293] Minor PEP8 cleanup. --- sphinx/addnodes.py | 28 ++++++++++++++++++++++++++++ sphinx/apidoc.py | 8 ++++---- sphinx/application.py | 8 ++++---- sphinx/environment.py | 3 ++- sphinx/quickstart.py | 23 ++++++++++++++++------- sphinx/roles.py | 3 +++ sphinx/theming.py | 4 +++- sphinx/transforms.py | 26 ++++++++++++++------------ 8 files changed, 74 insertions(+), 29 deletions(-) diff --git a/sphinx/addnodes.py b/sphinx/addnodes.py index 55abdb019..9d8c46901 100644 --- a/sphinx/addnodes.py +++ b/sphinx/addnodes.py @@ -25,6 +25,7 @@ class desc(nodes.Admonition, nodes.Element): contains one or more ``desc_signature`` and a ``desc_content``. """ + class desc_signature(nodes.Part, nodes.Inline, nodes.TextElement): """Node for object signatures. @@ -39,33 +40,42 @@ class desc_addname(nodes.Part, nodes.Inline, nodes.TextElement): # compatibility alias desc_classname = desc_addname + class desc_type(nodes.Part, nodes.Inline, nodes.TextElement): """Node for return types or object type names.""" + class desc_returns(desc_type): """Node for a "returns" annotation (a la -> in Python).""" def astext(self): return ' -> ' + nodes.TextElement.astext(self) + class desc_name(nodes.Part, nodes.Inline, nodes.TextElement): """Node for the main object name.""" + class desc_parameterlist(nodes.Part, nodes.Inline, nodes.TextElement): """Node for a general parameter list.""" child_text_separator = ', ' + class desc_parameter(nodes.Part, nodes.Inline, nodes.TextElement): """Node for a single parameter.""" + class desc_optional(nodes.Part, nodes.Inline, nodes.TextElement): """Node for marking optional parts of the parameter list.""" child_text_separator = ', ' + def astext(self): return '[' + nodes.TextElement.astext(self) + ']' + class desc_annotation(nodes.Part, nodes.Inline, nodes.TextElement): """Node for signature annotations (not Python 3-style annotations).""" + class desc_content(nodes.General, nodes.Element): """Node for object description content. @@ -82,15 +92,18 @@ class versionmodified(nodes.Admonition, nodes.TextElement): directives. """ + class seealso(nodes.Admonition, nodes.Element): """Custom "see also" admonition.""" + class productionlist(nodes.Admonition, nodes.Element): """Node for grammar production lists. Contains ``production`` nodes. """ + class production(nodes.Part, nodes.Inline, nodes.TextElement): """Node for a single grammar production rule.""" @@ -107,26 +120,33 @@ class index(nodes.Invisible, nodes.Inline, nodes.TextElement): *entrytype* is one of "single", "pair", "double", "triple". """ + class centered(nodes.Part, nodes.TextElement): """Deprecated.""" + class acks(nodes.Element): """Special node for "acks" lists.""" + class hlist(nodes.Element): """Node for "horizontal lists", i.e. lists that should be compressed to take up less vertical space. """ + class hlistcol(nodes.Element): """Node for one column in a horizontal list.""" + class compact_paragraph(nodes.paragraph): """Node for a compact paragraph (which never makes a

          node).""" + class glossary(nodes.Element): """Node to insert a glossary.""" + class only(nodes.Element): """Node for "only" directives (conditional inclusion based on tags).""" @@ -136,14 +156,17 @@ class only(nodes.Element): class start_of_file(nodes.Element): """Node to mark start of a new file, used in the LaTeX builder only.""" + class highlightlang(nodes.Element): """Inserted to set the highlight language and line number options for subsequent code blocks. """ + class tabular_col_spec(nodes.Element): """Node for specifying tabular columns, used for LaTeX output.""" + class meta(nodes.Special, nodes.PreBibliographic, nodes.Element): """Node for meta directive -- same as docutils' standard meta node, but pickleable. @@ -160,22 +183,27 @@ class pending_xref(nodes.Inline, nodes.Element): BuildEnvironment.resolve_references. """ + class download_reference(nodes.reference): """Node for download references, similar to pending_xref.""" + class literal_emphasis(nodes.emphasis): """Node that behaves like `emphasis`, but further text processors are not applied (e.g. smartypants for HTML output). """ + class literal_strong(nodes.strong): """Node that behaves like `strong`, but further text processors are not applied (e.g. smartypants for HTML output). """ + class abbreviation(nodes.Inline, nodes.TextElement): """Node for abbreviations with explanations.""" + class termsep(nodes.Structural, nodes.Element): """Separates two terms within a node.""" diff --git a/sphinx/apidoc.py b/sphinx/apidoc.py index f716286c7..7b1a96d25 100644 --- a/sphinx/apidoc.py +++ b/sphinx/apidoc.py @@ -88,7 +88,7 @@ def create_module_file(package, module, opts): text = format_heading(1, '%s module' % module) else: text = '' - #text += format_heading(2, ':mod:`%s` Module' % module) + # text += format_heading(2, ':mod:`%s` Module' % module) text += format_directive(module, package) write_file(makename(package, module), text, opts) @@ -173,7 +173,7 @@ def shall_skip(module, opts): # skip if it has a "private" name and this is selected filename = path.basename(module) if filename != '__init__.py' and filename.startswith('_') and \ - not opts.includeprivate: + not opts.includeprivate: return True return False @@ -218,7 +218,7 @@ def recurse_tree(rootpath, excludes, opts): if is_pkg: # we are in a package with something to document if subs or len(py_files) > 1 or not \ - shall_skip(path.join(root, INITPY), opts): + shall_skip(path.join(root, INITPY), opts): subpackage = root[len(rootpath):].lstrip(path.sep).\ replace(path.sep, '.') create_package_file(root, root_package, subpackage, @@ -318,7 +318,7 @@ Note: By default this script will not overwrite already created files.""") (opts, args) = parser.parse_args(argv[1:]) if opts.show_version: - print('Sphinx (sphinx-apidoc) %s' % __version__) + print('Sphinx (sphinx-apidoc) %s' % __version__) return 0 if not args: diff --git a/sphinx/application.py b/sphinx/application.py index fe8704018..6d30ab51a 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -200,8 +200,8 @@ class Sphinx(object): else: try: self.info(bold('loading pickled environment... '), nonl=True) - self.env = BuildEnvironment.frompickle(self.config, - path.join(self.doctreedir, ENV_PICKLE_FILENAME)) + self.env = BuildEnvironment.frompickle( + self.config, path.join(self.doctreedir, ENV_PICKLE_FILENAME)) self.env.domains = {} for domain in self.domains.keys(): # this can raise if the data version doesn't fit @@ -291,7 +291,7 @@ class Sphinx(object): else: location = None warntext = location and '%s: %s%s\n' % (location, prefix, message) or \ - '%s%s\n' % (prefix, message) + '%s%s\n' % (prefix, message) if self.warningiserror: raise SphinxWarning(warntext) self._warncount += 1 @@ -461,7 +461,7 @@ class Sphinx(object): else: raise ExtensionError( 'Builder %r already exists (in module %s)' % ( - builder.name, self.builderclasses[builder.name].__module__)) + builder.name, self.builderclasses[builder.name].__module__)) self.builderclasses[builder.name] = builder def add_config_value(self, name, default, rebuild): diff --git a/sphinx/environment.py b/sphinx/environment.py index 648e22565..8e94ef4ea 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -503,7 +503,8 @@ class BuildEnvironment: """Custom decoding error handler that warns and replaces.""" linestart = error.object.rfind(b'\n', 0, error.start) lineend = error.object.find(b'\n', error.start) - if lineend == -1: lineend = len(error.object) + if lineend == -1: + lineend = len(error.object) lineno = error.object.count(b'\n', 0, error.start) + 1 self.warn(self.docname, 'undecodable source characters, ' 'replacing with "?": %r' % diff --git a/sphinx/quickstart.py b/sphinx/quickstart.py index fdfb81062..f81b38f05 100644 --- a/sphinx/quickstart.py +++ b/sphinx/quickstart.py @@ -10,13 +10,16 @@ """ from __future__ import print_function -import sys, os, time, re +import re +import os +import sys +import time from os import path from io import open TERM_ENCODING = getattr(sys.stdin, 'encoding', None) -#try to import readline, unix specific enhancement +# try to import readline, unix specific enhancement try: import readline if readline.__doc__ and 'libedit' in readline.__doc__: @@ -33,7 +36,7 @@ from docutils.utils import column_width from sphinx import __version__ from sphinx.util.osutil import make_filename from sphinx.util.console import purple, bold, red, turquoise, \ - nocolor, color_terminal + nocolor, color_terminal from sphinx.util import texescape # function to get input from terminal -- overridden by the test suite @@ -972,17 +975,20 @@ def mkdir_p(dir): class ValidationError(Exception): """Raised for validation errors.""" + def is_path(x): x = path.expanduser(x) if path.exists(x) and not path.isdir(x): raise ValidationError("Please enter a valid path name.") return x + def nonempty(x): if not x: raise ValidationError("Please enter some text.") return x + def choice(*l): def val(x): if x not in l: @@ -990,17 +996,20 @@ def choice(*l): return x return val + def boolean(x): if x.upper() not in ('Y', 'YES', 'N', 'NO'): raise ValidationError("Please enter either 'y' or 'n'.") return x.upper() in ('Y', 'YES') + def suffix(x): if not (x[0:1] == '.' and len(x) > 1): raise ValidationError("Please enter a file suffix, " "e.g. '.rst' or '.txt'.") return x + def ok(x): return x @@ -1097,7 +1106,7 @@ Enter the root path for documentation.''') do_prompt(d, 'path', 'Root path for the documentation', '.', is_path) while path.isfile(path.join(d['path'], 'conf.py')) or \ - path.isfile(path.join(d['path'], 'source', 'conf.py')): + path.isfile(path.join(d['path'], 'source', 'conf.py')): print() print(bold('Error: an existing conf.py has been found in the ' 'selected root path.')) @@ -1169,7 +1178,7 @@ document is a custom template, you can also set this to another filename.''') 'index') while path.isfile(path.join(d['path'], d['master']+d['suffix'])) or \ - path.isfile(path.join(d['path'], 'source', d['master']+d['suffix'])): + path.isfile(path.join(d['path'], 'source', d['master']+d['suffix'])): print() print(bold('Error: the master file %s has already been found in the ' 'selected root path.' % (d['master']+d['suffix']))) @@ -1256,10 +1265,10 @@ def generate(d, overwrite=True, silent=False): d['extensions'] = extensions d['copyright'] = time.strftime('%Y') + ', ' + d['author'] d['author_texescaped'] = text_type(d['author']).\ - translate(texescape.tex_escape_map) + translate(texescape.tex_escape_map) d['project_doc'] = d['project'] + ' Documentation' d['project_doc_texescaped'] = text_type(d['project'] + ' Documentation').\ - translate(texescape.tex_escape_map) + translate(texescape.tex_escape_map) # escape backslashes and single quotes in strings that are put into # a Python string literal diff --git a/sphinx/roles.py b/sphinx/roles.py index 729dcea05..451cfe60b 100644 --- a/sphinx/roles.py +++ b/sphinx/roles.py @@ -236,6 +236,7 @@ def indexmarkup_role(typ, rawtext, text, lineno, inliner, _amp_re = re.compile(r'(? Date: Sun, 21 Sep 2014 20:23:24 +0200 Subject: [PATCH 065/293] Fixup windows newlines. --- doc/authors.rst | 18 +- sphinx/themes/basic/static/jquery.js | 2 +- .../bizstyle/static/css3-mediaqueries_src.js | 2208 ++++++++--------- sphinx/util/i18n.py | 178 +- tests/roots/test-api-set-translator/conf.py | 160 +- tests/roots/test-api-set-translator/index.rst | 4 +- .../test-api-set-translator/nonext/conf.py | 18 +- .../test-api-set-translator/translator.py | 12 +- tests/roots/test-docutilsconf/contents.txt | 30 +- .../roots/test-ext-viewcode/spam/__init__.py | 14 +- tests/roots/test-ext-viewcode/spam/mod1.py | 30 +- tests/roots/test-ext-viewcode/spam/mod2.py | 30 +- tests/roots/test-intl/refs_python_domain.txt | 30 +- tests/roots/test-intl/subdir/contents.txt | 4 +- tests/roots/test-setup/doc/contents.txt | 10 +- 15 files changed, 1374 insertions(+), 1374 deletions(-) diff --git a/doc/authors.rst b/doc/authors.rst index 04c8b2b44..980b33e8c 100644 --- a/doc/authors.rst +++ b/doc/authors.rst @@ -1,9 +1,9 @@ -:tocdepth: 2 - -.. _authors: - -Sphinx authors -============== - -.. include:: ../AUTHORS - +:tocdepth: 2 + +.. _authors: + +Sphinx authors +============== + +.. include:: ../AUTHORS + diff --git a/sphinx/themes/basic/static/jquery.js b/sphinx/themes/basic/static/jquery.js index 83589daa7..388377952 100644 --- a/sphinx/themes/basic/static/jquery.js +++ b/sphinx/themes/basic/static/jquery.js @@ -1,2 +1,2 @@ -/*! jQuery v1.8.3 jquery.com | jquery.org/license */ +/*! jQuery v1.8.3 jquery.com | jquery.org/license */ (function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write(""),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t
          a",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="
          t
          ",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="

          ",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;ti.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="
          ",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="

          ",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t0)for(i=r;i=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*\s*$/g,Nt={option:[1,""],legend:[1,"
          ","
          "],thead:[1,"","
          "],tr:[2,"","
          "],td:[3,"","
          "],col:[2,"","
          "],area:[1,"",""],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X
          ","
          "]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1>");try{for(;r1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]===""&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("
          ").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window); \ No newline at end of file diff --git a/sphinx/themes/bizstyle/static/css3-mediaqueries_src.js b/sphinx/themes/bizstyle/static/css3-mediaqueries_src.js index e5a3bb0bb..65b44825d 100644 --- a/sphinx/themes/bizstyle/static/css3-mediaqueries_src.js +++ b/sphinx/themes/bizstyle/static/css3-mediaqueries_src.js @@ -1,1104 +1,1104 @@ -/* -css3-mediaqueries.js - CSS Helper and CSS3 Media Queries Enabler - -author: Wouter van der Graaf -version: 1.0 (20110330) -license: MIT -website: http://code.google.com/p/css3-mediaqueries-js/ - -W3C spec: http://www.w3.org/TR/css3-mediaqueries/ - -Note: use of embedded