diff --git a/AUTHORS b/AUTHORS index 96d08788f..f4ce16164 100644 --- a/AUTHORS +++ b/AUTHORS @@ -29,6 +29,7 @@ Other contributors, listed alphabetically, are: * Kevin Dunn -- MathJax extension * Josip Dzolonga -- coverage builder * Buck Evan -- dummy builder +* Matthew Fernandez -- todo extension fix * Hernan Grecco -- search improvements * Horst Gutmann -- internationalization support * Martin Hans -- autodoc improvements diff --git a/CHANGES b/CHANGES index 6e203777f..6725d1d9e 100644 --- a/CHANGES +++ b/CHANGES @@ -11,17 +11,23 @@ Incompatible changes package when ``--implicit-namespaces`` option given, not subdirectories of given directory. * #3929: apidoc: Move sphinx.apidoc to sphinx.ext.apidoc +* #4226: apidoc: Generate new style makefile (make-mode) +* #4274: sphinx-build returns 2 as an exit code on argument error Deprecated ---------- +* using a string value for :confval:`html_sidebars` is deprecated and only list + values will be accepted at 2.0. + Features added -------------- * C++, handle ``decltype(auto)``. * #2406: C++, add proper parsing of expressions, including linking of identifiers. * C++, add a ``cpp:expr`` role for inserting inline C++ expressions or types. -* #4094: C++, allow empty template argument lists. +* C++, support explicit member instantiations with shorthand ``template`` prefix. +* C++, make function parameters linkable, like template params. * #3638: Allow to change a label of reference to equation using ``math_eqref_format`` @@ -37,6 +43,10 @@ Features added * #4052: viewcode: Sort before highlighting module code * #1448: qthelp: Add new config value; :confval:`qthelp_namespace` * #4140: html themes: Make body tag inheritable +* #4168: improve zh search with jieba +* HTML themes can set up default sidebars through ``theme.conf`` +* #3160: html: Use ```` to represent ``:kbd:`` role +* #4212: autosummary: catch all exceptions when importing modules * #3991, #4080: Add :confval:`math_numfig` for equation numbering by section @@ -68,19 +78,24 @@ Features removed * ``sphinx.util.nodes.process_only_nodes()`` * LaTeX environment ``notice``, use ``sphinxadmonition`` instead * LaTeX ``\sphinxstylethead``, use ``\sphinxstyletheadfamily`` +* C++, support of function concepts. Thanks to mickk-on-cpp. + Bugs fixed ---------- * #3882: Update the order of files for HTMLHelp and QTHelp * #3962: sphinx-apidoc does not recognize implicit namespace packages correctly +* #4094: C++, allow empty template argument lists. +* C++, also hyperlink types in the name of declarations with qualified names. +* C++, do not add index entries for declarations inside concepts. Testing -------- * Add support for docutils 0.14 -Release 1.6.5 (in development) +Release 1.6.6 (in development) ============================== Dependencies @@ -95,6 +110,36 @@ Deprecated Features added -------------- +* #4181: autodoc: Sort dictionary keys when possible +* ``VerbatimHighlightColor`` is a new + :ref:`LaTeX 'sphinxsetup' ` key (refs: #4285) +* Easier customizability of LaTeX macros involved in rendering of code-blocks + +Bugs fixed +---------- + +* #4206: latex: reST label between paragraphs loses paragraph break +* #4231: html: Apply fixFirefoxAnchorBug only under Firefox +* #4221: napoleon depends on autodoc, but users need to load it manually +* #2298: automodule fails to document a class attribute +* #4099: C++: properly link class reference to class from inside constructor +* #4267: PDF build broken by Unicode U+2116 NUMERO SIGN character +* #4249: PDF output: Pygments error highlighting increases line spacing in + code blocks +* #1238: Support ``:emphasize-lines:`` in PDF output +* #4279: Sphinx crashes with pickling error when run with multiple processes and + remote image +* #1421: Respect the quiet flag in sphinx-quickstart + +Testing +-------- + +Release 1.6.5 (released Oct 23, 2017) +===================================== + +Features added +-------------- + * #4107: Make searchtools.js compatible with pre-Sphinx1.5 templates * #4112: Don't override the smart_quotes setting if it was already set * #4125: Display reference texts of original and translated passages on @@ -112,9 +157,16 @@ Bugs fixed * #4108: Search word highlighting breaks SVG images * #3692: Unable to build HTML if writing .buildinfo failed * #4152: HTML writer crashes if a field list is placed on top of the document - -Testing --------- +* #4063: Sphinx crashes when labeling directive ``.. todolist::`` +* #4134: [doc] :file:`docutils.conf` is not documented explicitly +* #4169: Chinese language doesn't trigger Chinese search automatically +* #1020: ext.todo todolist not linking to the page in pdflatex +* #3965: New quickstart generates wrong SPHINXBUILD in Makefile +* #3739: ``:module:`` option is ignored at content of pyobjects +* #4149: Documentation: Help choosing :confval:`latex_engine` +* #4090: [doc] :confval:`latex_additional_files` with extra LaTeX macros should + not use ``.tex`` extension +* Failed to convert reST parser error to warning (refs: #4132) Release 1.6.4 (released Sep 26, 2017) ===================================== diff --git a/Makefile b/Makefile index e5a5fb305..5b3d5aad4 100644 --- a/Makefile +++ b/Makefile @@ -65,7 +65,7 @@ pylint: .PHONY: reindent reindent: - @$(PYTHON) utils/reindent.py -r -n . + @echo "This target no longer does anything and will be removed imminently" .PHONY: test test: diff --git a/doc/Makefile b/doc/Makefile index d0e4e297b..c54236be0 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -3,7 +3,7 @@ # You can set these variables from the command line. SPHINXOPTS = -SPHINXBUILD = python ../sphinx-build.py +SPHINXBUILD = python ../sphinx/cmd/build.py SPHINXPROJ = sphinx SOURCEDIR = . BUILDDIR = _build diff --git a/doc/config.rst b/doc/config.rst index 9499c6276..415a2298a 100644 --- a/doc/config.rst +++ b/doc/config.rst @@ -9,8 +9,17 @@ The build configuration file :synopsis: Build configuration file. The :term:`configuration directory` must contain a file named :file:`conf.py`. -This file (containing Python code) is called the "build configuration file" and -contains all configuration needed to customize Sphinx input and output behavior. +This file (containing Python code) is called the "build configuration file" +and contains (almost) all configuration needed to customize Sphinx input +and output behavior. + + An optional file `docutils.conf`_ can be added to the configuration + directory to adjust `Docutils`_ configuration if not otherwise overriden or + set by Sphinx. + + .. _`docutils`: http://docutils.sourceforge.net/ + + .. _`docutils.conf`: http://docutils.sourceforge.net/docs/user/config.html The configuration file is executed as Python code at build time (using :func:`execfile`, and with the current directory set to its containing @@ -766,6 +775,18 @@ that use Sphinx's HTMLWriter class. The empty string is equivalent to ``'%b %d, %Y'`` (or a locale-dependent equivalent). +.. confval:: html_use_smartypants + + If true, `SmartyPants `_ + will be used to convert quotes and dashes to typographically correct + entities. Default: ``True``. + + .. deprecated:: 1.6 + To disable or customize smart quotes, use the Docutils configuration file + (``docutils.conf``) instead to set there its `smart_quotes option`_. + + .. _`smart_quotes option`: http://docutils.sourceforge.net/docs/user/config.html#smart-quotes + .. confval:: html_add_permalinks Sphinx will add "permalinks" for each heading and description environment as @@ -796,14 +817,19 @@ that use Sphinx's HTMLWriter class. to include. If all or some of the default sidebars are to be included, they must be put into this list as well. - The default sidebars (for documents that don't match any pattern) are: - ``['localtoc.html', 'relations.html', 'sourcelink.html', + The default sidebars (for documents that don't match any pattern) are + defined by theme itself. Builtin themes are using these templates by + default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html']``. * If a value is a single string, it specifies a custom sidebar to be added between the ``'sourcelink.html'`` and ``'searchbox.html'`` entries. This is for compatibility with Sphinx versions before 1.0. + .. deprecated:: 1.7 + + a single string value for ``html_sidebars`` will be removed in 2.0 + Builtin sidebar templates that can be rendered are: * **localtoc.html** -- a fine-grained table of contents of the current @@ -1522,6 +1548,25 @@ These options influence LaTeX output. See further :doc:`latex`. * ``'lualatex'`` -- LuaLaTeX * ``'platex'`` -- pLaTeX (default if :confval:`language` is ``'ja'``) + PDFLaTeX's support for Unicode characters covers those from the document + language (the LaTeX ``babel`` and ``inputenc`` packages map them to glyph + slots in the document font, at various encodings allowing each only 256 + characters; Sphinx uses by default (except for Cyrillic languages) the + ``times`` package), but stray characters from other scripts or special + symbols may require adding extra LaTeX packages or macros to the LaTeX + preamble. + + If your project uses such extra Unicode characters, switching the engine to + XeLaTeX or LuaLaTeX often provides a quick fix. They only work with UTF-8 + encoded sources and can (in fact, should) use OpenType fonts, either from + the system or the TeX install tree. Recent LaTeX releases will default with + these engines to the Latin Modern OpenType font, which has good coverage of + Latin and Cyrillic scripts (it is provided by standard LaTeX installation), + and Sphinx does not modify this default. Refer to the documentation of the + LaTeX ``polyglossia`` package to see how to instruct LaTeX to use some + other OpenType font if Unicode coverage proves insufficient (or use + directly ``\setmainfont`` et. al. as in :ref:`this example `.) + .. confval:: latex_documents This value determines how to group the document tree into LaTeX source files. diff --git a/doc/domains.rst b/doc/domains.rst index 1697c6605..5bed02cf4 100644 --- a/doc/domains.rst +++ b/doc/domains.rst @@ -720,13 +720,13 @@ a visibility statement (``public``, ``private`` or ``protected``). .. rst:directive:: .. cpp:concept:: template-parameter-list name - .. cpp:concept:: template-parameter-list name() .. warning:: The support for concepts is experimental. It is based on the - Concepts Technical Specification, and the features may change as the TS evolves. + current draft standard and the Concepts Technical Specification. + The features may change as they evolve. - Describe a variable concept or a function concept. Both must have exactly 1 - template parameter list. The name may be a nested name. Examples:: + Describe a concept. It must have exactly 1 template parameter list. The name may be a + nested name. Example:: .. cpp:concept:: template std::Iterator @@ -744,12 +744,7 @@ a visibility statement (``public``, ``private`` or ``protected``). - :cpp:expr:`*r`, when :cpp:expr:`r` is dereferenceable. - :cpp:expr:`++r`, with return type :cpp:expr:`It&`, when :cpp:expr:`r` is incrementable. - .. cpp:concept:: template std::Container() - - Holder of elements, to which it can provide access via - :cpp:concept:`Iterator` s. - - They will render as follows: + This will render as follows: .. cpp:concept:: template std::Iterator @@ -767,11 +762,6 @@ a visibility statement (``public``, ``private`` or ``protected``). - :cpp:expr:`*r`, when :cpp:expr:`r` is dereferenceable. - :cpp:expr:`++r`, with return type :cpp:expr:`It&`, when :cpp:expr:`r` is incrementable. - .. cpp:concept:: template std::Container() - - Holder of elements, to which it can provide access via - :cpp:concept:`Iterator` s. - Options ....... @@ -785,8 +775,9 @@ Some directives support options: Constrained Templates ~~~~~~~~~~~~~~~~~~~~~ -.. warning:: The support for constrained templates is experimental. It is based on the - Concepts Technical Specification, and the features may change as the TS evolves. +.. warning:: The support for concepts is experimental. It is based on the + current draft standard and the Concepts Technical Specification. + The features may change as they evolve. .. note:: Sphinx does not currently support ``requires`` clauses. diff --git a/doc/ext/math.rst b/doc/ext/math.rst index 0fb3a17cf..5fdf842ee 100644 --- a/doc/ext/math.rst +++ b/doc/ext/math.rst @@ -93,7 +93,7 @@ or use Python raw strings (``r"raw"``). Normally, equations are not numbered. If you want your equation to get a number, use the ``label`` option. When given, it selects an internal label for the equation, by which it can be cross-referenced, and causes an equation - number to be issued. See :rst:role:`eqref` for an example. The numbering + number to be issued. See :rst:role:`eq` for an example. The numbering style depends on the output format. There is also an option ``nowrap`` that prevents any wrapping of the given diff --git a/doc/ext/napoleon.rst b/doc/ext/napoleon.rst index ea3e4042f..f7e9081f7 100644 --- a/doc/ext/napoleon.rst +++ b/doc/ext/napoleon.rst @@ -68,8 +68,8 @@ Getting Started # conf.py - # Add autodoc and napoleon to the extensions list - extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon'] + # Add napoleon to the extensions list + extensions = ['sphinx.ext.napoleon'] 2. Use `sphinx-apidoc` to build your API documentation:: @@ -246,13 +246,12 @@ Configuration Listed below are all the settings used by napoleon and their default values. These settings can be changed in the Sphinx `conf.py` file. Make -sure that both "sphinx.ext.autodoc" and "sphinx.ext.napoleon" are -enabled in `conf.py`:: +sure that "sphinx.ext.napoleon" is enabled in `conf.py`:: # conf.py # Add any Sphinx extension module names here, as strings - extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon'] + extensions = ['sphinx.ext.napoleon'] # Napoleon settings napoleon_google_docstring = True diff --git a/doc/latex.rst b/doc/latex.rst index ff79cc574..87117c164 100644 --- a/doc/latex.rst +++ b/doc/latex.rst @@ -29,6 +29,7 @@ The *latex* target does not benefit from pre-prepared themes like the cautionBgColor={named}{LightCyan}} \relax +.. _latex-basic: Basic customization ------------------- @@ -61,17 +62,17 @@ It is achieved via usage of the .. highlight:: latex If the size of the ``'preamble'`` contents becomes inconvenient, one may move -all needed macros into some file :file:`mystyle.tex` of the project source +all needed macros into some file :file:`mystyle.tex.txt` of the project source repertory, and get LaTeX to import it at run time:: - 'preamble': r'\input{mystyle.tex}', + 'preamble': r'\input{mystyle.tex.txt}', # or, if the \ProvidesPackage LaTeX macro is used in a file mystyle.sty 'preamble': r'\usepackage{mystyle}', It is needed to set appropriately :confval:`latex_additional_files`, for example:: - latex_additional_files = ["mystyle.tex"] + latex_additional_files = ["mystyle.sty"] .. _latexsphinxsetup: @@ -266,6 +267,16 @@ The available styling options ``VerbatimBorderColor`` default ``{rgb}{0,0,0}``. The frame color, defaults to black. +``VerbatimHighlightColor`` + default ``{rgb}{0.878,1,1}``. The color for highlighted lines. + + .. versionadded:: 1.6.6 + +.. note:: + + Starting with this colour key, and for all others coming next, the actual + names declared to "color" or "xcolor" are prefixed with "sphinx". + ``verbatimsep`` default ``\fboxsep``. The separation between code lines and the frame. @@ -287,11 +298,6 @@ The available styling options default ``{rgb}{0,0,0}`` (black). The colour for the two horizontal rules used by Sphinx in LaTeX for styling a :dudir:`note` type admonition. -.. note:: - - The actual colour names declared to "color" or "xcolor" are prefixed with - "sphinx". - ``noteborder``, ``hintborder``, ``importantborder``, ``tipborder`` default ``0.5pt``. The width of the two horizontal rules. @@ -440,6 +446,11 @@ Environments .. versionadded:: 1.5 options ``verbatimwithframe``, ``verbatimwrapslines``, ``verbatimsep``, ``verbatimborder``. + .. versionadded:: 1.6.6 + support for ``:emphasize-lines:`` option + .. versionadded:: 1.6.6 + easier customizability of the formatting via exposed to user LaTeX macros + such as ``\sphinxVerbatimHighlightLine``. - the bibliography uses ``sphinxthebibliography`` and the Python Module index as well as the general index both use ``sphinxtheindex``; these environments are wrappers of the ``thebibliography`` and respectively ``theindex`` diff --git a/doc/man/sphinx-build.rst b/doc/man/sphinx-build.rst index 4866282d4..46f213989 100644 --- a/doc/man/sphinx-build.rst +++ b/doc/man/sphinx-build.rst @@ -99,17 +99,16 @@ Options :ref:`builders `, the following build pipelines are available: **latexpdf** - Build LaTeX files and run them through :program:`pdflatex`. - - **latexpdfja** - Build LaTeX files and run them through :program:`platex/dvipdfmx`. - We recommend using ``latexpdf`` instead. + Build LaTeX files and run them through :program:`pdflatex`, or as per + :confval:`latex_engine` setting. + If :confval:`language` is set to ``'ja'``, will use automatically + the :program:`platex/dvipdfmx` latex to PDF pipeline. **info** Build Texinfo files and run them through :program:`makeinfo`. .. important:: - Sphinx only recognizes the ``-M`` option if it is placed first. + Sphinx only recognizes the ``-M`` option if it is placed first. .. versionadded:: 1.2.1 diff --git a/doc/markup/code.rst b/doc/markup/code.rst index 759008739..3b14bd6e2 100644 --- a/doc/markup/code.rst +++ b/doc/markup/code.rst @@ -121,6 +121,8 @@ emphasize particular lines:: .. versionchanged:: 1.3 ``lineno-start`` has been added. +.. versionchanged:: 1.6.6 + LaTeX supports the ``emphasize-lines`` option. Includes ^^^^^^^^ @@ -188,8 +190,8 @@ Includes ``lines``, the first allowed line having by convention the line number ``1``. When lines have been selected in any of the ways described above, the - line numbers in ``emphasize-lines`` also refer to the selection, with the - first selected line having number ``1``. + line numbers in ``emphasize-lines`` refer to those selected lines, counted + consecutively starting at ``1``. When specifying particular parts of a file to display, it can be useful to display the original line numbers. This can be done using the diff --git a/doc/theming.rst b/doc/theming.rst index 01f72fde9..34bca9607 100644 --- a/doc/theming.rst +++ b/doc/theming.rst @@ -276,6 +276,7 @@ Python :mod:`ConfigParser` module) and has the following structure: inherit = base theme stylesheet = main CSS name pygments_style = stylename + sidebars = localtoc.html, relations.html, sourcelink.html, searchbox.html [options] variable = default value @@ -295,10 +296,16 @@ Python :mod:`ConfigParser` module) and has the following structure: highlighting. This can be overridden by the user in the :confval:`pygments_style` config value. +* The **sidebars** setting gives the comma separated list of sidebar templates + for constructing sidebars. This can be overridden by the user in the + :confval:`html_sidebars` config value. + * The **options** section contains pairs of variable names and default values. These options can be overridden by the user in :confval:`html_theme_options` and are accessible from all templates as ``theme_``. +.. versionadded:: 1.7 + sidebar settings .. _distribute-your-theme: diff --git a/setup.cfg b/setup.cfg index 38a697edc..cb6887fc3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,7 +28,7 @@ license_file = LICENSE [flake8] max-line-length = 95 -ignore = E116,E241,E251 +ignore = E116,E241,E251,E741 exclude = .git,.tox,.venv,tests/*,build/*,doc/_build/*,sphinx/search/*,sphinx/pycode/pgen2/*,doc/ext/example*.py [mypy] diff --git a/setup.py b/setup.py index a4dd6b078..10a513166 100644 --- a/setup.py +++ b/setup.py @@ -68,13 +68,13 @@ extras_require = { 'whoosh>=2.0', ], 'test': [ + 'mock', 'pytest', 'pytest-cov', 'html5lib', ], 'test:python_version<"3"': [ 'enum34', - 'mock', ], 'test:python_version>="3"': [ 'mypy', diff --git a/sphinx-apidoc.py b/sphinx-apidoc.py deleted file mode 100755 index eb86e0b12..000000000 --- a/sphinx-apidoc.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" - Sphinx - Python documentation toolchain - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import sys - -if __name__ == '__main__': - from sphinx.ext.apidoc import main - sys.exit(main(sys.argv[1:])) diff --git a/sphinx-autogen.py b/sphinx-autogen.py deleted file mode 100755 index c9a78d158..000000000 --- a/sphinx-autogen.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" - Sphinx - Python documentation toolchain - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import sys - -if __name__ == '__main__': - from sphinx.ext.autosummary.generate import main - sys.exit(main(sys.argv[1:])) diff --git a/sphinx-build.py b/sphinx-build.py deleted file mode 100755 index e8116fefc..000000000 --- a/sphinx-build.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" - Sphinx - Python documentation toolchain - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import sys - -if __name__ == '__main__': - from sphinx import main - sys.exit(main(sys.argv[1:])) diff --git a/sphinx-quickstart.py b/sphinx-quickstart.py deleted file mode 100755 index 3caa6590f..000000000 --- a/sphinx-quickstart.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" - Sphinx - Python documentation toolchain - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import sys - -if __name__ == '__main__': - from sphinx.cmd.quickstart import main - sys.exit(main(sys.argv[1:])) diff --git a/sphinx/application.py b/sphinx/application.py index 209c73202..05d302c81 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -83,6 +83,7 @@ builtin_extensions = ( 'sphinx.directives.code', 'sphinx.directives.other', 'sphinx.directives.patches', + 'sphinx.io', 'sphinx.parsers', 'sphinx.roles', 'sphinx.transforms.post_transforms', diff --git a/sphinx/builders/html.py b/sphinx/builders/html.py index 6313eef61..1c51f07fa 100644 --- a/sphinx/builders/html.py +++ b/sphinx/builders/html.py @@ -274,7 +274,7 @@ class StandaloneHTMLBuilder(Builder): # type: () -> Iterator[unicode] cfgdict = dict((confval.name, confval.value) for confval in self.config.filter('html')) self.config_hash = get_stable_hash(cfgdict) - self.tags_hash = get_stable_hash(sorted(self.tags)) # type: ignore + self.tags_hash = get_stable_hash(sorted(self.tags)) old_config_hash = old_tags_hash = '' try: with open(path.join(self.outdir, '.buildinfo')) as fp: @@ -864,9 +864,21 @@ class StandaloneHTMLBuilder(Builder): def has_wildcard(pattern): # type: (unicode) -> bool return any(char in pattern for char in '*?[') - sidebars = None + sidebars = self.theme.get_config('theme', 'sidebars', None) matched = None customsidebar = None + + # default sidebars settings for selected theme + theme_default_sidebars = self.theme.get_config('theme', 'sidebars', None) + if theme_default_sidebars: + sidebars = [name.strip() for name in theme_default_sidebars.split(',')] + elif self.theme.name == 'alabaster': + # provide default settings for alabaster (for compatibility) + # Note: this will be removed before Sphinx-2.0 + sidebars = ['about.html', 'navigation.html', 'relation.html', + 'searchbox.html', 'donate.html'] + + # user sidebar settings for pattern, patsidebars in iteritems(self.config.html_sidebars): if patmatch(pagename, pattern): if matched: @@ -881,6 +893,7 @@ class StandaloneHTMLBuilder(Builder): continue matched = pattern sidebars = patsidebars + if sidebars is None: # keep defaults pass @@ -888,6 +901,11 @@ class StandaloneHTMLBuilder(Builder): # 0.x compatible mode: insert custom sidebar before searchbox customsidebar = sidebars sidebars = None + warnings.warn('Now html_sidebars only allows list of sidebar ' + 'templates as a value. Support for a string value ' + 'will be removed at Sphinx-2.0.', + RemovedInSphinx20Warning) + ctx['sidebars'] = sidebars ctx['customsidebar'] = customsidebar diff --git a/sphinx/builders/qthelp.py b/sphinx/builders/qthelp.py index 12c28b1a3..9bcfe9811 100644 --- a/sphinx/builders/qthelp.py +++ b/sphinx/builders/qthelp.py @@ -269,7 +269,7 @@ class QtHelpBuilder(StandaloneHTMLBuilder): link = node['refuri'] title = htmlescape(node.astext()).replace('"', '"') item = section_template % {'title': title, 'ref': link} - item = u' ' * 4 * indentlevel + item # type: ignore + item = u' ' * 4 * indentlevel + item parts.append(item.encode('ascii', 'xmlcharrefreplace')) elif isinstance(node, nodes.bullet_list): for subnode in node: diff --git a/sphinx/cmd/quickstart.py b/sphinx/cmd/quickstart.py index ac0859c31..80e9e3dd4 100644 --- a/sphinx/cmd/quickstart.py +++ b/sphinx/cmd/quickstart.py @@ -11,13 +11,14 @@ from __future__ import print_function from __future__ import absolute_import -import re +import argparse import os +import re import sys -import optparse import time -from os import path +from collections import OrderedDict from io import open +from os import path # try to import readline, unix specific enhancement try: @@ -44,11 +45,26 @@ from sphinx.util import texescape if False: # For type annotation - from typing import Any, Callable, Dict, List, Pattern # NOQA + from typing import Any, Callable, Dict, List, Pattern, Union # NOQA TERM_ENCODING = getattr(sys.stdin, 'encoding', None) -DEFAULT_VALUE = { +EXTENSIONS = OrderedDict([ + ('autodoc', 'automatically insert docstrings from modules'), + ('doctest', 'automatically test code snippets in doctest blocks'), + ('intersphinx', 'link between Sphinx documentation of different projects'), + ('todo', 'write "todo" entries that can be shown or hidden on build'), + ('coverage', 'checks for documentation coverage'), + ('imgmath', 'include math, rendered as PNG or SVG images'), + ('mathjax', 'include math, rendered in the browser by MathJax'), + ('ifconfig', 'conditional inclusion of content based on config values'), + ('viewcode', + 'include links to the source code of documented Python objects'), + ('githubpages', + 'create .nojekyll file to publish the document on GitHub pages'), +]) + +DEFAULTS = { 'path': '.', 'sep': False, 'dot': '_', @@ -56,16 +72,10 @@ DEFAULT_VALUE = { 'suffix': '.rst', 'master': 'index', 'epub': False, - 'ext_autodoc': False, - 'ext_doctest': False, - 'ext_todo': False, 'makefile': True, 'batchfile': True, } -EXTENSIONS = ('autodoc', 'doctest', 'intersphinx', 'todo', 'coverage', - 'imgmath', 'mathjax', 'ifconfig', 'viewcode', 'githubpages') - PROMPT_PREFIX = '> ' @@ -138,29 +148,29 @@ def ok(x): def term_decode(text): - # type: (unicode) -> unicode + # type: (Union[bytes,unicode]) -> unicode if isinstance(text, text_type): return text - # for Python 2.x, try to get a Unicode string out of it - if text.decode('ascii', 'replace').encode('ascii', 'replace') == text: - return text - + # Use the known encoding, if possible if TERM_ENCODING: - text = text.decode(TERM_ENCODING) - else: - print(turquoise('* Note: non-ASCII characters entered ' - 'and terminal encoding unknown -- assuming ' - 'UTF-8 or Latin-1.')) - try: - text = text.decode('utf-8') - except UnicodeDecodeError: - text = text.decode('latin1') - return text + return text.decode(TERM_ENCODING) + + # If ascii is safe, use it with no warning + if text.decode('ascii', 'replace').encode('ascii', 'replace') == text: + return text.decode('ascii') + + print(turquoise('* Note: non-ASCII characters entered ' + 'and terminal encoding unknown -- assuming ' + 'UTF-8 or Latin-1.')) + try: + return text.decode('utf-8') + except UnicodeDecodeError: + return text.decode('latin1') -def do_prompt(d, key, text, default=None, validator=nonempty): - # type: (Dict, unicode, unicode, unicode, Callable[[unicode], Any]) -> None +def do_prompt(text, default=None, validator=nonempty): + # type: (unicode, unicode, Callable[[unicode], Any]) -> Union[unicode, bool] while True: if default is not None: prompt = PROMPT_PREFIX + '%s [%s]: ' % (text, default) # type: unicode @@ -191,7 +201,7 @@ def do_prompt(d, key, text, default=None, validator=nonempty): print(red('* ' + str(err))) continue break - d[key] = x + return x def convert_python_source(source, rex=re.compile(r"[uU]('.*?')")): @@ -235,7 +245,7 @@ def ask_user(d): * suffix: source file suffix * master: master document name * epub: use epub (bool) - * ext_*: extensions to use (bools) + * extensions: extensions to use (list) * makefile: make Makefile * batchfile: make command file """ @@ -251,7 +261,7 @@ Selected root path: %s''' % d['path'])) else: print(''' Enter the root path for documentation.''') - do_prompt(d, 'path', 'Root path for the documentation', '.', is_path) + d['path'] = do_prompt('Root path for the documentation', '.', is_path) while path.isfile(path.join(d['path'], 'conf.py')) or \ path.isfile(path.join(d['path'], 'source', 'conf.py')): @@ -260,8 +270,8 @@ Enter the root path for documentation.''') 'selected root path.')) print('sphinx-quickstart will not overwrite existing Sphinx projects.') print() - do_prompt(d, 'path', 'Please enter a new root path (or just Enter ' - 'to exit)', '', is_path) + d['path'] = do_prompt('Please enter a new root path (or just Enter ' + 'to exit)', '', is_path) if not d['path']: sys.exit(1) @@ -270,22 +280,22 @@ Enter the root path for documentation.''') You have two options for placing the build directory for Sphinx output. Either, you use a directory "_build" within the root path, or you separate "source" and "build" directories within the root path.''') - do_prompt(d, 'sep', 'Separate source and build directories (y/n)', 'n', - boolean) + d['sep'] = do_prompt('Separate source and build directories (y/n)', + 'n', boolean) if 'dot' not in d: print(''' Inside the root directory, two more directories will be created; "_templates" for custom HTML templates and "_static" for custom stylesheets and other static files. You can enter another prefix (such as ".") to replace the underscore.''') - do_prompt(d, 'dot', 'Name prefix for templates and static dir', '_', ok) + d['dot'] = do_prompt('Name prefix for templates and static dir', '_', ok) if 'project' not in d: print(''' The project name will occur in several places in the built documentation.''') - do_prompt(d, 'project', 'Project name') + d['project'] = do_prompt('Project name') if 'author' not in d: - do_prompt(d, 'author', 'Author name(s)') + d['author'] = do_prompt('Author name(s)') if 'version' not in d: print(''' @@ -294,9 +304,9 @@ software. Each version can have multiple releases. For example, for Python the version is something like 2.5 or 3.0, while the release is something like 2.5.1 or 3.0a1. If you don't need this dual structure, just set both to the same value.''') - do_prompt(d, 'version', 'Project version', '', allow_empty) + d['version'] = do_prompt('Project version', '', allow_empty) if 'release' not in d: - do_prompt(d, 'release', 'Project release', d['version'], allow_empty) + d['release'] = do_prompt('Project release', d['version'], allow_empty) if 'language' not in d: print(''' @@ -306,7 +316,7 @@ translate text that it generates into that language. For a list of supported codes, see http://sphinx-doc.org/config.html#confval-language.''') - do_prompt(d, 'language', 'Project language', 'en') + d['language'] = do_prompt('Project language', 'en') if d['language'] == 'en': d['language'] = None @@ -314,7 +324,7 @@ http://sphinx-doc.org/config.html#confval-language.''') print(''' The file name suffix for source files. Commonly, this is either ".txt" or ".rst". Only files with this suffix are considered documents.''') - do_prompt(d, 'suffix', 'Source file suffix', '.rst', suffix) + d['suffix'] = do_prompt('Source file suffix', '.rst', suffix) if 'master' not in d: print(''' @@ -322,8 +332,8 @@ One document is special in that it is considered the top node of the "contents tree", that is, it is the root of the hierarchical structure of the documents. Normally, this is "index", but if your "index" document is a custom template, you can also set this to another filename.''') - do_prompt(d, 'master', 'Name of your master document (without suffix)', - 'index') + d['master'] = do_prompt('Name of your master document (without suffix)', + 'index') while path.isfile(path.join(d['path'], d['master'] + d['suffix'])) or \ path.isfile(path.join(d['path'], 'source', d['master'] + d['suffix'])): @@ -332,65 +342,40 @@ document is a custom template, you can also set this to another filename.''') 'selected root path.' % (d['master'] + d['suffix']))) print('sphinx-quickstart will not overwrite the existing file.') print() - do_prompt(d, 'master', 'Please enter a new file name, or rename the ' - 'existing file and press Enter', d['master']) + d['master'] = do_prompt('Please enter a new file name, or rename the ' + 'existing file and press Enter', d['master']) if 'epub' not in d: print(''' Sphinx can also add configuration for epub output:''') - do_prompt(d, 'epub', 'Do you want to use the epub builder (y/n)', - 'n', boolean) + d['epub'] = do_prompt('Do you want to use the epub builder (y/n)', + 'n', boolean) - if 'ext_autodoc' not in d: - print(''' -Please indicate if you want to use one of the following Sphinx extensions:''') - do_prompt(d, 'ext_autodoc', 'autodoc: automatically insert docstrings ' - 'from modules (y/n)', 'n', boolean) - if 'ext_doctest' not in d: - do_prompt(d, 'ext_doctest', 'doctest: automatically test code snippets ' - 'in doctest blocks (y/n)', 'n', boolean) - if 'ext_intersphinx' not in d: - do_prompt(d, 'ext_intersphinx', 'intersphinx: link between Sphinx ' - 'documentation of different projects (y/n)', 'n', boolean) - if 'ext_todo' not in d: - do_prompt(d, 'ext_todo', 'todo: write "todo" entries ' - 'that can be shown or hidden on build (y/n)', 'n', boolean) - if 'ext_coverage' not in d: - do_prompt(d, 'ext_coverage', 'coverage: checks for documentation ' - 'coverage (y/n)', 'n', boolean) - if 'ext_imgmath' not in d: - do_prompt(d, 'ext_imgmath', 'imgmath: include math, rendered ' - 'as PNG or SVG images (y/n)', 'n', boolean) - if 'ext_mathjax' not in d: - do_prompt(d, 'ext_mathjax', 'mathjax: include math, rendered in the ' - 'browser by MathJax (y/n)', 'n', boolean) - if d['ext_imgmath'] and d['ext_mathjax']: - print('''Note: imgmath and mathjax cannot be enabled at the same time. -imgmath has been deselected.''') - d['ext_imgmath'] = False - if 'ext_ifconfig' not in d: - do_prompt(d, 'ext_ifconfig', 'ifconfig: conditional inclusion of ' - 'content based on config values (y/n)', 'n', boolean) - if 'ext_viewcode' not in d: - do_prompt(d, 'ext_viewcode', 'viewcode: include links to the source ' - 'code of documented Python objects (y/n)', 'n', boolean) - if 'ext_githubpages' not in d: - do_prompt(d, 'ext_githubpages', 'githubpages: create .nojekyll file ' - 'to publish the document on GitHub pages (y/n)', 'n', boolean) + if 'extensions' not in d: + print('Indicate which of the following Sphinx extensions should be ' + 'enabled:') + d['extensions'] = [] + for name, description in EXTENSIONS.items(): + if do_prompt('%s: %s (y/n)' % (name, description), 'n', boolean): + d['extensions'].append('sphinx.ext.%s' % name) - if 'no_makefile' in d: - d['makefile'] = False - elif 'makefile' not in d: + # Handle conflicting options + if set(['sphinx.ext.imgmath', 'sphinx.ext.mathjax']).issubset( + d['extensions']): + print('Note: imgmath and mathjax cannot be enabled at the same ' + 'time. imgmath has been deselected.') + d['extensions'].remove('sphinx.ext.imgmath') + + if 'makefile' not in d: print(''' A Makefile and a Windows command file can be generated for you so that you only have to run e.g. `make html' instead of invoking sphinx-build directly.''') - do_prompt(d, 'makefile', 'Create Makefile? (y/n)', 'y', boolean) - if 'no_batchfile' in d: - d['batchfile'] = False - elif 'batchfile' not in d: - do_prompt(d, 'batchfile', 'Create Windows command file? (y/n)', - 'y', boolean) + d['makefile'] = do_prompt('Create Makefile? (y/n)', 'y', boolean) + + if 'batchfile' not in d: + d['batchfile'] = do_prompt('Create Windows command file? (y/n)', + 'y', boolean) print() @@ -400,7 +385,6 @@ def generate(d, overwrite=True, silent=False, templatedir=None): template = QuickstartRenderer(templatedir=templatedir) texescape.init() - indent = ' ' * 4 if 'mastertoctree' not in d: d['mastertoctree'] = '' @@ -414,10 +398,6 @@ def generate(d, overwrite=True, silent=False, templatedir=None): d['now'] = time.asctime() d['project_underline'] = column_width(d['project']) * '=' d.setdefault('extensions', []) - for name in EXTENSIONS: - if d.get('ext_' + name): - d['extensions'].append('sphinx.ext.' + name) - d['extensions'] = (',\n' + indent).join(repr(name) for name in d['extensions']) d['copyright'] = time.strftime('%Y') + ', ' + d['author'] d['author_texescaped'] = text_type(d['author']).\ translate(texescape.tex_escape_map) @@ -455,11 +435,13 @@ def generate(d, overwrite=True, silent=False, templatedir=None): def write_file(fpath, content, newline=None): # type: (unicode, unicode, unicode) -> None if overwrite or not path.isfile(fpath): - print('Creating file %s.' % fpath) + if 'quiet' not in d: + print('Creating file %s.' % fpath) with open(fpath, 'wt', encoding='utf-8', newline=newline) as f: f.write(content) else: - print('File %s already exists, skipping.' % fpath) + if 'quiet' not in d: + print('File %s already exists, skipping.' % fpath) conf_path = os.path.join(templatedir, 'conf.py_t') if templatedir else None if not conf_path or not path.isfile(conf_path): @@ -509,23 +491,6 @@ where "builder" is one of the supported builders, e.g. html, latex or linkcheck. ''') -def usage(argv, msg=None): - # type: (List[unicode], unicode) -> None - if msg: - print(msg, file=sys.stderr) - print(file=sys.stderr) - - -USAGE = """\ -Sphinx v%s -Usage: %%prog [options] [projectdir] -""" % __display_version__ - -EPILOG = """\ -For more information, visit . -""" - - def valid_dir(d): # type: (Dict) -> bool dir = d['path'] @@ -556,18 +521,86 @@ def valid_dir(d): return True -class MyFormatter(optparse.IndentedHelpFormatter): - def format_usage(self, usage): # type: ignore - # type: (str) -> str - return usage +def get_parser(): + # type: () -> argparse.ArgumentParser + parser = argparse.ArgumentParser( + usage='%(prog)s [OPTIONS] ', + epilog="For more information, visit .", + description=""" +Generate required files for a Sphinx project. - def format_help(self, formatter): - result = [] - if self.description: - result.append(self.format_description(formatter)) - if self.option_list: - result.append(self.format_option_help(formatter)) - return "\n".join(result) +sphinx-quickstart is an interactive tool that asks some questions about your +project and then generates a complete documentation directory and sample +Makefile to be used with sphinx-build. +""") + + parser.add_argument('-q', '--quiet', action='store_true', dest='quiet', + default=False, + help='quiet mode') + parser.add_argument('--version', action='version', dest='show_version', + version='%%(prog)s %s' % __display_version__) + + parser.add_argument('path', metavar='PROJECT_DIR', default='.', + help='output path') + + group = parser.add_argument_group('Structure options') + group.add_argument('--sep', action='store_true', + help='if specified, separate source and build dirs') + group.add_argument('--dot', metavar='DOT', + help='replacement for dot in _templates etc.') + + group = parser.add_argument_group('Project basic options') + group.add_argument('-p', '--project', metavar='PROJECT', dest='project', + help='project name') + group.add_argument('-a', '--author', metavar='AUTHOR', dest='author', + help='author names') + group.add_argument('-v', metavar='VERSION', dest='version', default='', + help='version of project') + group.add_argument('-r', '--release', metavar='RELEASE', dest='release', + help='release of project') + group.add_argument('-l', '--language', metavar='LANGUAGE', dest='language', + help='document language') + group.add_argument('--suffix', metavar='SUFFIX', + help='source file suffix') + group.add_argument('--master', metavar='MASTER', + help='master document name') + group.add_argument('--epub', action='store_true', default=False, + help='use epub') + + group = parser.add_argument_group('Extension options') + for ext in EXTENSIONS: + group.add_argument('--ext-%s' % ext, action='append_const', + const='sphinx.ext.%s' % ext, dest='extensions', + help='enable %s extension' % ext) + group.add_argument('--extensions', metavar='EXTENSIONS', dest='extensions', + action='append', help='enable arbitrary extensions') + + group = parser.add_argument_group('Makefile and Batchfile creation') + group.add_argument('--makefile', action='store_true', dest='makefile', + help='create makefile') + group.add_argument('--no-makefile', action='store_false', dest='makefile', + help='do not create makefile') + group.add_argument('--batchfile', action='store_true', dest='batchfile', + help='create batchfile') + group.add_argument('--no-batchfile', action='store_false', + dest='batchfile', + help='do not create batchfile') + group.add_argument('-m', '--use-make-mode', action='store_true', + dest='make_mode', default=True, + help='use make-mode for Makefile/make.bat') + group.add_argument('-M', '--no-use-make-mode', action='store_false', + dest='make_mode', + help='do not use make-mode for Makefile/make.bat') + + group = parser.add_argument_group('Project templating') + group.add_argument('-t', '--templatedir', metavar='TEMPLATEDIR', + dest='templatedir', + help='template directory for template files') + group.add_argument('-d', metavar='NAME=VALUE', action='append', + dest='variables', + help='define a template variable') + + return parser def main(argv=sys.argv[1:]): @@ -575,81 +608,14 @@ def main(argv=sys.argv[1:]): if not color_terminal(): nocolor() - parser = optparse.OptionParser(USAGE, epilog=EPILOG, - version='Sphinx v%s' % __display_version__, - formatter=MyFormatter()) - parser.add_option('-q', '--quiet', action='store_true', dest='quiet', - default=False, - help='quiet mode') - - group = parser.add_option_group('Structure options') - group.add_option('--sep', action='store_true', dest='sep', - help='if specified, separate source and build dirs') - group.add_option('--dot', metavar='DOT', dest='dot', - help='replacement for dot in _templates etc.') - - group = parser.add_option_group('Project basic options') - group.add_option('-p', '--project', metavar='PROJECT', dest='project', - help='project name') - group.add_option('-a', '--author', metavar='AUTHOR', dest='author', - help='author names') - group.add_option('-v', metavar='VERSION', dest='version', - help='version of project') - group.add_option('-r', '--release', metavar='RELEASE', dest='release', - help='release of project') - group.add_option('-l', '--language', metavar='LANGUAGE', dest='language', - help='document language') - group.add_option('--suffix', metavar='SUFFIX', dest='suffix', - help='source file suffix') - group.add_option('--master', metavar='MASTER', dest='master', - help='master document name') - group.add_option('--epub', action='store_true', dest='epub', - default=False, - help='use epub') - - group = parser.add_option_group('Extension options') - for ext in EXTENSIONS: - group.add_option('--ext-' + ext, action='store_true', - dest='ext_' + ext, default=False, - help='enable %s extension' % ext) - group.add_option('--extensions', metavar='EXTENSIONS', dest='extensions', - action='append', help='enable extensions') - - group = parser.add_option_group('Makefile and Batchfile creation') - group.add_option('--makefile', action='store_true', dest='makefile', - default=False, - help='create makefile') - group.add_option('--no-makefile', action='store_true', dest='no_makefile', - default=False, - help='not create makefile') - group.add_option('--batchfile', action='store_true', dest='batchfile', - default=False, - help='create batchfile') - group.add_option('--no-batchfile', action='store_true', dest='no_batchfile', - default=False, - help='not create batchfile') - group.add_option('-M', '--no-use-make-mode', action='store_false', dest='make_mode', - help='not use make-mode for Makefile/make.bat') - group.add_option('-m', '--use-make-mode', action='store_true', dest='make_mode', - default=True, - help='use make-mode for Makefile/make.bat') - - group = parser.add_option_group('Project templating') - group.add_option('-t', '--templatedir', metavar='TEMPLATEDIR', dest='templatedir', - help='template directory for template files') - group.add_option('-d', metavar='NAME=VALUE', action='append', dest='variables', - help='define a template variable') - # parse options + parser = get_parser() try: - opts, args = parser.parse_args(argv) + args = parser.parse_args(argv) except SystemExit as err: return err.code - if len(args) > 0: - opts.ensure_value('path', args[0]) - - d = vars(opts) + d = vars(args) # delete None or False value d = dict((k, v) for k, v in d.items() if not (v is None or v is False)) @@ -664,14 +630,9 @@ def main(argv=sys.argv[1:]): # quiet mode with all required params satisfied, use default d.setdefault('version', '') d.setdefault('release', d['version']) - d2 = DEFAULT_VALUE.copy() - d2.update(dict(("ext_" + ext, False) for ext in EXTENSIONS)) + d2 = DEFAULTS.copy() d2.update(d) d = d2 - if 'no_makefile' in d: - d['makefile'] = False - if 'no_batchfile' in d: - d['batchfile'] = False if not valid_dir(d): print() @@ -692,13 +653,12 @@ def main(argv=sys.argv[1:]): if isinstance(value, binary_type): d[key] = term_decode(value) - # parse extensions list + # handle use of CSV-style extension values d.setdefault('extensions', []) for ext in d['extensions'][:]: if ',' in ext: d['extensions'].remove(ext) - for modname in ext.split(','): - d['extensions'].append(modname) + d['extensions'].extend(ext.split(',')) for variable in d.get('variables', []): try: @@ -707,7 +667,7 @@ def main(argv=sys.argv[1:]): except ValueError: print('Invalid template variable: %s' % variable) - generate(d, templatedir=opts.templatedir) + generate(d, templatedir=args.templatedir) return 0 diff --git a/sphinx/cmdline.py b/sphinx/cmdline.py index 54e4dcb78..f18bbb286 100644 --- a/sphinx/cmdline.py +++ b/sphinx/cmdline.py @@ -10,14 +10,13 @@ """ from __future__ import print_function +import argparse import sys -import optparse import traceback from os import path -from six import text_type, binary_type - from docutils.utils import SystemMessage +from six import text_type, binary_type from sphinx import __display_version__ from sphinx.errors import SphinxError @@ -33,39 +32,9 @@ if False: from typing import Any, IO, List, Union # NOQA -USAGE = """\ -Sphinx v%s -Usage: %%prog [options] sourcedir outdir [filenames...] - -Filename arguments: - without -a and without filenames, write new and changed files. - with -a, write all files. - with filenames, write these. -""" % __display_version__ - -EPILOG = """\ -For more information, visit . -""" - - -class MyFormatter(optparse.IndentedHelpFormatter): - def format_usage(self, usage): - # type: (Any) -> Any - return usage - - def format_help(self, formatter): - # type: (Any) -> unicode - result = [] # type: List[unicode] - if self.description: # type: ignore - result.append(self.format_description(formatter)) - if self.option_list: # type: ignore - result.append(self.format_option_help(formatter)) # type: ignore - return "\n".join(result) - - -def handle_exception(app, opts, exception, stderr=sys.stderr): +def handle_exception(app, args, exception, stderr=sys.stderr): # type: (Sphinx, Any, Union[Exception, KeyboardInterrupt], IO) -> None - if opts.pdb: + if args.pdb: import pdb print(red('Exception occurred while building, starting debugger:'), file=stderr) @@ -73,7 +42,7 @@ def handle_exception(app, opts, exception, stderr=sys.stderr): pdb.post_mortem(sys.exc_info()[2]) else: print(file=stderr) - if opts.verbosity or opts.traceback: + if args.verbosity or args.traceback: traceback.print_exc(None, stderr) print(file=stderr) if isinstance(exception, KeyboardInterrupt): @@ -114,119 +83,135 @@ def handle_exception(app, opts, exception, stderr=sys.stderr): file=stderr) +def get_parser(): + # type: () -> argparse.ArgumentParser + parser = argparse.ArgumentParser( + usage='usage: %(prog)s [OPTIONS] SOURCEDIR OUTPUTDIR [FILENAMES...]', + epilog='For more information, visit .', + description=""" +Generate documentation from source files. + +sphinx-build generates documentation from the files in SOURCEDIR and places it +in OUTPUTDIR. It looks for 'conf.py' in SOURCEDIR for the configuration +settings. The 'sphinx-quickstart' tool may be used to generate template files, +including 'conf.py' + +sphinx-build can create documentation in different formats. A format is +selected by specifying the builder name on the command line; it defaults to +HTML. Builders can also perform other tasks related to documentation +processing. + +By default, everything that is outdated is built. Output only for selected +files can be built by specifying individual filenames. +""") + + parser.add_argument('--version', action='version', dest='show_version', + version='%%(prog)s %s' % __display_version__) + + parser.add_argument('sourcedir', + help='path to documentation source files') + parser.add_argument('outputdir', + help='path to output directory') + parser.add_argument('filenames', nargs='*', + help='a list of specific files to rebuild. Ignored ' + 'if -a is specified') + + group = parser.add_argument_group('general options') + group.add_argument('-b', metavar='BUILDER', dest='builder', + default='html', + help='builder to use (default: html)') + group.add_argument('-a', action='store_true', dest='force_all', + help='write all files (default: only write new and ' + 'changed files)') + group.add_argument('-E', action='store_true', dest='freshenv', + help='don\'t use a saved environment, always read ' + 'all files') + group.add_argument('-d', metavar='PATH', dest='doctreedir', + help='path for the cached environment and doctree ' + 'files (default: OUTPUTDIR/.doctrees)') + group.add_argument('-j', metavar='N', default=1, type=int, dest='jobs', + help='build in parallel with N processes where ' + 'possible') + + group = parser.add_argument_group('build configuration options') + group.add_argument('-c', metavar='PATH', dest='confdir', + help='path where configuration file (conf.py) is ' + 'located (default: same as SOURCEDIR)') + group.add_argument('-C', action='store_true', dest='noconfig', + help='use no config file at all, only -D options') + group.add_argument('-D', metavar='setting=value', action='append', + dest='define', default=[], + help='override a setting in configuration file') + group.add_argument('-A', metavar='name=value', action='append', + dest='htmldefine', default=[], + help='pass a value into HTML templates') + group.add_argument('-t', metavar='TAG', action='append', + dest='tags', default=[], + help='define tag: include "only" blocks with TAG') + group.add_argument('-n', action='store_true', dest='nitpicky', + help='nit-picky mode, warn about all missing ' + 'references') + + group = parser.add_argument_group('console output options') + group.add_argument('-v', action='count', dest='verbosity', default=0, + help='increase verbosity (can be repeated)') + group.add_argument('-q', action='store_true', dest='quiet', + help='no output on stdout, just warnings on stderr') + group.add_argument('-Q', action='store_true', dest='really_quiet', + help='no output at all, not even warnings') + group.add_argument('--color', action='store_const', const='yes', + default='auto', + help='do emit colored output (default: auto-detect)') + group.add_argument('-N', '--no-color', dest='color', action='store_const', + const='no', + help='do not emit colored output (default: ' + 'auto-detect)') + group.add_argument('-w', metavar='FILE', dest='warnfile', + help='write warnings (and errors) to given file') + group.add_argument('-W', action='store_true', dest='warningiserror', + help='turn warnings into errors') + group.add_argument('-T', action='store_true', dest='traceback', + help='show full traceback on exception') + group.add_argument('-P', action='store_true', dest='pdb', + help='run Pdb on exception') + + return parser + + def main(argv=sys.argv[1:]): # type: ignore # type: (List[unicode]) -> int - parser = optparse.OptionParser(USAGE, epilog=EPILOG, formatter=MyFormatter()) - parser.add_option('--version', action='store_true', dest='version', - help='show version information and exit') - group = parser.add_option_group('General options') - group.add_option('-b', metavar='BUILDER', dest='builder', default='html', - help='builder to use; default is html') - group.add_option('-a', action='store_true', dest='force_all', - help='write all files; default is to only write new and ' - 'changed files') - group.add_option('-E', action='store_true', dest='freshenv', - help='don\'t use a saved environment, always read ' - 'all files') - group.add_option('-d', metavar='PATH', default=None, dest='doctreedir', - help='path for the cached environment and doctree files ' - '(default: outdir/.doctrees)') - group.add_option('-j', metavar='N', default=1, type='int', dest='jobs', - help='build in parallel with N processes where possible') - # this option never gets through to this point (it is intercepted earlier) - # group.add_option('-M', metavar='BUILDER', dest='make_mode', - # help='"make" mode -- as used by Makefile, like ' - # '"sphinx-build -M html"') - - group = parser.add_option_group('Build configuration options') - group.add_option('-c', metavar='PATH', dest='confdir', - help='path where configuration file (conf.py) is located ' - '(default: same as sourcedir)') - group.add_option('-C', action='store_true', dest='noconfig', - help='use no config file at all, only -D options') - group.add_option('-D', metavar='setting=value', action='append', - dest='define', default=[], - help='override a setting in configuration file') - group.add_option('-A', metavar='name=value', action='append', - dest='htmldefine', default=[], - help='pass a value into HTML templates') - group.add_option('-t', metavar='TAG', action='append', - dest='tags', default=[], - help='define tag: include "only" blocks with TAG') - group.add_option('-n', action='store_true', dest='nitpicky', - help='nit-picky mode, warn about all missing references') - - group = parser.add_option_group('Console output options') - group.add_option('-v', action='count', dest='verbosity', default=0, - help='increase verbosity (can be repeated)') - group.add_option('-q', action='store_true', dest='quiet', - help='no output on stdout, just warnings on stderr') - group.add_option('-Q', action='store_true', dest='really_quiet', - help='no output at all, not even warnings') - group.add_option('--color', dest='color', - action='store_const', const='yes', default='auto', - help='Do emit colored output (default: auto-detect)') - group.add_option('-N', '--no-color', dest='color', - action='store_const', const='no', - help='Do not emit colored output (default: auto-detect)') - group.add_option('-w', metavar='FILE', dest='warnfile', - help='write warnings (and errors) to given file') - group.add_option('-W', action='store_true', dest='warningiserror', - help='turn warnings into errors') - group.add_option('-T', action='store_true', dest='traceback', - help='show full traceback on exception') - group.add_option('-P', action='store_true', dest='pdb', - help='run Pdb on exception') - - # parse options - try: - opts, args = parser.parse_args(argv) - except SystemExit as err: - return err.code - - # handle basic options - if opts.version: - print('Sphinx (sphinx-build) %s' % __display_version__) - return 0 + parser = get_parser() + args = parser.parse_args(argv) # get paths (first and second positional argument) try: - srcdir = abspath(args[0]) - confdir = abspath(opts.confdir or srcdir) - if opts.noconfig: + srcdir = abspath(args.sourcedir) + confdir = abspath(args.confdir or srcdir) + if args.noconfig: confdir = None + if not path.isdir(srcdir): - print('Error: Cannot find source directory `%s\'.' % srcdir, - file=sys.stderr) - return 1 - if not opts.noconfig and not path.isfile(path.join(confdir, 'conf.py')): - print('Error: Config directory doesn\'t contain a conf.py file.', - file=sys.stderr) - return 1 - outdir = abspath(args[1]) + parser.error('cannot find source directory (%s)' % srcdir) + if not args.noconfig and not path.isfile(path.join(confdir, 'conf.py')): + parser.error("config directory doesn't contain a conf.py file " + "(%s)" % confdir) + + outdir = abspath(args.outputdir) if srcdir == outdir: - print('Error: source directory and destination directory are same.', - file=sys.stderr) - return 1 - except IndexError: - parser.print_help() - return 1 + parser.error('source directory and destination directory are same') except UnicodeError: - print( - 'Error: Multibyte filename not supported on this filesystem ' - 'encoding (%r).' % fs_encoding, file=sys.stderr) - return 1 + parser.error('multibyte filename not supported on this filesystem ' + 'encoding (%r)' % fs_encoding) # handle remaining filename arguments - filenames = args[2:] - errored = False + filenames = args.filenames + missing_files = [] for filename in filenames: if not path.isfile(filename): - print('Error: Cannot find file %r.' % filename, file=sys.stderr) - errored = True - if errored: - return 1 + missing_files.append(filename) + if missing_files: + parser.error('cannot find files %r' % missing_files) # likely encoding used for command-line arguments try: @@ -235,41 +220,39 @@ def main(argv=sys.argv[1:]): # type: ignore except Exception: likely_encoding = None - if opts.force_all and filenames: - print('Error: Cannot combine -a option and filenames.', file=sys.stderr) - return 1 + if args.force_all and filenames: + parser.error('cannot combine -a option and filenames') - if opts.color == 'no' or (opts.color == 'auto' and not color_terminal()): + if args.color == 'no' or (args.color == 'auto' and not color_terminal()): nocolor() - doctreedir = abspath(opts.doctreedir or path.join(outdir, '.doctrees')) + doctreedir = abspath(args.doctreedir or path.join(outdir, '.doctrees')) status = sys.stdout warning = sys.stderr error = sys.stderr - if opts.quiet: + if args.quiet: status = None - if opts.really_quiet: + + if args.really_quiet: status = warning = None - if warning and opts.warnfile: + + if warning and args.warnfile: try: - warnfp = open(opts.warnfile, 'w') + warnfp = open(args.warnfile, 'w') except Exception as exc: - print('Error: Cannot open warning file %r: %s' % - (opts.warnfile, exc), file=sys.stderr) - sys.exit(1) + parser.error('cannot open warning file %r: %s' % ( + args.warnfile, exc)) warning = Tee(warning, warnfp) # type: ignore error = warning confoverrides = {} - for val in opts.define: + for val in args.define: try: key, val = val.split('=', 1) except ValueError: - print('Error: -D option argument must be in the form name=value.', - file=sys.stderr) - return 1 + parser.error('-D option argument must be in the form name=value') if likely_encoding and isinstance(val, binary_type): try: val = val.decode(likely_encoding) @@ -277,13 +260,11 @@ def main(argv=sys.argv[1:]): # type: ignore pass confoverrides[key] = val - for val in opts.htmldefine: + for val in args.htmldefine: try: key, val = val.split('=') except ValueError: - print('Error: -A option argument must be in the form name=value.', - file=sys.stderr) - return 1 + parser.error('-A option argument must be in the form name=value') try: val = int(val) except ValueError: @@ -294,17 +275,17 @@ def main(argv=sys.argv[1:]): # type: ignore pass confoverrides['html_context.%s' % key] = val - if opts.nitpicky: + if args.nitpicky: confoverrides['nitpicky'] = True app = None try: with patch_docutils(), docutils_namespace(): - app = Sphinx(srcdir, confdir, outdir, doctreedir, opts.builder, - confoverrides, status, warning, opts.freshenv, - opts.warningiserror, opts.tags, opts.verbosity, opts.jobs) - app.build(opts.force_all, filenames) + app = Sphinx(srcdir, confdir, outdir, doctreedir, args.builder, + confoverrides, status, warning, args.freshenv, + args.warningiserror, args.tags, args.verbosity, args.jobs) + app.build(args.force_all, filenames) return app.statuscode except (Exception, KeyboardInterrupt) as exc: - handle_exception(app, opts, exc, error) - return 1 + handle_exception(app, args, exc, error) + return 2 diff --git a/sphinx/config.py b/sphinx/config.py index 02ee529a3..509af31e6 100644 --- a/sphinx/config.py +++ b/sphinx/config.py @@ -24,7 +24,7 @@ from sphinx.util.pycompat import execfile_, NoneType if False: # For type annotation - from typing import Any, Callable, Dict, Iterable, Iterator, List, Tuple # NOQA + from typing import Any, Callable, Dict, Iterable, Iterator, List, Tuple, Union # NOQA from sphinx.util.tags import Tags # NOQA logger = logging.getLogger(__name__) @@ -63,8 +63,11 @@ class ENUM(object): self.candidates = candidates def match(self, value): - # type: (unicode) -> bool - return value in self.candidates + # type: (Union[unicode,List,Tuple]) -> bool + if isinstance(value, (list, tuple)): + return all(item in self.candidates for item in value) + else: + return value in self.candidates string_classes = [text_type] # type: List @@ -288,7 +291,7 @@ class Config(object): logger.warning("%s", exc) for name in config: if name in self.values: - self.__dict__[name] = config[name] + self.__dict__[name] = config[name] # type: ignore if isinstance(self.source_suffix, string_types): # type: ignore self.source_suffix = [self.source_suffix] # type: ignore diff --git a/sphinx/domains/cpp.py b/sphinx/domains/cpp.py index 95b0451ba..0ed987384 100644 --- a/sphinx/domains/cpp.py +++ b/sphinx/domains/cpp.py @@ -224,13 +224,12 @@ logger = logging.getLogger(__name__) concept_object: goal: just a declaration of the name (for now) - either a variable concept or function concept grammar: only a single template parameter list, and the nested name may not have any template argument lists "template" "<" template-parameter-list ">" - nested-name-specifier "()"[opt] + nested-name-specifier type_object: goal: @@ -291,7 +290,11 @@ logger = logging.getLogger(__name__) """ # TODO: support hex, oct, etc. work -_integer_literal_re = re.compile(r'-?[1-9][0-9]*') +_integer_literal_re = re.compile(r'[1-9][0-9]*') +_octal_literal_re = re.compile(r'0[0-7]*') +_hex_literal_re = re.compile(r'0[xX][0-7a-fA-F][0-7a-fA-F]*') +_binary_literal_re = re.compile(r'0[bB][01][01]*') +_integer_suffix_re = re.compile(r'') _float_literal_re = re.compile(r'[+-]?[0-9]*\.[0-9]+') _identifier_re = re.compile(r'(~?\b[a-zA-Z_][a-zA-Z0-9_]*)\b') _whitespace_re = re.compile(r'(?u)\s+') @@ -552,12 +555,12 @@ class DefinitionError(UnicodeMixin, Exception): class _DuplicateSymbolError(UnicodeMixin, Exception): - def __init__(self, symbol, candSymbol): - # type: (Symbol, Symbol) -> None + def __init__(self, symbol, declaration): + # type: (Symbol, Any) -> None assert symbol - assert candSymbol + assert declaration self.symbol = symbol - self.candSymbol = candSymbol + self.declaration = declaration def __unicode__(self): # type: () -> unicode @@ -570,7 +573,7 @@ class ASTBase(UnicodeMixin): if type(self) is not type(other): return False try: - for key, value in iteritems(self.__dict__): # type: ignore + for key, value in iteritems(self.__dict__): if value != getattr(other, key): return False except AttributeError: @@ -945,6 +948,86 @@ class ASTUnaryOpExpr(ASTBase): self.expr.describe_signature(signode, mode, env, symbol) +class ASTSizeofParamPack(ASTBase): + def __init__(self, identifier): + self.identifier = identifier + + def __unicode__(self): + return "sizeof...(" + text_type(self.identifier) + ")" + + def get_id(self, version): + return 'sZ' + self.identifier.get_id(version) + + def describe_signature(self, signode, mode, env, symbol): + signode.append(nodes.Text('sizeof...(')) + self.identifier.describe_signature(signode, mode, env, + symbol=symbol, prefix="", templateArgs="") + signode.append(nodes.Text(')')) + + +class ASTSizeofType(ASTBase): + def __init__(self, typ): + self.typ = typ + + def __unicode__(self): + return "sizeof(" + text_type(self.typ) + ")" + + def get_id(self, version): + return 'st' + self.typ.get_id(version) + + def describe_signature(self, signode, mode, env, symbol): + signode.append(nodes.Text('sizeof(')) + self.typ.describe_signature(signode, mode, env, symbol) + signode.append(nodes.Text(')')) + + +class ASTSizeofExpr(ASTBase): + def __init__(self, expr): + self.expr = expr + + def __unicode__(self): + return "sizeof " + text_type(self.expr) + + def get_id(self, version): + return 'sz' + self.expr.get_id(version) + + def describe_signature(self, signode, mode, env, symbol): + signode.append(nodes.Text('sizeof ')) + self.expr.describe_signature(signode, mode, env, symbol) + + +class ASTAlignofExpr(ASTBase): + def __init__(self, typ): + self.typ = typ + + def __unicode__(self): + return "alignof(" + text_type(self.typ) + ")" + + def get_id(self, version): + return 'at' + self.typ.get_id(version) + + def describe_signature(self, signode, mode, env, symbol): + signode.append(nodes.Text('alignof(')) + self.typ.describe_signature(signode, mode, env, symbol) + signode.append(nodes.Text(')')) + + +class ASTNoexceptExpr(ASTBase): + def __init__(self, expr): + self.expr = expr + + def __unicode__(self): + return "noexcept(" + text_type(self.expr) + ")" + + def get_id(self, version): + return 'nx' + self.expr.get_id(version) + + def describe_signature(self, signode, mode, env, symbol): + signode.append(nodes.Text('noexcept(')) + self.expr.describe_signature(signode, mode, env, symbol) + signode.append(nodes.Text(')')) + + class ASTPostfixCallExpr(ASTBase): def __init__(self, exprs): self.exprs = exprs @@ -1070,6 +1153,20 @@ class ASTPostfixExpr(ASTBase): p.describe_signature(signode, mode, env, symbol) +class ASTFallbackExpr(ASTBase): + def __init__(self, expr): + self.expr = expr + + def __unicode__(self): + return self.expr + + def get_id(self, version): + return text_type(self.expr) + + def describe_signature(self, signode, mode, env, symbol): + signode += nodes.Text(self.expr) + + ################################################################################ # The Rest ################################################################################ @@ -1099,11 +1196,11 @@ class ASTIdentifier(ASTBase): # type: () -> unicode return self.identifier - def describe_signature(self, signode, mode, env, prefix, symbol): - # type: (addnodes.desc_signature, unicode, BuildEnvironment, unicode, Symbol) -> None + def describe_signature(self, signode, mode, env, prefix, templateArgs, symbol): + # type: (Any, unicode, BuildEnvironment, unicode, unicode, Symbol) -> None _verify_description_mode(mode) if mode == 'markType': - targetText = prefix + self.identifier + targetText = prefix + self.identifier + templateArgs pnode = addnodes.pending_xref('', refdomain='cpp', reftype='identifier', reftarget=targetText, modname=None, @@ -1172,7 +1269,7 @@ class ASTTemplateKeyParamPackIdDefault(ASTBase): if self.identifier: if not self.parameterPack: signode += nodes.Text(' ') - self.identifier.describe_signature(signode, mode, env, '', symbol) + self.identifier.describe_signature(signode, mode, env, '', '', symbol) if self.default: signode += nodes.Text(' = ') self.default.describe_signature(signode, 'markType', env, symbol) @@ -1190,6 +1287,10 @@ class ASTTemplateParamType(ASTBase): id = self.get_identifier() return ASTNestedName([ASTNestedNameElement(id, None)], rooted=False) + @property + def isPack(self): + return self.data.parameterPack + def get_identifier(self): # type: () -> unicode return self.data.get_identifier() @@ -1351,6 +1452,16 @@ class ASTTemplateIntroductionParameter(ASTBase): self.identifier = identifier self.parameterPack = parameterPack + @property + def name(self): + # type: () -> ASTNestedName + id = self.get_identifier() + return ASTNestedName([ASTNestedNameElement(id, None)], rooted=False) + + @property + def isPack(self): + return self.parameterPack + def get_identifier(self): # type: () -> unicode return self.identifier @@ -1390,7 +1501,7 @@ class ASTTemplateIntroductionParameter(ASTBase): # type: (addnodes.desc_signature, unicode, BuildEnvironment, Symbol) -> None if self.parameterPack: signode += nodes.Text('...') - self.identifier.describe_signature(signode, mode, env, '', symbol) + self.identifier.describe_signature(signode, mode, env, '', '', symbol) class ASTTemplateIntroduction(ASTBase): @@ -1448,8 +1559,7 @@ class ASTTemplateIntroduction(ASTBase): class ASTTemplateDeclarationPrefix(ASTBase): def __init__(self, templates): # type: (List[Any]) -> None - assert templates is not None - assert len(templates) > 0 + # template is None means it's an explicit instantiation of a variable self.templates = templates def get_id(self, version): @@ -1665,8 +1775,9 @@ class ASTNestedNameElement(ASTBase): def describe_signature(self, signode, mode, env, prefix, symbol): # type: (addnodes.desc_signature, unicode, BuildEnvironment, unicode, Symbol) -> None - self.identifier.describe_signature(signode, mode, env, prefix, symbol) - if self.templateArgs: + tArgs = text_type(self.templateArgs) if self.templateArgs is not None else '' + self.identifier.describe_signature(signode, mode, env, prefix, tArgs, symbol) + if self.templateArgs is not None: self.templateArgs.describe_signature(signode, mode, env, symbol) @@ -1723,36 +1834,43 @@ class ASTNestedName(ASTBase): # type: (addnodes.desc_signature, unicode, BuildEnvironment, Symbol) -> None _verify_description_mode(mode) # just print the name part, with template args, not template params - if mode == 'lastIsName': - addname = [] # type: List[unicode] - if self.rooted: - addname.append('') - for n in self.names[:-1]: - addname.append(text_type(n)) - addname = '::'.join(addname) # type: ignore - if len(self.names) > 1: - addname += '::' - signode += addnodes.desc_addname(addname, addname) - self.names[-1].describe_signature(signode, mode, env, '', symbol) - elif mode == 'noneIsName': + if mode == 'noneIsName': signode += nodes.Text(text_type(self)) elif mode == 'param': name = text_type(self) signode += nodes.emphasis(name, name) - elif mode == 'markType': - # each element should be a pending xref targeting the complete + elif mode == 'markType' or mode == 'lastIsName': + # Each element should be a pending xref targeting the complete # prefix. however, only the identifier part should be a link, such # that template args can be a link as well. + # For 'lastIsName' we should also prepend template parameter lists. + templateParams = [] # type: List[Any] + if mode == 'lastIsName': + assert symbol is not None + if symbol.declaration.templatePrefix is not None: + templateParams = symbol.declaration.templatePrefix.templates + iTemplateParams = 0 + templateParamsPrefix = u'' prefix = '' # type: unicode first = True - for name in self.names: + names = self.names[:-1] if mode == 'lastIsName' else self.names + for name in names: if not first: signode += nodes.Text('::') prefix += '::' first = False if name != '': - name.describe_signature(signode, mode, env, prefix, symbol) # type: ignore + if (name.templateArgs and # type: ignore + iTemplateParams < len(templateParams)): + templateParamsPrefix += text_type(templateParams[iTemplateParams]) + iTemplateParams += 1 + name.describe_signature(signode, 'markType', # type: ignore + env, templateParamsPrefix + prefix, symbol) prefix += text_type(name) + if mode == 'lastIsName': + if len(self.names) > 1: + signode += addnodes.desc_addname('::', '::') + self.names[-1].describe_signature(signode, mode, env, '', symbol) else: raise Exception('Unknown description mode: %s' % mode) @@ -1835,14 +1953,37 @@ class ASTTrailingTypeSpecDecltypeAuto(ASTBase): signode.append(nodes.Text(text_type(self))) +class ASTTrailingTypeSpecDecltype(ASTBase): + def __init__(self, expr): + self.expr = expr + + def __unicode__(self): + return u'decltype(' + text_type(self.expr) + ')' + + def get_id(self, version): + if version == 1: + raise NoOldIdError() + return 'DT' + self.expr.get_id(version) + "E" + + def describe_signature(self, signode, mode, env, symbol): + signode.append(nodes.Text('decltype(')) + self.expr.describe_signature(signode, mode, env, symbol) + signode.append(nodes.Text(')')) + + class ASTFunctionParameter(ASTBase): def __init__(self, arg, ellipsis=False): # type: (Any, bool) -> None self.arg = arg self.ellipsis = ellipsis - def get_id(self, version): - # type: (int) -> unicode + def get_id(self, version, objectType=None, symbol=None): + # type: (int, unicode, Symbol) -> unicode + # this is not part of the normal name mangling in C++ + if symbol: + # the anchor will be our parent + return symbol.parent.declaration.get_id(version, prefixed=None) + # else, do the usual if self.ellipsis: return 'z' else: @@ -1877,6 +2018,11 @@ class ASTParametersQualifiers(ASTBase): self.final = final self.initializer = initializer + @property + def function_params(self): + # type: () -> Any + return self.args + def get_modifiers_id(self, version): # type: (int) -> unicode res = [] @@ -2180,6 +2326,11 @@ class ASTDeclaratorPtr(ASTBase): # type: () -> unicode return self.next.name + @property + def function_params(self): + # type: () -> Any + return self.next.function_params + def require_space_after_declSpecs(self): # type: () -> bool # TODO: if has paramPack, then False ? @@ -2272,6 +2423,11 @@ class ASTDeclaratorRef(ASTBase): # type: () -> unicode return self.next.name + @property + def function_params(self): + # type: () -> Any + return self.next.function_params + def require_space_after_declSpecs(self): # type: () -> bool return self.next.require_space_after_declSpecs() @@ -2323,6 +2479,11 @@ class ASTDeclaratorParamPack(ASTBase): # type: () -> unicode return self.next.name + @property + def function_params(self): + # type: () -> Any + return self.next.function_params + def require_space_after_declSpecs(self): # type: () -> bool return False @@ -2383,6 +2544,11 @@ class ASTDeclaratorMemPtr(ASTBase): # type: () -> unicode return self.next.name + @property + def function_params(self): + # type: () -> Any + return self.next.function_params + def require_space_after_declSpecs(self): # type: () -> bool return True @@ -2475,6 +2641,11 @@ class ASTDeclaratorParen(ASTBase): # type: () -> unicode return self.inner.name + @property + def function_params(self): + # type: () -> Any + return self.inner.function_params + def require_space_after_declSpecs(self): # type: () -> bool return True @@ -2537,6 +2708,11 @@ class ASTDeclaratorNameParamQual(ASTBase): # type: () -> unicode return self.declId + @property + def function_params(self): + # type: () -> Any + return self.paramQual.function_params + def get_modifiers_id(self, version): # only the modifiers for a function, e.g., # type: (int) -> unicode # cv-qualifiers @@ -2633,6 +2809,11 @@ class ASTType(ASTBase): name = self.decl.name return name + @property + def function_params(self): + # type: () -> Any + return self.decl.function_params + def get_id(self, version, objectType=None, symbol=None): # type: (int, unicode, Symbol) -> unicode if version == 1: @@ -2780,10 +2961,9 @@ class ASTTypeUsing(ASTBase): class ASTConcept(ASTBase): - def __init__(self, nestedName, isFunction, initializer): - # type: (Any, bool, Any) -> None + def __init__(self, nestedName, initializer): + # type: (Any, Any) -> None self.nestedName = nestedName - self.isFunction = isFunction # otherwise it's a variable concept self.initializer = initializer @property @@ -2800,18 +2980,13 @@ class ASTConcept(ASTBase): def __unicode__(self): # type: () -> unicode res = text_type(self.nestedName) - if self.isFunction: - res += "()" if self.initializer: res += text_type(self.initializer) return res def describe_signature(self, signode, mode, env, symbol): # type: (addnodes.desc_signature, unicode, BuildEnvironment, Symbol) -> None - signode += nodes.Text(text_type("bool ")) self.nestedName.describe_signature(signode, mode, env, symbol) - if self.isFunction: - signode += nodes.Text("()") if self.initializer: self.initializer.describe_signature(signode, mode, env, symbol) @@ -2985,6 +3160,13 @@ class ASTDeclaration(ASTBase): # type: () -> unicode return self.declaration.name + @property + def function_params(self): + # type: () -> Any + if self.objectType != 'function': + return None + return self.declaration.function_params + def get_id(self, version, prefixed=True): # type: (int, bool) -> unicode if version == 1: @@ -3023,15 +3205,15 @@ class ASTDeclaration(ASTBase): def describe_signature(self, signode, mode, env, options): # type: (addnodes.desc_signature, unicode, BuildEnvironment, Dict) -> None _verify_description_mode(mode) + assert self.symbol # The caller of the domain added a desc_signature node. # Always enable multiline: signode['is_multiline'] = True # Put each line in a desc_signature_line node. mainDeclNode = addnodes.desc_signature_line() mainDeclNode.sphinx_cpp_tagname = 'declarator' - mainDeclNode['add_permalink'] = True + mainDeclNode['add_permalink'] = not self.symbol.isRedeclaration - assert self.symbol if self.templatePrefix: self.templatePrefix.describe_signature(signode, mode, env, symbol=self.symbol, @@ -3098,6 +3280,7 @@ class Symbol(object): self.templateArgs = templateArgs # identifier self.declaration = declaration self.docname = docname + self.isRedeclaration = False self._assert_invariants() self.children = [] # type: List[Any] @@ -3111,7 +3294,7 @@ class Symbol(object): for p in self.templateParams.params: if not p.get_identifier(): continue - # only add a declaration if we our selfs from a declaration + # only add a declaration if we our selfs are from a declaration if declaration: decl = ASTDeclaration('templateParam', None, None, p) else: @@ -3119,6 +3302,22 @@ class Symbol(object): nne = ASTNestedNameElement(p.get_identifier(), None) nn = ASTNestedName([nne], rooted=False) self._add_symbols(nn, [], decl, docname) + # add symbols for function parameters, if any + if declaration is not None and declaration.function_params is not None: + for p in declaration.function_params: + if p.arg is None: + continue + nn = p.arg.name + if nn is None: + continue + # (comparing to the template params: we have checked that we are a declaration) + decl = ASTDeclaration('functionParam', None, None, p) + assert not nn.rooted + assert len(nn.names) == 1 + identifier = nn.names[0].identifier + Symbol(parent=self, identifier=identifier, + templateParams=None, templateArgs=None, + declaration=decl, docname=docname) def _fill_empty(self, declaration, docname): # type: (Any, unicode) -> None @@ -3186,6 +3385,31 @@ class Symbol(object): # type: (Any, Any, Any, Any, Any, bool) -> Symbol assert (identifier is None) != (operator is None) + def isSpecialization(): + # the names of the template parameters must be given exactly as args + # and params that are packs must in the args be the name expanded + if len(templateParams.params) != len(templateArgs.args): + return True + for i in range(len(templateParams.params)): + param = templateParams.params[i] + arg = templateArgs.args[i] + # TODO: doing this by string manipulation is probably not the most efficient + paramName = text_type(param.name) + argTxt = text_type(arg) + isArgPackExpansion = argTxt.endswith('...') + if param.isPack != isArgPackExpansion: + return True + argName = argTxt[:-3] if isArgPackExpansion else argTxt + if paramName != argName: + return True + return False + if templateParams is not None and templateArgs is not None: + # If both are given, but it's not a specialization, then do lookup as if + # there is no argument list. + # For example: template int A::var; + if not isSpecialization(): + templateArgs = None + def matches(s): if s.identifier != identifier: return False @@ -3297,23 +3521,27 @@ class Symbol(object): # .. class:: Test symbol._fill_empty(declaration, docname) return symbol - # It may simply be a functin overload, so let's compare ids. + # It may simply be a function overload, so let's compare ids. + isRedeclaration = True candSymbol = Symbol(parent=parentSymbol, identifier=identifier, templateParams=templateParams, templateArgs=templateArgs, declaration=declaration, docname=docname) - newId = declaration.get_newest_id() - oldId = symbol.declaration.get_newest_id() - if newId != oldId: - # we already inserted the symbol, so return the new one - symbol = candSymbol - else: + if declaration.objectType == "function": + newId = declaration.get_newest_id() + oldId = symbol.declaration.get_newest_id() + if newId != oldId: + # we already inserted the symbol, so return the new one + symbol = candSymbol + isRedeclaration = False + if isRedeclaration: # Redeclaration of the same symbol. # Let the new one be there, but raise an error to the client # so it can use the real symbol as subscope. # This will probably result in a duplicate id warning. - raise _DuplicateSymbolError(symbol, candSymbol) + candSymbol.isRedeclaration = True + raise _DuplicateSymbolError(symbol, declaration) else: symbol = Symbol(parent=parentSymbol, identifier=identifier, templateParams=templateParams, @@ -3415,8 +3643,8 @@ class Symbol(object): return None return s - def find_name(self, nestedName, templateDecls, templateShorthand, matchSelf): - # type: (Any, Any, Any, bool) -> Symbol + def find_name(self, nestedName, templateDecls, typ, templateShorthand, matchSelf): + # type: (Any, Any, Any, Any, bool) -> Symbol # templateShorthand: missing template parameter lists for templates is ok # TODO: unify this with the _add_symbols @@ -3434,9 +3662,15 @@ class Symbol(object): while parentSymbol.parent: if parentSymbol.find_identifier(firstName.identifier, matchSelf=matchSelf): - break + # if we are in the scope of a constructor but wants to reference the class + # we need to walk one extra up + if (len(names) == 1 and typ == 'class' and matchSelf and + parentSymbol.parent and parentSymbol.parent.identifier and + parentSymbol.parent.identifier == firstName.identifier): + pass + else: + break parentSymbol = parentSymbol.parent - iTemplateDecl = 0 for iName in range(len(names)): name = names[iName] @@ -3456,15 +3690,19 @@ class Symbol(object): assert iTemplateDecl == len(templateDecls) templateParams = None symbol = parentSymbol._find_named_symbol(identifier, - templateParams, - templateArgs, + templateParams, templateArgs, operator, templateShorthand=templateShorthand, matchSelf=matchSelf) - if symbol: + if symbol is not None: return symbol - else: - return None + # try without template params and args + symbol = parentSymbol._find_named_symbol(identifier, + None, None, + operator, + templateShorthand=templateShorthand, + matchSelf=matchSelf) + return symbol else: # there shouldn't be anything inside an operator assert not name.is_operator() @@ -3788,10 +4026,14 @@ class DefinitionParser(object): return ASTBooleanLiteral(True) if self.skip_word('false'): return ASTBooleanLiteral(False) - if self.match(_float_literal_re): - return ASTNumberLiteral(self.matched_text) - if self.match(_integer_literal_re): - return ASTNumberLiteral(self.matched_text) + for regex in [_float_literal_re, _binary_literal_re, _hex_literal_re, + _integer_literal_re, _octal_literal_re]: + pos = self.pos + if self.match(regex): + while self.current_char in 'uUlLfF': + self.pos += 1 + return ASTNumberLiteral(self.definition[pos:self.pos]) + string = self._parse_string() if string is not None: return ASTStringLiteral(string) @@ -3947,6 +4189,7 @@ class DefinitionParser(object): # | "++" cast # | "--" cast # | unary-operator cast -> (* | & | + | - | ! | ~) cast + # The rest: # | "sizeof" unary # | "sizeof" "(" type-id ")" # | "sizeof" "..." "(" identifier ")" @@ -3960,6 +4203,41 @@ class DefinitionParser(object): if self.skip_string(op): expr = self._parse_cast_expression() return ASTUnaryOpExpr(op, expr) + if self.skip_word_and_ws('sizeof'): + if self.skip_string_and_ws('...'): + if not self.skip_string_and_ws('('): + self.fail("Expecting '(' after 'sizeof...'.") + if not self.match(_identifier_re): + self.fail("Expecting identifier for 'sizeof...'.") + ident = ASTIdentifier(self.matched_text) + self.skip_ws() + if not self.skip_string(")"): + self.fail("Expecting ')' to end 'sizeof...'.") + return ASTSizeofParamPack(ident) + if self.skip_string_and_ws('('): + typ = self._parse_type(named=False) + self.skip_ws() + if not self.skip_string(')'): + self.fail("Expecting ')' to end 'sizeof'.") + return ASTSizeofType(typ) + expr = self._parse_unary_expression() + return ASTSizeofExpr(expr) + if self.skip_word_and_ws('alignof'): + if not self.skip_string_and_ws('('): + self.fail("Expecting '(' after 'alignof'.") + typ = self._parse_type(named=False) + self.skip_ws() + if not self.skip_string(')'): + self.fail("Expecting ')' to end 'alignof'.") + return ASTAlignofExpr(typ) + if self.skip_word_and_ws('noexcept'): + if not self.skip_string_and_ws('('): + self.fail("Expecting '(' after 'noexcept'.") + expr = self._parse_expression(inTemplate=False) + self.skip_ws() + if not self.skip_string(')'): + self.fail("Expecting ')' to end 'noexcept'.") + return ASTNoexceptExpr(expr) # TODO: the rest return self._parse_postfix_expression() @@ -4083,6 +4361,42 @@ class DefinitionParser(object): # TODO: actually parse the second production return self._parse_assignment_expression(inTemplate=inTemplate) + def _parse_expression_fallback(self, end, parser): + # Stupidly "parse" an expression. + # 'end' should be a list of characters which ends the expression. + + # first try to use the provided parser + prevPos = self.pos + try: + return parser() + except DefinitionError as e: + self.warn("Parsing of expression failed. Using fallback parser." + " Error was:\n%s" % e.description) + self.pos = prevPos + # and then the fallback scanning + assert end is not None + self.skip_ws() + startPos = self.pos + if self.match(_string_re): + value = self.matched_text + else: + # TODO: add handling of more bracket-like things, and quote handling + brackets = {'(': ')', '[': ']', '<': '>'} # type: Dict[unicode, unicode] + symbols = [] # type: List[unicode] + while not self.eof: + if (len(symbols) == 0 and self.current_char in end): + break + if self.current_char in brackets.keys(): + symbols.append(brackets[self.current_char]) + elif len(symbols) > 0 and self.current_char == symbols[-1]: + symbols.pop() + self.pos += 1 + if len(end) > 0 and self.eof: + self.fail("Could not find end of expression starting at %d." + % startPos) + value = self.definition[startPos:self.pos].strip() + return ASTFallbackExpr(value.strip()) + def _parse_operator(self): # type: () -> Any self.skip_ws() @@ -4143,7 +4457,9 @@ class DefinitionParser(object): prevErrors.append((e, "If type argument")) self.pos = pos try: - value = self._parse_constant_expression(inTemplate=True) + def parser(): + return self._parse_constant_expression(inTemplate=True) + value = self._parse_expression_fallback([',', '>'], parser) self.skip_ws() if self.skip_string('>'): parsedEnd = True @@ -4245,8 +4561,11 @@ class DefinitionParser(object): if not self.skip_string(')'): self.fail("Expected ')' after 'decltype(auto'.") return ASTTrailingTypeSpecDecltypeAuto() - self.fail('"decltype()" in trailing_type_spec not implemented') - # return ASTTrailingTypeSpecDecltype() + expr = self._parse_expression(inTemplate=False) + self.skip_ws() + if not self.skip_string(')'): + self.fail("Expected ')' after 'decltype('.") + return ASTTrailingTypeSpecDecltype(expr) # prefixed prefix = None @@ -4485,7 +4804,10 @@ class DefinitionParser(object): if self.skip_string(']'): arrayOps.append(ASTArray(None)) continue - value = self._parse_expression(inTemplate=False) + + def parser(): + return self._parse_expression(inTemplate=False) + value = self._parse_expression_fallback([']'], parser) if not self.skip_string(']'): self.fail("Expected ']' in end of array operator.") arrayOps.append(ASTArray(value)) @@ -4605,11 +4927,17 @@ class DefinitionParser(object): return None else: if outer == 'member': - value = self._parse_assignment_expression(inTemplate=False) + def parser(): + return self._parse_assignment_expression(inTemplate=False) + value = self._parse_expression_fallback([], parser) elif outer == 'templateParam': - value = self._parse_assignment_expression(inTemplate=True) + def parser(): + return self._parse_assignment_expression(inTemplate=True) + value = self._parse_expression_fallback([',', '>'], parser) elif outer is None: # function parameter - value = self._parse_assignment_expression(inTemplate=False) + def parser(): + return self._parse_assignment_expression(inTemplate=False) + value = self._parse_expression_fallback([',', ')'], parser) else: self.fail("Internal error, initializer for outer '%s' not " "implemented." % outer) @@ -4718,20 +5046,9 @@ class DefinitionParser(object): def _parse_concept(self): # type: () -> ASTConcept nestedName = self._parse_nested_name() - isFunction = False - self.skip_ws() - if self.skip_string('('): - isFunction = True - self.skip_ws() - if not self.skip_string(')'): - self.fail("Expected ')' in function concept declaration.") - initializer = self._parse_initializer('member') - if initializer and isFunction: - self.fail("Function concept with initializer.") - - return ASTConcept(nestedName, isFunction, initializer) + return ASTConcept(nestedName, initializer) def _parse_class(self): # type: () -> ASTClass @@ -4782,7 +5099,10 @@ class DefinitionParser(object): init = None if self.skip_string('='): self.skip_ws() - initVal = self._parse_constant_expression(inTemplate=False) + + def parser(): + return self._parse_constant_expression(inTemplate=False) + initVal = self._parse_expression_fallback([], parser) init = ASTInitializer(initVal) return ASTEnumerator(name, init) @@ -4862,7 +5182,7 @@ class DefinitionParser(object): pos = self.pos try: concept = self._parse_nested_name() - except: + except Exception: self.pos = pos return None self.skip_ws() @@ -4904,7 +5224,13 @@ class DefinitionParser(object): # the saved position is only used to provide a better error message pos = self.pos if self.skip_word("template"): - params = self._parse_template_parameter_list() # type: Any + try: + params = self._parse_template_parameter_list() # type: Any + except DefinitionError as e: + if objectType == 'member' and len(templates) == 0: + return ASTTemplateDeclarationPrefix(None) + else: + raise e else: params = self._parse_template_introduction() if not params: @@ -4921,20 +5247,25 @@ class DefinitionParser(object): return ASTTemplateDeclarationPrefix(templates) def _check_template_consistency(self, nestedName, templatePrefix, - fullSpecShorthand): - # type: (Any, Any, bool) -> ASTTemplateDeclarationPrefix + fullSpecShorthand, isMember=False): + # type: (Any, Any, Any, bool) -> ASTTemplateDeclarationPrefix numArgs = nestedName.num_templates() + isMemberInstantiation = False if not templatePrefix: numParams = 0 else: - numParams = len(templatePrefix.templates) + if isMember and templatePrefix.templates is None: + numParams = 0 + isMemberInstantiation = True + else: + numParams = len(templatePrefix.templates) if numArgs + 1 < numParams: self.fail("Too few template argument lists comapred to parameter" " lists. Argument lists: %d, Parameter lists: %d." % (numArgs, numParams)) if numArgs > numParams: numExtra = numArgs - numParams - if not fullSpecShorthand: + if not fullSpecShorthand and not isMemberInstantiation: msg = "Too many template argument lists compared to parameter" \ " lists. Argument lists: %d, Parameter lists: %d," \ " Extra empty parameters lists prepended: %d." \ @@ -4948,7 +5279,7 @@ class DefinitionParser(object): newTemplates = [] for i in range(numExtra): newTemplates.append(ASTTemplateParams([])) - if templatePrefix: + if templatePrefix and not isMemberInstantiation: newTemplates.extend(templatePrefix.templates) templatePrefix = ASTTemplateDeclarationPrefix(newTemplates) return templatePrefix @@ -5003,7 +5334,8 @@ class DefinitionParser(object): assert False templatePrefix = self._check_template_consistency(declaration.name, templatePrefix, - fullSpecShorthand=False) + fullSpecShorthand=False, + isMember=objectType == 'member') return ASTDeclaration(objectType, visibility, templatePrefix, declaration) @@ -5142,13 +5474,25 @@ class CPPObject(ObjectDescription): 'report as bug (id=%s).' % (text_type(ast), newestId)) name = text_type(ast.symbol.get_full_nested_name()).lstrip(':') - strippedName = name - for prefix in self.env.config.cpp_index_common_prefix: - if name.startswith(prefix): - strippedName = strippedName[len(prefix):] + # Add index entry, but not if it's a declaration inside a concept + isInConcept = False + s = ast.symbol.parent + while s is not None: + decl = s.declaration + s = s.parent + if decl is None: + continue + if decl.objectType == 'concept': + isInConcept = True break - indexText = self.get_index_text(strippedName) - self.indexnode['entries'].append(('single', indexText, newestId, '', None)) + if not isInConcept: + strippedName = name + for prefix in self.env.config.cpp_index_common_prefix: + if name.startswith(prefix): + strippedName = strippedName[len(prefix):] + break + indexText = self.get_index_text(strippedName) + self.indexnode['entries'].append(('single', indexText, newestId, '', None)) if newestId not in self.state.document.ids: # if the name is not unique, the first one will win @@ -5207,6 +5551,7 @@ class CPPObject(ObjectDescription): # Assume we are actually in the old symbol, # instead of the newly created duplicate. self.env.temp_data['cpp:last_symbol'] = e.symbol + self.warn("Duplicate declaration.") if ast.objectType == 'enumerator': self._add_enumerator_to_parent(ast) @@ -5548,6 +5893,7 @@ class CPPDomain(Domain): def _resolve_xref_inner(self, env, fromdocname, builder, typ, target, node, contnode, emitWarnings=True): # type: (BuildEnvironment, unicode, Builder, unicode, unicode, nodes.Node, nodes.Node, bool) -> nodes.Node # NOQA + class Warner(object): def warn(self, msg): if emitWarnings: @@ -5593,7 +5939,7 @@ class CPPDomain(Domain): templateDecls = ast.templatePrefix.templates else: templateDecls = [] - s = parentSymbol.find_name(name, templateDecls, + s = parentSymbol.find_name(name, templateDecls, typ, templateShorthand=True, matchSelf=True) if s is None or s.declaration is None: diff --git a/sphinx/domains/python.py b/sphinx/domains/python.py index a55747ab4..d64938452 100644 --- a/sphinx/domains/python.py +++ b/sphinx/domains/python.py @@ -348,6 +348,10 @@ class PyObject(ObjectDescription): if self.allow_nesting: classes = self.env.ref_context.setdefault('py:classes', []) classes.append(prefix) + if 'module' in self.options: + modules = self.env.ref_context.setdefault('py:modules', []) + modules.append(self.env.ref_context.get('py:module')) + self.env.ref_context['py:module'] = self.options['module'] def after_content(self): # type: () -> None @@ -368,6 +372,12 @@ class PyObject(ObjectDescription): pass self.env.ref_context['py:class'] = (classes[-1] if len(classes) > 0 else None) + if 'module' in self.options: + modules = self.env.ref_context.setdefault('py:modules', []) + if modules: + self.env.ref_context['py:module'] = modules.pop() + else: + self.env.ref_context.pop('py:module') class PyModulelevel(PyObject): diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py index 92f16d7cf..5ebf9d4d5 100644 --- a/sphinx/environment/__init__.py +++ b/sphinx/environment/__init__.py @@ -14,7 +14,6 @@ import os import sys import time import types -import codecs import fnmatch import warnings from os import path @@ -24,17 +23,13 @@ from collections import defaultdict from six import BytesIO, itervalues, class_types, next from six.moves import cPickle as pickle -from docutils.io import NullOutput -from docutils.core import Publisher from docutils.utils import Reporter, get_source_line, normalize_language_tag from docutils.utils.smartquotes import smartchars -from docutils.parsers.rst import roles -from docutils.parsers.rst.languages import en as english from docutils.frontend import OptionParser -from sphinx import addnodes -from sphinx.io import SphinxStandaloneReader, SphinxDummyWriter, SphinxFileInput -from sphinx.util import logging +from sphinx import addnodes, versioning +from sphinx.io import read_doc +from sphinx.util import logging, rst from sphinx.util import get_matching_docs, FilenameUniqDict, status_iterator from sphinx.util.nodes import is_translatable from sphinx.util.osutil import SEP, ensuredir @@ -47,7 +42,6 @@ from sphinx.util.websupport import is_commentable from sphinx.errors import SphinxError, ExtensionError from sphinx.locale import __ from sphinx.transforms import SphinxTransformer -from sphinx.versioning import add_uids, merge_doctrees from sphinx.deprecation import RemovedInSphinx20Warning from sphinx.environment.adapters.indexentries import IndexEntries from sphinx.environment.adapters.toctree import TocTree @@ -82,8 +76,6 @@ default_settings = { ENV_VERSION = 52 + (sys.version_info[0] - 2) -dummy_reporter = Reporter('', 4, 4) - versioning_conditions = { 'none': False, 'text': is_translatable, @@ -110,7 +102,12 @@ class BuildEnvironment(object): @staticmethod def load(f, app=None): # type: (IO, Sphinx) -> BuildEnvironment - env = pickle.load(f) + try: + env = pickle.load(f) + except Exception as exc: + # This can happen for example when the pickle is from a + # different version of Sphinx. + raise IOError(exc) if env.version != ENV_VERSION: raise IOError('build environment version not current') if app: @@ -643,25 +640,9 @@ class BuildEnvironment(object): # --------- SINGLE FILE READING -------------------------------------------- - def warn_and_replace(self, error): - # type: (Any) -> Tuple - """Custom decoding error handler that warns and replaces.""" - linestart = error.object.rfind(b'\n', 0, error.start) - lineend = error.object.find(b'\n', error.start) - if lineend == -1: - lineend = len(error.object) - lineno = error.object.count(b'\n', 0, error.start) + 1 - logger.warning('undecodable source characters, replacing with "?": %r', - (error.object[linestart + 1:error.start] + b'>>>' + - error.object[error.start:error.end] + b'<<<' + - error.object[error.end:lineend]), - location=(self.docname, lineno)) - return (u'?', error.end) - - def read_doc(self, docname, app=None): - # type: (unicode, Sphinx) -> None - """Parse a file and add/update inventory entries for the doctree.""" - + def prepare_settings(self, docname): + # type: (unicode) -> None + """Prepare to set up environment for reading.""" self.temp_data['docname'] = docname # defaults to the global default, but can be re-set in a document self.temp_data['default_role'] = self.config.default_role @@ -677,11 +658,18 @@ class BuildEnvironment(object): self.settings['language_code'] = language if 'smart_quotes' not in self.settings: self.settings['smart_quotes'] = True - for tag in normalize_language_tag(language): - if tag in smartchars.quotes: - break - else: - self.settings['smart_quotes'] = False + + # confirm selected language supports smart_quotes or not + for tag in normalize_language_tag(language): + if tag in smartchars.quotes: + break + else: + self.settings['smart_quotes'] = False + + def read_doc(self, docname, app=None): + # type: (unicode, Sphinx) -> None + """Parse a file and add/update inventory entries for the doctree.""" + self.prepare_settings(docname) docutilsconf = path.join(self.srcdir, 'docutils.conf') # read docutils.conf from source dir, not from current dir @@ -689,34 +677,8 @@ class BuildEnvironment(object): if path.isfile(docutilsconf): self.note_dependency(docutilsconf) - with sphinx_domains(self): - if self.config.default_role: - role_fn, messages = roles.role(self.config.default_role, english, - 0, dummy_reporter) - if role_fn: - roles._roles[''] = role_fn - else: - logger.warning('default role %s not found', self.config.default_role, - location=docname) - - codecs.register_error('sphinx', self.warn_and_replace) # type: ignore - - # publish manually - reader = SphinxStandaloneReader(self.app, - parsers=self.app.registry.get_source_parsers()) - pub = Publisher(reader=reader, - writer=SphinxDummyWriter(), - destination_class=NullOutput) - pub.set_components(None, 'restructuredtext', None) - pub.process_programmatic_settings(None, self.settings, None) - src_path = self.doc2path(docname) - source = SphinxFileInput(app, self, source=None, source_path=src_path, - encoding=self.config.source_encoding) - pub.source = source - pub.settings._source = src_path - pub.set_destination(None, None) - pub.publish() - doctree = pub.document + with sphinx_domains(self), rst.default_role(docname, self.config.default_role): + doctree = read_doc(self.app, self, self.doc2path(docname)) # post-processing for domain in itervalues(self.domains): @@ -734,41 +696,14 @@ class BuildEnvironment(object): time.time(), path.getmtime(self.doc2path(docname))) if self.versioning_condition: - old_doctree = None - if self.versioning_compare: - # get old doctree - try: - with open(self.doc2path(docname, - self.doctreedir, '.doctree'), 'rb') as f: - old_doctree = pickle.load(f) - except EnvironmentError: - pass - # add uids for versioning - if not self.versioning_compare or old_doctree is None: - list(add_uids(doctree, self.versioning_condition)) - else: - list(merge_doctrees( - old_doctree, doctree, self.versioning_condition)) - - # make it picklable - doctree.reporter = None - doctree.transformer = None - doctree.settings.warning_stream = None - doctree.settings.env = None - doctree.settings.record_dependencies = None + versioning.prepare(doctree) # cleanup self.temp_data.clear() self.ref_context.clear() - roles._roles.pop('', None) # if a document has set a local default role - # save the parsed doctree - doctree_filename = self.doc2path(docname, self.doctreedir, - '.doctree') - ensuredir(path.dirname(doctree_filename)) - with open(doctree_filename, 'wb') as f: - pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL) + self.write_doctree(docname, doctree) # utilities to use while reading a document @@ -872,6 +807,21 @@ class BuildEnvironment(object): doctree.reporter = Reporter(self.doc2path(docname), 2, 5, stream=WarningStream()) return doctree + def write_doctree(self, docname, doctree): + # type: (unicode, nodes.Node) -> None + """Write the doctree to a file.""" + # make it picklable + doctree.reporter = None + doctree.transformer = None + doctree.settings.warning_stream = None + doctree.settings.env = None + doctree.settings.record_dependencies = None + + doctree_filename = self.doc2path(docname, self.doctreedir, '.doctree') + ensuredir(path.dirname(doctree_filename)) + with open(doctree_filename, 'wb') as f: + pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL) + def get_and_resolve_doctree(self, docname, builder, doctree=None, prune_toctrees=True, includehidden=False): # type: (unicode, Builder, nodes.Node, bool, bool) -> nodes.Node diff --git a/sphinx/environment/collectors/toctree.py b/sphinx/environment/collectors/toctree.py index f19fd5d26..91aa21f2e 100644 --- a/sphinx/environment/collectors/toctree.py +++ b/sphinx/environment/collectors/toctree.py @@ -262,7 +262,7 @@ class TocTreeCollector(EnvironmentCollector): continue - figtype = env.get_domain('std').get_figtype(subnode) # type: ignore + figtype = env.get_domain('std').get_figtype(subnode) if figtype and subnode['ids']: register_fignumber(docname, secnum, figtype, subnode) diff --git a/sphinx/ext/apidoc.py b/sphinx/ext/apidoc.py index 4a5c56850..0bdeb9865 100644 --- a/sphinx/ext/apidoc.py +++ b/sphinx/ext/apidoc.py @@ -17,9 +17,9 @@ from __future__ import print_function +import argparse import os import sys -import optparse from os import path from six import binary_type from fnmatch import fnmatch @@ -265,12 +265,6 @@ def recurse_tree(rootpath, excludes, opts): return toplevels -def normalize_excludes(rootpath, excludes): - # type: (unicode, List[unicode]) -> List[unicode] - """Normalize the excluded directory list.""" - return [path.abspath(exclude) for exclude in excludes] - - def is_excluded(root, excludes): # type: (unicode, List[unicode]) -> bool """Check if the directory is in the exclude list. @@ -284,106 +278,116 @@ def is_excluded(root, excludes): return False -def main(argv=sys.argv[1:]): - # type: (List[str]) -> int - """Parse and check the command line arguments.""" - parser = optparse.OptionParser( - usage="""\ -usage: %prog [options] -o [exclude_pattern, ...] +def get_parser(): + # type: () -> argparse.ArgumentParser + parser = argparse.ArgumentParser( + usage='usage: %(prog)s [OPTIONS] -o ' + '[EXCLUDE_PATTERN, ...]', + epilog='For more information, visit .', + description=""" +Look recursively in for Python modules and packages and create +one reST file with automodule directives per package in the . -Look recursively in for Python modules and packages and create -one reST file with automodule directives per package in the . - -The s can be file and/or directory patterns that will be +The s can be file and/or directory patterns that will be excluded from generation. Note: By default this script will not overwrite already created files.""") - parser.add_option('-o', '--output-dir', action='store', dest='destdir', - help='Directory to place all output', default='') - parser.add_option('-d', '--maxdepth', action='store', dest='maxdepth', - help='Maximum depth of submodules to show in the TOC ' - '(default: 4)', type='int', default=4) - parser.add_option('-f', '--force', action='store_true', dest='force', - help='Overwrite existing files') - parser.add_option('-l', '--follow-links', action='store_true', - dest='followlinks', default=False, - help='Follow symbolic links. Powerful when combined ' - 'with collective.recipe.omelette.') - parser.add_option('-n', '--dry-run', action='store_true', dest='dryrun', - help='Run the script without creating files') - parser.add_option('-e', '--separate', action='store_true', - dest='separatemodules', - help='Put documentation for each module on its own page') - parser.add_option('-P', '--private', action='store_true', - dest='includeprivate', - help='Include "_private" modules') - parser.add_option('-T', '--no-toc', action='store_true', dest='notoc', - help='Don\'t create a table of contents file') - parser.add_option('-E', '--no-headings', action='store_true', - dest='noheadings', - help='Don\'t create headings for the module/package ' - 'packages (e.g. when the docstrings already contain ' - 'them)') - parser.add_option('-M', '--module-first', action='store_true', - dest='modulefirst', - help='Put module documentation before submodule ' - 'documentation') - parser.add_option('--implicit-namespaces', action='store_true', - dest='implicit_namespaces', - help='Interpret module paths according to PEP-0420 ' - 'implicit namespaces specification') - parser.add_option('-s', '--suffix', action='store', dest='suffix', - help='file suffix (default: rst)', default='rst') - parser.add_option('-F', '--full', action='store_true', dest='full', - help='Generate a full project with sphinx-quickstart') - parser.add_option('-a', '--append-syspath', action='store_true', - dest='append_syspath', - help='Append module_path to sys.path, used when --full is given') - parser.add_option('-H', '--doc-project', action='store', dest='header', - help='Project name (default: root module name)') - parser.add_option('-A', '--doc-author', action='store', dest='author', - type='str', - help='Project author(s), used when --full is given') - parser.add_option('-V', '--doc-version', action='store', dest='version', - help='Project version, used when --full is given') - parser.add_option('-R', '--doc-release', action='store', dest='release', - help='Project release, used when --full is given, ' - 'defaults to --doc-version') - parser.add_option('--version', action='store_true', dest='show_version', - help='Show version information and exit') - group = parser.add_option_group('Extension options') + parser.add_argument('--version', action='version', dest='show_version', + version='%%(prog)s %s' % __display_version__) + + parser.add_argument('module_path', + help='path to module to document') + parser.add_argument('exclude_pattern', nargs='*', + help='fnmatch-style file and/or directory patterns ' + 'to exclude from generation') + + parser.add_argument('-o', '--output-dir', action='store', dest='destdir', + required=True, + help='directory to place all output') + parser.add_argument('-d', '--maxdepth', action='store', dest='maxdepth', + type=int, default=4, + help='maximum depth of submodules to show in the TOC ' + '(default: 4)') + parser.add_argument('-f', '--force', action='store_true', dest='force', + help='overwrite existing files') + parser.add_argument('-l', '--follow-links', action='store_true', + dest='followlinks', default=False, + help='follow symbolic links. Powerful when combined ' + 'with collective.recipe.omelette.') + parser.add_argument('-n', '--dry-run', action='store_true', dest='dryrun', + help='run the script without creating files') + parser.add_argument('-e', '--separate', action='store_true', + dest='separatemodules', + help='put documentation for each module on its own page') + parser.add_argument('-P', '--private', action='store_true', + dest='includeprivate', + help='include "_private" modules') + parser.add_argument('-T', '--no-toc', action='store_true', dest='notoc', + help="don't create a table of contents file") + parser.add_argument('-E', '--no-headings', action='store_true', + dest='noheadings', + help="don't create headings for the module/package " + "packages (e.g. when the docstrings already " + "contain them)") + parser.add_argument('-M', '--module-first', action='store_true', + dest='modulefirst', + help='put module documentation before submodule ' + 'documentation') + parser.add_argument('--implicit-namespaces', action='store_true', + dest='implicit_namespaces', + help='interpret module paths according to PEP-0420 ' + 'implicit namespaces specification') + parser.add_argument('-s', '--suffix', action='store', dest='suffix', + default='rst', + help='file suffix (default: rst)') + parser.add_argument('-F', '--full', action='store_true', dest='full', + help='generate a full project with sphinx-quickstart') + parser.add_argument('-a', '--append-syspath', action='store_true', + dest='append_syspath', + help='append module_path to sys.path, used when --full is given') + parser.add_argument('-H', '--doc-project', action='store', dest='header', + help='project name (default: root module name)') + parser.add_argument('-A', '--doc-author', action='store', dest='author', + help='project author(s), used when --full is given') + parser.add_argument('-V', '--doc-version', action='store', dest='version', + help='project version, used when --full is given') + parser.add_argument('-R', '--doc-release', action='store', dest='release', + help='project release, used when --full is given, ' + 'defaults to --doc-version') + + group = parser.add_argument_group('extension options') for ext in EXTENSIONS: - group.add_option('--ext-' + ext, action='store_true', - dest='ext_' + ext, default=False, - help='enable %s extension' % ext) + group.add_argument('--ext-%s' % ext, action='append_const', + const='sphinx.ext.%s' % ext, dest='extensions', + help='enable %s extension' % ext) - (opts, args) = parser.parse_args(argv) + return parser - if opts.show_version: - print('Sphinx (sphinx-apidoc) %s' % __display_version__) - return 0 - if not args: - parser.error('A package path is required.') +def main(argv=sys.argv[1:]): + # type: (List[str]) -> int + """Parse and check the command line arguments.""" + parser = get_parser() + args = parser.parse_args(argv) - rootpath, excludes = args[0], args[1:] - if not opts.destdir: - parser.error('An output directory is required.') - if opts.header is None: - opts.header = path.abspath(rootpath).split(path.sep)[-1] - if opts.suffix.startswith('.'): - opts.suffix = opts.suffix[1:] + rootpath = path.abspath(args.module_path) + + # normalize opts + + if args.header is None: + args.header = rootpath.split(path.sep)[-1] + if args.suffix.startswith('.'): + args.suffix = args.suffix[1:] if not path.isdir(rootpath): print('%s is not a directory.' % rootpath, file=sys.stderr) sys.exit(1) - if not path.isdir(opts.destdir): - if not opts.dryrun: - os.makedirs(opts.destdir) - rootpath = path.abspath(rootpath) - excludes = normalize_excludes(rootpath, excludes) - modules = recurse_tree(rootpath, excludes, opts) - if opts.full: + if not path.isdir(args.destdir) and not args.dryrun: + os.makedirs(args.destdir) + excludes = [path.abspath(exclude) for exclude in args.exclude_pattern] + modules = recurse_tree(rootpath, excludes, args) + + if args.full: from sphinx.cmd import quickstart as qs modules.sort() prev_module = '' # type: unicode @@ -394,44 +398,44 @@ Note: By default this script will not overwrite already created files.""") prev_module = module text += ' %s\n' % module d = dict( - path = opts.destdir, + path = args.destdir, sep = False, dot = '_', - project = opts.header, - author = opts.author or 'Author', - version = opts.version or '', - release = opts.release or opts.version or '', - suffix = '.' + opts.suffix, + project = args.header, + author = args.author or 'Author', + version = args.version or '', + release = args.release or args.version or '', + suffix = '.' + args.suffix, master = 'index', epub = True, - ext_autodoc = True, - ext_viewcode = True, - ext_todo = True, + extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', + 'sphinx.ext.todo'], makefile = True, batchfile = True, - mastertocmaxdepth = opts.maxdepth, + make_mode = True, + mastertocmaxdepth = args.maxdepth, mastertoctree = text, language = 'en', module_path = rootpath, - append_syspath = opts.append_syspath, + append_syspath = args.append_syspath, ) - enabled_exts = {'ext_' + ext: getattr(opts, 'ext_' + ext) - for ext in EXTENSIONS if getattr(opts, 'ext_' + ext)} - d.update(enabled_exts) + if args.extensions: + d['extensions'].extend(args.extensions) - if isinstance(opts.header, binary_type): + if isinstance(args.header, binary_type): d['project'] = d['project'].decode('utf-8') - if isinstance(opts.author, binary_type): + if isinstance(args.author, binary_type): d['author'] = d['author'].decode('utf-8') - if isinstance(opts.version, binary_type): + if isinstance(args.version, binary_type): d['version'] = d['version'].decode('utf-8') - if isinstance(opts.release, binary_type): + if isinstance(args.release, binary_type): d['release'] = d['release'].decode('utf-8') - if not opts.dryrun: - qs.generate(d, silent=True, overwrite=opts.force) - elif not opts.notoc: - create_modules_toc_file(modules, opts) + if not args.dryrun: + qs.generate(d, silent=True, overwrite=args.force) + elif not args.notoc: + create_modules_toc_file(modules, args) + return 0 diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py index e8586943f..ff161565c 100644 --- a/sphinx/ext/autodoc/__init__.py +++ b/sphinx/ext/autodoc/__init__.py @@ -15,7 +15,6 @@ import re import sys import inspect import traceback -import warnings from six import PY2, iterkeys, iteritems, itervalues, text_type, class_types, string_types @@ -25,7 +24,8 @@ from docutils.parsers.rst import Directive from docutils.statemachine import ViewList import sphinx -from sphinx.ext.autodoc.importer import _MockImporter +from sphinx.ext.autodoc.importer import mock, import_module +from sphinx.ext.autodoc.importer import _MockImporter # to keep compatibility # NOQA from sphinx.ext.autodoc.inspector import format_annotation, formatargspec # to keep compatibility # NOQA from sphinx.util import rpartition, force_decode from sphinx.locale import _ @@ -389,48 +389,53 @@ class Documenter(object): self.modname, '.'.join(self.objpath)) # always enable mock import hook # it will do nothing if autodoc_mock_imports is empty - import_hook = _MockImporter(self.env.config.autodoc_mock_imports) - try: - logger.debug('[autodoc] import %s', self.modname) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=ImportWarning) - with logging.skip_warningiserror(not self.env.config.autodoc_warningiserror): - __import__(self.modname) - parent = None - obj = self.module = sys.modules[self.modname] - logger.debug('[autodoc] => %r', obj) - for part in self.objpath: - parent = obj - logger.debug('[autodoc] getattr(_, %r)', part) - obj = self.get_attr(obj, part) + with mock(self.env.config.autodoc_mock_imports): + try: + logger.debug('[autodoc] import %s', self.modname) + obj = import_module(self.modname, self.env.config.autodoc_warningiserror) + parent = None + self.module = obj logger.debug('[autodoc] => %r', obj) - self.object_name = part - self.parent = parent - self.object = obj - return True - # this used to only catch SyntaxError, ImportError and AttributeError, - # but importing modules with side effects can raise all kinds of errors - except (Exception, SystemExit) as e: - if self.objpath: - errmsg = 'autodoc: failed to import %s %r from module %r' % \ - (self.objtype, '.'.join(self.objpath), self.modname) - else: - errmsg = 'autodoc: failed to import %s %r' % \ - (self.objtype, self.fullname) - if isinstance(e, SystemExit): - errmsg += ('; the module executes module level statement ' + - 'and it might call sys.exit().') - else: - errmsg += '; the following exception was raised:\n%s' % \ - traceback.format_exc() - if PY2: - errmsg = errmsg.decode('utf-8') # type: ignore - logger.debug(errmsg) - self.directive.warn(errmsg) - self.env.note_reread() - return False - finally: - import_hook.disable() + for part in self.objpath: + parent = obj + logger.debug('[autodoc] getattr(_, %r)', part) + obj = self.get_attr(obj, part) + logger.debug('[autodoc] => %r', obj) + self.object_name = part + self.parent = parent + self.object = obj + return True + except (AttributeError, ImportError) as exc: + if self.objpath: + errmsg = 'autodoc: failed to import %s %r from module %r' % \ + (self.objtype, '.'.join(self.objpath), self.modname) + else: + errmsg = 'autodoc: failed to import %s %r' % \ + (self.objtype, self.fullname) + + if isinstance(exc, ImportError): + # import_module() raises ImportError having real exception obj and + # traceback + real_exc, traceback_msg = exc.args + if isinstance(real_exc, SystemExit): + errmsg += ('; the module executes module level statement ' + + 'and it might call sys.exit().') + elif isinstance(real_exc, ImportError): + errmsg += ('; the following exception was raised:\n%s' % + real_exc.args[0]) + else: + errmsg += ('; the following exception was raised:\n%s' % + traceback_msg) + else: + errmsg += ('; the following exception was raised:\n%s' % + traceback.format_exc()) + + if PY2: + errmsg = errmsg.decode('utf-8') # type: ignore + logger.debug(errmsg) + self.directive.warn(errmsg) + self.env.note_reread() + return False def get_real_modname(self): # type: () -> str @@ -1269,6 +1274,17 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: return ModuleLevelDocumenter.document_members(self, all_members) + def generate(self, more_content=None, real_modname=None, + check_module=False, all_members=False): + # Do not pass real_modname and use the name from the __module__ + # attribute of the class. + # If a class gets imported into the module real_modname + # the analyzer won't find the source of the class, if + # it looks in real_modname. + return super(ClassDocumenter, self).generate(more_content=more_content, + check_module=check_module, + all_members=all_members) + class ExceptionDocumenter(ClassDocumenter): """ diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py index 1234d716a..5c28f490d 100644 --- a/sphinx/ext/autodoc/importer.py +++ b/sphinx/ext/autodoc/importer.py @@ -10,13 +10,16 @@ """ import sys +import warnings +import traceback +import contextlib from types import FunctionType, MethodType, ModuleType from sphinx.util import logging if False: # For type annotation - from typing import Any, List, Set # NOQA + from typing import Any, Generator, List, Set # NOQA logger = logging.getLogger(__name__) @@ -75,7 +78,6 @@ class _MockModule(ModuleType): class _MockImporter(object): - def __init__(self, names): # type: (List[str]) -> None self.base_packages = set() # type: Set[str] @@ -116,3 +118,29 @@ class _MockImporter(object): sys.modules[name] = module self.mocked_modules.append(name) return module + + +@contextlib.contextmanager +def mock(names): + # type: (List[str]) -> Generator + try: + importer = _MockImporter(names) + yield + finally: + importer.disable() + + +def import_module(modname, warningiserror=False): + """ + Call __import__(modname), convert exceptions to ImportError + """ + try: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=ImportWarning) + with logging.skip_warningiserror(not warningiserror): + __import__(modname) + return sys.modules[modname] + except BaseException as exc: + # Importing modules may cause any side effects, including + # SystemExit, so we need to catch all errors. + raise ImportError(exc, traceback.format_exc()) diff --git a/sphinx/ext/autodoc/inspector.py b/sphinx/ext/autodoc/inspector.py index f1faf2043..5d157c797 100644 --- a/sphinx/ext/autodoc/inspector.py +++ b/sphinx/ext/autodoc/inspector.py @@ -65,7 +65,7 @@ def format_annotation(annotation): elif (hasattr(typing, 'UnionMeta') and isinstance(annotation, typing.UnionMeta) and # type: ignore hasattr(annotation, '__union_params__')): - params = annotation.__union_params__ # type: ignore + params = annotation.__union_params__ if params is not None: param_str = ', '.join(format_annotation(p) for p in params) return '%s[%s]' % (qualified_name, param_str) @@ -74,7 +74,7 @@ def format_annotation(annotation): getattr(annotation, '__args__', None) is not None and hasattr(annotation, '__result__')): # Skipped in the case of plain typing.Callable - args = annotation.__args__ # type: ignore + args = annotation.__args__ if args is None: return qualified_name elif args is Ellipsis: @@ -84,15 +84,15 @@ def format_annotation(annotation): args_str = '[%s]' % ', '.join(formatted_args) return '%s[%s, %s]' % (qualified_name, args_str, - format_annotation(annotation.__result__)) # type: ignore + format_annotation(annotation.__result__)) elif (hasattr(typing, 'TupleMeta') and isinstance(annotation, typing.TupleMeta) and # type: ignore hasattr(annotation, '__tuple_params__') and hasattr(annotation, '__tuple_use_ellipsis__')): - params = annotation.__tuple_params__ # type: ignore + params = annotation.__tuple_params__ if params is not None: param_strings = [format_annotation(p) for p in params] - if annotation.__tuple_use_ellipsis__: # type: ignore + if annotation.__tuple_use_ellipsis__: param_strings.append('...') return '%s[%s]' % (qualified_name, ', '.join(param_strings)) diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py index 67bbf6d91..21bfe7b13 100644 --- a/sphinx/ext/autosummary/__init__.py +++ b/sphinx/ext/autosummary/__init__.py @@ -73,6 +73,7 @@ from sphinx.environment.adapters.toctree import TocTree from sphinx.util import import_object, rst, logging from sphinx.pycode import ModuleAnalyzer, PycodeError from sphinx.ext.autodoc import Options +from sphinx.ext.autodoc.importer import import_module if False: # For type annotation @@ -512,8 +513,7 @@ def _import_by_name(name): modname = '.'.join(name_parts[:-1]) if modname: try: - __import__(modname) - mod = sys.modules[modname] + mod = import_module(modname) return getattr(mod, name_parts[-1]), mod, modname except (ImportError, IndexError, AttributeError): pass @@ -525,9 +525,10 @@ def _import_by_name(name): last_j = j modname = '.'.join(name_parts[:j]) try: - __import__(modname) + import_module(modname) except ImportError: continue + if modname in sys.modules: break diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py index 13b463e87..f02c50692 100644 --- a/sphinx/ext/autosummary/generate.py +++ b/sphinx/ext/autosummary/generate.py @@ -19,16 +19,17 @@ """ from __future__ import print_function +import argparse +import codecs import os +import pydoc import re import sys -import pydoc -import optparse -import codecs from jinja2 import FileSystemLoader, TemplateNotFound from jinja2.sandbox import SandboxedEnvironment +from sphinx import __display_version__ from sphinx import package_dir from sphinx.ext.autosummary import import_by_name, get_documenter from sphinx.jinja2glue import BuiltinTemplateLoader @@ -59,33 +60,6 @@ if False: from sphinx.environment import BuildEnvironment # NOQA -def main(argv=sys.argv[1:]): - # type: (List[str]) -> None - usage = """%prog [OPTIONS] SOURCEFILE ...""" - p = optparse.OptionParser(usage.strip()) - p.add_option("-o", "--output-dir", action="store", type="string", - dest="output_dir", default=None, - help="Directory to place all output in") - p.add_option("-s", "--suffix", action="store", type="string", - dest="suffix", default="rst", - help="Default suffix for files (default: %default)") - p.add_option("-t", "--templates", action="store", type="string", - dest="templates", default=None, - help="Custom template directory (default: %default)") - p.add_option("-i", "--imported-members", action="store_true", - dest="imported_members", default=False, - help="Document imported members (default: %default)") - options, args = p.parse_args(argv) - - if len(args) < 1: - p.error('no input files given') - - generate_autosummary_docs(args, options.output_dir, - "." + options.suffix, - template_dir=options.templates, - imported_members=options.imported_members) - - def _simple_info(msg): # type: (unicode) -> None print(msg) @@ -373,5 +347,57 @@ def find_autosummary_in_lines(lines, module=None, filename=None): return documented +def get_parser(): + # type: () -> argparse.ArgumentParser + parser = argparse.ArgumentParser( + usage='%(prog)s [OPTIONS] ...', + epilog='For more information, visit .', + description=""" +Generate ReStructuredText using autosummary directives. + +sphinx-autogen is a frontend to sphinx.ext.autosummary.generate. It generates +the reStructuredText files from the autosummary directives contained in the +given input files. + +The format of the autosummary directive is documented in the +``sphinx.ext.autosummary`` Python module and can be read using:: + + pydoc sphinx.ext.autosummary +""") + + parser.add_argument('--version', action='version', dest='show_version', + version='%%(prog)s %s' % __display_version__) + + parser.add_argument('source_file', nargs='+', + help='source files to generate rST files for') + + parser.add_argument('-o', '--output-dir', action='store', + dest='output_dir', + help='directory to place all output in') + parser.add_argument('-s', '--suffix', action='store', dest='suffix', + default='rst', + help='default suffix for files (default: ' + '%(default)s)') + parser.add_argument('-t', '--templates', action='store', dest='templates', + default=None, + help='custom template directory (default: ' + '%(default)s)') + parser.add_argument('-i', '--imported-members', action='store_true', + dest='imported_members', default=False, + help='document imported members (default: ' + '%(default)s)') + + return parser + + +def main(argv=sys.argv[1:]): + # type: (List[str]) -> None + args = get_parser().parse_args(argv) + generate_autosummary_docs(args.source_file, args.output_dir, + '.' + args.suffix, + template_dir=args.templates, + imported_members=args.imported_members) + + if __name__ == '__main__': main() diff --git a/sphinx/ext/intersphinx.py b/sphinx/ext/intersphinx.py index 73078330b..1ee58353c 100644 --- a/sphinx/ext/intersphinx.py +++ b/sphinx/ext/intersphinx.py @@ -217,7 +217,7 @@ def load_mappings(app): if isinstance(value, (list, tuple)): # new format - name, (uri, inv) = key, value # type: ignore + name, (uri, inv) = key, value if not isinstance(name, string_types): logger.warning('intersphinx identifier %r is not string. Ignored', name) continue diff --git a/sphinx/ext/napoleon/__init__.py b/sphinx/ext/napoleon/__init__.py index f319a18c4..7aca9b629 100644 --- a/sphinx/ext/napoleon/__init__.py +++ b/sphinx/ext/napoleon/__init__.py @@ -27,13 +27,12 @@ class Config(object): Listed below are all the settings used by napoleon and their default values. These settings can be changed in the Sphinx `conf.py` file. Make - sure that both "sphinx.ext.autodoc" and "sphinx.ext.napoleon" are - enabled in `conf.py`:: + sure that "sphinx.ext.napoleon" is enabled in `conf.py`:: # conf.py # Add any Sphinx extension module names here, as strings - extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon'] + extensions = ['sphinx.ext.napoleon'] # Napoleon settings napoleon_google_docstring = True @@ -294,6 +293,7 @@ def setup(app): _patch_python_domain() + app.setup_extension('sphinx.ext.autodoc') app.connect('autodoc-process-docstring', _process_docstring) app.connect('autodoc-skip-member', _skip_member) diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py index c77598ef1..d3a64049b 100644 --- a/sphinx/ext/napoleon/docstring.py +++ b/sphinx/ext/napoleon/docstring.py @@ -194,7 +194,7 @@ class GoogleDocstring(UnicodeMixin): line = self._line_iter.peek() while(not self._is_section_break() and (not line or self._is_indented(line, indent))): - lines.append(next(self._line_iter)) # type: ignore + lines.append(next(self._line_iter)) line = self._line_iter.peek() return lines @@ -204,7 +204,7 @@ class GoogleDocstring(UnicodeMixin): while (self._line_iter.has_next() and self._line_iter.peek() and not self._is_section_header()): - lines.append(next(self._line_iter)) # type: ignore + lines.append(next(self._line_iter)) return lines def _consume_empty(self): @@ -212,13 +212,13 @@ class GoogleDocstring(UnicodeMixin): lines = [] line = self._line_iter.peek() while self._line_iter.has_next() and not line: - lines.append(next(self._line_iter)) # type: ignore + lines.append(next(self._line_iter)) line = self._line_iter.peek() return lines def _consume_field(self, parse_type=True, prefer_type=False): # type: (bool, bool) -> Tuple[unicode, unicode, List[unicode]] - line = next(self._line_iter) # type: ignore + line = next(self._line_iter) before, colon, after = self._partition_field_on_colon(line) _name, _type, _desc = before, '', after # type: unicode, unicode, unicode @@ -250,7 +250,7 @@ class GoogleDocstring(UnicodeMixin): def _consume_inline_attribute(self): # type: () -> Tuple[unicode, List[unicode]] - line = next(self._line_iter) # type: ignore + line = next(self._line_iter) _type, colon, _desc = self._partition_field_on_colon(line) if not colon: _type, _desc = _desc, _type @@ -285,7 +285,7 @@ class GoogleDocstring(UnicodeMixin): def _consume_section_header(self): # type: () -> unicode - section = next(self._line_iter) # type: ignore + section = next(self._line_iter) stripped_section = section.strip(':') if stripped_section.lower() in self._sections: section = stripped_section @@ -295,7 +295,7 @@ class GoogleDocstring(UnicodeMixin): # type: () -> List[unicode] lines = [] while self._line_iter.has_next(): - lines.append(next(self._line_iter)) # type: ignore + lines.append(next(self._line_iter)) return lines def _consume_to_next_section(self): @@ -303,7 +303,7 @@ class GoogleDocstring(UnicodeMixin): self._consume_empty() lines = [] while not self._is_section_break(): - lines.append(next(self._line_iter)) # type: ignore + lines.append(next(self._line_iter)) return lines + self._consume_empty() def _dedent(self, lines, full=False): @@ -886,7 +886,7 @@ class NumpyDocstring(GoogleDocstring): def _consume_field(self, parse_type=True, prefer_type=False): # type: (bool, bool) -> Tuple[unicode, unicode, List[unicode]] - line = next(self._line_iter) # type: ignore + line = next(self._line_iter) if parse_type: _name, _, _type = self._partition_field_on_colon(line) else: @@ -907,10 +907,10 @@ class NumpyDocstring(GoogleDocstring): def _consume_section_header(self): # type: () -> unicode - section = next(self._line_iter) # type: ignore + section = next(self._line_iter) if not _directive_regex.match(section): # Consume the header underline - next(self._line_iter) # type: ignore + next(self._line_iter) return section def _is_section_break(self): diff --git a/sphinx/ext/todo.py b/sphinx/ext/todo.py index 24e9beec1..a58422793 100644 --- a/sphinx/ext/todo.py +++ b/sphinx/ext/todo.py @@ -69,6 +69,8 @@ class Todo(BaseAdmonition): env = self.state.document.settings.env targetid = 'index-%s' % env.new_serialno('index') + # Stash the target to be retrieved later in latex_visit_todo_node. + todo['targetref'] = '%s:%s' % (env.docname, targetid) targetnode = nodes.target('', '', ids=[targetid]) return [targetnode, todo] @@ -137,11 +139,14 @@ def process_todo_nodes(app, doctree, fromdocname): env.todo_all_todos = [] # type: ignore for node in doctree.traverse(todolist): - if not app.config['todo_include_todos']: - node.replace_self([]) - continue + if node.get('ids'): + content = [nodes.target()] + else: + content = [] - content = [] + if not app.config['todo_include_todos']: + node.replace_self(content) + continue for todo_info in env.todo_all_todos: # type: ignore para = nodes.paragraph(classes=['todo-source']) @@ -170,8 +175,12 @@ def process_todo_nodes(app, doctree, fromdocname): para += newnode para += nodes.Text(desc2, desc2) - # (Recursively) resolve references in the todo content todo_entry = todo_info['todo'] + # Remove targetref from the (copied) node to avoid emitting a + # duplicate label of the original entry when we walk this node. + del todo_entry['targetref'] + + # (Recursively) resolve references in the todo content env.resolve_references(todo_entry, todo_info['docname'], app.builder) @@ -213,7 +222,13 @@ def depart_todo_node(self, node): def latex_visit_todo_node(self, node): # type: (nodes.NodeVisitor, todo_node) -> None title = node.pop(0).astext().translate(tex_escape_map) - self.body.append(u'\n\\begin{sphinxadmonition}{note}{%s:}' % title) + self.body.append(u'\n\\begin{sphinxadmonition}{note}{') + # If this is the original todo node, emit a label that will be referenced by + # a hyperref in the todolist. + target = node.get('targetref') + if target is not None: + self.body.append(u'\\label{%s}' % target) + self.body.append('%s:}' % title) def latex_depart_todo_node(self, node): diff --git a/sphinx/io.py b/sphinx/io.py index 8813cb3b6..8a41069db 100644 --- a/sphinx/io.py +++ b/sphinx/io.py @@ -8,10 +8,15 @@ :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from docutils.io import FileInput +import re +import codecs + +from docutils.io import FileInput, NullOutput +from docutils.core import Publisher from docutils.readers import standalone +from docutils.statemachine import StringList, string2lines from docutils.writers import UnfilteredWriter -from six import string_types, text_type, iteritems +from six import text_type from typing import Any, Union # NOQA from sphinx.transforms import ( @@ -24,7 +29,7 @@ from sphinx.transforms.compact_bullet_list import RefOnlyBulletListTransform from sphinx.transforms.i18n import ( PreserveTranslatableMessages, Locale, RemoveTranslatableInline, ) -from sphinx.util import import_object, split_docinfo +from sphinx.util import logging from sphinx.util.docutils import LoggingReporter if False: @@ -38,41 +43,18 @@ if False: from sphinx.builders import Builder # NOQA from sphinx.environment import BuildEnvironment # NOQA +docinfo_re = re.compile(':\\w+:.*?') + + +logger = logging.getLogger(__name__) + class SphinxBaseReader(standalone.Reader): """ - Add our source parsers + A base class of readers for Sphinx. + + This replaces reporter by Sphinx's on generating document. """ - def __init__(self, app, parsers={}, *args, **kwargs): - # type: (Sphinx, Dict[unicode, Parser], Any, Any) -> None - standalone.Reader.__init__(self, *args, **kwargs) - self.parser_map = {} # type: Dict[unicode, Parser] - for suffix, parser_class in parsers.items(): - if isinstance(parser_class, string_types): - parser_class = import_object(parser_class, 'source parser') # type: ignore - parser = parser_class() - if hasattr(parser, 'set_application'): - parser.set_application(app) - self.parser_map[suffix] = parser - - def read(self, source, parser, settings): - # type: (Input, Parser, Dict) -> nodes.document - self.source = source - - for suffix in self.parser_map: - if source.source_path.endswith(suffix): - self.parser = self.parser_map[suffix] - break - else: - # use special parser for unknown file-extension '*' (if exists) - self.parser = self.parser_map.get('*') - - if not self.parser: - self.parser = parser - self.settings = settings - self.input = self.source.read() - self.parse() - return self.document def get_transforms(self): # type: () -> List[Transform] @@ -80,17 +62,19 @@ class SphinxBaseReader(standalone.Reader): def new_document(self): # type: () -> nodes.document + """Creates a new document object which having a special reporter object good + for logging. + """ document = standalone.Reader.new_document(self) reporter = document.reporter - document.reporter = LoggingReporter(reporter.source, reporter.report_level, - reporter.halt_level, reporter.debug_flag, - reporter.error_handler) + document.reporter = LoggingReporter.from_reporter(reporter) + document.reporter.set_source(self.source) return document class SphinxStandaloneReader(SphinxBaseReader): """ - Add our own transforms. + A basic document reader for Sphinx. """ transforms = [ApplySourceWorkaround, ExtraTranslatableNodes, PreserveTranslatableMessages, Locale, CitationReferences, DefaultSubstitutions, MoveModuleTargets, @@ -101,29 +85,30 @@ class SphinxStandaloneReader(SphinxBaseReader): class SphinxI18nReader(SphinxBaseReader): """ - Replacer for document.reporter.get_source_and_line method. + A document reader for i18n. - reST text lines for translation do not have the original source line number. - This class provides the correct line numbers when reporting. + This returns the source line number of original text as current source line number + to let users know where the error happened. + Because the translated texts are partial and they don't have correct line numbers. """ + lineno = None # type: int transforms = [ApplySourceWorkaround, ExtraTranslatableNodes, CitationReferences, DefaultSubstitutions, MoveModuleTargets, HandleCodeBlocks, AutoNumbering, SortIds, RemoveTranslatableInline, FilterSystemMessages, RefOnlyBulletListTransform, UnreferencedFootnotesDetector] - def __init__(self, *args, **kwargs): - # type: (Any, Any) -> None - SphinxBaseReader.__init__(self, *args, **kwargs) - self.lineno = None # type: int - def set_lineno_for_reporter(self, lineno): # type: (int) -> None + """Stores the source line number of original text.""" self.lineno = lineno def new_document(self): # type: () -> nodes.document + """Creates a new document object which having a special reporter object for + translation. + """ document = SphinxBaseReader.new_document(self) reporter = document.reporter @@ -136,6 +121,8 @@ class SphinxI18nReader(SphinxBaseReader): class SphinxDummyWriter(UnfilteredWriter): + """Dummy writer module used for generating doctree.""" + supported = ('html',) # needed to keep "meta" nodes def translate(self): @@ -143,11 +130,26 @@ class SphinxDummyWriter(UnfilteredWriter): pass -class SphinxFileInput(FileInput): +def SphinxDummySourceClass(source, *args, **kwargs): + """Bypass source object as is to cheat Publisher.""" + return source + + +class SphinxBaseFileInput(FileInput): + """A base class of SphinxFileInput. + + It supports to replace unknown Unicode characters to '?'. And it also emits + Sphinx events ``source-read`` on reading. + """ + def __init__(self, app, env, *args, **kwds): # type: (Sphinx, BuildEnvironment, Any, Any) -> None self.app = app self.env = env + + # set up error handler + codecs.register_error('sphinx', self.warn_and_replace) # type: ignore + kwds['error_handler'] = 'sphinx' # py3: handle error on open. FileInput.__init__(self, *args, **kwds) @@ -159,24 +161,121 @@ class SphinxFileInput(FileInput): def read(self): # type: () -> unicode - def get_parser_type(source_path): - # type: (unicode) -> Tuple[unicode] - for suffix, parser_class in iteritems(self.app.registry.get_source_parsers()): - if source_path.endswith(suffix): - if isinstance(parser_class, string_types): - parser_class = import_object(parser_class, 'source parser') # type: ignore # NOQA - return parser_class.supported - return ('restructuredtext',) + """Reads the contents from file. + After reading, it emits Sphinx event ``source-read``. + """ data = FileInput.read(self) - if self.app: - arg = [data] - self.app.emit('source-read', self.env.docname, arg) - data = arg[0] - docinfo, data = split_docinfo(data) - if 'restructuredtext' in get_parser_type(self.source_path): - if self.env.config.rst_epilog: - data = data + '\n' + self.env.config.rst_epilog + '\n' - if self.env.config.rst_prolog: - data = self.env.config.rst_prolog + '\n' + data - return docinfo + data + + # emit source-read event + arg = [data] + self.app.emit('source-read', self.env.docname, arg) + return arg[0] + + def warn_and_replace(self, error): + # type: (Any) -> Tuple + """Custom decoding error handler that warns and replaces.""" + linestart = error.object.rfind(b'\n', 0, error.start) + lineend = error.object.find(b'\n', error.start) + if lineend == -1: + lineend = len(error.object) + lineno = error.object.count(b'\n', 0, error.start) + 1 + logger.warning('undecodable source characters, replacing with "?": %r', + (error.object[linestart + 1:error.start] + b'>>>' + + error.object[error.start:error.end] + b'<<<' + + error.object[error.end:lineend]), + location=(self.env.docname, lineno)) + return (u'?', error.end) + + +class SphinxFileInput(SphinxBaseFileInput): + """A basic FileInput for Sphinx.""" + pass + + +class SphinxRSTFileInput(SphinxBaseFileInput): + """A reST FileInput for Sphinx. + + This FileInput automatically prepends and appends text by :confval:`rst_prolog` and + :confval:`rst_epilog`. + + .. important:: + + This FileInput uses an instance of ``StringList`` as a return value of ``read()`` + method to indicate original source filename and line numbers after prepending and + appending. + For that reason, ``sphinx.parsers.RSTParser`` should be used with this to parse + a content correctly. + """ + + def prepend_prolog(self, text, prolog): + # type: (StringList, unicode) -> None + docinfo = self.count_docinfo_lines(text) + if docinfo: + # insert a blank line after docinfo + text.insert(docinfo, '', '', 0) + docinfo += 1 + + # insert prolog (after docinfo if exists) + for lineno, line in enumerate(prolog.splitlines()): + text.insert(docinfo + lineno, line, '', lineno) + + text.insert(docinfo + lineno + 1, '', '', 0) + + def append_epilog(self, text, epilog): + # type: (StringList, unicode) -> None + # append a blank line and rst_epilog + text.append('', '', 0) + for lineno, line in enumerate(epilog.splitlines()): + text.append(line, '', lineno) + + def read(self): + # type: () -> StringList + inputstring = SphinxBaseFileInput.read(self) + lines = string2lines(inputstring, convert_whitespace=True) + content = StringList() + for lineno, line in enumerate(lines): + content.append(line, self.source_path, lineno) + + if self.env.config.rst_prolog: + self.prepend_prolog(content, self.env.config.rst_prolog) + if self.env.config.rst_epilog: + self.append_epilog(content, self.env.config.rst_epilog) + + return content + + def count_docinfo_lines(self, content): + # type: (StringList) -> int + if len(content) == 0: + return 0 + else: + for lineno, line in enumerate(content.data): + if not docinfo_re.match(line): + break + return lineno + + +def read_doc(app, env, filename): + # type: (Sphinx, BuildEnvironment, unicode) -> nodes.document + """Parse a document and convert to doctree.""" + input_class = app.registry.get_source_input(filename) + reader = SphinxStandaloneReader() + source = input_class(app, env, source=None, source_path=filename, + encoding=env.config.source_encoding) + parser = app.registry.create_source_parser(app, filename) + + pub = Publisher(reader=reader, + parser=parser, + writer=SphinxDummyWriter(), + source_class=SphinxDummySourceClass, + destination=NullOutput()) + pub.set_components(None, 'restructuredtext', None) + pub.process_programmatic_settings(None, env.settings, None) + pub.set_source(source, filename) + pub.publish() + return pub.document + + +def setup(app): + app.registry.add_source_input('*', SphinxFileInput) + app.registry.add_source_input('restructuredtext', SphinxRSTFileInput) diff --git a/sphinx/locale/__init__.py b/sphinx/locale/__init__.py index 1b4ed02fe..e148f2c12 100644 --- a/sphinx/locale/__init__.py +++ b/sphinx/locale/__init__.py @@ -171,7 +171,7 @@ class _TranslationProxy(UserString, object): # type: () -> str try: return 'i' + repr(text_type(self.data)) - except: + except Exception: return '<%s broken>' % self.__class__.__name__ diff --git a/sphinx/parsers.py b/sphinx/parsers.py index 33556e487..92bea9461 100644 --- a/sphinx/parsers.py +++ b/sphinx/parsers.py @@ -11,6 +11,8 @@ import docutils.parsers import docutils.parsers.rst +from docutils.parsers.rst import states +from docutils.statemachine import StringList from docutils.transforms.universal import SmartQuotes from sphinx.transforms import SphinxSmartQuotes @@ -18,6 +20,7 @@ from sphinx.transforms import SphinxSmartQuotes if False: # For type annotation from typing import Any, Dict, List, Type # NOQA + from docutils import nodes # NOQA from docutils.transforms import Transform # NOQA from sphinx.application import Sphinx # NOQA @@ -56,7 +59,7 @@ class Parser(docutils.parsers.Parser): class RSTParser(docutils.parsers.rst.Parser): - """A reST parser customized for Sphinx.""" + """A reST parser for Sphinx.""" def get_transforms(self): # type: () -> List[Type[Transform]] @@ -66,6 +69,26 @@ class RSTParser(docutils.parsers.rst.Parser): transforms.append(SphinxSmartQuotes) return transforms + def parse(self, inputstring, document): + # type: (Any, nodes.document) -> None + """Parse text and generate a document tree. + + This accepts StringList as an inputstring parameter. + It enables to handle mixed contents (cf. :confval:`rst_prolog`) correctly. + """ + if isinstance(inputstring, StringList): + self.setup_parse(inputstring, document) + self.statemachine = states.RSTStateMachine( + state_classes=self.state_classes, + initial_state=self.initial_state, + debug=document.reporter.debug_flag) + # Give inputstring directly to statemachine. + self.statemachine.run(inputstring, document, inliner=self.inliner) + self.finish_parse() + else: + # otherwise, inputstring might be a string. It will be handled by superclass. + docutils.parsers.rst.Parser.parse(self, inputstring, document) + def setup(app): # type: (Sphinx) -> Dict[unicode, Any] diff --git a/sphinx/pycode/__init__.py b/sphinx/pycode/__init__.py index 66544f073..20de2a656 100644 --- a/sphinx/pycode/__init__.py +++ b/sphinx/pycode/__init__.py @@ -36,11 +36,11 @@ class ModuleAnalyzer(object): if ('file', filename) in cls.cache: return cls.cache['file', filename] try: - fileobj = open(filename, 'rb') + with open(filename, 'rb') as f: + obj = cls(f, modname, filename) + cls.cache['file', filename] = obj except Exception as err: raise PycodeError('error opening %r' % filename, err) - obj = cls(fileobj, modname, filename) - cls.cache['file', filename] = obj return obj @classmethod diff --git a/sphinx/pycode/parser.py b/sphinx/pycode/parser.py index d488842e9..7460dcfce 100644 --- a/sphinx/pycode/parser.py +++ b/sphinx/pycode/parser.py @@ -52,8 +52,13 @@ def get_lvar_names(node, self=None): else: raise TypeError('The assignment %r is not instance variable' % node) elif node_name in ('Tuple', 'List'): - members = [get_lvar_names(elt) for elt in node.elts] # type: ignore - return sum(members, []) + members = [] + for elt in node.elts: # type: ignore + try: + members.extend(get_lvar_names(elt, self)) + except TypeError: + pass + return members elif node_name == 'Attribute': if node.value.__class__.__name__ == 'Name' and self and node.value.id == self_id: # type: ignore # NOQA # instance variable @@ -62,6 +67,8 @@ def get_lvar_names(node, self=None): raise TypeError('The assignment %r is not instance variable' % node) elif node_name == 'str': return [node] # type: ignore + elif node_name == 'Starred': + return get_lvar_names(node.value, self) # type: ignore else: raise NotImplementedError('Unexpected node name %r' % node_name) @@ -277,7 +284,7 @@ class VariableCommentPicker(ast.NodeVisitor): # type: (ast.Assign) -> None """Handles Assign node and pick up a variable comment.""" try: - varnames = sum([get_lvar_names(t, self=self.get_self()) for t in node.targets], []) # type: ignore # NOQA + varnames = sum([get_lvar_names(t, self=self.get_self()) for t in node.targets], []) current_line = self.get_line(node.lineno) except TypeError: return # this assignment is not new definition! @@ -336,6 +343,7 @@ class VariableCommentPicker(ast.NodeVisitor): self.current_classes.append(node.name) self.add_entry(node.name) self.context.append(node.name) + self.previous = node for child in node.body: self.visit(child) self.context.pop() diff --git a/sphinx/registry.py b/sphinx/registry.py index 0861575db..b627f23af 100644 --- a/sphinx/registry.py +++ b/sphinx/registry.py @@ -13,21 +13,24 @@ from __future__ import print_function import traceback from pkg_resources import iter_entry_points -from six import itervalues +from six import iteritems, itervalues, string_types from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError from sphinx.extension import Extension from sphinx.domains import ObjType from sphinx.domains.std import GenericObject, Target from sphinx.locale import __ +from sphinx.parsers import Parser as SphinxParser from sphinx.roles import XRefRole from sphinx.util import logging +from sphinx.util import import_object from sphinx.util.docutils import directive_helper if False: # For type annotation from typing import Any, Callable, Dict, Iterator, List, Type # NOQA from docutils import nodes # NOQA + from docutils.io import Input # NOQA from docutils.parsers import Parser # NOQA from sphinx.application import Sphinx # NOQA from sphinx.builders import Builder # NOQA @@ -48,6 +51,7 @@ class SphinxComponentRegistry(object): self.builders = {} # type: Dict[unicode, Type[Builder]] self.domains = {} # type: Dict[unicode, Type[Domain]] self.source_parsers = {} # type: Dict[unicode, Parser] + self.source_inputs = {} # type: Dict[unicode, Input] self.translators = {} # type: Dict[unicode, nodes.NodeVisitor] def add_builder(self, builder): @@ -155,15 +159,61 @@ class SphinxComponentRegistry(object): stddomain.object_types[directivename] = ObjType(objname or directivename, rolename) def add_source_parser(self, suffix, parser): - # type: (unicode, Parser) -> None + # type: (unicode, Type[Parser]) -> None if suffix in self.source_parsers: raise ExtensionError(__('source_parser for %r is already registered') % suffix) self.source_parsers[suffix] = parser + def get_source_parser(self, filename): + # type: (unicode) -> Type[Parser] + for suffix, parser_class in iteritems(self.source_parsers): + if filename.endswith(suffix): + break + else: + # use special parser for unknown file-extension '*' (if exists) + parser_class = self.source_parsers.get('*') + + if parser_class is None: + raise SphinxError(__('Source parser for %s not registered') % filename) + else: + if isinstance(parser_class, string_types): + parser_class = import_object(parser_class, 'source parser') # type: ignore + return parser_class + def get_source_parsers(self): # type: () -> Dict[unicode, Parser] return self.source_parsers + def create_source_parser(self, app, filename): + # type: (Sphinx, unicode) -> Parser + parser_class = self.get_source_parser(filename) + parser = parser_class() + if isinstance(parser, SphinxParser): + parser.set_application(app) + return parser + + def add_source_input(self, filetype, input_class): + # type: (unicode, Type[Input]) -> None + if filetype in self.source_inputs: + raise ExtensionError(__('source_input for %r is already registered') % filetype) + self.source_inputs[filetype] = input_class + + def get_source_input(self, filename): + # type: (unicode) -> Type[Input] + parser = self.get_source_parser(filename) + for filetype in parser.supported: + if filetype in self.source_inputs: + input_class = self.source_inputs[filetype] + break + else: + # use special source_input for unknown file-type '*' (if exists) + input_class = self.source_inputs.get('*') + + if input_class is None: + raise SphinxError(__('source_input for %s not registered') % filename) + else: + return input_class + def add_translator(self, name, translator): # type: (unicode, Type[nodes.NodeVisitor]) -> None self.translators[name] = translator diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index 813a6dd51..a6074a863 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -265,6 +265,11 @@ class IndexBuilder(object): # objtype index -> (domain, type, objname (localized)) lang_class = languages.get(lang) # type: Type[SearchLanguage] # add language-specific SearchLanguage instance + + # fallback; try again with language-code + if lang_class is None and '_' in lang: + lang_class = languages.get(lang.split('_')[0]) + if lang_class is None: self.lang = SearchEnglish(options) # type: SearchLanguage elif isinstance(lang_class, str): diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py index 5d4f87b88..5ef4b5888 100644 --- a/sphinx/search/zh.py +++ b/sphinx/search/zh.py @@ -240,7 +240,7 @@ class SearchChinese(SearchLanguage): if JIEBA: dict_path = options.get('dict') if dict_path and os.path.isfile(dict_path): - jieba.set_dictionary(dict_path) + jieba.load_userdict(dict_path) self.stemmer = get_stemmer() diff --git a/sphinx/setup_command.py b/sphinx/setup_command.py index d219a14d9..8c00a2ff8 100644 --- a/sphinx/setup_command.py +++ b/sphinx/setup_command.py @@ -136,8 +136,8 @@ class BuildDoc(Command): # type: () -> None if self.source_dir is None: self.source_dir = self._guess_source_dir() - self.announce('Using source directory %s' % self.source_dir) # type: ignore - self.ensure_dirname('source_dir') # type: ignore + self.announce('Using source directory %s' % self.source_dir) + self.ensure_dirname('source_dir') if self.source_dir is None: self.source_dir = os.curdir self.source_dir = abspath(self.source_dir) @@ -145,10 +145,10 @@ class BuildDoc(Command): self.config_dir = self.source_dir self.config_dir = abspath(self.config_dir) - self.ensure_string_list('builder') # type: ignore + self.ensure_string_list('builder') if self.build_dir is None: - build = self.get_finalized_command('build') # type: ignore - self.build_dir = os.path.join(abspath(build.build_base), 'sphinx') + build = self.get_finalized_command('build') + self.build_dir = os.path.join(abspath(build.build_base), 'sphinx') # type: ignore self.mkpath(self.build_dir) # type: ignore self.build_dir = abspath(self.build_dir) self.doctree_dir = os.path.join(self.build_dir, 'doctrees') diff --git a/sphinx/templates/quickstart/Makefile.new_t b/sphinx/templates/quickstart/Makefile.new_t index bba767a4c..c7cd62dda 100644 --- a/sphinx/templates/quickstart/Makefile.new_t +++ b/sphinx/templates/quickstart/Makefile.new_t @@ -3,7 +3,7 @@ # You can set these variables from the command line. SPHINXOPTS = -SPHINXBUILD = python -msphinx +SPHINXBUILD = sphinx-build SPHINXPROJ = {{ project_fn }} SOURCEDIR = {{ rsrcdir }} BUILDDIR = {{ rbuilddir }} diff --git a/sphinx/templates/quickstart/Makefile_t b/sphinx/templates/quickstart/Makefile_t index fdcf05691..4639a982b 100644 --- a/sphinx/templates/quickstart/Makefile_t +++ b/sphinx/templates/quickstart/Makefile_t @@ -2,9 +2,9 @@ # # You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = python -msphinx -PAPER = +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +PAPER ?= BUILDDIR = {{ rbuilddir }} # Internal variables. diff --git a/sphinx/templates/quickstart/conf.py_t b/sphinx/templates/quickstart/conf.py_t index 8300e626f..c42861c28 100644 --- a/sphinx/templates/quickstart/conf.py_t +++ b/sphinx/templates/quickstart/conf.py_t @@ -42,7 +42,11 @@ sys.path.insert(0, u'{{ module_path }}') # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [{{ extensions }}] +extensions = [ +{%- for ext in extensions %} + '{{ ext }}', +{%- endfor %} +] # Add any paths that contain templates here, relative to this directory. templates_path = ['{{ dot }}templates'] @@ -85,9 +89,6 @@ exclude_patterns = [{{ exclude_patterns }}] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = {{ ext_todo }} - # -- Options for HTML output ---------------------------------------------- @@ -110,14 +111,12 @@ html_static_path = ['{{ dot }}static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - '**': [ - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', - ] -} +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} # -- Options for HTMLHelp output ------------------------------------------ @@ -175,8 +174,8 @@ texinfo_documents = [ author, '{{ project_fn }}', 'One line description of project.', 'Miscellaneous'), ] +{%- if epub %} -{% if epub %} # -- Options for Epub output ---------------------------------------------- @@ -197,9 +196,23 @@ epub_copyright = copyright # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] -{% endif %} +{%- endif %} +{%- if extensions %} + + +# -- Extension configuration ---------------------------------------------- +{%- endif %} +{%- if 'sphinx.ext.intersphinx' in extensions %} + +# -- Options for intersphinx extension ------------------------------------ -{% if ext_intersphinx %} # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None} -{% endif %} +{%- endif %} +{%- if 'sphinx.ext.todo' in extensions %} + +# -- Options for todo extension ------------------------------------------- + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True +{%- endif %} diff --git a/sphinx/templates/quickstart/make.bat.new_t b/sphinx/templates/quickstart/make.bat.new_t index a52951ebb..e49ffbe78 100644 --- a/sphinx/templates/quickstart/make.bat.new_t +++ b/sphinx/templates/quickstart/make.bat.new_t @@ -5,7 +5,7 @@ pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=python -msphinx + set SPHINXBUILD=sphinx-build ) set SOURCEDIR={{ rsrcdir }} set BUILDDIR={{ rbuilddir }} @@ -16,10 +16,10 @@ if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. - echo.The Sphinx module was not found. Make sure you have Sphinx installed, - echo.then set the SPHINXBUILD environment variable to point to the full - echo.path of the 'sphinx-build' executable. Alternatively you may add the - echo.Sphinx directory to PATH. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ diff --git a/sphinx/templates/quickstart/make.bat_t b/sphinx/templates/quickstart/make.bat_t index 03ae9d423..8438b5f7e 100644 --- a/sphinx/templates/quickstart/make.bat_t +++ b/sphinx/templates/quickstart/make.bat_t @@ -5,7 +5,7 @@ REM Command file for Sphinx documentation pushd %~dp0 if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=python -msphinx + set SPHINXBUILD=sphinx-build ) set BUILDDIR={{ rbuilddir }} set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% {{ rsrcdir }} @@ -52,20 +52,29 @@ if "%1" == "clean" ( ) -REM Check if sphinx-build is available +REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 1>NUL 2>NUL -if errorlevel 1 ( +if errorlevel 9009 goto sphinx_python +goto sphinx_ok + +:sphinx_python + +set SPHINXBUILD=python -m sphinx.__init__ +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( echo. - echo.The Sphinx module was not found. Make sure you have Sphinx installed, - echo.then set the SPHINXBUILD environment variable to point to the full - echo.path of the 'sphinx-build' executable. Alternatively you may add the - echo.Sphinx directory to PATH. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) +:sphinx_ok + if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html diff --git a/sphinx/testing/util.py b/sphinx/testing/util.py index 807adfcfe..91ae821ac 100644 --- a/sphinx/testing/util.py +++ b/sphinx/testing/util.py @@ -132,7 +132,7 @@ class SphinxTestApp(application.Sphinx): application.Sphinx.__init__(self, srcdir, confdir, outdir, doctreedir, buildername, confoverrides, status, warning, freshenv, warningiserror, tags) - except: + except Exception: self.cleanup() raise diff --git a/sphinx/texinputs/footnotehyper-sphinx.sty b/sphinx/texinputs/footnotehyper-sphinx.sty index ff23f6ebe..5995f012d 100644 --- a/sphinx/texinputs/footnotehyper-sphinx.sty +++ b/sphinx/texinputs/footnotehyper-sphinx.sty @@ -1,6 +1,6 @@ \NeedsTeXFormat{LaTeX2e} \ProvidesPackage{footnotehyper-sphinx}% - [2017/03/07 v1.6 hyperref aware footnote.sty for sphinx (JFB)] + [2017/10/27 v1.7 hyperref aware footnote.sty for sphinx (JFB)] %% %% Package: footnotehyper-sphinx %% Version: based on footnotehyper.sty 2017/03/07 v1.0 @@ -16,6 +16,7 @@ %% 3. use of \sphinxunactivateextrasandspace from sphinx.sty, %% 4. macro definition \sphinxfootnotemark, %% 5. macro definition \sphinxlongtablepatch +%% 6. replaced an \undefined by \@undefined \DeclareOption*{\PackageWarning{footnotehyper-sphinx}{Option `\CurrentOption' is unknown}}% \ProcessOptions\relax \newbox\FNH@notes @@ -197,7 +198,7 @@ }% \AtBeginDocument{% \let\FNH@@makefntext\@makefntext - \ifx\@makefntextFB\undefined + \ifx\@makefntextFB\@undefined \expandafter\@gobble\else\expandafter\@firstofone\fi {\ifFBFrenchFootnotes \let\FNH@@makefntext\@makefntextFB \else \let\FNH@@makefntext\@makefntextORI\fi}% diff --git a/sphinx/texinputs/sphinx.sty b/sphinx/texinputs/sphinx.sty index 28f0091ff..80b81f6d6 100644 --- a/sphinx/texinputs/sphinx.sty +++ b/sphinx/texinputs/sphinx.sty @@ -6,7 +6,7 @@ % \NeedsTeXFormat{LaTeX2e}[1995/12/01] -\ProvidesPackage{sphinx}[2017/07/24 v1.6.4 LaTeX package (Sphinx markup)] +\ProvidesPackage{sphinx}[2017/12/12 v1.7 LaTeX package (Sphinx markup)] % provides \ltx@ifundefined % (many packages load ltxcmds: graphicx does for pdftex and lualatex but @@ -39,7 +39,7 @@ \@ifclassloaded{memoir}{}{\RequirePackage{fancyhdr}} % for \text macro and \iffirstchoice@ conditional even if amsmath not loaded \RequirePackage{amstext} -\RequirePackage{textcomp} +\RequirePackage[warn]{textcomp} \RequirePackage{titlesec} \@ifpackagelater{titlesec}{2016/03/15}% {\@ifpackagelater{titlesec}{2016/03/21}% @@ -165,6 +165,7 @@ % For highlighted code. \RequirePackage{fancyvrb} \fvset{fontsize=\small} +\define@key{FV}{hllines}{\def\sphinx@verbatim@checkifhl##1{\in@{, ##1,}{#1}}} % For hyperlinked footnotes in tables; also for gathering footnotes from % topic and warning blocks. Also to allow code-blocks in footnotes. \RequirePackage{footnotehyper-sphinx} @@ -185,12 +186,12 @@ % to make pdf with correct encoded bookmarks in Japanese % this should precede the hyperref package -\ifx\kanjiskip\undefined +\ifx\kanjiskip\@undefined % for non-Japanese: make sure bookmarks are ok also with lualatex \PassOptionsToPackage{pdfencoding=unicode}{hyperref} \else \RequirePackage{atbegshi} - \ifx\ucs\undefined + \ifx\ucs\@undefined \ifnum 42146=\euc"A4A2 \AtBeginShipoutFirst{\special{pdf:tounicode EUC-UCS2}} \else @@ -201,7 +202,7 @@ \fi \fi -\ifx\@jsc@uplatextrue\undefined\else +\ifx\@jsc@uplatextrue\@undefined\else \PassOptionsToPackage{setpagesize=false}{hyperref} \fi @@ -214,6 +215,17 @@ % stylesheet for highlighting with pygments \RequirePackage{sphinxhighlight} +% fix baseline increase from Pygments latex formatter in case of error tokens +% and keep \fboxsep's scope local via added braces +\def\PYG@tok@err{% + \def\PYG@bc##1{{\setlength{\fboxsep}{-\fboxrule}% + \fcolorbox[rgb]{1.00,0.00,0.00}{1,1,1}{\strut ##1}}}% +} +\def\PYG@tok@cs{% + \def\PYG@tc##1{\textcolor[rgb]{0.25,0.50,0.56}{##1}}% + \def\PYG@bc##1{{\setlength{\fboxsep}{0pt}% + \colorbox[rgb]{1.00,0.94,0.94}{\strut ##1}}}% +}% %% OPTIONS @@ -223,7 +235,7 @@ \SetupKeyvalOptions{prefix=spx@opt@} % use \spx@opt@ prefix % Sphinx legacy text layout: 1in margins on all four sides -\ifx\@jsc@uplatextrue\undefined +\ifx\@jsc@uplatextrue\@undefined \DeclareStringOption[1in]{hmargin} \DeclareStringOption[1in]{vmargin} \DeclareStringOption[.5in]{marginpar} @@ -314,6 +326,8 @@ % set the key handler. The "value" ##1 must be acceptable by \definecolor. \define@key{sphinx}{#1}{\definecolor{sphinx#1}##1}% }% +% Default color chosen to be as in minted.sty LaTeX package! +\sphinxDeclareSphinxColorOption{VerbatimHighlightColor}{{rgb}{0.878,1,1}} % admonition boxes, "light" style \sphinxDeclareSphinxColorOption{noteBorderColor}{{rgb}{0,0,0}} \sphinxDeclareSphinxColorOption{hintBorderColor}{{rgb}{0,0,0}} @@ -370,7 +384,7 @@ \expandafter\let \csname @list\romannumeral\the\count@\expandafter\endcsname \csname @list\romannumeral\the\numexpr\count@-\@ne\endcsname - % work around 2.6--3.2d babel-french issue (fixed in 3.2e; no change needed) + % workaround 2.6--3.2d babel-french issue (fixed in 3.2e; no change needed) \ltx@ifundefined{leftmargin\romannumeral\the\count@} {\expandafter\let \csname leftmargin\romannumeral\the\count@\expandafter\endcsname @@ -419,7 +433,7 @@ % % fix the double index and bibliography on the table of contents % in jsclasses (Japanese standard document classes) -\ifx\@jsc@uplatextrue\undefined\else +\ifx\@jsc@uplatextrue\@undefined\else \renewenvironment{sphinxtheindex} {\cleardoublepage\phantomsection \begin{theindex}} @@ -432,7 +446,7 @@ \fi % disable \@chappos in Appendix in pTeX -\ifx\kanjiskip\undefined\else +\ifx\kanjiskip\@undefined\else \let\py@OldAppendix=\appendix \renewcommand{\appendix}{ \py@OldAppendix @@ -446,10 +460,10 @@ {\newenvironment {sphinxthebibliography}{\begin{thebibliography}}{\end{thebibliography}}% } - {}% else clause of ifundefined + {}% else clause of \ltx@ifundefined \ltx@ifundefined{sphinxtheindex} {\newenvironment{sphinxtheindex}{\begin{theindex}}{\end{theindex}}}% - {}% else clause of ifundefined + {}% else clause of \ltx@ifundefined %% COLOR (general) @@ -503,7 +517,7 @@ } % geometry -\ifx\kanjiskip\undefined +\ifx\kanjiskip\@undefined \PassOptionsToPackage{% hmargin={\unexpanded{\spx@opt@hmargin}},% vmargin={\unexpanded{\spx@opt@vmargin}},% @@ -525,7 +539,7 @@ \newcommand*\sphinxtextlinesja[1]{% \numexpr\@ne+\dimexpr\paperheight-\topskip-\tw@\dimexpr#1\relax\relax/ \baselineskip\relax}% - \ifx\@jsc@uplatextrue\undefined\else + \ifx\@jsc@uplatextrue\@undefined\else % the way we found in order for the papersize special written by % geometry in the dvi file to be correct in case of jsbook class \ifnum\mag=\@m\else % do nothing special if nomag class option or 10pt @@ -542,7 +556,7 @@ }{geometry}% \AtBeginDocument {% update a dimension used by the jsclasses - \ifx\@jsc@uplatextrue\undefined\else\fullwidth\textwidth\fi + \ifx\@jsc@uplatextrue\@undefined\else\fullwidth\textwidth\fi % for some reason, jreport normalizes all dimensions with \@settopoint \@ifclassloaded{jreport} {\@settopoint\textwidth\@settopoint\textheight\@settopoint\marginparwidth} @@ -851,6 +865,34 @@ % needed to create wrapper environments of fancyvrb's Verbatim \newcommand*{\sphinxVerbatimEnvironment}{\gdef\FV@EnvironName{sphinxVerbatim}} \newcommand*{\sphinxverbatimsmallskipamount}{\smallskipamount} +% serves to implement line highlighting and line wrapping +\newcommand\sphinxFancyVerbFormatLine[1]{% + \expandafter\sphinx@verbatim@checkifhl\expandafter{\the\FV@CodeLineNo}% + \ifin@ + \sphinxVerbatimHighlightLine{#1}% + \else + \sphinxVerbatimFormatLine{#1}% + \fi +}% +\newcommand\sphinxVerbatimHighlightLine[1]{% + \edef\sphinxrestorefboxsep{\fboxsep\the\fboxsep\relax}% + \fboxsep0pt\relax % cf LaTeX bug graphics/4524 + \colorbox{sphinxVerbatimHighlightColor}% + {\sphinxrestorefboxsep\sphinxVerbatimFormatLine{#1}}% + % no need to restore \fboxsep here, as this ends up in a \hbox from fancyvrb +}% +% \sphinxVerbatimFormatLine will be set locally to one of those two: +\newcommand\sphinxVerbatimFormatLineWrap[1]{% + \hsize\linewidth + \vtop{\raggedright\hyphenpenalty\z@\exhyphenpenalty\z@ + \doublehyphendemerits\z@\finalhyphendemerits\z@ + \strut #1\strut}% +}% +\newcommand\sphinxVerbatimFormatLineNoWrap[1]{\hb@xt@\linewidth{\strut #1\hss}}% +\g@addto@macro\FV@SetupFont{% + \sbox\sphinxcontinuationbox {\spx@opt@verbatimcontinued}% + \sbox\sphinxvisiblespacebox {\spx@opt@verbatimvisiblespace}% +}% \newenvironment{sphinxVerbatim}{% % first, let's check if there is a caption \ifx\sphinxVerbatimTitle\empty @@ -905,23 +947,19 @@ % to achieve this without extensive rewrite of fancyvrb. % - The (not used in sphinx) obeytabs option to Verbatim is % broken by this change (showtabs and tabspace work). - \expandafter\def\expandafter\FV@SetupFont\expandafter - {\FV@SetupFont\sbox\sphinxcontinuationbox {\spx@opt@verbatimcontinued}% - \sbox\sphinxvisiblespacebox {\spx@opt@verbatimvisiblespace}}% - \def\FancyVerbFormatLine ##1{\hsize\linewidth - \vtop{\raggedright\hyphenpenalty\z@\exhyphenpenalty\z@ - \doublehyphendemerits\z@\finalhyphendemerits\z@ - \strut ##1\strut}% - }% - \let\FV@Space\spx@verbatim@space + \let\sphinxVerbatimFormatLine\sphinxVerbatimFormatLineWrap + \let\FV@Space\spx@verbatim@space % Allow breaks at special characters using \PYG... macros. - \sphinxbreaksatspecials + \sphinxbreaksatspecials % Breaks at punctuation characters . , ; ? ! and / (needs catcode activation) - \def\FancyVerbCodes{\sphinxbreaksviaactive}% - \fi % end of conditional code for wrapping long code lines - % go around fancyvrb's check of \@currenvir + \fvset{codes*=\sphinxbreaksviaactive}% + \else % end of conditional code for wrapping long code lines + \let\sphinxVerbatimFormatLine\sphinxVerbatimFormatLineNoWrap + \fi + \let\FancyVerbFormatLine\sphinxFancyVerbFormatLine + % workaround to fancyvrb's check of \@currenvir \let\VerbatimEnvironment\sphinxVerbatimEnvironment - % go around fancyvrb's check of current list depth + % workaround to fancyvrb's check of current list depth \def\@toodeep {\advance\@listdepth\@ne}% % The list environment is needed to control perfectly the vertical space. % Note: \OuterFrameSep used by framed.sty is later set to \topsep hence 0pt. @@ -1018,7 +1056,7 @@ \sphinxunactivateextras}% % now for the modified alltt environment \newenvironment{sphinxalltt} -{% at start of next line to work around Emacs/AUCTeX issue with this file +{% at start of next line to workaround Emacs/AUCTeX issue with this file \begin{alltt}% \ifspx@opt@parsedliteralwraps \sbox\sphinxcontinuationbox {\spx@opt@verbatimcontinued}% @@ -1243,7 +1281,7 @@ \spx@notice@border \dimexpr\csname spx@opt@#1border\endcsname\relax % start specific environment, passing the heading as argument \begin{sphinx#1}{#2}} - % in end part, need to go around a LaTeX's "feature" + % workaround some LaTeX "feature" of \end command {\edef\spx@temp{\noexpand\end{sphinx\spx@noticetype}}\spx@temp} diff --git a/sphinx/themes/basic/static/doctools.js_t b/sphinx/themes/basic/static/doctools.js_t index 9ceecef79..326856cfc 100644 --- a/sphinx/themes/basic/static/doctools.js_t +++ b/sphinx/themes/basic/static/doctools.js_t @@ -206,7 +206,7 @@ var Documentation = { * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 */ fixFirefoxAnchorBug : function() { - if (document.location.hash) + if (document.location.hash && $.browser.mozilla) window.setTimeout(function() { document.location.href += ''; }, 10); diff --git a/sphinx/themes/basic/theme.conf b/sphinx/themes/basic/theme.conf index 3248070bc..25495e8c6 100644 --- a/sphinx/themes/basic/theme.conf +++ b/sphinx/themes/basic/theme.conf @@ -2,6 +2,7 @@ inherit = none stylesheet = basic.css pygments_style = none +sidebars = localtoc.html, relations.html, sourcelink.html, searchbox.html [options] nosidebar = false diff --git a/sphinx/theming.py b/sphinx/theming.py index c7ac7500b..78c73b63f 100644 --- a/sphinx/theming.py +++ b/sphinx/theming.py @@ -157,7 +157,7 @@ def is_archived_theme(filename): try: with ZipFile(filename) as f: # type: ignore return THEMECONF in f.namelist() - except: + except Exception: return False diff --git a/sphinx/transforms/compact_bullet_list.py b/sphinx/transforms/compact_bullet_list.py index 006ae7161..8c930c8bc 100644 --- a/sphinx/transforms/compact_bullet_list.py +++ b/sphinx/transforms/compact_bullet_list.py @@ -14,6 +14,10 @@ from docutils import nodes from sphinx import addnodes from sphinx.transforms import SphinxTransform +if False: + # For type annotation + from typing import List # NOQA + class RefOnlyListChecker(nodes.GenericNodeVisitor): """Raise `nodes.NodeFound` if non-simple list item is encountered. @@ -32,7 +36,7 @@ class RefOnlyListChecker(nodes.GenericNodeVisitor): def visit_list_item(self, node): # type: (nodes.Node) -> None - children = [] + children = [] # type: List[nodes.Node] for child in node.children: if not isinstance(child, nodes.Invisible): children.append(child) diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py index 4c1fbc2a7..d08cc81f4 100644 --- a/sphinx/transforms/i18n.py +++ b/sphinx/transforms/i18n.py @@ -50,15 +50,12 @@ def publish_msgstr(app, source, source_path, source_line, config, settings): :rtype: docutils.nodes.document """ from sphinx.io import SphinxI18nReader - reader = SphinxI18nReader( - app=app, - parsers=app.registry.get_source_parsers(), - parser_name='restructuredtext', # default parser - ) + reader = SphinxI18nReader() reader.set_lineno_for_reporter(source_line) + parser = app.registry.create_source_parser(app, '') doc = reader.read( source=StringInput(source=source, source_path=source_path), - parser=reader.parser, + parser=parser, settings=settings, ) try: diff --git a/sphinx/util/__init__.py b/sphinx/util/__init__.py index 03f8ce6a3..4b62dc5f0 100644 --- a/sphinx/util/__init__.py +++ b/sphinx/util/__init__.py @@ -398,10 +398,8 @@ def parselinenos(spec, total): elif len(begend) == 1: items.append(int(begend[0]) - 1) elif len(begend) == 2: - start = int(begend[0] or 1) # type: ignore - # left half open (cf. -10) - end = int(begend[1] or max(start, total)) # type: ignore - # right half open (cf. 10-) + start = int(begend[0] or 1) # left half open (cf. -10) + end = int(begend[1] or max(start, total)) # right half open (cf. 10-) if start > end: # invalid range (cf. 10-1) raise ValueError items.extend(range(start - 1, end)) @@ -528,7 +526,7 @@ class PeekableIterator(object): def peek(self): # type: () -> Any """Return the next item without changing the state of the iterator.""" - item = next(self) # type: ignore + item = next(self) self.push(item) return item @@ -564,16 +562,6 @@ def encode_uri(uri): return urlunsplit(split) -def split_docinfo(text): - # type: (unicode) -> Sequence[unicode] - docinfo_re = re.compile('\\A((?:\\s*:\\w+:.*?\n(?:[ \\t]+.*?\n)*)+)', re.M) - result = docinfo_re.split(text, 1) # type: ignore - if len(result) == 1: - return '', result[0] - else: - return result[1:] - - def display_chunk(chunk): # type: (Any) -> unicode if isinstance(chunk, (list, tuple)): diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py index c984bcfaf..00ea5919e 100644 --- a/sphinx/util/docutils.py +++ b/sphinx/util/docutils.py @@ -18,22 +18,23 @@ from contextlib import contextmanager import docutils from docutils.languages import get_language -from docutils.utils import Reporter +from docutils.statemachine import ViewList from docutils.parsers.rst import directives, roles, convert_directive_function +from docutils.utils import Reporter from sphinx.errors import ExtensionError from sphinx.locale import __ from sphinx.util import logging logger = logging.getLogger(__name__) -report_re = re.compile('^(.+?:(?:\\d+)?): \\((DEBUG|INFO|WARNING|ERROR|SEVERE)/(\\d+)?\\) ' - '(.+?)\n?$') +report_re = re.compile('^(.+?:(?:\\d+)?): \\((DEBUG|INFO|WARNING|ERROR|SEVERE)/(\\d+)?\\) ') if False: # For type annotation from typing import Any, Callable, Iterator, List, Tuple # NOQA from docutils import nodes # NOQA from sphinx.environment import BuildEnvironment # NOQA + from sphinx.io import SphinxFileInput # NOQA __version_info__ = tuple(LooseVersion(docutils.__version__).version) @@ -162,21 +163,40 @@ class WarningStream(object): if not matched: logger.warning(text.rstrip("\r\n")) else: - location, type, level, message = matched.groups() + location, type, level = matched.groups() + message = report_re.sub('', text).rstrip() # type: ignore logger.log(type, message, location=location) class LoggingReporter(Reporter): + @classmethod + def from_reporter(cls, reporter): + # type: (Reporter) -> LoggingReporter + """Create an instance of LoggingReporter from other reporter object.""" + return cls(reporter.source, reporter.report_level, reporter.halt_level, + reporter.debug_flag, reporter.error_handler) + def __init__(self, source, report_level, halt_level, debug=False, error_handler='backslashreplace'): # type: (unicode, int, int, bool, unicode) -> None stream = WarningStream() Reporter.__init__(self, source, report_level, halt_level, stream, debug, error_handler=error_handler) + self.source_and_line = None # type: SphinxFileInput - def set_conditions(self, category, report_level, halt_level, debug=False): - # type: (unicode, int, int, bool) -> None - Reporter.set_conditions(self, category, report_level, halt_level, debug=debug) + def set_source(self, source): + # type: (SphinxFileInput) -> None + self.source_and_line = source + + def system_message(self, *args, **kwargs): + # type: (Any, Any) -> Any + if kwargs.get('line') and isinstance(self.source_and_line, ViewList): + # replace source parameter if source is set + source, lineno = self.source_and_line.info(kwargs.get('line')) + kwargs['source'] = source + kwargs['line'] = lineno + + return Reporter.system_message(self, *args, **kwargs) def is_html5_writer_available(): diff --git a/sphinx/util/images.py b/sphinx/util/images.py index eba295a3c..1c2b4033a 100644 --- a/sphinx/util/images.py +++ b/sphinx/util/images.py @@ -64,7 +64,7 @@ def get_image_size(filename): pass return size - except: + except Exception: return None diff --git a/sphinx/util/inspect.py b/sphinx/util/inspect.py index a2928fc7e..da13b8af0 100644 --- a/sphinx/util/inspect.py +++ b/sphinx/util/inspect.py @@ -204,6 +204,14 @@ def safe_getmembers(object, predicate=None, attr_getter=safe_getattr): def object_description(object): # type: (Any) -> unicode """A repr() implementation that returns text safe to use in reST context.""" + if isinstance(object, dict): + try: + sorted_keys = sorted(object) + except TypeError: + pass # Cannot sort dict keys, fall back to generic repr + else: + items = ("%r: %r" % (key, object[key]) for key in sorted_keys) + return "{%s}" % ", ".join(items) try: s = repr(object) except Exception: @@ -273,7 +281,7 @@ class Signature(object): try: self.annotations = typing.get_type_hints(subject) # type: ignore - except: + except Exception: self.annotations = {} if bound_method: @@ -426,7 +434,7 @@ class Signature(object): elif (hasattr(typing, 'UnionMeta') and # for py35 or below isinstance(annotation, typing.UnionMeta) and # type: ignore hasattr(annotation, '__union_params__')): - params = annotation.__union_params__ # type: ignore + params = annotation.__union_params__ if params is not None: param_str = ', '.join(self.format_annotation(p) for p in params) return '%s[%s]' % (qualified_name, param_str) @@ -434,7 +442,7 @@ class Signature(object): getattr(annotation, '__args__', None) is not None and hasattr(annotation, '__result__')): # Skipped in the case of plain typing.Callable - args = annotation.__args__ # type: ignore + args = annotation.__args__ if args is None: return qualified_name elif args is Ellipsis: @@ -444,14 +452,14 @@ class Signature(object): args_str = '[%s]' % ', '.join(formatted_args) return '%s[%s, %s]' % (qualified_name, args_str, - self.format_annotation(annotation.__result__)) # type: ignore # NOQA + self.format_annotation(annotation.__result__)) elif (isinstance(annotation, typing.TupleMeta) and # type: ignore hasattr(annotation, '__tuple_params__') and hasattr(annotation, '__tuple_use_ellipsis__')): - params = annotation.__tuple_params__ # type: ignore + params = annotation.__tuple_params__ if params is not None: param_strings = [self.format_annotation(p) for p in params] - if annotation.__tuple_use_ellipsis__: # type: ignore + if annotation.__tuple_use_ellipsis__: param_strings.append('...') return '%s[%s]' % (qualified_name, ', '.join(param_strings)) diff --git a/sphinx/util/logging.py b/sphinx/util/logging.py index 17ee88a01..00c12ec4f 100644 --- a/sphinx/util/logging.py +++ b/sphinx/util/logging.py @@ -29,6 +29,7 @@ if False: from sphinx.application import Sphinx # NOQA +NAMESPACE = 'sphinx' VERBOSE = 15 LEVEL_NAMES = defaultdict(lambda: logging.WARNING) # type: Dict[str, int] @@ -59,8 +60,18 @@ COLOR_MAP.update({ def getLogger(name): # type: (str) -> SphinxLoggerAdapter - """Get logger wrapped by SphinxLoggerAdapter.""" - return SphinxLoggerAdapter(logging.getLogger(name), {}) + """Get logger wrapped by SphinxLoggerAdapter. + + Sphinx logger always uses ``sphinx.*`` namesapce to be independent from + settings of root logger. It enables to log stably even if 3rd party + extension or imported application resets logger settings. + """ + # add sphinx prefix to name forcely + logger = logging.getLogger(NAMESPACE + '.' + name) + # Forcely enable logger + logger.disabled = False + # wrap logger by SphinxLoggerAdapter + return SphinxLoggerAdapter(logger, {}) def convert_serializable(records): @@ -71,6 +82,10 @@ def convert_serializable(records): r.msg = r.getMessage() r.args = () + location = getattr(r, 'location', None) + if isinstance(location, nodes.Node): + r.location = get_node_location(location) # type: ignore + class SphinxWarningLogRecord(logging.LogRecord): """Log record class supporting location""" @@ -141,8 +156,8 @@ class NewLineStreamHandlerPY2(logging.StreamHandler): # remove return code forcely when nonl=True self.stream = StringIO() super(NewLineStreamHandlerPY2, self).emit(record) - stream.write(self.stream.getvalue()[:-1]) # type: ignore - stream.flush() # type: ignore + stream.write(self.stream.getvalue()[:-1]) + stream.flush() else: super(NewLineStreamHandlerPY2, self).emit(record) finally: @@ -203,7 +218,7 @@ class MemoryHandler(logging.handlers.BufferingHandler): def pending_warnings(): # type: () -> Generator """contextmanager to pend logging warnings temporary.""" - logger = logging.getLogger() + logger = logging.getLogger(NAMESPACE) memhandler = MemoryHandler() memhandler.setLevel(logging.WARNING) @@ -229,7 +244,7 @@ def pending_warnings(): def pending_logging(): # type: () -> Generator """contextmanager to pend logging all logs temporary.""" - logger = logging.getLogger() + logger = logging.getLogger(NAMESPACE) memhandler = MemoryHandler() try: @@ -253,7 +268,7 @@ def pending_logging(): def skip_warningiserror(skip=True): # type: (bool) -> Generator """contextmanager to skip WarningIsErrorFilter for a while.""" - logger = logging.getLogger() + logger = logging.getLogger(NAMESPACE) if skip is False: yield @@ -404,21 +419,26 @@ class WarningLogRecordTranslator(logging.Filter): else: record.location = None elif isinstance(location, nodes.Node): - (source, line) = get_source_line(location) - if source and line: - record.location = "%s:%s" % (source, line) - elif source: - record.location = "%s:" % source - elif line: - record.location = ":%s" % line - else: - record.location = None + record.location = get_node_location(location) elif location and ':' not in location: record.location = '%s' % self.app.env.doc2path(location) return True +def get_node_location(node): + # type: (nodes.Node) -> str + (source, line) = get_source_line(node) + if source and line: + return "%s:%s" % (source, line) + elif source: + return "%s:" % source + elif line: + return ":%s" % line + else: + return None + + class ColorizeFormatter(logging.Formatter): def format(self, record): # type: (logging.LogRecord) -> str @@ -469,8 +489,9 @@ class LastMessagesWriter(object): def setup(app, status, warning): # type: (Sphinx, IO, IO) -> None """Setup root logger for Sphinx""" - logger = logging.getLogger() - logger.setLevel(logging.NOTSET) + logger = logging.getLogger(NAMESPACE) + logger.setLevel(logging.DEBUG) + logger.propagate = False # clear all handlers for handler in logger.handlers[:]: diff --git a/sphinx/util/rst.py b/sphinx/util/rst.py index 8186130cf..6977cda96 100644 --- a/sphinx/util/rst.py +++ b/sphinx/util/rst.py @@ -8,12 +8,41 @@ :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ +from __future__ import absolute_import import re +from contextlib import contextmanager + +from docutils.parsers.rst import roles +from docutils.parsers.rst.languages import en as english +from docutils.utils import Reporter + +from sphinx.util import logging + +if False: + # For type annotation + from typing import Generator # NOQA symbols_re = re.compile(r'([!-/:-@\[-`{-~])') +logger = logging.getLogger(__name__) def escape(text): # type: (unicode) -> unicode return symbols_re.sub(r'\\\1', text) # type: ignore + + +@contextmanager +def default_role(docname, name): + # type: (unicode, unicode) -> Generator + if name: + dummy_reporter = Reporter('', 4, 4) + role_fn, _ = roles.role(name, english, 0, dummy_reporter) + if role_fn: + roles._roles[''] = role_fn + else: + logger.warning('default role %s not found', name, location=docname) + + yield + + roles._roles.pop('', None) # if a document has set a local default role diff --git a/sphinx/util/smartypants.py b/sphinx/util/smartypants.py index c368d7d0f..03771d168 100644 --- a/sphinx/util/smartypants.py +++ b/sphinx/util/smartypants.py @@ -139,7 +139,7 @@ def educateQuotes(text, language='en'): smart = smartquotes.smartchars(language) try: apostrophe = smart.apostrophe - except: + except Exception: apostrophe = u'’' # oldtext = text diff --git a/sphinx/versioning.py b/sphinx/versioning.py index 97a013135..1e7c452bd 100644 --- a/sphinx/versioning.py +++ b/sphinx/versioning.py @@ -15,6 +15,9 @@ from itertools import product from six import iteritems from six.moves import range, zip_longest +from six.moves import cPickle as pickle + +from sphinx.transforms import SphinxTransform if False: # For type annotation @@ -148,3 +151,32 @@ def levenshtein_distance(a, b): current_row.append(min(insertions, deletions, substitutions)) previous_row = current_row # type: ignore return previous_row[-1] + + +class UIDTransform(SphinxTransform): + """Add UIDs to doctree for versioning.""" + default_priority = 100 + + def apply(self): + env = self.env + old_doctree = None + if env.versioning_compare: + # get old doctree + try: + filename = env.doc2path(env.docname, env.doctreedir, '.doctree') + with open(filename, 'rb') as f: + old_doctree = pickle.load(f) + except EnvironmentError: + pass + + # add uids for versioning + if not env.versioning_compare or old_doctree is None: + list(add_uids(self.document, env.versioning_condition)) + else: + list(merge_doctrees(old_doctree, self.document, env.versioning_condition)) + + +def prepare(document): + """Simple wrapper for UIDTransform.""" + transform = UIDTransform(document) + transform.apply() diff --git a/sphinx/writers/html.py b/sphinx/writers/html.py index 8d02793e3..b3419b70a 100644 --- a/sphinx/writers/html.py +++ b/sphinx/writers/html.py @@ -491,14 +491,21 @@ class HTMLTranslator(BaseTranslator): # overwritten def visit_literal(self, node): # type: (nodes.Node) -> None - self.body.append(self.starttag(node, 'code', '', - CLASS='docutils literal')) - self.protect_literal_text += 1 + if 'kbd' in node['classes']: + self.body.append(self.starttag(node, 'kbd', '', + CLASS='docutils literal')) + else: + self.body.append(self.starttag(node, 'code', '', + CLASS='docutils literal')) + self.protect_literal_text += 1 def depart_literal(self, node): # type: (nodes.Node) -> None - self.protect_literal_text -= 1 - self.body.append('') + if 'kbd' in node['classes']: + self.body.append('') + else: + self.protect_literal_text -= 1 + self.body.append('') def visit_productionlist(self, node): # type: (nodes.Node) -> None diff --git a/sphinx/writers/html5.py b/sphinx/writers/html5.py index 51c4e8ecb..1efd060f2 100644 --- a/sphinx/writers/html5.py +++ b/sphinx/writers/html5.py @@ -437,14 +437,21 @@ class HTML5Translator(BaseTranslator): # overwritten def visit_literal(self, node): # type: (nodes.Node) -> None - self.body.append(self.starttag(node, 'code', '', - CLASS='docutils literal')) - self.protect_literal_text += 1 + if 'kbd' in node['classes']: + self.body.append(self.starttag(node, 'kbd', '', + CLASS='docutils literal')) + else: + self.body.append(self.starttag(node, 'code', '', + CLASS='docutils literal')) + self.protect_literal_text += 1 def depart_literal(self, node): # type: (nodes.Node) -> None - self.protect_literal_text -= 1 - self.body.append('') + if 'kbd' in node['classes']: + self.body.append('') + else: + self.protect_literal_text -= 1 + self.body.append('') def visit_productionlist(self, node): # type: (nodes.Node) -> None diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index 98d42c4ff..751aabf4e 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -25,6 +25,7 @@ from sphinx import addnodes from sphinx import highlighting from sphinx.errors import SphinxError from sphinx.locale import admonitionlabels, _ +from sphinx.transforms import SphinxTransform from sphinx.util import split_into, logging from sphinx.util.i18n import format_date from sphinx.util.nodes import clean_astext, traverse_parent @@ -222,12 +223,11 @@ class ExtBabel(Babel): return language -class ShowUrlsTransform(object): - expanded = False - - def __init__(self, document): - # type: (nodes.Node) -> None - self.document = document +class ShowUrlsTransform(SphinxTransform, object): + def __init__(self, document, startnode=None): + # type: (nodes.document, nodes.Node) -> None + super(ShowUrlsTransform, self).__init__(document, startnode) + self.expanded = False def apply(self): # type: () -> None @@ -1917,6 +1917,12 @@ class LaTeXTranslator(nodes.NodeVisitor): # will be generated differently if id.startswith('index-'): return + + # insert blank line, if the target follows a paragraph node + index = node.parent.index(node) + if index > 0 and isinstance(node.parent[index - 1], nodes.paragraph): + self.body.append('\n') + # do not generate \phantomsection in \section{} anchor = not self.in_title self.body.append(self.hypertarget(id, anchor=anchor)) @@ -2262,6 +2268,8 @@ class LaTeXTranslator(nodes.NodeVisitor): lang = self.hlsettingstack[-1][0] linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1 highlight_args = node.get('highlight_args', {}) + hllines = '\\fvset{hllines={, %s,}}%%' %\ + str(highlight_args.get('hl_lines', []))[1:-1] if 'language' in node: # code-block directives lang = node['language'] @@ -2300,7 +2308,7 @@ class LaTeXTranslator(nodes.NodeVisitor): hlcode += '\\end{sphinxVerbatimintable}' else: hlcode += '\\end{sphinxVerbatim}' - self.body.append('\n' + hlcode + '\n') + self.body.append('\n' + hllines + '\n' + hlcode + '\n') raise nodes.SkipNode def depart_literal_block(self, node): diff --git a/tests/roots/test-basic/index.rst b/tests/roots/test-basic/index.rst index 8c4ca7d80..48407e643 100644 --- a/tests/roots/test-basic/index.rst +++ b/tests/roots/test-basic/index.rst @@ -12,6 +12,9 @@ Sphinx uses reStructuredText as its markup language, and many of its strengths come from the power and straightforwardness of reStructuredText and its parsing and translating suite, the Docutils. +features +-------- + Among its features are the following: * Output formats: HTML (including derivative formats such as HTML Help, Epub diff --git a/tests/roots/test-root/bom.po b/tests/roots/test-builder-gettext-dont-rebuild-mo/bom.po similarity index 100% rename from tests/roots/test-root/bom.po rename to tests/roots/test-builder-gettext-dont-rebuild-mo/bom.po diff --git a/tests/roots/test-builder-gettext-dont-rebuild-mo/bom.rst b/tests/roots/test-builder-gettext-dont-rebuild-mo/bom.rst new file mode 100644 index 000000000..3fea824f8 --- /dev/null +++ b/tests/roots/test-builder-gettext-dont-rebuild-mo/bom.rst @@ -0,0 +1,5 @@ +File with UTF-8 BOM +=================== + +This file has a UTF-8 "BOM". + diff --git a/tests/roots/test-builder-gettext-dont-rebuild-mo/conf.py b/tests/roots/test-builder-gettext-dont-rebuild-mo/conf.py new file mode 100644 index 000000000..31e7a6ed4 --- /dev/null +++ b/tests/roots/test-builder-gettext-dont-rebuild-mo/conf.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- + +master_doc = 'index' + +latex_documents = [ + (master_doc, 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report') +] diff --git a/tests/roots/test-builder-gettext-dont-rebuild-mo/index.rst b/tests/roots/test-builder-gettext-dont-rebuild-mo/index.rst new file mode 100644 index 000000000..7ff38c5a8 --- /dev/null +++ b/tests/roots/test-builder-gettext-dont-rebuild-mo/index.rst @@ -0,0 +1,6 @@ +The basic Sphinx documentation for testing +========================================== + +.. toctree:: + + bom diff --git a/tests/roots/test-directive-code/emphasize.rst b/tests/roots/test-directive-code/emphasize.rst new file mode 100644 index 000000000..95db574ce --- /dev/null +++ b/tests/roots/test-directive-code/emphasize.rst @@ -0,0 +1,7 @@ +Literal Includes with Highlighted Lines +======================================= + +.. literalinclude:: target.py + :language: python + :emphasize-lines: 5-6, 13-15, 24- + diff --git a/tests/roots/test-domain-py/module_option.rst b/tests/roots/test-domain-py/module_option.rst new file mode 100644 index 000000000..1dec2ce0c --- /dev/null +++ b/tests/roots/test-domain-py/module_option.rst @@ -0,0 +1,25 @@ +module_option +============= + +.. py:class:: B + :module: test.extra + + This is also a test. + + + .. py:method:: B.baz() + :module: test.extra + + Does something similar to :meth:`foo`. + + + .. py:method:: B.foo() + :module: test.extra + + Does something. + + + .. py:method:: B.test() + :module: test.extra + + Does something completely unrelated to :meth:`foo` diff --git a/tests/roots/test-ext-autosummary/autosummary_importfail.py b/tests/roots/test-ext-autosummary/autosummary_importfail.py new file mode 100644 index 000000000..9e3f9f195 --- /dev/null +++ b/tests/roots/test-ext-autosummary/autosummary_importfail.py @@ -0,0 +1,4 @@ +import sys + +# Fail module import in a catastrophic way +sys.exit(1) diff --git a/tests/roots/test-ext-autosummary/contents.rst b/tests/roots/test-ext-autosummary/contents.rst index 3b43086a2..fc84927bb 100644 --- a/tests/roots/test-ext-autosummary/contents.rst +++ b/tests/roots/test-ext-autosummary/contents.rst @@ -1,6 +1,11 @@ +:autolink:`autosummary_dummy_module.Foo` + +:autolink:`autosummary_importfail` + .. autosummary:: :toctree: generated autosummary_dummy_module autosummary_dummy_module.Foo + autosummary_importfail diff --git a/tests/roots/test-doctest/conf.py b/tests/roots/test-ext-doctest/conf.py similarity index 100% rename from tests/roots/test-doctest/conf.py rename to tests/roots/test-ext-doctest/conf.py diff --git a/tests/roots/test-doctest/doctest.txt b/tests/roots/test-ext-doctest/doctest.txt similarity index 100% rename from tests/roots/test-doctest/doctest.txt rename to tests/roots/test-ext-doctest/doctest.txt diff --git a/tests/roots/test-ext-todo/conf.py b/tests/roots/test-ext-todo/conf.py index c67a86c5a..5d5619245 100644 --- a/tests/roots/test-ext-todo/conf.py +++ b/tests/roots/test-ext-todo/conf.py @@ -2,3 +2,8 @@ extensions = ['sphinx.ext.todo'] master_doc = 'index' + +latex_documents = [ + (master_doc, 'TodoTests.tex', 'Todo Tests Documentation', + 'Robin Banks', 'manual'), +] diff --git a/tests/roots/test-ext-viewcode/index.rst b/tests/roots/test-ext-viewcode/index.rst index b5776cfa7..e7956e723 100644 --- a/tests/roots/test-ext-viewcode/index.rst +++ b/tests/roots/test-ext-viewcode/index.rst @@ -28,6 +28,11 @@ viewcode :language: python :pyobject: func1 +.. autoclass:: spam.mod3.Class3 + :members: + +.. automodule:: spam.mod3 + :members: .. toctree:: diff --git a/tests/roots/test-ext-viewcode/spam/mod1.py b/tests/roots/test-ext-viewcode/spam/mod1.py index 94fceff7a..f876d0134 100644 --- a/tests/roots/test-ext-viewcode/spam/mod1.py +++ b/tests/roots/test-ext-viewcode/spam/mod1.py @@ -18,3 +18,10 @@ class Class1(object): """ this is Class1 """ + +class Class3(object): + """ + this is Class3 + """ + class_attr = 42 + """this is the class attribute class_attr""" diff --git a/tests/roots/test-ext-viewcode/spam/mod3.py b/tests/roots/test-ext-viewcode/spam/mod3.py new file mode 100644 index 000000000..f7b6afbe0 --- /dev/null +++ b/tests/roots/test-ext-viewcode/spam/mod3.py @@ -0,0 +1,2 @@ +from spam.mod1 import Class3 +__all__ = ('Class3',) diff --git a/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex b/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex index 097449cd9..e1628a9bd 100644 --- a/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex +++ b/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex @@ -27,6 +27,7 @@ header2 \endlastfoot +\fvset{hllines={, ,}}% \begin{sphinxVerbatimintable}[commandchars=\\\{\}] \PYG{n}{hello} \PYG{n}{world} \end{sphinxVerbatimintable} diff --git a/tests/roots/test-latex-table/expects/table_having_verbatim.tex b/tests/roots/test-latex-table/expects/table_having_verbatim.tex index 2e2b1dc9a..40d2f424c 100644 --- a/tests/roots/test-latex-table/expects/table_having_verbatim.tex +++ b/tests/roots/test-latex-table/expects/table_having_verbatim.tex @@ -10,6 +10,7 @@ header1 header2 \\ \hline +\fvset{hllines={, ,}}% \begin{sphinxVerbatimintable}[commandchars=\\\{\}] \PYG{n}{hello} \PYG{n}{world} \end{sphinxVerbatimintable} diff --git a/tests/roots/test-root/_static/README b/tests/roots/test-root/_static/README deleted file mode 100644 index 9e1ec3569..000000000 --- a/tests/roots/test-root/_static/README +++ /dev/null @@ -1 +0,0 @@ -This whole directory is there to test html_static_path. diff --git a/tests/roots/test-root/_static/excluded.css b/tests/roots/test-root/_static/excluded.css deleted file mode 100644 index 03c941a44..000000000 --- a/tests/roots/test-root/_static/excluded.css +++ /dev/null @@ -1 +0,0 @@ -/* This file should be excluded from being copied over */ diff --git a/tests/roots/test-root/_static/subdir/foo.css b/tests/roots/test-root/_static/subdir/foo.css deleted file mode 100644 index 9427981d6..000000000 --- a/tests/roots/test-root/_static/subdir/foo.css +++ /dev/null @@ -1 +0,0 @@ -/* Stub file */ diff --git a/tests/roots/test-root/conf.py b/tests/roots/test-root/conf.py index a23aec482..0753fe19c 100644 --- a/tests/roots/test-root/conf.py +++ b/tests/roots/test-root/conf.py @@ -29,15 +29,11 @@ numfig = True rst_epilog = '.. |subst| replace:: global substitution' -html_theme = 'testtheme' -html_theme_path = ['.'] -html_theme_options = {'testopt': 'testoverride'} -html_sidebars = {'**': 'customsb.html', +html_sidebars = {'**': ['localtoc.html', 'relations.html', 'sourcelink.html', + 'customsb.html', 'searchbox.html'], 'contents': ['contentssb.html', 'localtoc.html', 'globaltoc.html']} html_style = 'default.css' -html_static_path = ['_static', 'templated.css_t'] -html_extra_path = ['robots.txt'] html_last_updated_fmt = '%b %d, %Y' html_context = {'hckey': 'hcval', 'hckey_co': 'wrong_hcval_co'} diff --git a/tests/roots/test-root/robots.txt b/tests/roots/test-root/robots.txt deleted file mode 100644 index 1b425ee0f..000000000 --- a/tests/roots/test-root/robots.txt +++ /dev/null @@ -1,2 +0,0 @@ -User-agent: * -Disallow: /cgi-bin/ diff --git a/tests/roots/test-root/subdir.po b/tests/roots/test-root/subdir.po deleted file mode 100644 index f515f2207..000000000 --- a/tests/roots/test-root/subdir.po +++ /dev/null @@ -1,9 +0,0 @@ -#, fuzzy -msgid "" -msgstr "" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" - -msgid "Including in subdir" -msgstr "translation" diff --git a/tests/roots/test-root/templated.css_t b/tests/roots/test-root/templated.css_t deleted file mode 100644 index 72ddb807c..000000000 --- a/tests/roots/test-root/templated.css_t +++ /dev/null @@ -1,2 +0,0 @@ -/* Stub file, templated */ -{{ sphinx_version }} diff --git a/tests/roots/test-root/testtheme/layout.html b/tests/roots/test-theming/test_theme/staticfiles/layout.html similarity index 100% rename from tests/roots/test-root/testtheme/layout.html rename to tests/roots/test-theming/test_theme/staticfiles/layout.html diff --git a/tests/roots/test-root/testtheme/static/staticimg.png b/tests/roots/test-theming/test_theme/staticfiles/static/staticimg.png similarity index 100% rename from tests/roots/test-root/testtheme/static/staticimg.png rename to tests/roots/test-theming/test_theme/staticfiles/static/staticimg.png diff --git a/tests/roots/test-root/testtheme/static/statictmpl.html_t b/tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html_t similarity index 100% rename from tests/roots/test-root/testtheme/static/statictmpl.html_t rename to tests/roots/test-theming/test_theme/staticfiles/static/statictmpl.html_t diff --git a/tests/roots/test-root/testtheme/theme.conf b/tests/roots/test-theming/test_theme/staticfiles/theme.conf similarity index 100% rename from tests/roots/test-root/testtheme/theme.conf rename to tests/roots/test-theming/test_theme/staticfiles/theme.conf diff --git a/tests/roots/test-theming/test_theme/test-theme/theme.conf b/tests/roots/test-theming/test_theme/test-theme/theme.conf index 0d8403f0b..b7518bc9c 100644 --- a/tests/roots/test-theming/test_theme/test-theme/theme.conf +++ b/tests/roots/test-theming/test_theme/test-theme/theme.conf @@ -1,2 +1,3 @@ [theme] inherit = classic +sidebars = globaltoc.html, searchbox.html diff --git a/tests/roots/test-root/ziptheme.zip b/tests/roots/test-theming/ziptheme.zip similarity index 100% rename from tests/roots/test-root/ziptheme.zip rename to tests/roots/test-theming/ziptheme.zip diff --git a/tests/test_build.py b/tests/test_build.py index 185b5cda4..387e308a8 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -59,13 +59,14 @@ def nonascii_srcdir(request, rootdir, sphinx_test_tempdir): return srcdir +# note: this test skips building docs for some builders because they have independent testcase. +# (html, latex, texinfo and manpage) @pytest.mark.parametrize( "buildername", [ # note: no 'html' - if it's ok with dirhtml it's ok with html - 'dirhtml', 'singlehtml', 'latex', 'texinfo', 'pickle', 'json', 'text', - 'htmlhelp', 'qthelp', 'epub', 'applehelp', 'changes', 'xml', - 'pseudoxml', 'man', 'linkcheck', + 'dirhtml', 'singlehtml', 'pickle', 'json', 'text', 'htmlhelp', 'qthelp', + 'epub', 'applehelp', 'changes', 'xml', 'pseudoxml', 'linkcheck', ], ) @mock.patch('sphinx.builders.linkcheck.requests.head', diff --git a/tests/test_build_html.py b/tests/test_build_html.py index ceeb5f01c..b4fec18ba 100644 --- a/tests/test_build_html.py +++ b/tests/test_build_html.py @@ -126,24 +126,6 @@ def check_xpath(etree, fname, path, check, be_found=True): [node.text for node in nodes])) -def check_static_entries(outdir): - staticdir = outdir / '_static' - assert staticdir.isdir() - # a file from a directory entry in html_static_path - assert (staticdir / 'README').isfile() - # a directory from a directory entry in html_static_path - assert (staticdir / 'subdir' / 'foo.css').isfile() - # a file from a file entry in html_static_path - assert (staticdir / 'templated.css').isfile() - assert (staticdir / 'templated.css').text().splitlines()[1] == __display_version__ - # a file from _static, but matches exclude_patterns - assert not (staticdir / 'excluded.css').exists() - - -def check_extra_entries(outdir): - assert (outdir / 'robots.txt').isfile() - - @pytest.mark.sphinx('html', testroot='warnings') def test_html_warnings(app, warning): app.build() @@ -156,15 +138,6 @@ def test_html_warnings(app, warning): '--- Got:\n' + html_warnings -@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={ - 'html_context.hckey_co': 'hcval_co'}) -@pytest.mark.test_params(shared_result='test_build_html_output') -def test_static_output(app): - app.build() - check_static_entries(app.builder.outdir) - check_extra_entries(app.builder.outdir) - - @pytest.mark.parametrize("fname,expect", flat_dict({ 'images.html': [ (".//img[@src='_images/img.png']", ''), @@ -238,7 +211,7 @@ def test_static_output(app): (".//li/strong", r'^command\\n$'), (".//li/strong", r'^program\\n$'), (".//li/em", r'^dfn\\n$'), - (".//li/code/span[@class='pre']", r'^kbd\\n$'), + (".//li/kbd", r'^kbd\\n$'), (".//li/span", u'File \N{TRIANGULAR BULLET} Close'), (".//li/code/span[@class='pre']", '^a/$'), (".//li/code/em/span[@class='pre']", '^varpart$'), @@ -377,7 +350,6 @@ def test_static_output(app): 'contents.html': [ (".//meta[@name='hc'][@content='hcval']", ''), (".//meta[@name='hc_co'][@content='hcval_co']", ''), - (".//meta[@name='testopt'][@content='testoverride']", ''), (".//td[@class='label']", r'\[Ref1\]'), (".//td[@class='label']", ''), (".//li[@class='toctree-l1']/a", 'Testing various markup'), @@ -410,9 +382,6 @@ def test_static_output(app): (".//a[@href='http://bugs.python.org/issue1000']", "issue 1000"), (".//a[@href='http://bugs.python.org/issue1042']", "explicit caption"), ], - '_static/statictmpl.html': [ - (".//project", 'Sphinx '), - ], 'genindex.html': [ # index entries (".//a/strong", "Main"), @@ -1145,16 +1114,28 @@ def test_html_assets(app): assert not (app.outdir / 'subdir' / '.htpasswd').exists() -@pytest.mark.sphinx('html', confoverrides={'html_sourcelink_suffix': ''}) +@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_copy_source': False}) +def test_html_copy_source(app): + app.builder.build_all() + assert not (app.outdir / '_sources' / 'index.rst.txt').exists() + + +@pytest.mark.sphinx('html', testroot='basic', confoverrides={'html_sourcelink_suffix': '.txt'}) def test_html_sourcelink_suffix(app): app.builder.build_all() - content_otherext = (app.outdir / 'otherext.html').text() - content_images = (app.outdir / 'images.html').text() + assert (app.outdir / '_sources' / 'index.rst.txt').exists() - assert '' in result) assert not (app.outdir / 'python-logo.png').exists() + + +@pytest.mark.sphinx('html', testroot='basic') +def test_html_sidebar(app, status, warning): + app.builder.build_all() + result = (app.outdir / 'index.html').text(encoding='utf8') + assert '

Table Of Contents

' in result + assert '

Related Topics

' in result + assert '

This Page

' in result + assert '

Quick search

' in result + + app.config.html_sidebars = {'**': []} + app.builder.build_all() + result = (app.outdir / 'index.html').text(encoding='utf8') + assert '

Table Of Contents

' not in result + assert '

Related Topics

' not in result + assert '

This Page

' not in result + assert '

Quick search

' not in result diff --git a/tests/test_build_html5.py b/tests/test_build_html5.py index 12386705e..39b064b1c 100644 --- a/tests/test_build_html5.py +++ b/tests/test_build_html5.py @@ -119,7 +119,7 @@ def cached_etree_parse(): (".//li/p/strong", r'^command\\n$'), (".//li/p/strong", r'^program\\n$'), (".//li/p/em", r'^dfn\\n$'), - (".//li/p/code/span[@class='pre']", r'^kbd\\n$'), + (".//li/p/kbd", r'^kbd\\n$'), (".//li/p/span", u'File \N{TRIANGULAR BULLET} Close'), (".//li/p/code/span[@class='pre']", '^a/$'), (".//li/p/code/em/span[@class='pre']", '^varpart$'), @@ -251,7 +251,6 @@ def cached_etree_parse(): 'contents.html': [ (".//meta[@name='hc'][@content='hcval']", ''), (".//meta[@name='hc_co'][@content='hcval_co']", ''), - (".//meta[@name='testopt'][@content='testoverride']", ''), (".//dt[@class='label']/span[@class='brackets']", r'Ref1'), (".//dt[@class='label']", ''), (".//li[@class='toctree-l1']/a", 'Testing various markup'), @@ -284,9 +283,6 @@ def cached_etree_parse(): (".//a[@href='http://bugs.python.org/issue1000']", "issue 1000"), (".//a[@href='http://bugs.python.org/issue1042']", "explicit caption"), ], - '_static/statictmpl.html': [ - (".//project", 'Sphinx '), - ], 'genindex.html': [ # index entries (".//a/strong", "Main"), diff --git a/tests/test_build_latex.py b/tests/test_build_latex.py index b58ee09c9..b78bcf637 100644 --- a/tests/test_build_latex.py +++ b/tests/test_build_latex.py @@ -1035,6 +1035,6 @@ def test_latex_image_in_parsed_literal(app, status, warning): app.builder.build_all() result = (app.outdir / 'Python.tex').text(encoding='utf8') - assert ('{\\sphinxunactivateextrasandspace \\raisebox{-0.5\height}' + assert ('{\\sphinxunactivateextrasandspace \\raisebox{-0.5\\height}' '{\\scalebox{2.000000}{\\sphinxincludegraphics[height=1cm]{{pic}.png}}}' '}AFTER') in result diff --git a/tests/test_config.py b/tests/test_config.py index f9300c30e..578f6e55c 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -240,3 +240,15 @@ def test_check_enum(app, status, warning): def test_check_enum_failed(app, status, warning): assert "The config value `value17` has to be a one of ('default', 'one', 'two'), " \ "but `invalid` is given." in warning.getvalue() + + +@pytest.mark.sphinx(testroot='config', confoverrides={'value17': ['one', 'two']}) +def test_check_enum_for_list(app, status, warning): + assert "The config value `value17` has to be a one of ('default', 'one', 'two'), " \ + not in warning.getvalue() + + +@pytest.mark.sphinx(testroot='config', confoverrides={'value17': ['one', 'two', 'invalid']}) +def test_check_enum_for_list_failed(app, status, warning): + assert "The config value `value17` has to be a one of ('default', 'one', 'two'), " \ + "but `['one', 'two', 'invalid']` is given." in warning.getvalue() diff --git a/tests/test_directive_code.py b/tests/test_directive_code.py index 548a5d72c..e3069061b 100644 --- a/tests/test_directive_code.py +++ b/tests/test_directive_code.py @@ -349,6 +349,14 @@ def test_code_block_namedlink_latex(app, status, warning): assert link2 in latex +@pytest.mark.sphinx('latex', testroot='directive-code') +def test_code_block_emphasize_latex(app, status, warning): + app.builder.build(['emphasize']) + latex = (app.outdir / 'Python.tex').text(encoding='utf-8').replace('\r\n', '\n') + includes = '\\fvset{hllines={, 5, 6, 13, 14, 15, 24, 25, 26, 27,}}%\n' + assert includes in latex + + @pytest.mark.sphinx('xml', testroot='directive-code') def test_literal_include(app, status, warning): app.builder.build(['index']) diff --git a/tests/test_domain_cpp.py b/tests/test_domain_cpp.py index 8c0fae2e3..f3f0037f5 100644 --- a/tests/test_domain_cpp.py +++ b/tests/test_domain_cpp.py @@ -111,9 +111,20 @@ def test_expressions(): exprCheck('nullptr', 'LDnE') exprCheck('true', 'L1E') exprCheck('false', 'L0E') - exprCheck('5', 'L5E') - exprCheck('5.0', 'L5.0E') - exprCheck('"abc\\"cba"', 'LA8_KcE') + ints = ['5', '0', '075', '0xF', '0XF', '0b1', '0B1'] + unsignedSuffix = ['', 'u', 'U'] + longSuffix = ['', 'l', 'L', 'll', 'LL'] + for i in ints: + for u in unsignedSuffix: + for l in longSuffix: + expr = i + u + l; + exprCheck(expr, 'L' + expr + 'E') + expr = i + l + u; + exprCheck(expr, 'L' + expr + 'E') + for suffix in ['', 'f', 'F', 'l', 'L']: + expr = '5.0' + suffix + exprCheck(expr, 'L' + expr + 'E') + exprCheck('"abc\\"cba"', 'LA8_KcE') # string # TODO: test the rest exprCheck('(... + Ns)', '(... + Ns)') exprCheck('(5)', 'L5E') @@ -135,6 +146,11 @@ def test_expressions(): exprCheck('-5', 'ngL5E') exprCheck('!5', 'ntL5E') exprCheck('~5', 'coL5E') + exprCheck('sizeof...(a)', 'sZ1a') + exprCheck('sizeof(T)', 'st1T') + exprCheck('sizeof -42', 'szngL42E') + exprCheck('alignof(T)', 'at1T') + exprCheck('noexcept(-42)', 'nxngL42E') # cast exprCheck('(int)2', 'cviL2E') # binary op @@ -219,6 +235,7 @@ def test_type_definitions(): check("type", "bool ::B::b", {1:"B::b", 2:"N1B1bE"}) check('type', 'A = B', {2:'1A'}) + check('type', 'A = decltype(b)', {2:'1A'}) # from breathe#267 (named function parameters for function pointers check('type', 'void (*gpio_callback_t)(struct device *port, uint32_t pin)', @@ -231,10 +248,6 @@ def test_concept_definitions(): {2:'I0EN1A1B7ConceptE'}) check('concept', 'template Foo', {2:'I00DpE3Foo'}) - check('concept', 'template A::B::Concept()', - {2:'I0EN1A1B7ConceptE'}) - check('concept', 'template Foo()', - {2:'I00DpE3Foo'}) with pytest.raises(DefinitionError): parse('concept', 'Foo') with pytest.raises(DefinitionError): @@ -377,6 +390,8 @@ def test_function_definitions(): check('function', 'extern int f()', {1:'f', 2:'1fv'}) + check('function', 'decltype(auto) f()', {1: 'f', 2:"1fv"}) + # TODO: make tests for functions in a template, e.g., Test # such that the id generation for function type types is correct. @@ -454,6 +469,10 @@ def test_class_definitions(): check('class', 'A : B, C...', {1:'A', 2:'1A'}) check('class', 'A : B..., C', {1:'A', 2:'1A'}) + # from #4094 + check('class', 'template> has_var', {2:'I00E7has_var'}) + check('class', 'template has_var>', {2:'I0E7has_varI1TNSt6void_tIDTadN1T3varEEEEE'}) + def test_enum_definitions(): check('enum', 'A', {2:"1A"}) @@ -489,6 +508,10 @@ def test_templates(): check('class', "template A", {2:"I0E1A"}) check('class', "template typename T> A", {2:"II0E0E1A"}) + check('class', "template typename> A", {2: "II0E0E1A"}) + check('class', "template typename ...T> A", {2:"II0EDpE1A"}) + check('class', "template typename...> A", {2: "II0EDpE1A"}) + check('class', "template A", {2:"I_iE1A"}) check('class', "template A", {2:"I_iE1A"}) check('class', "template A", {2:"I_DpiE1A"}) @@ -538,6 +561,13 @@ def test_templates(): check('concept', 'template Numerics = (... && Numeric)', {2:'IDpE8Numerics'}) + # explicit specializations of members + check('member', 'template<> int A::a', {2:'IEN1AIiE1aE'}) + check('member', 'template int A::a', {2: 'IEN1AIiE1aE'}, + output='template<> int A::a') # same as above + check('member', 'template<> template<> int A::B::b', {2:'IEIEN1AIiE1BIiE1bE'}) + check('member', 'template int A::B::b', {2: 'IEIEN1AIiE1BIiE1bE'}, + output='template<> template<> int A::B::b') # same as above def test_template_args(): diff --git a/tests/test_domain_py.py b/tests/test_domain_py.py index 38327c697..bf391053f 100644 --- a/tests/test_domain_py.py +++ b/tests/test_domain_py.py @@ -116,6 +116,15 @@ def test_domain_py_xrefs(app, status, warning): assert_refnode(refnodes[11], False, False, 'list', 'class') assert len(refnodes) == 12 + doctree = app.env.get_doctree('module_option') + refnodes = list(doctree.traverse(addnodes.pending_xref)) + print(refnodes) + print(refnodes[0]) + print(refnodes[1]) + assert_refnode(refnodes[0], 'test.extra', 'B', 'foo', 'meth') + assert_refnode(refnodes[1], 'test.extra', 'B', 'foo', 'meth') + assert len(refnodes) == 2 + @pytest.mark.sphinx('dummy', testroot='domain-py') def test_domain_py_objects(app, status, warning): diff --git a/tests/test_ext_apidoc.py b/tests/test_ext_apidoc.py index 794591aa6..d98dbabb6 100644 --- a/tests/test_ext_apidoc.py +++ b/tests/test_ext_apidoc.py @@ -152,10 +152,10 @@ def test_trailing_underscore(make_app, apidoc): @pytest.mark.apidoc( coderoot='test-root', options=[ - '--doc-project', u'プロジェクト名'.encode('utf-8'), - '--doc-author', u'著者名'.encode('utf-8'), - '--doc-version', u'バージョン'.encode('utf-8'), - '--doc-release', u'リリース'.encode('utf-8'), + '--doc-project', u'プロジェクト名', + '--doc-author', u'著者名', + '--doc-version', u'バージョン', + '--doc-release', u'リリース', ], ) def test_multibyte_parameters(make_app, apidoc): diff --git a/tests/test_ext_autosummary.py b/tests/test_ext_autosummary.py index 81fd35762..b59f0cbc8 100644 --- a/tests/test_ext_autosummary.py +++ b/tests/test_ext_autosummary.py @@ -11,7 +11,7 @@ from six import iteritems, StringIO -from sphinx.ext.autosummary import mangle_signature +from sphinx.ext.autosummary import mangle_signature, import_by_name from sphinx.testing.util import etree_parse @@ -145,3 +145,26 @@ def test_autosummary_generate(app, status, warning): ' ~Foo.__init__\n' ' ~Foo.bar\n' ' \n' in Foo) + + +def test_import_by_name(): + import sphinx + import sphinx.ext.autosummary + + prefixed_name, obj, parent, modname = import_by_name('sphinx') + assert prefixed_name == 'sphinx' + assert obj is sphinx + assert parent is None + assert modname == 'sphinx' + + prefixed_name, obj, parent, modname = import_by_name('sphinx.ext.autosummary.__name__') + assert prefixed_name == 'sphinx.ext.autosummary.__name__' + assert obj is sphinx.ext.autosummary.__name__ + assert parent is sphinx.ext.autosummary + assert modname == 'sphinx.ext.autosummary' + + prefixed_name, obj, parent, modname = import_by_name('sphinx.ext.autosummary.Autosummary.get_items') + assert prefixed_name == 'sphinx.ext.autosummary.Autosummary.get_items' + assert obj == sphinx.ext.autosummary.Autosummary.get_items + assert parent is sphinx.ext.autosummary.Autosummary + assert modname == 'sphinx.ext.autosummary' diff --git a/tests/test_ext_doctest.py b/tests/test_ext_doctest.py index 705f6262a..fa3ad6bc4 100644 --- a/tests/test_ext_doctest.py +++ b/tests/test_ext_doctest.py @@ -14,7 +14,7 @@ from sphinx.ext.doctest import compare_version cleanup_called = 0 -@pytest.mark.sphinx('doctest', testroot='doctest') +@pytest.mark.sphinx('doctest', testroot='ext-doctest') def test_build(app, status, warning): global cleanup_called cleanup_called = 0 diff --git a/tests/test_ext_todo.py b/tests/test_ext_todo.py index 77d657adc..4f01a07ab 100644 --- a/tests/test_ext_todo.py +++ b/tests/test_ext_todo.py @@ -84,3 +84,31 @@ def test_todo_not_included(app, status, warning): # check handled event assert len(todos) == 2 assert set(todo[1].astext() for todo in todos) == set(['todo in foo', 'todo in bar']) + +@pytest.mark.sphinx('latex', testroot='ext-todo', freshenv=True, + confoverrides={'todo_include_todos': True, 'todo_emit_warnings': True}) +def test_todo_valid_link(app, status, warning): + """ + Test that the inserted "original entry" links for todo items have a target + that exists in the LaTeX output. The target was previously incorrectly + omitted (GitHub issue #1020). + """ + + # Ensure the LaTeX output is built. + app.builder.build_all() + + content = (app.outdir / 'TodoTests.tex').text() + + # Look for the link to foo. We could equally well look for the link to bar. + link = r'\{\\hyperref\[\\detokenize\{(.*?foo.*?)}]\{\\sphinxcrossref{' \ + r'\\sphinxstyleemphasis{original entry}}}}' + m = re.findall(link, content) + assert len(m) == 1 + target = m[0] + + # Look for the targets of this link. + labels = [m for m in re.findall(r'\\label\{([^}]*)}', content) + if m == target] + + # If everything is correct we should have exactly one target. + assert len(labels) == 1 diff --git a/tests/test_ext_viewcode.py b/tests/test_ext_viewcode.py index 4a3fb550f..4dceaa488 100644 --- a/tests/test_ext_viewcode.py +++ b/tests/test_ext_viewcode.py @@ -32,6 +32,12 @@ def test_viewcode(app, status, warning): assert result.count('href="_modules/spam/mod2.html#Class2"') == 2 assert result.count('@decorator') == 1 + # test that the class attribute is correctly documented + assert result.count('this is Class3') == 2 + assert 'this is the class attribute class_attr' in result + # the next assert fails, until the autodoc bug gets fixed + assert result.count('this is the class attribute class_attr') == 2 + @pytest.mark.sphinx(testroot='ext-viewcode', tags=['test_linkcode']) def test_linkcode(app, status, warning): diff --git a/tests/test_intl.py b/tests/test_intl.py index f3952e9a1..6b72438bd 100644 --- a/tests/test_intl.py +++ b/tests/test_intl.py @@ -197,28 +197,28 @@ def test_text_inconsistency_warnings(app, warning): expected_warning_expr = ( warning_fmt % { u'reftype': u'footnote references', - u'original': u"\[u?'\[#\]_'\]", - u'translated': u"\[\]" + u'original': u"\\[u?'\\[#\\]_'\\]", + u'translated': u"\\[\\]" } + warning_fmt % { u'reftype': u'footnote references', - u'original': u"\[u?'\[100\]_'\]", - u'translated': u"\[\]" + u'original': u"\\[u?'\\[100\\]_'\\]", + u'translated': u"\\[\\]" } + warning_fmt % { u'reftype': u'references', - u'original': u"\[u?'reference_'\]", - u'translated': u"\[u?'reference_', u?'reference_'\]" + u'original': u"\\[u?'reference_'\\]", + u'translated': u"\\[u?'reference_', u?'reference_'\\]" } + warning_fmt % { u'reftype': u'references', - u'original': u"\[\]", - u'translated': u"\[u?'`I18N WITH REFS INCONSISTENCY`_'\]" + u'original': u"\\[\\]", + u'translated': u"\\[u?'`I18N WITH REFS INCONSISTENCY`_'\\]" }) assert_re_search(expected_warning_expr, warnings) expected_citation_warning_expr = ( - u'.*/refs_inconsistency.txt:\\d+: WARNING: Citation \[ref2\] is not referenced.\n' + + u'.*/refs_inconsistency.txt:\\d+: WARNING: Citation \\[ref2\\] is not referenced.\n' + u'.*/refs_inconsistency.txt:\\d+: WARNING: citation not found: ref3') assert_re_search(expected_citation_warning_expr, warnings) @@ -300,8 +300,8 @@ def test_text_glossary_term_inconsistencies(app, warning): expected_warning_expr = ( u'.*/glossary_terms_inconsistency.txt:\\d+: ' u'WARNING: inconsistent term references in translated message.' - u" original: \[u?':term:`Some term`', u?':term:`Some other term`'\]," - u" translated: \[u?':term:`SOME NEW TERM`'\]\n") + u" original: \\[u?':term:`Some term`', u?':term:`Some other term`'\\]," + u" translated: \\[u?':term:`SOME NEW TERM`'\\]\n") assert_re_search(expected_warning_expr, warnings) @@ -520,7 +520,7 @@ def test_gettext_buildr_ignores_only_directive(app): @sphinx_intl # use individual shared_result directory to avoid "incompatible doctree" error -@pytest.mark.test_params(shared_result='test_gettext_dont_rebuild_mo') +@pytest.mark.sphinx(testroot='builder-gettext-dont-rebuild-mo') def test_gettext_dont_rebuild_mo(make_app, app_params, build_mo): # --- don't rebuild by .mo mtime def get_number_of_update_targets(app_): @@ -533,7 +533,7 @@ def test_gettext_dont_rebuild_mo(make_app, app_params, build_mo): app0 = make_app('dummy', *args, **kwargs) build_mo(app0.srcdir) app0.build() - assert (app0.srcdir / 'bom.mo') + assert (app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').exists() # Since it is after the build, the number of documents to be updated is 0 assert get_number_of_update_targets(app0) == 0 # When rewriting the timestamp of mo file, the number of documents to be diff --git a/tests/test_io.py b/tests/test_io.py new file mode 100644 index 000000000..ecd4a1009 --- /dev/null +++ b/tests/test_io.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +""" + test_sphinx_io + ~~~~~~~~~~~~~~ + + Tests io modules. + + :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import pytest +from six import StringIO + +from sphinx.io import SphinxRSTFileInput + + +@pytest.mark.sphinx(testroot='basic') +def test_SphinxRSTFileInput(app): + app.env.temp_data['docname'] = 'index' + + # normal case + text = ('hello Sphinx world\n' + 'Sphinx is a document generator') + source = SphinxRSTFileInput(app, app.env, source=StringIO(text), + source_path='dummy.rst', encoding='utf-8') + result = source.read() + assert result.data == ['hello Sphinx world', + 'Sphinx is a document generator'] + assert result.info(0) == ('dummy.rst', 0) + assert result.info(1) == ('dummy.rst', 1) + assert result.info(2) == ('dummy.rst', None) # out of range + + # having rst_prolog ends without CR + app.env.config.rst_prolog = 'this is rst_prolog\nhello reST!' + source = SphinxRSTFileInput(app, app.env, source=StringIO(text), + source_path='dummy.rst', encoding='utf-8') + result = source.read() + assert result.data == ['this is rst_prolog', + 'hello reST!', + '', + 'hello Sphinx world', + 'Sphinx is a document generator'] + assert result.info(0) == ('', 0) + assert result.info(1) == ('', 1) + assert result.info(2) == ('', 0) + assert result.info(3) == ('dummy.rst', 0) + assert result.info(4) == ('dummy.rst', 1) + + # having rst_prolog ends with CR + app.env.config.rst_prolog = 'this is rst_prolog\nhello reST!\n' + source = SphinxRSTFileInput(app, app.env, source=StringIO(text), + source_path='dummy.rst', encoding='utf-8') + result = source.read() + assert result.data == ['this is rst_prolog', + 'hello reST!', + '', + 'hello Sphinx world', + 'Sphinx is a document generator'] + + # having docinfo and rst_prolog + docinfo_text = (':title: test of SphinxFileInput\n' + ':author: Sphinx team\n' + '\n' + 'hello Sphinx world\n' + 'Sphinx is a document generator\n') + app.env.config.rst_prolog = 'this is rst_prolog\nhello reST!' + source = SphinxRSTFileInput(app, app.env, source=StringIO(docinfo_text), + source_path='dummy.rst', encoding='utf-8') + result = source.read() + assert result.data == [':title: test of SphinxFileInput', + ':author: Sphinx team', + '', + 'this is rst_prolog', + 'hello reST!', + '', + '', + 'hello Sphinx world', + 'Sphinx is a document generator'] + assert result.info(0) == ('dummy.rst', 0) + assert result.info(1) == ('dummy.rst', 1) + assert result.info(2) == ('', 0) + assert result.info(3) == ('', 0) + assert result.info(4) == ('', 1) + assert result.info(5) == ('', 0) + assert result.info(6) == ('dummy.rst', 2) + assert result.info(7) == ('dummy.rst', 3) + assert result.info(8) == ('dummy.rst', 4) + assert result.info(9) == ('dummy.rst', None) # out of range + + # having rst_epilog + app.env.config.rst_prolog = None + app.env.config.rst_epilog = 'this is rst_epilog\ngood-bye reST!' + source = SphinxRSTFileInput(app, app.env, source=StringIO(text), + source_path='dummy.rst', encoding='utf-8') + result = source.read() + assert result.data == ['hello Sphinx world', + 'Sphinx is a document generator', + '', + 'this is rst_epilog', + 'good-bye reST!'] + assert result.info(0) == ('dummy.rst', 0) + assert result.info(1) == ('dummy.rst', 1) + assert result.info(2) == ('', 0) + assert result.info(3) == ('', 0) + assert result.info(4) == ('', 1) + assert result.info(5) == ('', None) # out of range + + # expandtabs / convert whitespaces + app.env.config.rst_prolog = None + app.env.config.rst_epilog = None + text = ('\thello Sphinx world\n' + '\v\fSphinx is a document generator') + source = SphinxRSTFileInput(app, app.env, source=StringIO(text), + source_path='dummy.rst', encoding='utf-8') + result = source.read() + assert result.data == [' hello Sphinx world', + ' Sphinx is a document generator'] diff --git a/tests/test_markup.py b/tests/test_markup.py index dfa4d74cf..9c41845fc 100644 --- a/tests/test_markup.py +++ b/tests/test_markup.py @@ -211,7 +211,8 @@ def get_verifier(verify, verify_re): 'verify', u'::\n\n @Γ\\∞${}', None, - (u'\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n' + (u'\\fvset{hllines={, ,}}%\n' + u'\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n' u'@\\(\\Gamma\\)\\PYGZbs{}\\(\\infty\\)\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n' u'\\end{sphinxVerbatim}'), ), diff --git a/tests/test_pycode_parser.py b/tests/test_pycode_parser.py index cfea2ca00..b9327999b 100644 --- a/tests/test_pycode_parser.py +++ b/tests/test_pycode_parser.py @@ -9,6 +9,9 @@ :license: BSD, see LICENSE for details. """ +import pytest +from six import PY2 + from sphinx.pycode.parser import Parser @@ -116,6 +119,23 @@ def test_complex_assignment(): assert parser.definitions == {} +@pytest.mark.skipif(PY2, reason='tests for py3 syntax') +def test_complex_assignment_py3(): + source = ('a, *b, c = (1, 2, 3, 4) #: unpack assignment\n' + 'd, *self.attr = (5, 6, 7) #: unpack assignment2\n' + 'e, *f[0] = (8, 9, 0) #: unpack assignment3\n' + ) + parser = Parser(source) + parser.parse() + assert parser.comments == {('', 'a'): 'unpack assignment', + ('', 'b'): 'unpack assignment', + ('', 'c'): 'unpack assignment', + ('', 'd'): 'unpack assignment2', + ('', 'e'): 'unpack assignment3', + } + assert parser.definitions == {} + + def test_obj_assignment(): source = ('obj = SomeObject() #: some object\n' 'obj.attr = 1 #: attr1\n' @@ -226,6 +246,18 @@ def test_nested_class(): 'Foo.Bar.attr2': 3} +def test_class_comment(): + source = ('import logging\n' + 'logger = logging.getLogger(__name__)\n' + '\n' + 'class Foo(object):\n' + ' """Bar"""\n') + parser = Parser(source) + parser.parse() + assert parser.comments == {} + assert parser.definitions == {'Foo': ('class', 4, 5)} + + def test_comment_picker_multiline_string(): source = ('class Foo(object):\n' ' a = None\n' diff --git a/tests/test_quickstart.py b/tests/test_quickstart.py index 46ed3a0c1..f69a0a58e 100644 --- a/tests/test_quickstart.py +++ b/tests/test_quickstart.py @@ -61,27 +61,7 @@ def teardown_module(): coloron() -def test_quickstart_inputstrip(): - d = {} - answers = { - 'Q1': 'Y', - 'Q2': ' Yes ', - 'Q3': 'N', - 'Q4': 'N ', - } - qs.term_input = mock_input(answers) - qs.do_prompt(d, 'k1', 'Q1') - assert d['k1'] == 'Y' - qs.do_prompt(d, 'k2', 'Q2') - assert d['k2'] == 'Yes' - qs.do_prompt(d, 'k3', 'Q3') - assert d['k3'] == 'N' - qs.do_prompt(d, 'k4', 'Q4') - assert d['k4'] == 'N' - - def test_do_prompt(): - d = {} answers = { 'Q2': 'v2', 'Q3': 'v3', @@ -90,24 +70,29 @@ def test_do_prompt(): 'Q6': 'foo', } qs.term_input = mock_input(answers) - try: - qs.do_prompt(d, 'k1', 'Q1') - except AssertionError: - assert 'k1' not in d - else: - assert False, 'AssertionError not raised' - qs.do_prompt(d, 'k1', 'Q1', default='v1') - assert d['k1'] == 'v1' - qs.do_prompt(d, 'k3', 'Q3', default='v3_default') - assert d['k3'] == 'v3' - qs.do_prompt(d, 'k2', 'Q2') - assert d['k2'] == 'v2' - qs.do_prompt(d, 'k4', 'Q4', validator=qs.boolean) - assert d['k4'] is True - qs.do_prompt(d, 'k5', 'Q5', validator=qs.boolean) - assert d['k5'] is False + + assert qs.do_prompt('Q1', default='v1') == 'v1' + assert qs.do_prompt('Q3', default='v3_default') == 'v3' + assert qs.do_prompt('Q2') == 'v2' + assert qs.do_prompt('Q4', validator=qs.boolean) is True + assert qs.do_prompt('Q5', validator=qs.boolean) is False with pytest.raises(AssertionError): - qs.do_prompt(d, 'k6', 'Q6', validator=qs.boolean) + qs.do_prompt('Q6', validator=qs.boolean) + + +def test_do_prompt_inputstrip(): + answers = { + 'Q1': 'Y', + 'Q2': ' Yes ', + 'Q3': 'N', + 'Q4': 'N ', + } + qs.term_input = mock_input(answers) + + assert qs.do_prompt('Q1') == 'Y' + assert qs.do_prompt('Q2') == 'Yes' + assert qs.do_prompt('Q3') == 'N' + assert qs.do_prompt('Q4') == 'N' def test_do_prompt_with_nonascii(): @@ -117,12 +102,12 @@ def test_do_prompt_with_nonascii(): } qs.term_input = mock_input(answers) try: - qs.do_prompt(d, 'k1', 'Q1', default=u'\u65e5\u672c') + result = qs.do_prompt('Q1', default=u'\u65e5\u672c') except UnicodeEncodeError: raise pytest.skip.Exception( 'non-ASCII console input not supported on this encoding: %s', qs.TERM_ENCODING) - assert d['k1'] == u'\u30c9\u30a4\u30c4' + assert result == u'\u30c9\u30a4\u30c4' def test_quickstart_defaults(tempdir): @@ -149,7 +134,6 @@ def test_quickstart_defaults(tempdir): assert ns['copyright'] == '%s, Georg Brandl' % time.strftime('%Y') assert ns['version'] == '0.1' assert ns['release'] == '0.1' - assert ns['todo_include_todos'] is False assert ns['html_static_path'] == ['_static'] assert ns['latex_documents'] == [ ('index', 'SphinxTest.tex', 'Sphinx Test Documentation', diff --git a/tests/test_search.py b/tests/test_search.py index 21c0badb7..f1825dfa4 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -228,3 +228,15 @@ def test_IndexBuilder(): } assert index._objtypes == {('dummy', 'objtype'): 0} assert index._objnames == {0: ('dummy', 'objtype', 'objtype')} + + +def test_IndexBuilder_lookup(): + env = DummyEnvironment('1.0', {}) + + # zh + index = IndexBuilder(env, 'zh', {}, None) + assert index.lang.lang == 'zh' + + # zh_CN + index = IndexBuilder(env, 'zh_CN', {}, None) + assert index.lang.lang == 'zh' diff --git a/tests/test_theming.py b/tests/test_theming.py index 408a6503f..0977e1274 100644 --- a/tests/test_theming.py +++ b/tests/test_theming.py @@ -17,6 +17,7 @@ from sphinx.theming import ThemeError @pytest.mark.sphinx( + testroot='theming', confoverrides={'html_theme': 'ziptheme', 'html_theme_options.testopt': 'foo'}) def test_theme_api(app, status, warning): @@ -25,10 +26,11 @@ def test_theme_api(app, status, warning): # test Theme class API assert set(app.html_themes.keys()) == \ set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku', - 'traditional', 'testtheme', 'ziptheme', 'epub', 'nature', - 'pyramid', 'bizstyle', 'classic', 'nonav']) - assert app.html_themes['testtheme'] == app.srcdir / 'testtheme' + 'traditional', 'epub', 'nature', 'pyramid', 'bizstyle', 'classic', 'nonav', + 'test-theme', 'ziptheme', 'staticfiles', 'parent', 'child']) + assert app.html_themes['test-theme'] == app.srcdir / 'test_theme' / 'test-theme' assert app.html_themes['ziptheme'] == app.srcdir / 'ziptheme.zip' + assert app.html_themes['staticfiles'] == app.srcdir / 'test_theme' / 'staticfiles' # test Theme instance API theme = app.builder.theme @@ -95,3 +97,30 @@ def test_double_inheriting_theme(app, status, warning): def test_nested_zipped_theme(app, status, warning): assert app.builder.theme.name == 'child' app.build() # => not raises TemplateNotFound + + +@pytest.mark.sphinx(testroot='theming', + confoverrides={'html_theme': 'staticfiles'}) +def test_staticfiles(app, status, warning): + app.build() + assert (app.outdir / '_static' / 'staticimg.png').exists() + assert (app.outdir / '_static' / 'statictmpl.html').exists() + assert (app.outdir / '_static' / 'statictmpl.html').text() == ( + '\n' + 'Python' + ) + + result = (app.outdir / 'index.html').text() + assert '' in result + + +@pytest.mark.sphinx(testroot='theming') +def test_theme_sidebars(app, status, warning): + app.build() + + # test-theme specifies globaltoc and searchbox as default sidebars + result = (app.outdir / 'index.html').text(encoding='utf8') + assert '

Table Of Contents

' in result + assert '

Related Topics

' not in result + assert '

This Page

' not in result + assert '

Quick search

' in result diff --git a/tests/test_util.py b/tests/test_util.py index 84ce44007..aae54eaf0 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -14,8 +14,7 @@ from mock import patch from sphinx.util import logging from sphinx.util import ( - display_chunk, encode_uri, parselinenos, split_docinfo, status_iterator, - xmlname_checker + display_chunk, encode_uri, parselinenos, status_iterator, xmlname_checker ) from sphinx.testing.util import strip_escseq @@ -36,28 +35,6 @@ def test_encode_uri(): assert expected, encode_uri(uri) -def test_splitdocinfo(): - source = "Hello world.\n" - docinfo, content = split_docinfo(source) - assert docinfo == '' - assert content == 'Hello world.\n' - - source = ":orphan:\n\nHello world.\n" - docinfo, content = split_docinfo(source) - assert docinfo == ':orphan:\n' - assert content == '\nHello world.\n' - - source = ":author: Georg Brandl\n:title: Manual of Sphinx\n\nHello world.\n" - docinfo, content = split_docinfo(source) - assert docinfo == ':author: Georg Brandl\n:title: Manual of Sphinx\n' - assert content == '\nHello world.\n' - - source = ":multiline: one\n\ttwo\n\tthree\n\nHello world.\n" - docinfo, content = split_docinfo(source) - assert docinfo == ":multiline: one\n\ttwo\n\tthree\n" - assert content == '\nHello world.\n' - - def test_display_chunk(): assert display_chunk('hello') == 'hello' assert display_chunk(['hello']) == 'hello' diff --git a/tests/test_util_inspect.py b/tests/test_util_inspect.py index 6176449b8..63e04ee76 100644 --- a/tests/test_util_inspect.py +++ b/tests/test_util_inspect.py @@ -113,6 +113,7 @@ def test_getargspec_bound_methods(): assert expected_bound == inspect.getargspec(wrapped_bound_method) + def test_Signature(): # literals with pytest.raises(TypeError): @@ -310,3 +311,23 @@ def test_safe_getattr_with___dict___override(): assert exc.args[0] == 'bar' else: pytest.fail('AttributeError not raised') + + +def test_dictionary_sorting(): + dictionary = {"c": 3, "a": 1, "d": 2, "b": 4} + description = inspect.object_description(dictionary) + assert description == "{'a': 1, 'b': 4, 'c': 3, 'd': 2}" + + +def test_dict_customtype(): + class CustomType(object): + def __init__(self, value): + self._value = value + + def __repr__(self): + return "" % self._value + + dictionary = {CustomType(2): 2, CustomType(1): 1} + description = inspect.object_description(dictionary) + # Type is unsortable, just check that it does not crash + assert ": 2" in description diff --git a/tests/test_writer_latex.py b/tests/test_writer_latex.py index 228161886..b026f8d17 100644 --- a/tests/test_writer_latex.py +++ b/tests/test_writer_latex.py @@ -27,7 +27,7 @@ def test_rstdim_to_latexdim(): assert rstdim_to_latexdim('30%') == '0.300\\linewidth' assert rstdim_to_latexdim('160') == '160\\sphinxpxdimen' - # flaot values + # float values assert rstdim_to_latexdim('160.0em') == '160.0em' assert rstdim_to_latexdim('.5em') == '.5em' diff --git a/utils/bump_version.py b/utils/bump_version.py index da193a3de..9033aee70 100755 --- a/utils/bump_version.py +++ b/utils/bump_version.py @@ -81,7 +81,7 @@ def processing(message): yield except Skip as exc: print('skip: %s' % exc) - except: + except Exception: print('error') raise else: diff --git a/utils/reindent.py b/utils/reindent.py deleted file mode 100755 index b79657636..000000000 --- a/utils/reindent.py +++ /dev/null @@ -1,320 +0,0 @@ -#! /usr/bin/env python - -# Released to the public domain, by Tim Peters, 03 October 2000. - -"""reindent [-d][-r][-v] [ path ... ] - --d (--dryrun) Dry run. Analyze, but don't make any changes to, files. --r (--recurse) Recurse. Search for all .py files in subdirectories too. --n (--nobackup) No backup. Does not make a ".bak" file before reindenting. --v (--verbose) Verbose. Print informative msgs; else no output. --h (--help) Help. Print this usage information and exit. - -Change Python (.py) files to use 4-space indents and no hard tab characters. -Also trim excess spaces and tabs from ends of lines, and remove empty lines -at the end of files. Also ensure the last line ends with a newline. - -If no paths are given on the command line, reindent operates as a filter, -reading a single source file from standard input and writing the transformed -source to standard output. In this case, the -d, -r and -v flags are -ignored. - -You can pass one or more file and/or directory paths. When a directory -path, all .py files within the directory will be examined, and, if the -r -option is given, likewise recursively for subdirectories. - -If output is not to standard output, reindent overwrites files in place, -renaming the originals with a .bak extension. If it finds nothing to -change, the file is left alone. If reindent does change a file, the changed -file is a fixed-point for future runs (i.e., running reindent on the -resulting .py file won't change it again). - -The hard part of reindenting is figuring out what to do with comment -lines. So long as the input files get a clean bill of health from -tabnanny.py, reindent should do a good job. - -The backup file is a copy of the one that is being reindented. The ".bak" -file is generated with shutil.copy(), but some corner cases regarding -user/group and permissions could leave the backup file more readable that -you'd prefer. You can always use the --nobackup option to prevent this. -""" -from __future__ import print_function - -import os -import sys -import shutil -import tokenize -from six.ranges import range - -__version__ = "1" - -if sys.version_info >= (3, 0): - def tokens(readline, tokeneater): - for token in tokenize.tokenize(readline): - yield tokeneater(*token) -else: - tokens = tokenize.tokenize - -verbose = 0 -recurse = 0 -dryrun = 0 -makebackup = True - - -def usage(msg=None): - if msg is not None: - print(msg, file=sys.stderr) - print(__doc__, file=sys.stderr) - - -def errprint(*args): - sep = "" - for arg in args: - sys.stderr.write(sep + str(arg)) - sep = " " - sys.stderr.write("\n") - - -def main(): - import getopt - global verbose, recurse, dryrun, makebackup - try: - opts, args = getopt.getopt(sys.argv[1:], "drnvh", - ["dryrun", "recurse", "nobackup", "verbose", "help"]) - except getopt.error as msg: - usage(msg) - return - for o, a in opts: - if o in ('-d', '--dryrun'): - dryrun += 1 - elif o in ('-r', '--recurse'): - recurse += 1 - elif o in ('-n', '--nobackup'): - makebackup = False - elif o in ('-v', '--verbose'): - verbose += 1 - elif o in ('-h', '--help'): - usage() - return - if not args: - r = Reindenter(sys.stdin) - r.run() - r.write(sys.stdout) - return - for arg in args: - check(arg) - - -def check(file): - if os.path.isdir(file) and not os.path.islink(file): - if verbose: - print("listing directory", file) - names = os.listdir(file) - for name in names: - fullname = os.path.join(file, name) - if ((recurse and os.path.isdir(fullname) and - not os.path.islink(fullname) and - not os.path.split(fullname)[1].startswith(".")) or - name.lower().endswith(".py")): - check(fullname) - return - - if verbose: - print("checking", file, "...", end=' ') - try: - f = open(file) - except IOError as msg: - errprint("%s: I/O Error: %s" % (file, str(msg))) - return - - with f: - r = Reindenter(f) - if r.run(): - if verbose: - print("changed.") - if dryrun: - print("But this is a dry run, so leaving it alone.") - if not dryrun: - bak = file + ".bak" - if makebackup: - shutil.copyfile(file, bak) - if verbose: - print("backed up", file, "to", bak) - with open(file, "w") as f: - r.write(f) - if verbose: - print("wrote new", file) - return True - else: - if verbose: - print("unchanged.") - return False - - -def _rstrip(line, JUNK='\n \t'): - """Return line stripped of trailing spaces, tabs, newlines. - - Note that line.rstrip() instead also strips sundry control characters, - but at least one known Emacs user expects to keep junk like that, not - mentioning Barry by name or anything . - """ - - i = len(line) - while i > 0 and line[i - 1] in JUNK: - i -= 1 - return line[:i] - - -class Reindenter: - def __init__(self, f): - self.find_stmt = 1 # next token begins a fresh stmt? - self.level = 0 # current indent level - - # Raw file lines. - self.raw = f.readlines() - - # File lines, rstripped & tab-expanded. Dummy at start is so - # that we can use tokenize's 1-based line numbering easily. - # Note that a line is all-blank iff it's "\n". - self.lines = [_rstrip(line).expandtabs() + "\n" - for line in self.raw] - self.lines.insert(0, None) - self.index = 1 # index into self.lines of next line - - # List of (lineno, indentlevel) pairs, one for each stmt and - # comment line. indentlevel is -1 for comment lines, as a - # signal that tokenize doesn't know what to do about them; - # indeed, they're our headache! - self.stats = [] - - def run(self): - tokens(self.getline, self.tokeneater) - # Remove trailing empty lines. - lines = self.lines - while lines and lines[-1] == "\n": - lines.pop() - # Sentinel. - stats = self.stats - stats.append((len(lines), 0)) - # Map count of leading spaces to # we want. - have2want = {} - # Program after transformation. - after = self.after = [] - # Copy over initial empty lines -- there's nothing to do until - # we see a line with *something* on it. - i = stats[0][0] - after.extend(lines[1:i]) - for i in range(len(stats) - 1): - thisstmt, thislevel = stats[i] - nextstmt = stats[i + 1][0] - have = getlspace(lines[thisstmt]) - want = thislevel * 4 - if want < 0: - # A comment line. - if have: - # An indented comment line. If we saw the same - # indentation before, reuse what it most recently - # mapped to. - want = have2want.get(have, -1) - if want < 0: - # Then it probably belongs to the next real stmt. - for j in range(i + 1, len(stats) - 1): - jline, jlevel = stats[j] - if jlevel >= 0: - if have == getlspace(lines[jline]): - want = jlevel * 4 - break - if want < 0: # Maybe it's a hanging - # comment like this one, - # in which case we should shift it like its base - # line got shifted. - for j in range(i - 1, -1, -1): - jline, jlevel = stats[j] - if jlevel >= 0: - want = (have + getlspace(after[jline - 1]) - - getlspace(lines[jline])) - break - if want < 0: - # Still no luck -- leave it alone. - want = have - else: - want = 0 - assert want >= 0 - have2want[have] = want - diff = want - have - if diff == 0 or have == 0: - after.extend(lines[thisstmt:nextstmt]) - else: - for line in lines[thisstmt:nextstmt]: - if diff > 0: - if line == "\n": - after.append(line) - else: - after.append(" " * diff + line) - else: - remove = min(getlspace(line), -diff) - after.append(line[remove:]) - return self.raw != self.after - - def write(self, f): - f.writelines(self.after) - - # Line-getter for tokenize. - def getline(self): - if self.index >= len(self.lines): - line = "" - else: - line = self.lines[self.index] - self.index += 1 - return line - - # Line-eater for tokenize. - def tokeneater(self, type, token, position, end, line, - INDENT=tokenize.INDENT, - DEDENT=tokenize.DEDENT, - NEWLINE=tokenize.NEWLINE, - COMMENT=tokenize.COMMENT, - NL=tokenize.NL): - - if type == NEWLINE: - # A program statement, or ENDMARKER, will eventually follow, - # after some (possibly empty) run of tokens of the form - # (NL | COMMENT)* (INDENT | DEDENT+)? - self.find_stmt = 1 - - elif type == INDENT: - self.find_stmt = 1 - self.level += 1 - - elif type == DEDENT: - self.find_stmt = 1 - self.level -= 1 - - elif type == COMMENT: - if self.find_stmt: - self.stats.append((position[0], -1)) - # but we're still looking for a new stmt, so leave - # find_stmt alone - - elif type == NL: - pass - - elif self.find_stmt: - # This is the first "real token" following a NEWLINE, so it - # must be the first token of the next program statement, or an - # ENDMARKER. - self.find_stmt = 0 - if line: # not endmarker - self.stats.append((position[0], self.level)) - - -# Count number of leading blanks. -def getlspace(line): - i, n = 0, len(line) - while i < n and line[i] == " ": - i += 1 - return i - - -if __name__ == '__main__': - main()