Merge branch '3.x' into 8183

This commit is contained in:
Takeshi KOMIYA 2020-11-01 11:38:39 +09:00
commit 87121c3de0
173 changed files with 1500 additions and 801 deletions

View File

@ -2,5 +2,8 @@
blank_issues_enabled: false # default: true
contact_links:
- name: Question
url: https://stackoverflow.com/questions/tagged/python-sphinx
about: For Q&A purpose, please use Stackoverflow with the tag python-sphinx
- name: Discussion
url: https://groups.google.com/forum/#!forum/sphinx-users
about: For Q&A purpose, please use sphinx-users mailing list.
about: For general discussion, please use sphinx-users mailing list.

View File

@ -24,9 +24,6 @@ jobs:
env:
- TOXENV=du15
- PYTEST_ADDOPTS="--cov ./ --cov-append --cov-config setup.cfg"
- python: 'nightly'
env:
- TOXENV=du16
- language: node_js
node_js: '10.7'

96
CHANGES
View File

@ -1,3 +1,86 @@
Release 3.3.0 (in development)
==============================
Dependencies
------------
Incompatible changes
--------------------
Deprecated
----------
* ``sphinx.builders.latex.LaTeXBuilder.usepackages``
* ``sphinx.builders.latex.LaTeXBuilder.usepackages_afger_hyperref``
* ``sphinx.ext.autodoc.SingledispatchFunctionDocumenter``
* ``sphinx.ext.autodoc.SingledispatchMethodDocumenter``
Features added
--------------
* #8100: html: Show a better error message for failures on copying
html_static_files
* #8141: C: added a ``maxdepth`` option to :rst:dir:`c:alias` to insert
nested declarations.
* #8081: LaTeX: Allow to add LaTeX package via ``app.add_latex_package()`` until
just before writing .tex file
* #7996: manpage: Add :confval:`man_make_section_directory` to make a section
directory on build man page
* #8289: epub: Allow to suppress "duplicated ToC entry found" warnings from epub
builder using :confval:`suppress_warnings`.
* #8298: sphinx-quickstart: Add :option:`sphinx-quickstart --no-sep` option
* #8304: sphinx.testing: Register public markers in sphinx.testing.fixtures
* #8051: napoleon: use the obj role for all See Also items
* #8050: napoleon: Apply :confval:`napoleon_preprocess_types` to every field
Bugs fixed
----------
* #8085: i18n: Add support for having single text domain
* #6640: i18n: Failed to override system message translation
* #8143: autodoc: AttributeError is raised when False value is passed to
autodoc_default_options
* #8103: autodoc: functools.cached_property is not considered as a property
* #8190: autodoc: parsing error is raised if some extension replaces docstring
by string not ending with blank lines
* #8142: autodoc: Wrong constructor signature for the class derived from
typing.Generic
* #8157: autodoc: TypeError is raised when annotation has invalid __args__
* #7964: autodoc: Tuple in default value is wrongly rendered
* #8200: autodoc: type aliases break type formatting of autoattribute
* #7786: autodoc: can't detect overloaded methods defined in other file
* #8294: autodoc: single-string __slots__ is not handled correctly
* #7785: autodoc: autodoc_typehints='none' does not effect to overloaded functions
* #8192: napoleon: description is disappeared when it contains inline literals
* #8142: napoleon: Potential of regex denial of service in google style docs
* #8169: LaTeX: pxjahyper loaded even when latex_engine is not platex
* #8215: LaTeX: 'oneside' classoption causes build warning
* #8175: intersphinx: Potential of regex denial of service by broken inventory
* #8277: sphinx-build: missing and redundant spacing (and etc) for console
output on building
* #7973: imgconverter: Check availability of imagemagick many times
* #8255: py domain: number in default argument value is changed from hexadecimal
to decimal
* #8316: html: Prevent arrow keys changing page when button elements are focused
* #8343: html search: Fix unnecessary load of images when parsing the document
* #8254: html theme: Line numbers misalign with code lines
* #8093: The highlight warning has wrong location in some builders (LaTeX,
singlehtml and so on)
* #8215: Eliminate Fancyhdr build warnings for oneside documents
* #8239: Failed to refer a token in productionlist if it is indented
* #8268: linkcheck: Report HTTP errors when ``linkcheck_anchors`` is ``True``
* #8245: linkcheck: take source directory into account for local files
* #8321: linkcheck: ``tel:`` schema hyperlinks are detected as errors
* #8323: linkcheck: An exit status is incorrect when links having unsupported
schema found
* #6914: figure numbers are unexpectedly assigned to uncaptioned items
* #8320: make "inline" line numbers un-selectable
Testing
--------
* #8257: Support parallel build in sphinx.testing
Release 3.2.2 (in development)
==============================
@ -16,6 +99,15 @@ Features added
Bugs fixed
----------
* #8188: C, add missing items to internal object types dictionary,
e.g., preventing intersphinx from resolving them.
* C, fix anon objects in intersphinx.
* #8270, C++, properly reject functions as duplicate declarations if a
non-function declaration of the same name already exists.
* C, fix references to function parameters.
Link to the function instead of a non-existing anchor.
Testing
--------
@ -118,7 +210,7 @@ Bugs fixed
contains a hyperlink target
* #7469: autosummary: "Module attributes" header is not translatable
* #7940: apidoc: An extra newline is generated at the end of the rst file if a
module has submodules
module has submodules
* #4258: napoleon: decorated special methods are not shown
* #7799: napoleon: parameters are not escaped for combined params in numpydoc
* #7780: napoleon: multiple paramaters declaration in numpydoc was wrongly
@ -285,7 +377,7 @@ Features added
* #7543: html theme: Add top and bottom margins to tables
* #7695: html theme: Add viewport meta tag for basic theme
* #7721: html theme: classic: default codetextcolor/codebgcolor doesn't override
Pygments
Pygments
* C and C++: allow semicolon in the end of declarations.
* C++, parse parameterized noexcept specifiers.
* #7294: C++, parse expressions with user-defined literals.

View File

@ -230,6 +230,7 @@ Documentation using sphinx_rtd_theme
* `MyHDL <http://docs.myhdl.org/>`__
* `Nextflow <https://www.nextflow.io/docs/latest/index.html>`__
* `NICOS <https://forge.frm2.tum.de/nicos/doc/nicos-master/>`__ (customized)
* `OpenFAST <https://openfast.readthedocs.io/>`__
* `Pelican <http://docs.getpelican.com/>`__
* `picamera <https://picamera.readthedocs.io/>`__
* `Pillow <https://pillow.readthedocs.io/>`__
@ -330,6 +331,7 @@ Documentation using a custom theme or integrated in a website
* `Lasso <http://lassoguide.com/>`__
* `Mako <http://docs.makotemplates.org/>`__
* `MirrorBrain <http://mirrorbrain.org/docs/>`__
* `Mitiq <https://mitiq.readthedocs.io/>`__
* `MongoDB <https://docs.mongodb.com/>`__
* `Music21 <https://web.mit.edu/music21/doc/>`__
* `MyHDL <http://docs.myhdl.org/>`__

View File

@ -64,10 +64,6 @@ type-check:
doclinter:
python utils/doclinter.py CHANGES *.rst doc/
.PHONY: pylint
pylint:
@pylint --rcfile utils/pylintrc sphinx
.PHONY: test
test:
@$(PYTHON) -m pytest -v $(TEST)

8
doc/_static/favicon.svg vendored Normal file
View File

@ -0,0 +1,8 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100">
<style>
@media (prefers-color-scheme: dark) {
svg { fill: white; }
}
</style>
<path d="m 67.780707,71.526216 c 0,-2.720856 0.735772,-7.633735 1.635035,-10.917507 2.076574,-7.582764 3.222746,-16.97568 2.071477,-16.97568 -0.485619,0 -3.994408,3.173002 -7.797313,7.051115 -14.448869,14.734603 -29.952812,23.068339 -42.915946,23.068339 -7.400211,0 -12.4298817,-1.871115 -17.2867007,-6.430912 -2.94436186,-2.764297 -3.47532146,-4.129685 -3.47532146,-8.936928 0,-4.94488 0.4862322,-6.108589 3.78321146,-9.054437 2.987989,-2.669773 4.875111,-3.380296 8.9779137,-3.380296 3.163221,0.711278 5.032659,0.664017 6.063532,1.917191 1.045041,1.231842 1.406892,5.262673 0.143323,7.623675 -0.674746,1.260763 -2.435471,2.043539 -4.5966,2.043539 -2.040303,0 -3.203991,-0.483702 -2.786976,-1.15844 1.31395,-2.126021 -0.560952,-3.566616 -2.9664067,-2.279256 -2.907025,1.555792 -2.957418,7.069066 -0.08839,9.665535 4.0345357,3.651203 15.1912207,5.023925 21.9019857,2.694828 7.250749,-2.516503 16.739014,-8.578986 24.30831,-15.531674 l 6.657407,-6.115083 -8.688303,-0.05007 C 43.622519,44.707714 37.702703,43.621524 18.54695,38.489741 12.175528,36.782852 6.0502733,35.306342 4.9352743,35.208608 3.6710803,35.097791 2.841723,34.067882 2.9080043,32.476074 3.0199286,29.788108 4.4800823,27.78768 6.2067673,27.033038 7.2437505,26.579828 14.43583,25.894406 22.0605,23.866486 c 29.699148,-7.899023 31.502043,-6.781254 51.28707,-1.772167 6.461504,1.635896 13.942408,3.414988 17.256961,3.474566 5.106245,0.09178 6.211825,0.514653 7.240255,2.76932 0.66758,1.46355 1.21378,2.858905 1.21378,3.10079 0,0.241884 -2.89333,1.764397 -6.429613,3.383363 -12.984983,5.944723 -17.083271,9.093943 -12.855172,15.130399 1.753219,2.503069 1.718037,2.768923 -0.57922,4.37799 -1.345193,0.942203 -2.457238,2.856456 -2.471232,4.253898 -0.03777,3.776976 -2.424786,11.884847 -5.893734,15.080164 l -3.048923,2.808424 z m 6.632814,-34.658372 c 5.169656,-1.440693 8.302047,-3.07045 14.72913,-6.500861 -5.292267,-1.548658 -18.570782,-3.724097 -18.570782,-3.724097 -9.796513,-1.964547 -8.76916,-1.865132 -9.21348,0.29669 -0.176673,0.859598 -0.702644,2.763948 -1.872329,4.596663 -2.251474,3.527711 -10.489307,4.271075 -15.214327,2.009703 -1.482367,-0.709454 -2.971272,-3.416276 -2.950606,-5.336922 0.02911,-2.705486 -1.505386,-3.336055 -2.486689,-2.975309 -0.796428,0.292781 -3.384665,0.330004 -9.071284,1.864262 -18.784765,5.068157 -21.3552119,4.487473 -9.110967,6.223299 1.472409,0.208739 9.252992,2.381926 13.052028,3.39412 9.318588,2.482796 11.064717,2.665087 23.125496,2.414247 8.385835,-0.174409 11.891174,-0.675356 17.58381,-2.261795 z M 3.0589449,14.916483 C 3.2921927,12.514245 3.424378,11.992797 10.100599,10.647894 13.924923,9.8774962 23.355266,7.3808108 31.056903,5.0997052 c 17.703937,-5.2436279 22.73392,-5.2565016 41.092202,-0.105175 7.923233,2.2232606 16.798382,4.047803 19.72254,4.054541 4.567242,0.01054 6.941892,2.0284768 6.941892,2.0284768 2.101843,4.825342 1.718463,5.158474 -6.484103,5.158474 -5.714193,0 -10.641875,-0.963081 -18.245438,-3.565943 C 68.300078,10.69012 60.060462,8.8316882 55.557963,8.4915615 47.342337,7.8709375 47.353713,7.8687835 21.963188,14.855617 17.503192,16.082896 11.34213,17.454164 8.2719268,17.902883 l -5.5821654,0.81585 z" />
</svg>

After

Width:  |  Height:  |  Size: 3.2 KiB

View File

@ -239,7 +239,7 @@ div.footer a {
/* -- body styles ----------------------------------------------------------- */
p {
p {
margin: 0.8em 0 0.5em 0;
}

View File

@ -28,6 +28,7 @@ html_sidebars = {'index': ['indexsidebar.html', 'searchbox.html']}
html_additional_pages = {'index': 'index.html'}
html_use_opensearch = 'https://www.sphinx-doc.org/en/master'
html_baseurl = 'https://www.sphinx-doc.org/en/master/'
html_favicon = '_static/favicon.svg'
htmlhelp_basename = 'Sphinxdoc'
@ -110,8 +111,6 @@ texinfo_documents = [
1),
]
# We're not using intersphinx right now, but if we did, this would be part of
# the mapping:
intersphinx_mapping = {'python': ('https://docs.python.org/3/', None)}
# Sphinx document translation with sphinx gettext feature uses these settings:

View File

@ -140,14 +140,14 @@ started with writing your own extensions.
.. _slideshare: https://www.slideshare.net/
.. _TikZ/PGF LaTeX package: https://sourceforge.net/projects/pgf/
.. _MATLAB: https://www.mathworks.com/products/matlab.html
.. _swf: https://bitbucket.org/klorenz/sphinxcontrib-swf
.. _findanything: https://bitbucket.org/klorenz/sphinxcontrib-findanything
.. _cmakedomain: https://bitbucket.org/klorenz/sphinxcontrib-cmakedomain
.. _swf: https://github.com/sphinx-contrib/swf
.. _findanything: https://github.com/sphinx-contrib/findanything
.. _cmakedomain: https://github.com/sphinx-contrib/cmakedomain
.. _GNU Make: https://www.gnu.org/software/make/
.. _makedomain: https://bitbucket.org/klorenz/sphinxcontrib-makedomain
.. _makedomain: https://github.com/sphinx-contrib/makedomain
.. _inlinesyntaxhighlight: https://sphinxcontrib-inlinesyntaxhighlight.readthedocs.io/
.. _CMake: https://cmake.org
.. _domaintools: https://bitbucket.org/klorenz/sphinxcontrib-domaintools
.. _domaintools: https://github.com/sphinx-contrib/domaintools
.. _restbuilder: https://pypi.org/project/sphinxcontrib-restbuilder/
.. _Lasso: http://www.lassosoft.com/
.. _beamer: https://pypi.org/project/sphinxcontrib-beamer/

View File

@ -177,17 +177,18 @@ type for that event::
9. (if running in parallel mode, for each process) event.env-merged-info(app, env, docnames, other)
10. event.env-updated(app, env)
11. event.env-get-updated(app, env)
11. event.env-check-consistency(app, env)
12. event.env-check-consistency(app, env)
# The updated-docs list can be builder dependent, but generally includes all new/changed documents,
# plus any output from `env-get-updated`, and then all "parent" documents in the ToC tree
# For builders that output a single page, they are first joined into a single doctree before post-transforms/doctree-resolved
for docname in docnames:
12. apply post-transforms (by priority): docutils.document -> docutils.document
13. event.doctree-resolved(app, doctree, docname)
for docname in updated-docs:
13. apply post-transforms (by priority): docutils.document -> docutils.document
14. event.doctree-resolved(app, doctree, docname)
- (for any reference node that fails to resolve) event.missing-reference(env, node, contnode)
14. Generate output files
15. event.build-finished(app, exception)
15. Generate output files
16. event.build-finished(app, exception)
Here is a more detailed list of these events.

View File

@ -26,6 +26,26 @@ The following is a list of deprecated interfaces.
- (will be) Removed
- Alternatives
* - ``sphinx.builders.latex.LaTeXBuilder.usepackages``
- 3.3
- 5.0
- N/A
* - ``sphinx.builders.latex.LaTeXBuilder.usepackages_afger_hyperref``
- 3.3
- 5.0
- N/A
* - ``sphinx.ext.autodoc.SingledispatchFunctionDocumenter``
- 3.3
- 5.0
- ``sphinx.ext.autodoc.FunctionDocumenter``
* - ``sphinx.ext.autodoc.SingledispatchMethodDocumenter``
- 3.3
- 5.0
- ``sphinx.ext.autodoc.MethodDocumenter``
* - ``sphinx.ext.autodoc.members_set_option()``
- 3.2
- 5.0

View File

@ -9,8 +9,8 @@ Glossary
A class (inheriting from :class:`~sphinx.builders.Builder`) that takes
parsed documents and performs an action on them. Normally, builders
translate the documents to an output format, but it is also possible to
use the builder builders that e.g. check for broken links in the
documentation, or build coverage information.
use builders that e.g. check for broken links in the documentation, or
build coverage information.
See :doc:`/usage/builders/index` for an overview over Sphinx's built-in
builders.

View File

@ -12,6 +12,9 @@ Getting help
The Sphinx community maintains a number of mailing lists and IRC channels.
Stack Overflow with tag `python-sphinx`_
Questions and answers about use and development.
sphinx-users <sphinx-users@googlegroups.com>
Mailing list for user support.
@ -21,6 +24,7 @@ sphinx-dev <sphinx-dev@googlegroups.com>
#sphinx-doc on irc.freenode.net
IRC channel for development questions and user support.
.. _python-sphinx: https://stackoverflow.com/questions/tagged/python-sphinx
Bug Reports and Feature Requests
--------------------------------

View File

@ -33,6 +33,10 @@ Options
If specified, separate source and build directories.
.. option:: --no-sep
If specified, create build directroy under source directroy.
.. option:: --dot=DOT
Inside the root directory, two more directories will be created;

View File

@ -316,6 +316,7 @@ General configuration
* ``toc.circular``
* ``toc.secnum``
* ``epub.unknown_project_files``
* ``epub.duplicated_toc_entry``
* ``autosectionlabel.*``
You can choose from these types.
@ -340,6 +341,10 @@ General configuration
Added ``autosectionlabel.*``
.. versionchanged:: 3.3.0
Added ``epub.duplicated_toc_entry``
.. confval:: needs_sphinx
If set to a ``major.minor`` version string like ``'1.1'``, Sphinx will
@ -756,9 +761,15 @@ documentation on :ref:`intl` for details.
If true, a document's text domain is its docname if it is a top-level
project file and its very base directory otherwise.
If set to string, all document's text domain is this string, making all
documents use single text domain.
By default, the document ``markup/code.rst`` ends up in the ``markup`` text
domain. With this option set to ``False``, it is ``markup/code``.
.. versionchanged:: 3.3
The string value is now accepted.
.. confval:: gettext_uuid
If true, Sphinx generates uuid information for version tracking in message
@ -2239,6 +2250,12 @@ These options influence manual page output.
.. versionadded:: 1.1
.. confval:: man_make_section_directory
If true, make a section directory on build man page. Default is False.
.. versionadded:: 3.3
.. _texinfo-options:

View File

@ -229,7 +229,7 @@ inserting them into the page source under a suitable :rst:dir:`py:module`,
.. versionchanged:: 3.0
It takes an anchestor class name as an argument.
It takes an ancestor class name as an argument.
* It's possible to override the signature for explicitly documented callable
objects (functions, methods, classes) with the regular syntax that will
@ -515,6 +515,44 @@ There are also config values that you can set:
New option ``'description'`` is added.
.. confval:: autodoc_type_aliases
A dictionary for users defined `type aliases`__ that maps a type name to the
full-qualified object name. It is used to keep type aliases not evaluated in
the document. Defaults to empty (``{}``).
The type aliases are only available if your program enables `Postponed
Evaluation of Annotations (PEP 563)`__ feature via ``from __future__ import
annotations``.
For example, there is code using a type alias::
from __future__ import annotations
AliasType = Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]
def f() -> AliasType:
...
If ``autodoc_type_aliases`` is not set, autodoc will generate internal mark-up
from this code as following::
.. py:function:: f() -> Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]
...
If you set ``autodoc_type_aliases`` as
``{'AliasType': 'your.module.TypeAlias'}``, it generates a following document
internally::
.. py:function:: f() -> your.module.AliasType:
...
.. __: https://www.python.org/dev/peps/pep-0563/
.. __: https://mypy.readthedocs.io/en/latest/kinds_of_types.html#type-aliases
.. versionadded:: 3.3
.. confval:: autodoc_warningiserror
This value controls the behavior of :option:`sphinx-build -W` during

View File

@ -51,7 +51,7 @@ should check:
.. versionadded:: 1.1
.. confval:: coverage_show_missing_items
.. confval:: coverage_show_missing_items
Print objects that are missing to standard output also.
``False`` by default.

View File

@ -171,7 +171,7 @@ Docker images for Sphinx are published on the `Docker Hub <https://hub.docker.co
- `sphinxdoc/sphinx <https://hub.docker.com/repository/docker/sphinxdoc/sphinx>`_
- `sphinxdoc/sphinx-latexpdf <https://hub.docker.com/repository/docker/sphinxdoc/sphinx-latexpdf>`_
Former one is used for standard usage of Sphinx, and latter one is mainly used for PDF builds using LaTeX.
Former one is used for standard usage of Sphinx, and latter one is mainly used for PDF builds using LaTeX.
Please choose one for your purpose.
.. note::

View File

@ -15,7 +15,7 @@ Much of Sphinx's power comes from the richness of its default plain-text markup
format, :doc:`reStructuredText </usage/restructuredtext/index>`, along with
it's :doc:`significant extensibility capabilities </development/index>`.
The goal of this document is to give you a quick taste of what Sphinx it is and
The goal of this document is to give you a quick taste of what Sphinx is and
how you might use it. When you're done here, you can check out the
:doc:`installation guide </usage/installation>` followed by the intro to the
default markup format used by Sphinx, :doc:`reStucturedText

View File

@ -665,7 +665,7 @@ __ http://pygments.org/docs/lexers
.. note::
If you want to select only ``[second-section]`` of ini file like the
following, you can use ``:start-at: [second-section]`` and
following, you can use ``:start-at: [second-section]`` and
``:end-before: [third-section]``:
.. code-block:: ini
@ -692,7 +692,7 @@ __ http://pygments.org/docs/lexers
# [initialize]
app.start(":8000")
# [initialize]
When lines have been selected in any of the ways described above, the line
numbers in ``emphasize-lines`` refer to those selected lines, counted

View File

@ -744,6 +744,18 @@ The following directive can be used for this purpose.
.. versionadded:: 3.2
.. rubric:: Options
.. rst:directive:option:: maxdepth: int
Insert nested declarations as well, up to the total depth given.
Use 0 for infinite depth and 1 for just the mentioned declaration.
Defaults to 1.
.. versionadded:: 3.3
.. c:namespace-pop::

22
package-lock.json generated
View File

@ -385,12 +385,6 @@
"integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=",
"dev": true
},
"eventemitter3": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-3.1.2.tgz",
"integrity": "sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q==",
"dev": true
},
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
@ -535,14 +529,22 @@
}
},
"http-proxy": {
"version": "1.17.0",
"resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.17.0.tgz",
"integrity": "sha512-Taqn+3nNvYRfJ3bGvKfBSRwy1v6eePlm3oc/aWVxZp57DQr5Eq3xhKJi7Z4hZpS8PC3H4qI+Yly5EmFacGuA/g==",
"version": "1.18.1",
"resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz",
"integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==",
"dev": true,
"requires": {
"eventemitter3": "^3.0.0",
"eventemitter3": "^4.0.0",
"follow-redirects": "^1.0.0",
"requires-port": "^1.0.0"
},
"dependencies": {
"eventemitter3": {
"version": "4.0.7",
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==",
"dev": true
}
}
},
"iconv-lite": {

View File

@ -57,10 +57,8 @@ filterwarnings =
ignore::DeprecationWarning:pyximport.pyximport
ignore::PendingDeprecationWarning:sphinx.util.pycompat
markers =
sphinx
apidoc
setup_command
test_params
testpaths = tests
[coverage:run]

View File

@ -44,7 +44,7 @@ extras_require = {
'lint': [
'flake8>=3.5.0',
'flake8-import-order',
'mypy>=0.780',
'mypy>=0.790',
'docutils-stubs',
],
'test': [

View File

@ -32,8 +32,8 @@ if 'PYTHONWARNINGS' not in os.environ:
warnings.filterwarnings('ignore', "'U' mode is deprecated",
DeprecationWarning, module='docutils.io')
__version__ = '3.2.2+'
__released__ = '3.2.2' # used when Sphinx builds its own docs
__version__ = '3.3.0+'
__released__ = '3.3.0' # used when Sphinx builds its own docs
#: Version info for better programmatic use.
#:
@ -43,7 +43,7 @@ __released__ = '3.2.2' # used when Sphinx builds its own docs
#:
#: .. versionadded:: 1.2
#: Before version 1.2, check the string ``sphinx.__version__``.
version_info = (3, 2, 2, 'beta', 0)
version_info = (3, 3, 0, 'beta', 0)
package_dir = path.abspath(path.dirname(__file__))

View File

@ -18,10 +18,11 @@ import warnings
from collections import deque
from io import StringIO
from os import path
from typing import Any, Callable, Dict, IO, List, Tuple, Union
from typing import Any, Callable, Dict, IO, List, Optional, Tuple, Union
from docutils import nodes
from docutils.nodes import Element, TextElement
from docutils.parsers import Parser
from docutils.parsers.rst import Directive, roles
from docutils.transforms import Transform
from pygments.lexer import Lexer
@ -293,7 +294,10 @@ class Sphinx:
if catalog.domain == 'sphinx' and catalog.is_outdated():
catalog.write_mo(self.config.language)
locale_dirs = [None, path.join(package_dir, 'locale')] + list(repo.locale_dirs)
locale_dirs = [None] # type: List[Optional[str]]
locale_dirs += list(repo.locale_dirs)
locale_dirs += [path.join(package_dir, 'locale')]
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
if has_translation or self.config.language == 'en':
# "en" never needs to be translated
@ -468,8 +472,10 @@ class Sphinx:
def add_builder(self, builder: "Type[Builder]", override: bool = False) -> None:
"""Register a new builder.
*builder* must be a class that inherits from
:class:`~sphinx.builders.Builder`.
*builder* must be a class that inherits from :class:`~sphinx.builders.Builder`.
If *override* is True, the given *builder* is forcedly installed even if
a builder having the same name is already installed.
.. versionchanged:: 1.8
Add *override* keyword.
@ -526,6 +532,9 @@ class Sphinx:
builtin translator. This allows extensions to use custom translator
and define custom nodes for the translator (see :meth:`add_node`).
If *override* is True, the given *translator_class* is forcedly installed even if
a translator for *name* is already installed.
.. versionadded:: 1.3
.. versionchanged:: 1.8
Add *override* keyword.
@ -560,6 +569,9 @@ class Sphinx:
Obviously, translators for which you don't specify visitor methods will
choke on the node when encountered in a document to translate.
If *override* is True, the given *node* is forcedly installed even if
a node having the same name is already installed.
.. versionchanged:: 0.5
Added the support for keyword arguments giving visit functions.
"""
@ -595,6 +607,9 @@ class Sphinx:
Other keyword arguments are used for node visitor functions. See the
:meth:`.Sphinx.add_node` for details.
If *override* is True, the given *node* is forcedly installed even if
a node having the same name is already installed.
.. versionadded:: 1.4
"""
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
@ -608,14 +623,14 @@ class Sphinx:
details, see `the Docutils docs
<http://docutils.sourceforge.net/docs/howto/rst-directives.html>`_ .
For example, the (already existing) :rst:dir:`literalinclude` directive
would be added like this:
For example, a custom directive named ``my-directive`` would be added
like this:
.. code-block:: python
from docutils.parsers.rst import Directive, directives
class LiteralIncludeDirective(Directive):
class MyDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 0
@ -628,7 +643,11 @@ class Sphinx:
def run(self):
...
add_directive('literalinclude', LiteralIncludeDirective)
def setup(app):
add_directive('my-directive', MyDirective)
If *override* is True, the given *cls* is forcedly installed even if
a directive named as *name* is already installed.
.. versionchanged:: 0.6
Docutils 0.5-style directive classes are now supported.
@ -652,6 +671,9 @@ class Sphinx:
<http://docutils.sourceforge.net/docs/howto/rst-roles.html>`_ for
more information.
If *override* is True, the given *role* is forcedly installed even if
a role named as *name* is already installed.
.. versionchanged:: 1.8
Add *override* keyword.
"""
@ -667,6 +689,9 @@ class Sphinx:
Register a Docutils role that does nothing but wrap its contents in the
node given by *nodeclass*.
If *override* is True, the given *nodeclass* is forcedly installed even if
a role named as *name* is already installed.
.. versionadded:: 0.6
.. versionchanged:: 1.8
Add *override* keyword.
@ -686,6 +711,9 @@ class Sphinx:
Make the given *domain* (which must be a class; more precisely, a
subclass of :class:`~sphinx.domains.Domain`) known to Sphinx.
If *override* is True, the given *domain* is forcedly installed even if
a domain having the same name is already installed.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
@ -699,6 +727,9 @@ class Sphinx:
Like :meth:`add_directive`, but the directive is added to the domain
named *domain*.
If *override* is True, the given *directive* is forcedly installed even if
a directive named as *name* is already installed.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
@ -712,6 +743,9 @@ class Sphinx:
Like :meth:`add_role`, but the role is added to the domain named
*domain*.
If *override* is True, the given *role* is forcedly installed even if
a role named as *name* is already installed.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
@ -725,6 +759,9 @@ class Sphinx:
Add a custom *index* class to the domain named *domain*. *index* must
be a subclass of :class:`~sphinx.domains.Index`.
If *override* is True, the given *index* is forcedly installed even if
an index having the same name is already installed.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
@ -788,6 +825,9 @@ class Sphinx:
For the role content, you have the same syntactical possibilities as
for standard Sphinx roles (see :ref:`xref-syntax`).
If *override* is True, the given object_type is forcedly installed even if
an object_type having the same name is already installed.
.. versionchanged:: 1.8
Add *override* keyword.
"""
@ -824,6 +864,9 @@ class Sphinx:
(Of course, the element following the ``topic`` directive needn't be a
section.)
If *override* is True, the given crossref_type is forcedly installed even if
a crossref_type having the same name is already installed.
.. versionchanged:: 1.8
Add *override* keyword.
"""
@ -1004,7 +1047,7 @@ class Sphinx:
logger.debug('[app] adding lexer: %r', (alias, lexer))
if isinstance(lexer, Lexer):
warnings.warn('app.add_lexer() API changed; '
'Please give lexer class instead instance',
'Please give lexer class instead of instance',
RemovedInSphinx40Warning, stacklevel=2)
lexers[alias] = lexer
else:
@ -1019,6 +1062,9 @@ class Sphinx:
new types of objects. See the source of the autodoc module for
examples on how to subclass :class:`Documenter`.
If *override* is True, the given *cls* is forcedly installed even if
a documenter having the same name is already installed.
.. todo:: Add real docs for Documenter and subclassing
.. versionadded:: 0.6
@ -1067,13 +1113,19 @@ class Sphinx:
Same as :confval:`source_suffix`. The users can override this
using the setting.
If *override* is True, the given *suffix* is forcedly installed even if
a same suffix is already installed.
.. versionadded:: 1.8
"""
self.registry.add_source_suffix(suffix, filetype, override=override)
def add_source_parser(self, *args: Any, **kwargs: Any) -> None:
def add_source_parser(self, parser: "Type[Parser]", override: bool = False) -> None:
"""Register a parser class.
If *override* is True, the given *parser* is forcedly installed even if
a parser for the same suffix is already installed.
.. versionadded:: 1.4
.. versionchanged:: 1.8
*suffix* argument is deprecated. It only accepts *parser* argument.
@ -1081,7 +1133,7 @@ class Sphinx:
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_source_parser(*args, **kwargs)
self.registry.add_source_parser(parser, override=override)
def add_env_collector(self, collector: "Type[EnvironmentCollector]") -> None:
"""Register an environment collector class.

View File

@ -208,7 +208,12 @@ class EpubBuilder(StandaloneHTMLBuilder):
appeared = set() # type: Set[str]
for node in nodes:
if node['refuri'] in appeared:
logger.warning(__('duplicated ToC entry found: %s'), node['refuri'])
logger.warning(
__('duplicated ToC entry found: %s'),
node['refuri'],
type="epub",
subtype="duplicated_toc_entry",
)
else:
appeared.add(node['refuri'])

View File

@ -316,7 +316,7 @@ class MessageCatalogBuilder(I18nBuilder):
def setup(app: Sphinx) -> Dict[str, Any]:
app.add_builder(MessageCatalogBuilder)
app.add_config_value('gettext_compact', True, 'gettext')
app.add_config_value('gettext_compact', True, 'gettext', Any)
app.add_config_value('gettext_location', True, 'gettext')
app.add_config_value('gettext_uuid', False, 'gettext')
app.add_config_value('gettext_auto_build', True, 'env')

View File

@ -641,17 +641,17 @@ class StandaloneHTMLBuilder(Builder):
def gen_additional_pages(self) -> None:
# additional pages from conf.py
for pagename, template in self.config.html_additional_pages.items():
logger.info(' ' + pagename, nonl=True)
logger.info(pagename + ' ', nonl=True)
self.handle_page(pagename, {}, template)
# the search page
if self.search:
logger.info(' search', nonl=True)
logger.info('search ', nonl=True)
self.handle_page('search', {}, 'search.html')
# the opensearch xml file
if self.config.html_use_opensearch and self.search:
logger.info(' opensearch', nonl=True)
logger.info('opensearch ', nonl=True)
fn = path.join(self.outdir, '_static', 'opensearch.xml')
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
@ -669,7 +669,7 @@ class StandaloneHTMLBuilder(Builder):
'genindexcounts': indexcounts,
'split_index': self.config.html_split_index,
}
logger.info(' genindex', nonl=True)
logger.info('genindex ', nonl=True)
if self.config.html_split_index:
self.handle_page('genindex', genindexcontext,
@ -691,7 +691,7 @@ class StandaloneHTMLBuilder(Builder):
'content': content,
'collapse_index': collapse,
}
logger.info(' ' + indexname, nonl=True)
logger.info(indexname + ' ', nonl=True)
self.handle_page(indexname, indexcontext, 'domainindex.html')
def copy_image_files(self) -> None:
@ -751,18 +751,27 @@ class StandaloneHTMLBuilder(Builder):
copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))
def copy_theme_static_files(self, context: Dict) -> None:
def onerror(filename: str, error: Exception) -> None:
logger.warning(__('Failed to copy a file in html_static_file: %s: %r'),
filename, error)
if self.theme:
for entry in self.theme.get_theme_dirs()[::-1]:
copy_asset(path.join(entry, 'static'),
path.join(self.outdir, '_static'),
excluded=DOTFILES, context=context, renderer=self.templates)
excluded=DOTFILES, context=context,
renderer=self.templates, onerror=onerror)
def copy_html_static_files(self, context: Dict) -> None:
def onerror(filename: str, error: Exception) -> None:
logger.warning(__('Failed to copy a file in html_static_file: %s: %r'),
filename, error)
excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
for entry in self.config.html_static_path:
copy_asset(path.join(self.confdir, entry),
path.join(self.outdir, '_static'),
excluded, context=context, renderer=self.templates)
excluded, context=context, renderer=self.templates, onerror=onerror)
def copy_html_logo(self) -> None:
if self.config.html_logo:
@ -776,7 +785,7 @@ class StandaloneHTMLBuilder(Builder):
def copy_static_files(self) -> None:
try:
with progress_message(__('copying static files... ')):
with progress_message(__('copying static files')):
ensuredir(path.join(self.outdir, '_static'))
# prepare context for templates

View File

@ -24,7 +24,7 @@ from sphinx.builders.latex.constants import ADDITIONAL_SETTINGS, DEFAULT_SETTING
from sphinx.builders.latex.theming import Theme, ThemeFactory
from sphinx.builders.latex.util import ExtBabel
from sphinx.config import Config, ENUM
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.deprecation import RemovedInSphinx40Warning, RemovedInSphinx50Warning
from sphinx.environment.adapters.asset import ImageAdapter
from sphinx.errors import NoUri, SphinxError
from sphinx.locale import _, __
@ -128,8 +128,6 @@ class LaTeXBuilder(Builder):
self.docnames = [] # type: Iterable[str]
self.document_data = [] # type: List[Tuple[str, str, str, str, str, bool]]
self.themes = ThemeFactory(self.app)
self.usepackages = self.app.registry.latex_packages
self.usepackages_after_hyperref = self.app.registry.latex_packages_after_hyperref
texescape.init()
self.init_context()
@ -179,10 +177,6 @@ class LaTeXBuilder(Builder):
key = (self.config.latex_engine, self.config.language[:2])
self.context.update(ADDITIONAL_SETTINGS.get(key, {}))
# Apply extension settings to context
self.context['packages'] = self.usepackages
self.context['packages_after_hyperref'] = self.usepackages_after_hyperref
# Apply user settings to context
self.context.update(self.config.latex_elements)
self.context['release'] = self.config.release
@ -203,6 +197,13 @@ class LaTeXBuilder(Builder):
# Show the release label only if release value exists
self.context.setdefault('releasename', _('Release'))
def update_context(self) -> None:
"""Update template variables for .tex file just before writing."""
# Apply extension settings to context
registry = self.app.registry
self.context['packages'] = registry.latex_packages
self.context['packages_after_hyperref'] = registry.latex_packages_after_hyperref
def init_babel(self) -> None:
self.babel = ExtBabel(self.config.language, not self.context['babel'])
if self.config.language and not self.babel.is_supported_language():
@ -290,6 +291,7 @@ class LaTeXBuilder(Builder):
doctree['tocdepth'] = tocdepth
self.post_process_images(doctree)
self.update_doc_context(title, author, theme)
self.update_context()
with progress_message(__("writing")):
docsettings._author = author
@ -448,6 +450,18 @@ class LaTeXBuilder(Builder):
filename = path.join(package_dir, 'templates', 'latex', 'sphinxmessages.sty_t')
copy_asset_file(filename, self.outdir, context=context, renderer=LaTeXRenderer())
@property
def usepackages(self) -> List[Tuple[str, str]]:
warnings.warn('LaTeXBuilder.usepackages is deprecated.',
RemovedInSphinx50Warning, stacklevel=2)
return self.app.registry.latex_packages
@property
def usepackages_after_hyperref(self) -> List[Tuple[str, str]]:
warnings.warn('LaTeXBuilder.usepackages_after_hyperref is deprecated.',
RemovedInSphinx50Warning, stacklevel=2)
return self.app.registry.latex_packages_after_hyperref
def patch_settings(settings: Any) -> Any:
"""Make settings object to show deprecation messages."""
@ -503,9 +517,9 @@ def validate_latex_theme_options(app: Sphinx, config: Config) -> None:
config.latex_theme_options.pop(key)
def install_pakcages_for_ja(app: Sphinx) -> None:
def install_packages_for_ja(app: Sphinx) -> None:
"""Install packages for Japanese."""
if app.config.language == 'ja':
if app.config.language == 'ja' and app.config.latex_engine in ('platex', 'uplatex'):
app.add_latex_package('pxjahyper', after_hyperref=True)
@ -556,7 +570,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
app.add_builder(LaTeXBuilder)
app.connect('config-inited', validate_config_values, priority=800)
app.connect('config-inited', validate_latex_theme_options, priority=800)
app.connect('builder-inited', install_pakcages_for_ja)
app.connect('builder-inited', install_packages_for_ja)
app.add_config_value('latex_engine', default_latex_engine, None,
ENUM('pdflatex', 'xelatex', 'lualatex', 'platex', 'uplatex'))

View File

@ -106,8 +106,7 @@ class CheckExternalLinksBuilder(Builder):
self.rqueue = queue.Queue() # type: queue.Queue
self.workers = [] # type: List[threading.Thread]
for i in range(self.app.config.linkcheck_workers):
thread = threading.Thread(target=self.check_thread)
thread.setDaemon(True)
thread = threading.Thread(target=self.check_thread, daemon=True)
thread.start()
self.workers.append(thread)
@ -166,6 +165,7 @@ class CheckExternalLinksBuilder(Builder):
# Read the whole document and see if #anchor exists
response = requests.get(req_url, stream=True, config=self.app.config,
auth=auth_info, **kwargs)
response.raise_for_status()
found = check_anchor(response, unquote(anchor))
if not found:
@ -210,16 +210,17 @@ class CheckExternalLinksBuilder(Builder):
else:
return 'redirected', new_url, 0
def check() -> Tuple[str, str, int]:
def check(docname: str) -> Tuple[str, str, int]:
# check for various conditions without bothering the network
if len(uri) == 0 or uri.startswith(('#', 'mailto:')):
if len(uri) == 0 or uri.startswith(('#', 'mailto:', 'tel:')):
return 'unchecked', '', 0
elif not uri.startswith(('http:', 'https:')):
if uri_re.match(uri):
# non supported URI schemes (ex. ftp)
return 'unchecked', '', 0
else:
if path.exists(path.join(self.srcdir, uri)):
srcdir = path.dirname(self.env.doc2path(docname))
if path.exists(path.join(srcdir, uri)):
return 'working', '', 0
else:
for rex in self.to_ignore:
@ -256,7 +257,7 @@ class CheckExternalLinksBuilder(Builder):
uri, docname, lineno = self.wqueue.get()
if uri is None:
break
status, info, code = check()
status, info, code = check(docname)
self.rqueue.put((uri, docname, lineno, status, info, code))
def process_result(self, result: Tuple[str, str, int, str, str, int]) -> None:

View File

@ -24,7 +24,7 @@ from sphinx.util import logging
from sphinx.util import progress_message
from sphinx.util.console import darkgreen # type: ignore
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.osutil import make_filename_from_project
from sphinx.util.osutil import ensuredir, make_filename_from_project
from sphinx.writers.manpage import ManualPageWriter, ManualPageTranslator
@ -80,7 +80,12 @@ class ManualPageBuilder(Builder):
docsettings.authors = authors
docsettings.section = section
targetname = '%s.%s' % (name, section)
if self.config.man_make_section_directory:
ensuredir(path.join(self.outdir, str(section)))
targetname = '%s/%s.%s' % (section, name, section)
else:
targetname = '%s.%s' % (name, section)
logger.info(darkgreen(targetname) + ' { ', nonl=True)
destination = FileOutput(
destination_path=path.join(self.outdir, targetname),
@ -115,6 +120,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
app.add_config_value('man_pages', default_man_pages, None)
app.add_config_value('man_show_urls', False, None)
app.add_config_value('man_make_section_directory', False, None)
return {
'version': 'builtin',

View File

@ -489,8 +489,10 @@ def get_parser() -> argparse.ArgumentParser:
help=__('project root'))
group = parser.add_argument_group(__('Structure options'))
group.add_argument('--sep', action='store_true', default=None,
group.add_argument('--sep', action='store_true', dest='sep', default=None,
help=__('if specified, separate source and build dirs'))
group.add_argument('--no-sep', action='store_false', dest='sep',
help=__('if specified, create build dir under source dir'))
group.add_argument('--dot', metavar='DOT', default='_',
help=__('replacement for dot in _templates etc.'))

View File

@ -72,7 +72,7 @@ def dedent_lines(lines: List[str], dedent: int, location: Tuple[str, int] = None
return lines
if any(s[:dedent].strip() for s in lines):
logger.warning(__('Over dedent has detected'), location=location)
logger.warning(__('non-whitespace stripped by dedent'), location=location)
new_lines = []
for line in lines:

View File

@ -10,9 +10,8 @@
import re
from typing import (
Any, Callable, Dict, Generator, Iterator, List, Type, TypeVar, Tuple, Union
Any, Callable, cast, Dict, Generator, Iterator, List, Type, TypeVar, Tuple, Union
)
from typing import cast
from docutils import nodes
from docutils.nodes import Element, Node, TextElement, system_message
@ -47,6 +46,11 @@ from sphinx.util.nodes import make_refnode
logger = logging.getLogger(__name__)
T = TypeVar('T')
DeclarationType = Union[
"ASTStruct", "ASTUnion", "ASTEnum", "ASTEnumerator",
"ASTType", "ASTTypeWithInit", "ASTMacro",
]
# https://en.cppreference.com/w/c/keyword
_keywords = [
'auto', 'break', 'case', 'char', 'const', 'continue', 'default', 'do', 'double',
@ -136,8 +140,8 @@ class ASTIdentifier(ASTBaseBase):
reftype='identifier',
reftarget=targetText, modname=None,
classname=None)
# key = symbol.get_lookup_key()
# pnode['c:parent_key'] = key
key = symbol.get_lookup_key()
pnode['c:parent_key'] = key
if self.is_anon():
pnode += nodes.strong(text="[anonymous]")
else:
@ -636,6 +640,10 @@ class ASTFunctionParameter(ASTBase):
self.arg = arg
self.ellipsis = ellipsis
def get_id(self, version: int, objectType: str, symbol: "Symbol") -> str:
# the anchor will be our parent
return symbol.parent.declaration.get_id(version, prefixed=False)
def _stringify(self, transform: StringifyTransform) -> str:
if self.ellipsis:
return '...'
@ -1149,6 +1157,9 @@ class ASTType(ASTBase):
def name(self) -> ASTNestedName:
return self.decl.name
def get_id(self, version: int, objectType: str, symbol: "Symbol") -> str:
return symbol.get_full_nested_name().get_id(version)
@property
def function_params(self) -> List[ASTFunctionParameter]:
return self.decl.function_params
@ -1191,6 +1202,9 @@ class ASTTypeWithInit(ASTBase):
def name(self) -> ASTNestedName:
return self.type.name
def get_id(self, version: int, objectType: str, symbol: "Symbol") -> str:
return self.type.get_id(version, objectType, symbol)
def _stringify(self, transform: StringifyTransform) -> str:
res = []
res.append(transform(self.type))
@ -1242,6 +1256,9 @@ class ASTMacro(ASTBase):
def name(self) -> ASTNestedName:
return self.ident
def get_id(self, version: int, objectType: str, symbol: "Symbol") -> str:
return symbol.get_full_nested_name().get_id(version)
def _stringify(self, transform: StringifyTransform) -> str:
res = []
res.append(transform(self.ident))
@ -1342,7 +1359,8 @@ class ASTEnumerator(ASTBase):
class ASTDeclaration(ASTBaseBase):
def __init__(self, objectType: str, directiveType: str, declaration: Any,
def __init__(self, objectType: str, directiveType: str,
declaration: Union[DeclarationType, ASTFunctionParameter],
semicolon: bool = False) -> None:
self.objectType = objectType
self.directiveType = directiveType
@ -1359,18 +1377,20 @@ class ASTDeclaration(ASTBaseBase):
@property
def name(self) -> ASTNestedName:
return self.declaration.name
decl = cast(DeclarationType, self.declaration)
return decl.name
@property
def function_params(self) -> List[ASTFunctionParameter]:
if self.objectType != 'function':
return None
return self.declaration.function_params
decl = cast(ASTType, self.declaration)
return decl.function_params
def get_id(self, version: int, prefixed: bool = True) -> str:
if self.objectType == 'enumerator' and self.enumeratorScopedSymbol:
return self.enumeratorScopedSymbol.declaration.get_id(version, prefixed)
id_ = self.symbol.get_full_nested_name().get_id(version)
id_ = self.declaration.get_id(version, self.objectType, self.symbol)
if prefixed:
return _id_prefix[version] + id_
else:
@ -1413,7 +1433,8 @@ class ASTDeclaration(ASTBaseBase):
elif self.objectType == 'enumerator':
mainDeclNode += addnodes.desc_annotation('enumerator ', 'enumerator ')
elif self.objectType == 'type':
prefix = self.declaration.get_type_declaration_prefix()
decl = cast(ASTType, self.declaration)
prefix = decl.get_type_declaration_prefix()
prefix += ' '
mainDeclNode += addnodes.desc_annotation(prefix, prefix)
else:
@ -1562,6 +1583,11 @@ class Symbol:
for s in sChild.get_all_symbols():
yield s
@property
def children(self) -> Iterator["Symbol"]:
for c in self._children:
yield c
@property
def children_recurse_anon(self) -> Iterator["Symbol"]:
for c in self._children:
@ -1792,7 +1818,7 @@ class Symbol:
if not declaration:
if Symbol.debug_lookup:
Symbol.debug_print("no delcaration")
Symbol.debug_print("no declaration")
Symbol.debug_indent -= 2
# good, just a scope creation
# TODO: what if we have more than one symbol?
@ -2983,7 +3009,7 @@ class DefinitionParser(BaseParser):
def parse_pre_v3_type_definition(self) -> ASTDeclaration:
self.skip_ws()
declaration = None # type: Any
declaration = None # type: DeclarationType
if self.skip_word('struct'):
typ = 'struct'
declaration = self._parse_struct()
@ -3006,7 +3032,7 @@ class DefinitionParser(BaseParser):
'macro', 'struct', 'union', 'enum', 'enumerator', 'type'):
raise Exception('Internal error, unknown directiveType "%s".' % directiveType)
declaration = None # type: Any
declaration = None # type: DeclarationType
if objectType == 'member':
declaration = self._parse_type_with_init(named=True, outer='member')
elif objectType == 'function':
@ -3153,10 +3179,6 @@ class CObject(ObjectDescription):
self.state.document.note_explicit_target(signode)
domain = cast(CDomain, self.env.get_domain('c'))
if name not in domain.objects:
domain.objects[name] = (domain.env.docname, newestId, self.objtype)
if 'noindexentry' not in self.options:
indexText = self.get_index_text(name)
self.indexnode['entries'].append(('single', indexText, newestId, '', None))
@ -3408,10 +3430,13 @@ class CNamespacePopObject(SphinxDirective):
class AliasNode(nodes.Element):
def __init__(self, sig: str, env: "BuildEnvironment" = None,
def __init__(self, sig: str, maxdepth: int, document: Any, env: "BuildEnvironment" = None,
parentKey: LookupKey = None) -> None:
super().__init__()
self.sig = sig
self.maxdepth = maxdepth
assert maxdepth >= 0
self.document = document
if env is not None:
if 'c:parent_symbol' not in env.temp_data:
root = env.domaindata['c']['root_symbol']
@ -3428,6 +3453,37 @@ class AliasNode(nodes.Element):
class AliasTransform(SphinxTransform):
default_priority = ReferencesResolver.default_priority - 1
def _render_symbol(self, s: Symbol, maxdepth: int, document: Any) -> List[Node]:
nodes = [] # type: List[Node]
options = dict() # type: ignore
signode = addnodes.desc_signature('', '')
nodes.append(signode)
s.declaration.describe_signature(signode, 'markName', self.env, options)
if maxdepth == 0:
recurse = True
elif maxdepth == 1:
recurse = False
else:
maxdepth -= 1
recurse = True
if recurse:
content = addnodes.desc_content()
desc = addnodes.desc()
content.append(desc)
desc.document = document
desc['domain'] = 'c'
# 'desctype' is a backwards compatible attribute
desc['objtype'] = desc['desctype'] = 'alias'
desc['noindex'] = True
for sChild in s.children:
childNodes = self._render_symbol(sChild, maxdepth, document)
desc.extend(childNodes)
if len(desc.children) != 0:
nodes.append(content)
return nodes
def apply(self, **kwargs: Any) -> None:
for node in self.document.traverse(AliasNode):
sig = node.sig
@ -3468,17 +3524,16 @@ class AliasTransform(SphinxTransform):
logger.warning("Could not find C declaration for alias '%s'." % name,
location=node)
node.replace_self(signode)
else:
nodes = []
options = dict() # type: ignore
signode = addnodes.desc_signature(sig, '')
nodes.append(signode)
s.declaration.describe_signature(signode, 'markName', self.env, options)
node.replace_self(nodes)
continue
nodes = self._render_symbol(s, maxdepth=node.maxdepth, document=node.document)
node.replace_self(nodes)
class CAliasObject(ObjectDescription):
option_spec = {} # type: Dict
option_spec = {
'maxdepth': directives.nonnegative_int
} # type: Dict
def run(self) -> List[Node]:
if ':' in self.name:
@ -3494,16 +3549,10 @@ class CAliasObject(ObjectDescription):
node['noindex'] = True
self.names = [] # type: List[str]
maxdepth = self.options.get('maxdepth', 1)
signatures = self.get_signatures()
for i, sig in enumerate(signatures):
node.append(AliasNode(sig, env=self.env))
contentnode = addnodes.desc_content()
node.append(contentnode)
self.before_content()
self.state.nested_parse(self.content, self.content_offset, contentnode)
self.env.temp_data['object'] = None
self.after_content()
node.append(AliasNode(sig, maxdepth, self.state.document, env=self.env))
return [node]
@ -3607,6 +3656,10 @@ class CDomain(Domain):
'macro': ObjType(_('macro'), 'macro'),
'type': ObjType(_('type'), 'type'),
'var': ObjType(_('variable'), 'data'),
'enum': ObjType(_('enum'), 'enum'),
'enumerator': ObjType(_('enumerator'), 'enumerator'),
'struct': ObjType(_('struct'), 'struct'),
'union': ObjType(_('union'), 'union'),
}
directives = {
@ -3645,10 +3698,6 @@ class CDomain(Domain):
'objects': {}, # fullname -> docname, node_id, objtype
} # type: Dict[str, Union[Symbol, Dict[str, Tuple[str, str, str]]]]
@property
def objects(self) -> Dict[str, Tuple[str, str, str]]:
return self.data.setdefault('objects', {}) # fullname -> docname, node_id, objtype
def clear_doc(self, docname: str) -> None:
if Symbol.debug_show_tree:
print("clear_doc:", docname)
@ -3664,9 +3713,6 @@ class CDomain(Domain):
print(self.data['root_symbol'].dump(1))
print("\tafter end")
print("clear_doc end:", docname)
for fullname, (fn, _id, _l) in list(self.objects.items()):
if fn == docname:
del self.objects[fullname]
def process_doc(self, env: BuildEnvironment, docname: str,
document: nodes.document) -> None:
@ -3752,8 +3798,18 @@ class CDomain(Domain):
return []
def get_objects(self) -> Iterator[Tuple[str, str, str, str, str, int]]:
for refname, (docname, node_id, objtype) in list(self.objects.items()):
yield (refname, refname, objtype, docname, node_id, 1)
rootSymbol = self.data['root_symbol']
for symbol in rootSymbol.get_all_symbols():
if symbol.declaration is None:
continue
assert symbol.docname
fullNestedName = symbol.get_full_nested_name()
name = str(fullNestedName).lstrip('.')
dispname = fullNestedName.get_display_string().lstrip('.')
objectType = symbol.declaration.objectType
docname = symbol.docname
newestId = symbol.declaration.get_newest_id()
yield (name, dispname, objectType, docname, newestId, 1)
def setup(app: Sphinx) -> Dict[str, Any]:

View File

@ -1836,7 +1836,7 @@ class ASTFunctionParameter(ASTBase):
# this is not part of the normal name mangling in C++
if symbol:
# the anchor will be our parent
return symbol.parent.declaration.get_id(version, prefixed=None)
return symbol.parent.declaration.get_id(version, prefixed=False)
# else, do the usual
if self.ellipsis:
return 'z'
@ -4107,7 +4107,7 @@ class Symbol:
Symbol.debug_print("self:")
print(self.to_string(Symbol.debug_indent + 1), end="")
Symbol.debug_print("nestedName: ", nestedName)
Symbol.debug_print("templateDecls: ", templateDecls)
Symbol.debug_print("templateDecls: ", ",".join(str(t) for t in templateDecls))
Symbol.debug_print("strictTemplateParamArgLists:", strictTemplateParamArgLists)
Symbol.debug_print("ancestorLookupType:", ancestorLookupType)
Symbol.debug_print("templateShorthand: ", templateShorthand)
@ -4231,7 +4231,7 @@ class Symbol:
Symbol.debug_indent += 1
Symbol.debug_print("_add_symbols:")
Symbol.debug_indent += 1
Symbol.debug_print("tdecls:", templateDecls)
Symbol.debug_print("tdecls:", ",".join(str(t) for t in templateDecls))
Symbol.debug_print("nn: ", nestedName)
Symbol.debug_print("decl: ", declaration)
Symbol.debug_print("doc: ", docname)
@ -4292,7 +4292,7 @@ class Symbol:
if not declaration:
if Symbol.debug_lookup:
Symbol.debug_print("no delcaration")
Symbol.debug_print("no declaration")
Symbol.debug_indent -= 2
# good, just a scope creation
# TODO: what if we have more than one symbol?
@ -4360,6 +4360,11 @@ class Symbol:
if Symbol.debug_lookup:
Symbol.debug_print("candId:", candId)
for symbol in withDecl:
# but all existing must be functions as well,
# otherwise we declare it to be a duplicate
if symbol.declaration.objectType != 'function':
handleDuplicateDeclaration(symbol, candSymbol)
# (not reachable)
oldId = symbol.declaration.get_newest_id()
if Symbol.debug_lookup:
Symbol.debug_print("oldId: ", oldId)
@ -4370,7 +4375,11 @@ class Symbol:
# if there is an empty symbol, fill that one
if len(noDecl) == 0:
if Symbol.debug_lookup:
Symbol.debug_print("no match, no empty, candSybmol is not None?:", candSymbol is not None) # NOQA
Symbol.debug_print("no match, no empty")
if candSymbol is not None:
Symbol.debug_print("result is already created candSymbol")
else:
Symbol.debug_print("result is makeCandSymbol()")
Symbol.debug_indent -= 2
if candSymbol is not None:
return candSymbol
@ -6814,10 +6823,12 @@ class CPPObject(ObjectDescription):
parentSymbol = env.temp_data['cpp:parent_symbol']
parentDecl = parentSymbol.declaration
if parentDecl is not None and parentDecl.objectType == 'function':
logger.warning("C++ declarations inside functions are not supported." +
" Parent function is " +
str(parentSymbol.get_full_nested_name()),
location=self.get_source_info())
msg = "C++ declarations inside functions are not supported." \
" Parent function: {}\nDirective name: {}\nDirective arg: {}"
logger.warning(msg.format(
str(parentSymbol.get_full_nested_name()),
self.name, self.arguments[0]
), location=self.get_source_info())
name = _make_phony_error_name()
symbol = parentSymbol.add_name(name)
env.temp_data['cpp:last_symbol'] = symbol

View File

@ -500,7 +500,8 @@ class ProductionList(SphinxDirective):
except ValueError:
break
subnode = addnodes.production(rule)
subnode['tokenname'] = name.strip()
name = name.strip()
subnode['tokenname'] = name
if subnode['tokenname']:
prefix = 'grammar-token-%s' % productionGroup
node_id = make_id(self.env, self.state.document, prefix, name)

View File

@ -224,6 +224,10 @@ class TocTreeCollector(EnvironmentCollector):
def get_figtype(node: Node) -> str:
for domain in env.domains.values():
figtype = domain.get_enumerable_node_type(node)
if domain.name == 'std' and not domain.get_numfig_title(node): # type: ignore
# Skip if uncaptioned node
continue
if figtype:
return figtype

View File

@ -94,7 +94,10 @@ def members_option(arg: Any) -> Union[object, List[str]]:
"""Used to convert the :members: option to auto directives."""
if arg is None or arg is True:
return ALL
return [x.strip() for x in arg.split(',') if x.strip()]
elif arg is False:
return None
else:
return [x.strip() for x in arg.split(',') if x.strip()]
def members_set_option(arg: Any) -> Union[object, Set[str]]:
@ -172,7 +175,7 @@ def merge_members_option(options: Dict) -> None:
members = options.setdefault('members', [])
for key in {'private-members', 'special-members'}:
if key in options and options[key] is not ALL:
if key in options and options[key] not in (ALL, None):
for member in options[key]:
if member not in members:
members.append(member)
@ -532,6 +535,11 @@ class Documenter:
self.env.app.emit('autodoc-process-docstring',
self.objtype, self.fullname, self.object,
self.options, docstringlines)
if docstringlines and docstringlines[-1] != '':
# append a blank line to the end of the docstring
docstringlines.append('')
yield from docstringlines
def get_sourcename(self) -> str:
@ -1205,7 +1213,8 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
try:
self.env.app.emit('autodoc-before-process-signature', self.object, False)
sig = inspect.signature(self.object, follow_wrapped=True)
sig = inspect.signature(self.object, follow_wrapped=True,
type_aliases=self.env.config.autodoc_type_aliases)
args = stringify_signature(sig, **kwargs)
except TypeError as exc:
logger.warning(__("Failed to get a function signature for %s: %s"),
@ -1231,7 +1240,9 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
def format_signature(self, **kwargs: Any) -> str:
sigs = []
if self.analyzer and '.'.join(self.objpath) in self.analyzer.overloads:
if (self.analyzer and
'.'.join(self.objpath) in self.analyzer.overloads and
self.env.config.autodoc_typehints == 'signature'):
# Use signatures for overloaded functions instead of the implementation function.
overloaded = True
else:
@ -1254,7 +1265,9 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
if overloaded:
__globals__ = safe_getattr(self.object, '__globals__', {})
for overload in self.analyzer.overloads.get('.'.join(self.objpath)):
overload = evaluate_signature(overload, __globals__)
overload = evaluate_signature(overload, __globals__,
self.env.config.autodoc_type_aliases)
sig = stringify_signature(overload, **kwargs)
sigs.append(sig)
@ -1263,7 +1276,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
def annotate_to_first_argument(self, func: Callable, typ: Type) -> None:
"""Annotate type hint to the first argument of function if needed."""
try:
sig = inspect.signature(func)
sig = inspect.signature(func, type_aliases=self.env.config.autodoc_type_aliases)
except TypeError as exc:
logger.warning(__("Failed to get a function signature for %s: %s"),
self.fullname, exc)
@ -1291,6 +1304,11 @@ class SingledispatchFunctionDocumenter(FunctionDocumenter):
Retained for backwards compatibility, now does the same as the FunctionDocumenter
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
warnings.warn("%s is deprecated." % self.__class__.__name__,
RemovedInSphinx50Warning, stacklevel=2)
super().__init__(*args, **kwargs)
class DecoratorDocumenter(FunctionDocumenter):
"""
@ -1317,6 +1335,12 @@ _METACLASS_CALL_BLACKLIST = [
]
# Types whose __new__ signature is a pass-thru.
_CLASS_NEW_BLACKLIST = [
'typing.Generic.__new__',
]
class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore
"""
Specialized Documenter subclass for classes.
@ -1378,17 +1402,24 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
if call is not None:
self.env.app.emit('autodoc-before-process-signature', call, True)
try:
sig = inspect.signature(call, bound_method=True)
sig = inspect.signature(call, bound_method=True,
type_aliases=self.env.config.autodoc_type_aliases)
return type(self.object), '__call__', sig
except ValueError:
pass
# Now we check if the 'obj' class has a '__new__' method
new = get_user_defined_function_or_method(self.object, '__new__')
if new is not None:
if "{0.__module__}.{0.__qualname__}".format(new) in _CLASS_NEW_BLACKLIST:
new = None
if new is not None:
self.env.app.emit('autodoc-before-process-signature', new, True)
try:
sig = inspect.signature(new, bound_method=True)
sig = inspect.signature(new, bound_method=True,
type_aliases=self.env.config.autodoc_type_aliases)
return self.object, '__new__', sig
except ValueError:
pass
@ -1398,7 +1429,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
if init is not None:
self.env.app.emit('autodoc-before-process-signature', init, True)
try:
sig = inspect.signature(init, bound_method=True)
sig = inspect.signature(init, bound_method=True,
type_aliases=self.env.config.autodoc_type_aliases)
return self.object, '__init__', sig
except ValueError:
pass
@ -1409,7 +1441,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
# the signature from, so just pass the object itself to our hook.
self.env.app.emit('autodoc-before-process-signature', self.object, False)
try:
sig = inspect.signature(self.object, bound_method=False)
sig = inspect.signature(self.object, bound_method=False,
type_aliases=self.env.config.autodoc_type_aliases)
return None, None, sig
except ValueError:
pass
@ -1440,23 +1473,16 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
return ''
sig = super().format_signature()
overloaded = False
qualname = None
# TODO: recreate analyzer for the module of class (To be clear, owner of the method)
if self._signature_class and self._signature_method_name and self.analyzer:
qualname = '.'.join([self._signature_class.__qualname__,
self._signature_method_name])
if qualname in self.analyzer.overloads:
overloaded = True
sigs = []
if overloaded:
overloads = self.get_overloaded_signatures()
if overloads and self.env.config.autodoc_typehints == 'signature':
# Use signatures for overloaded methods instead of the implementation method.
method = safe_getattr(self._signature_class, self._signature_method_name, None)
__globals__ = safe_getattr(method, '__globals__', {})
for overload in self.analyzer.overloads.get(qualname):
overload = evaluate_signature(overload, __globals__)
for overload in overloads:
overload = evaluate_signature(overload, __globals__,
self.env.config.autodoc_type_aliases)
parameters = list(overload.parameters.values())
overload = overload.replace(parameters=parameters[1:],
@ -1468,6 +1494,20 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
return "\n".join(sigs)
def get_overloaded_signatures(self) -> List[Signature]:
if self._signature_class and self._signature_method_name:
for cls in self._signature_class.__mro__:
try:
analyzer = ModuleAnalyzer.for_module(cls.__module__)
analyzer.parse()
qualname = '.'.join([cls.__qualname__, self._signature_method_name])
if qualname in analyzer.overloads:
return analyzer.overloads.get(qualname)
except PycodeError:
pass
return []
def add_directive_header(self, sig: str) -> None:
sourcename = self.get_sourcename()
@ -1704,7 +1744,8 @@ class GenericAliasDocumenter(DataDocumenter):
return inspect.isgenericalias(member)
def add_directive_header(self, sig: str) -> None:
self.options.annotation = SUPPRESS # type: ignore
self.options = Options(self.options)
self.options['annotation'] = SUPPRESS
super().add_directive_header(sig)
def add_content(self, more_content: Any, no_docstring: bool = False) -> None:
@ -1725,10 +1766,11 @@ class TypeVarDocumenter(DataDocumenter):
@classmethod
def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any
) -> bool:
return isinstance(member, TypeVar) and isattr # type: ignore
return isinstance(member, TypeVar) and isattr
def add_directive_header(self, sig: str) -> None:
self.options.annotation = SUPPRESS # type: ignore
self.options = Options(self.options)
self.options['annotation'] = SUPPRESS
super().add_directive_header(sig)
def get_doc(self, encoding: str = None, ignore: int = None) -> List[List[str]]:
@ -1801,11 +1843,13 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
else:
if inspect.isstaticmethod(self.object, cls=self.parent, name=self.object_name):
self.env.app.emit('autodoc-before-process-signature', self.object, False)
sig = inspect.signature(self.object, bound_method=False)
sig = inspect.signature(self.object, bound_method=False,
type_aliases=self.env.config.autodoc_type_aliases)
else:
self.env.app.emit('autodoc-before-process-signature', self.object, True)
sig = inspect.signature(self.object, bound_method=True,
follow_wrapped=True)
follow_wrapped=True,
type_aliases=self.env.config.autodoc_type_aliases)
args = stringify_signature(sig, **kwargs)
except TypeError as exc:
logger.warning(__("Failed to get a method signature for %s: %s"),
@ -1840,7 +1884,9 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
def format_signature(self, **kwargs: Any) -> str:
sigs = []
if self.analyzer and '.'.join(self.objpath) in self.analyzer.overloads:
if (self.analyzer and
'.'.join(self.objpath) in self.analyzer.overloads and
self.env.config.autodoc_typehints == 'signature'):
# Use signatures for overloaded methods instead of the implementation method.
overloaded = True
else:
@ -1865,7 +1911,9 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
if overloaded:
__globals__ = safe_getattr(self.object, '__globals__', {})
for overload in self.analyzer.overloads.get('.'.join(self.objpath)):
overload = evaluate_signature(overload, __globals__)
overload = evaluate_signature(overload, __globals__,
self.env.config.autodoc_type_aliases)
if not inspect.isstaticmethod(self.object, cls=self.parent,
name=self.object_name):
parameters = list(overload.parameters.values())
@ -1878,7 +1926,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
def annotate_to_first_argument(self, func: Callable, typ: Type) -> None:
"""Annotate type hint to the first argument of function if needed."""
try:
sig = inspect.signature(func)
sig = inspect.signature(func, type_aliases=self.env.config.autodoc_type_aliases)
except TypeError as exc:
logger.warning(__("Failed to get a method signature for %s: %s"),
self.fullname, exc)
@ -1905,6 +1953,11 @@ class SingledispatchMethodDocumenter(MethodDocumenter):
Retained for backwards compatibility, now does the same as the MethodDocumenter
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
warnings.warn("%s is deprecated." % self.__class__.__name__,
RemovedInSphinx50Warning, stacklevel=2)
super().__init__(*args, **kwargs)
class AttributeDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter): # type: ignore
"""
@ -2218,6 +2271,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
app.add_config_value('autodoc_mock_imports', [], True)
app.add_config_value('autodoc_typehints', "signature", True,
ENUM("signature", "description", "none"))
app.add_config_value('autodoc_type_aliases', {}, True)
app.add_config_value('autodoc_warningiserror', True, True)
app.add_config_value('autodoc_inherit_docstrings', True, True)
app.add_event('autodoc-before-process-signature')

View File

@ -206,7 +206,10 @@ def get_object_members(subject: Any, objpath: List[str], attrgetter: Callable,
if isclass(subject) and getattr(subject, '__slots__', None) is not None:
from sphinx.ext.autodoc import SLOTSATTR
for name in subject.__slots__:
slots = subject.__slots__
if isinstance(slots, str):
slots = [slots]
for name in slots:
members[name] = Attribute(name, True, SLOTSATTR)
# other members

View File

@ -66,6 +66,10 @@ module_sig_re = re.compile(r'''^(?:([\w.]*)\.)? # module names
''', re.VERBOSE)
py_builtins = [obj for obj in vars(builtins).values()
if inspect.isclass(obj)]
def try_import(objname: str) -> Any:
"""Import a object or module using *name* and *currentmodule*.
*name* should be a relative name from *currentmodule* or
@ -178,7 +182,6 @@ class InheritanceGraph:
traverse to. Multiple names can be specified separated by comma.
"""
all_classes = {}
py_builtins = vars(builtins).values()
def recurse(cls: Any) -> None:
if not show_builtins and cls in py_builtins:

View File

@ -31,12 +31,12 @@ logger = logging.getLogger(__name__)
_directive_regex = re.compile(r'\.\. \S+::')
_google_section_regex = re.compile(r'^(\s|\w)+:\s*$')
_google_typed_arg_regex = re.compile(r'\s*(.+?)\s*\(\s*(.*[^\s]+)\s*\)')
_google_typed_arg_regex = re.compile(r'(.+?)\(\s*(.*[^\s]+)\s*\)')
_numpy_section_regex = re.compile(r'^[=\-`:\'"~^_*+#<>]{2,}\s*$')
_single_colon_regex = re.compile(r'(?<!:):(?!:)')
_xref_or_code_regex = re.compile(
r'((?::(?:[a-zA-Z0-9]+[\-_+:.])*[a-zA-Z0-9]+:`.+?`)|'
r'(?:``.+``))')
r'(?:``.+?``))')
_xref_regex = re.compile(
r'(?:(?::(?:[a-zA-Z0-9]+[\-_+:.])*[a-zA-Z0-9]+:)?`.+?`)'
)
@ -254,7 +254,7 @@ class GoogleDocstring:
if parse_type:
match = _google_typed_arg_regex.match(before)
if match:
_name = match.group(1)
_name = match.group(1).strip()
_type = match.group(2)
_name = self._escape_args_and_kwargs(_name)
@ -699,6 +699,9 @@ class GoogleDocstring:
m = self._name_rgx.match(_type)
if m and m.group('name'):
_type = m.group('name')
elif _xref_regex.match(_type):
pos = _type.find('`')
_type = _type[pos + 1:-1]
_type = ' ' + _type if _type else ''
_desc = self._strip_empty(_desc)
_descs = ' ' + '\n '.join(_desc) if any(_desc) else ''
@ -1104,6 +1107,10 @@ class NumpyDocstring(GoogleDocstring):
_name, _type = line, ''
_name, _type = _name.strip(), _type.strip()
_name = self._escape_args_and_kwargs(_name)
if prefer_type and not _type:
_type, _name = _name, _type
if self._config.napoleon_preprocess_types:
_type = _convert_numpy_type_spec(
_type,
@ -1111,8 +1118,6 @@ class NumpyDocstring(GoogleDocstring):
translations=self._config.napoleon_type_aliases or {},
)
if prefer_type and not _type:
_type, _name = _name, _type
indent = self._get_indent(line) + 1
_desc = self._dedent(self._consume_indented_block(indent))
_desc = self.__class__(_desc, self._config).lines()
@ -1188,6 +1193,22 @@ class NumpyDocstring(GoogleDocstring):
items.append((name, list(rest), role))
del rest[:]
def translate(func, description, role):
translations = self._config.napoleon_type_aliases
if role is not None or not translations:
return func, description, role
translated = translations.get(func, func)
match = self._name_rgx.match(translated)
if not match:
return translated, description, role
groups = match.groupdict()
role = groups["role"]
new_func = groups["name"] or groups["name2"]
return new_func, description, role
current_func = None
rest = [] # type: List[str]
@ -1218,37 +1239,19 @@ class NumpyDocstring(GoogleDocstring):
if not items:
return []
roles = {
'method': 'meth',
'meth': 'meth',
'function': 'func',
'func': 'func',
'class': 'class',
'exception': 'exc',
'exc': 'exc',
'object': 'obj',
'obj': 'obj',
'module': 'mod',
'mod': 'mod',
'data': 'data',
'constant': 'const',
'const': 'const',
'attribute': 'attr',
'attr': 'attr'
}
if self._what is None:
func_role = 'obj'
else:
func_role = roles.get(self._what, '')
# apply type aliases
items = [
translate(func, description, role)
for func, description, role in items
]
lines = [] # type: List[str]
last_had_desc = True
for func, desc, role in items:
for name, desc, role in items:
if role:
link = ':%s:`%s`' % (role, func)
elif func_role:
link = ':%s:`%s`' % (func_role, func)
link = ':%s:`%s`' % (role, name)
else:
link = "`%s`_" % func
link = ':obj:`%s`' % name
if desc or last_had_desc:
lines += ['']
lines += [link]

View File

@ -106,7 +106,7 @@ class _TranslationProxy(UserString):
translators = defaultdict(NullTranslations) # type: Dict[Tuple[str, str], NullTranslations]
def init(locale_dirs: List[str], language: str,
def init(locale_dirs: List[Optional[str]], language: str,
catalog: str = 'sphinx', namespace: str = 'general') -> Tuple[NullTranslations, bool]:
"""Look for message catalogs in `locale_dirs` and *ensure* that there is at
least a NullTranslations catalog set in `translators`. If called multiple

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Mohammed Shannaq <sam@ms.per.jo>, 2018
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Julien Malard <julien.malard@mail.mcgill.ca>, 2019
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2008
# Vilibald W. <vilibald.wanca@gmail.com>, 2014-2015

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2016
# Geraint Palmer <palmer.geraint@googlemail.com>, 2016

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# askhl <asklarsen@gmail.com>, 2010-2011
# Jakob Lykke Andersen <jakob@caput.dk>, 2014,2016

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Georg Brandl <g.brandl@gmx.net>, 2013-2015
# Jean-François B. <jfbu@free.fr>, 2018

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Stelios Vitalis <liberostelios@gmail.com>, 2015
# tzoumakers tzoumakers <tzoumakersx@gmail.com>, 2019

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Dinu Gherman <gherman@darwin.in-berlin.de>, 2014
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Edward Villegas-Pulgarin <cosmoscalibur@gmail.com>, 2018
# Edward Villegas-Pulgarin <cosmoscalibur@gmail.com>, 2019

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Aivar Annamaa <aivar.annamaa@gmail.com>, 2011
# Ivar Smolin <okul at linux ee>, 2012

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Ales Zabala Alava <shagi@gisa-elkartea.org>, 2011
# Asier Iturralde Sarasola <asier.iturralde@gmail.com>, 2018

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Christophe CHAUVET <christophe.chauvet@gmail.com>, 2017
# Christophe CHAUVET <christophe.chauvet@gmail.com>, 2013,2015

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2011
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Ajay Singh <ajaysajay@gmail.com>, 2019
# Purnank H. Ghumalia <me@purnank.in>, 2015-2016

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Mario Šarić, 2015-2020
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2011
# Molnár Dénes <denes.molnar2@stud.uni-corvinus.hu>, 2017

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Arif Budiman <arifpedia@gmail.com>, 2016-2017
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Denis Cappellin <denis@cappell.in>, 2018
# Paolo Cavallini <cavallini@faunalia.it>, 2013-2017

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# shirou - しろう <shirou.faw@gmail.com>, 2013
# Akitoshi Ohta <fire.kuma8@gmail.com>, 2011

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Minho Ryang <minhoryang@gmail.com>, 2019
# YT H <dev@theYT.net>, 2019

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# DALIUS DOBRAVOLSKAS <DALIUS@SANDBOX.LT>, 2010
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Vasil Vangelovski <vvangelovski@gmail.com>, 2013
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2011
# Takeshi KOMIYA <i.tkomiya@gmail.com>, 2016

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Bram Geron, 2017
# brechtm, 2016

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# m_aciek <maciej.olko@gmail.com>, 2017-2020
# Michael Gielda <michal.gielda@gmail.com>, 2014

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Claudio Rogerio Carvalho Filho <excriptbrasil@gmail.com>, 2016
# FIRST AUTHOR <roger.demetrescu@gmail.com>, 2008

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Pedro Algarvio <pedro@algarvio.me>, 2013
# Takeshi KOMIYA <i.tkomiya@gmail.com>, 2016

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Razvan Stefanescu <razvan.stefanescu@gmail.com>, 2015-2017
# Takeshi KOMIYA <i.tkomiya@gmail.com>, 2016

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Alex Salikov <Salikvo57@gmail.com>, 2019
# Dmitry Shachnev <mitya57@gmail.com>, 2013

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# callkalpa <callkalpa@gmail.com>, 2013
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# FIRST AUTHOR <EMAIL@ADDRESS>, 2008
# Slavko <linux@slavino.sk>, 2013-2019

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Risto Pejasinovic <risto.pejasinovic@gmail.com>, 2019
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Julien Malard <julien.malard@mail.mcgill.ca>, 2019
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# BouRock, 2020
# Fırat Özgül <ozgulfirat@gmail.com>, 2013-2016

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Petro Sasnyk <petro@sasnyk.name>, 2009
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
msgid ""
msgstr ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Hoat Le Van <hoatlevan@gmail.com>, 2014
msgid ""

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Yinian Chin <yinian1992@live.com>, 2015,2017-2018
# Hsiaoming Yang <me@lepture.com>, 2018

View File

@ -1,7 +1,7 @@
# Translations template for Sphinx.
# Copyright (C) 2020 ORGANIZATION
# This file is distributed under the same license as the Sphinx project.
#
#
# Translators:
# Adrian Liaw <adrianliaw2000@gmail.com>, 2018
# Fred Lin <gasolin@gmail.com>, 2008

View File

@ -58,17 +58,19 @@ def parse(code: str, mode: str = 'exec') -> "ast.AST":
return ast.parse(code, mode=mode)
def unparse(node: Optional[ast.AST]) -> Optional[str]:
def unparse(node: Optional[ast.AST], code: str = '') -> Optional[str]:
"""Unparse an AST to string."""
if node is None:
return None
elif isinstance(node, str):
return node
return _UnparseVisitor().visit(node)
return _UnparseVisitor(code).visit(node)
# a greatly cut-down version of `ast._Unparser`
class _UnparseVisitor(ast.NodeVisitor):
def __init__(self, code: str = '') -> None:
self.code = code
def _visit_op(self, node: ast.AST) -> str:
return OPERATORS[node.__class__]
@ -166,14 +168,28 @@ class _UnparseVisitor(ast.NodeVisitor):
return "{" + ", ".join(self.visit(e) for e in node.elts) + "}"
def visit_Subscript(self, node: ast.Subscript) -> str:
return "%s[%s]" % (self.visit(node.value), self.visit(node.slice))
def is_simple_tuple(value: ast.AST) -> bool:
return (
isinstance(value, ast.Tuple) and
bool(value.elts) and
not any(isinstance(elt, ast.Starred) for elt in value.elts)
)
if is_simple_tuple(node.slice):
elts = ", ".join(self.visit(e) for e in node.slice.elts) # type: ignore
return "%s[%s]" % (self.visit(node.value), elts)
elif isinstance(node.slice, ast.Index) and is_simple_tuple(node.slice.value):
elts = ", ".join(self.visit(e) for e in node.slice.value.elts) # type: ignore
return "%s[%s]" % (self.visit(node.value), elts)
else:
return "%s[%s]" % (self.visit(node.value), self.visit(node.slice))
def visit_UnaryOp(self, node: ast.UnaryOp) -> str:
return "%s %s" % (self.visit(node.op), self.visit(node.operand))
def visit_Tuple(self, node: ast.Tuple) -> str:
if node.elts:
return ", ".join(self.visit(e) for e in node.elts)
return "(" + ", ".join(self.visit(e) for e in node.elts) + ")"
else:
return "()"
@ -181,6 +197,11 @@ class _UnparseVisitor(ast.NodeVisitor):
def visit_Constant(self, node: ast.Constant) -> str:
if node.value is Ellipsis:
return "..."
elif isinstance(node.value, (int, float, complex)):
if self.code and sys.version_info > (3, 8):
return ast.get_source_segment(self.code, node)
else:
return repr(node.value)
else:
return repr(node.value)

View File

@ -259,12 +259,12 @@ class SphinxComponentRegistry:
else:
self.source_suffix[suffix] = filetype
def add_source_parser(self, parser: "Type[Parser]", **kwargs: Any) -> None:
def add_source_parser(self, parser: "Type[Parser]", override: bool = False) -> None:
logger.debug('[app] adding search source_parser: %r', parser)
# create a map from filetype to parser
for filetype in parser.supported:
if filetype in self.source_parsers and not kwargs.get('override'):
if filetype in self.source_parsers and not override:
raise ExtensionError(__('source_parser for %r is already registered') %
filetype)
else:
@ -367,7 +367,14 @@ class SphinxComponentRegistry:
logger.debug('[app] adding js_file: %r, %r', filename, attributes)
self.js_files.append((filename, attributes))
def has_latex_package(self, name: str) -> bool:
packages = self.latex_packages + self.latex_packages_after_hyperref
return bool([x for x in packages if x[0] == name])
def add_latex_package(self, name: str, options: str, after_hyperref: bool = False) -> None:
if self.has_latex_package(name):
logger.warn("latex package '%s' already included" % name)
logger.debug('[app] adding latex package: %r', name)
if after_hyperref:
self.latex_packages_after_hyperref.append((name, options))
@ -394,7 +401,7 @@ class SphinxComponentRegistry:
def load_extension(self, app: "Sphinx", extname: str) -> None:
"""Load a Sphinx extension."""
if extname in app.extensions: # alread loaded
if extname in app.extensions: # already loaded
return
if extname in EXTENSION_BLACKLIST:
logger.warning(__('the extension %r was already merged with Sphinx since '

View File

@ -297,8 +297,8 @@ class IndexBuilder:
frozen.get('envversion') != self.env.version:
raise ValueError('old format')
index2fn = frozen['docnames']
self._filenames = dict(zip(index2fn, frozen['filenames'])) # type: ignore
self._titles = dict(zip(index2fn, frozen['titles'])) # type: ignore
self._filenames = dict(zip(index2fn, frozen['filenames']))
self._titles = dict(zip(index2fn, frozen['titles']))
def load_terms(mapping: Dict[str, Any]) -> Dict[str, Set[str]]:
rv = {}
@ -359,13 +359,13 @@ class IndexBuilder:
def get_terms(self, fn2index: Dict) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]:
rvs = {}, {} # type: Tuple[Dict[str, List[str]], Dict[str, List[str]]]
for rv, mapping in zip(rvs, (self._mapping, self._title_mapping)):
for k, v in mapping.items(): # type: ignore
for k, v in mapping.items():
if len(v) == 1:
fn, = v
if fn in fn2index:
rv[k] = fn2index[fn] # type: ignore
rv[k] = fn2index[fn]
else:
rv[k] = sorted([fn2index[fn] for fn in v if fn in fn2index]) # type: ignore # NOQA
rv[k] = sorted([fn2index[fn] for fn in v if fn in fn2index])
return rvs
def freeze(self) -> Dict[str, Any]:

Some files were not shown because too many files have changed in this diff Show More