mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '3.x' into 3.2.x_to_3.x
This commit is contained in:
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -2,5 +2,8 @@
|
||||
blank_issues_enabled: false # default: true
|
||||
contact_links:
|
||||
- name: Question
|
||||
url: https://stackoverflow.com/questions/tagged/python-sphinx
|
||||
about: For Q&A purpose, please use Stackoverflow with the tag python-sphinx
|
||||
- name: Discussion
|
||||
url: https://groups.google.com/forum/#!forum/sphinx-users
|
||||
about: For Q&A purpose, please use sphinx-users mailing list.
|
||||
about: For general discussion, please use sphinx-users mailing list.
|
||||
|
||||
70
CHANGES
70
CHANGES
@@ -1,3 +1,69 @@
|
||||
Release 3.3.0 (in development)
|
||||
==============================
|
||||
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
Incompatible changes
|
||||
--------------------
|
||||
|
||||
Deprecated
|
||||
----------
|
||||
|
||||
* ``sphinx.builders.latex.LaTeXBuilder.usepackages``
|
||||
* ``sphinx.builders.latex.LaTeXBuilder.usepackages_afger_hyperref``
|
||||
* ``sphinx.ext.autodoc.SingledispatchFunctionDocumenter``
|
||||
* ``sphinx.ext.autodoc.SingledispatchMethodDocumenter``
|
||||
|
||||
Features added
|
||||
--------------
|
||||
|
||||
* #8100: html: Show a better error message for failures on copying
|
||||
html_static_files
|
||||
* #8141: C: added a ``maxdepth`` option to :rst:dir:`c:alias` to insert
|
||||
nested declarations.
|
||||
* #8081: LaTeX: Allow to add LaTeX package via ``app.add_latex_package()`` until
|
||||
just before writing .tex file
|
||||
* #7996: manpage: Add :confval:`man_make_section_directory` to make a section
|
||||
directory on build man page
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
* #8085: i18n: Add support for having single text domain
|
||||
* #6640: i18n: Failed to override system message translation
|
||||
* #8143: autodoc: AttributeError is raised when False value is passed to
|
||||
autodoc_default_options
|
||||
* #8103: autodoc: functools.cached_property is not considered as a property
|
||||
* #8190: autodoc: parsing error is raised if some extension replaces docstring
|
||||
by string not ending with blank lines
|
||||
* #8142: autodoc: Wrong constructor signature for the class derived from
|
||||
typing.Generic
|
||||
* #8157: autodoc: TypeError is raised when annotation has invalid __args__
|
||||
* #7964: autodoc: Tuple in default value is wrongly rendered
|
||||
* #8200: autodoc: type aliases break type formatting of autoattribute
|
||||
* #7786: autodoc: can't detect overloaded methods defined in other file
|
||||
* #8192: napoleon: description is disappeared when it contains inline literals
|
||||
* #8142: napoleon: Potential of regex denial of service in google style docs
|
||||
* #8169: LaTeX: pxjahyper loaded even when latex_engine is not platex
|
||||
* #8175: intersphinx: Potential of regex denial of service by broken inventory
|
||||
* #8277: sphinx-build: missing and redundant spacing (and etc) for console
|
||||
output on building
|
||||
* #7973: imgconverter: Check availability of imagemagick many times
|
||||
* #8255: py domain: number in default argument value is changed from hexadecimal
|
||||
to decimal
|
||||
* #8093: The highlight warning has wrong location in some builders (LaTeX,
|
||||
singlehtml and so on)
|
||||
* #8239: Failed to refer a token in productionlist if it is indented
|
||||
* #8268: linkcheck: Report HTTP errors when ``linkcheck_anchors`` is ``True``
|
||||
* #8245: linkcheck: take source directory into account for local files
|
||||
* #6914: figure numbers are unexpectedly assigned to uncaptioned items
|
||||
|
||||
Testing
|
||||
--------
|
||||
|
||||
* #8257: Support parallel build in sphinx.testing
|
||||
|
||||
Release 3.2.2 (in development)
|
||||
==============================
|
||||
|
||||
@@ -127,7 +193,7 @@ Bugs fixed
|
||||
contains a hyperlink target
|
||||
* #7469: autosummary: "Module attributes" header is not translatable
|
||||
* #7940: apidoc: An extra newline is generated at the end of the rst file if a
|
||||
module has submodules
|
||||
module has submodules
|
||||
* #4258: napoleon: decorated special methods are not shown
|
||||
* #7799: napoleon: parameters are not escaped for combined params in numpydoc
|
||||
* #7780: napoleon: multiple paramaters declaration in numpydoc was wrongly
|
||||
@@ -294,7 +360,7 @@ Features added
|
||||
* #7543: html theme: Add top and bottom margins to tables
|
||||
* #7695: html theme: Add viewport meta tag for basic theme
|
||||
* #7721: html theme: classic: default codetextcolor/codebgcolor doesn't override
|
||||
Pygments
|
||||
Pygments
|
||||
* C and C++: allow semicolon in the end of declarations.
|
||||
* C++, parse parameterized noexcept specifiers.
|
||||
* #7294: C++, parse expressions with user-defined literals.
|
||||
|
||||
1
EXAMPLES
1
EXAMPLES
@@ -330,6 +330,7 @@ Documentation using a custom theme or integrated in a website
|
||||
* `Lasso <http://lassoguide.com/>`__
|
||||
* `Mako <http://docs.makotemplates.org/>`__
|
||||
* `MirrorBrain <http://mirrorbrain.org/docs/>`__
|
||||
* `Mitiq <https://mitiq.readthedocs.io/>`__
|
||||
* `MongoDB <https://docs.mongodb.com/>`__
|
||||
* `Music21 <https://web.mit.edu/music21/doc/>`__
|
||||
* `MyHDL <http://docs.myhdl.org/>`__
|
||||
|
||||
4
Makefile
4
Makefile
@@ -64,10 +64,6 @@ type-check:
|
||||
doclinter:
|
||||
python utils/doclinter.py CHANGES *.rst doc/
|
||||
|
||||
.PHONY: pylint
|
||||
pylint:
|
||||
@pylint --rcfile utils/pylintrc sphinx
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
@$(PYTHON) -m pytest -v $(TEST)
|
||||
|
||||
2
doc/_themes/sphinx13/static/sphinx13.css
vendored
2
doc/_themes/sphinx13/static/sphinx13.css
vendored
@@ -239,7 +239,7 @@ div.footer a {
|
||||
|
||||
/* -- body styles ----------------------------------------------------------- */
|
||||
|
||||
p {
|
||||
p {
|
||||
margin: 0.8em 0 0.5em 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -110,8 +110,6 @@ texinfo_documents = [
|
||||
1),
|
||||
]
|
||||
|
||||
# We're not using intersphinx right now, but if we did, this would be part of
|
||||
# the mapping:
|
||||
intersphinx_mapping = {'python': ('https://docs.python.org/3/', None)}
|
||||
|
||||
# Sphinx document translation with sphinx gettext feature uses these settings:
|
||||
|
||||
@@ -140,14 +140,14 @@ started with writing your own extensions.
|
||||
.. _slideshare: https://www.slideshare.net/
|
||||
.. _TikZ/PGF LaTeX package: https://sourceforge.net/projects/pgf/
|
||||
.. _MATLAB: https://www.mathworks.com/products/matlab.html
|
||||
.. _swf: https://bitbucket.org/klorenz/sphinxcontrib-swf
|
||||
.. _findanything: https://bitbucket.org/klorenz/sphinxcontrib-findanything
|
||||
.. _cmakedomain: https://bitbucket.org/klorenz/sphinxcontrib-cmakedomain
|
||||
.. _swf: https://github.com/sphinx-contrib/swf
|
||||
.. _findanything: https://github.com/sphinx-contrib/findanything
|
||||
.. _cmakedomain: https://github.com/sphinx-contrib/cmakedomain
|
||||
.. _GNU Make: https://www.gnu.org/software/make/
|
||||
.. _makedomain: https://bitbucket.org/klorenz/sphinxcontrib-makedomain
|
||||
.. _makedomain: https://github.com/sphinx-contrib/makedomain
|
||||
.. _inlinesyntaxhighlight: https://sphinxcontrib-inlinesyntaxhighlight.readthedocs.io/
|
||||
.. _CMake: https://cmake.org
|
||||
.. _domaintools: https://bitbucket.org/klorenz/sphinxcontrib-domaintools
|
||||
.. _domaintools: https://github.com/sphinx-contrib/domaintools
|
||||
.. _restbuilder: https://pypi.org/project/sphinxcontrib-restbuilder/
|
||||
.. _Lasso: http://www.lassosoft.com/
|
||||
.. _beamer: https://pypi.org/project/sphinxcontrib-beamer/
|
||||
|
||||
@@ -177,17 +177,18 @@ type for that event::
|
||||
9. (if running in parallel mode, for each process) event.env-merged-info(app, env, docnames, other)
|
||||
10. event.env-updated(app, env)
|
||||
11. event.env-get-updated(app, env)
|
||||
11. event.env-check-consistency(app, env)
|
||||
12. event.env-check-consistency(app, env)
|
||||
|
||||
# The updated-docs list can be builder dependent, but generally includes all new/changed documents,
|
||||
# plus any output from `env-get-updated`, and then all "parent" documents in the ToC tree
|
||||
# For builders that output a single page, they are first joined into a single doctree before post-transforms/doctree-resolved
|
||||
for docname in docnames:
|
||||
12. apply post-transforms (by priority): docutils.document -> docutils.document
|
||||
13. event.doctree-resolved(app, doctree, docname)
|
||||
for docname in updated-docs:
|
||||
13. apply post-transforms (by priority): docutils.document -> docutils.document
|
||||
14. event.doctree-resolved(app, doctree, docname)
|
||||
- (for any reference node that fails to resolve) event.missing-reference(env, node, contnode)
|
||||
|
||||
14. Generate output files
|
||||
|
||||
15. event.build-finished(app, exception)
|
||||
15. Generate output files
|
||||
16. event.build-finished(app, exception)
|
||||
|
||||
Here is a more detailed list of these events.
|
||||
|
||||
|
||||
@@ -26,6 +26,26 @@ The following is a list of deprecated interfaces.
|
||||
- (will be) Removed
|
||||
- Alternatives
|
||||
|
||||
* - ``sphinx.builders.latex.LaTeXBuilder.usepackages``
|
||||
- 3.3
|
||||
- 5.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.builders.latex.LaTeXBuilder.usepackages_afger_hyperref``
|
||||
- 3.3
|
||||
- 5.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.ext.autodoc.SingledispatchFunctionDocumenter``
|
||||
- 3.3
|
||||
- 5.0
|
||||
- ``sphinx.ext.autodoc.FunctionDocumenter``
|
||||
|
||||
* - ``sphinx.ext.autodoc.SingledispatchMethodDocumenter``
|
||||
- 3.3
|
||||
- 5.0
|
||||
- ``sphinx.ext.autodoc.MethodDocumenter``
|
||||
|
||||
* - ``sphinx.ext.autodoc.members_set_option()``
|
||||
- 3.2
|
||||
- 5.0
|
||||
|
||||
@@ -12,6 +12,9 @@ Getting help
|
||||
|
||||
The Sphinx community maintains a number of mailing lists and IRC channels.
|
||||
|
||||
Stack Overflow with tag `python-sphinx`_
|
||||
Questions and answers about use and development.
|
||||
|
||||
sphinx-users <sphinx-users@googlegroups.com>
|
||||
Mailing list for user support.
|
||||
|
||||
@@ -21,6 +24,7 @@ sphinx-dev <sphinx-dev@googlegroups.com>
|
||||
#sphinx-doc on irc.freenode.net
|
||||
IRC channel for development questions and user support.
|
||||
|
||||
.. _python-sphinx: https://stackoverflow.com/questions/tagged/python-sphinx
|
||||
|
||||
Bug Reports and Feature Requests
|
||||
--------------------------------
|
||||
|
||||
@@ -756,9 +756,15 @@ documentation on :ref:`intl` for details.
|
||||
If true, a document's text domain is its docname if it is a top-level
|
||||
project file and its very base directory otherwise.
|
||||
|
||||
If set to string, all document's text domain is this string, making all
|
||||
documents use single text domain.
|
||||
|
||||
By default, the document ``markup/code.rst`` ends up in the ``markup`` text
|
||||
domain. With this option set to ``False``, it is ``markup/code``.
|
||||
|
||||
.. versionchanged:: 3.3
|
||||
The string value is now accepted.
|
||||
|
||||
.. confval:: gettext_uuid
|
||||
|
||||
If true, Sphinx generates uuid information for version tracking in message
|
||||
@@ -2239,6 +2245,12 @@ These options influence manual page output.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. confval:: man_make_section_directory
|
||||
|
||||
If true, make a section directory on build man page. Default is False.
|
||||
|
||||
.. versionadded:: 3.3
|
||||
|
||||
|
||||
.. _texinfo-options:
|
||||
|
||||
|
||||
@@ -229,7 +229,7 @@ inserting them into the page source under a suitable :rst:dir:`py:module`,
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
|
||||
It takes an anchestor class name as an argument.
|
||||
It takes an ancestor class name as an argument.
|
||||
|
||||
* It's possible to override the signature for explicitly documented callable
|
||||
objects (functions, methods, classes) with the regular syntax that will
|
||||
@@ -515,6 +515,44 @@ There are also config values that you can set:
|
||||
|
||||
New option ``'description'`` is added.
|
||||
|
||||
.. confval:: autodoc_type_aliases
|
||||
|
||||
A dictionary for users defined `type aliases`__ that maps a type name to the
|
||||
full-qualified object name. It is used to keep type aliases not evaluated in
|
||||
the document. Defaults to empty (``{}``).
|
||||
|
||||
The type aliases are only available if your program enables `Postponed
|
||||
Evaluation of Annotations (PEP 563)`__ feature via ``from __future__ import
|
||||
annotations``.
|
||||
|
||||
For example, there is code using a type alias::
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
AliasType = Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]
|
||||
|
||||
def f() -> AliasType:
|
||||
...
|
||||
|
||||
If ``autodoc_type_aliases`` is not set, autodoc will generate internal mark-up
|
||||
from this code as following::
|
||||
|
||||
.. py:function:: f() -> Union[List[Dict[Tuple[int, str], Set[int]]], Tuple[str, List[str]]]
|
||||
|
||||
...
|
||||
|
||||
If you set ``autodoc_type_aliases`` as
|
||||
``{'AliasType': 'your.module.TypeAlias'}``, it generates a following document
|
||||
internally::
|
||||
|
||||
.. py:function:: f() -> your.module.AliasType:
|
||||
|
||||
...
|
||||
|
||||
.. __: https://www.python.org/dev/peps/pep-0563/
|
||||
.. __: https://mypy.readthedocs.io/en/latest/kinds_of_types.html#type-aliases
|
||||
.. versionadded:: 3.3
|
||||
|
||||
.. confval:: autodoc_warningiserror
|
||||
|
||||
This value controls the behavior of :option:`sphinx-build -W` during
|
||||
|
||||
@@ -51,7 +51,7 @@ should check:
|
||||
|
||||
.. versionadded:: 1.1
|
||||
|
||||
.. confval:: coverage_show_missing_items
|
||||
.. confval:: coverage_show_missing_items
|
||||
|
||||
Print objects that are missing to standard output also.
|
||||
``False`` by default.
|
||||
|
||||
@@ -171,7 +171,7 @@ Docker images for Sphinx are published on the `Docker Hub <https://hub.docker.co
|
||||
- `sphinxdoc/sphinx <https://hub.docker.com/repository/docker/sphinxdoc/sphinx>`_
|
||||
- `sphinxdoc/sphinx-latexpdf <https://hub.docker.com/repository/docker/sphinxdoc/sphinx-latexpdf>`_
|
||||
|
||||
Former one is used for standard usage of Sphinx, and latter one is mainly used for PDF builds using LaTeX.
|
||||
Former one is used for standard usage of Sphinx, and latter one is mainly used for PDF builds using LaTeX.
|
||||
Please choose one for your purpose.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -15,7 +15,7 @@ Much of Sphinx's power comes from the richness of its default plain-text markup
|
||||
format, :doc:`reStructuredText </usage/restructuredtext/index>`, along with
|
||||
it's :doc:`significant extensibility capabilities </development/index>`.
|
||||
|
||||
The goal of this document is to give you a quick taste of what Sphinx it is and
|
||||
The goal of this document is to give you a quick taste of what Sphinx is and
|
||||
how you might use it. When you're done here, you can check out the
|
||||
:doc:`installation guide </usage/installation>` followed by the intro to the
|
||||
default markup format used by Sphinx, :doc:`reStucturedText
|
||||
|
||||
@@ -665,7 +665,7 @@ __ http://pygments.org/docs/lexers
|
||||
.. note::
|
||||
|
||||
If you want to select only ``[second-section]`` of ini file like the
|
||||
following, you can use ``:start-at: [second-section]`` and
|
||||
following, you can use ``:start-at: [second-section]`` and
|
||||
``:end-before: [third-section]``:
|
||||
|
||||
.. code-block:: ini
|
||||
@@ -692,7 +692,7 @@ __ http://pygments.org/docs/lexers
|
||||
# [initialize]
|
||||
app.start(":8000")
|
||||
# [initialize]
|
||||
|
||||
|
||||
|
||||
When lines have been selected in any of the ways described above, the line
|
||||
numbers in ``emphasize-lines`` refer to those selected lines, counted
|
||||
|
||||
@@ -744,6 +744,18 @@ The following directive can be used for this purpose.
|
||||
|
||||
.. versionadded:: 3.2
|
||||
|
||||
|
||||
.. rubric:: Options
|
||||
|
||||
.. rst:directive:option:: maxdepth: int
|
||||
|
||||
Insert nested declarations as well, up to the total depth given.
|
||||
Use 0 for infinite depth and 1 for just the mentioned declaration.
|
||||
Defaults to 1.
|
||||
|
||||
.. versionadded:: 3.3
|
||||
|
||||
|
||||
.. c:namespace-pop::
|
||||
|
||||
|
||||
|
||||
22
package-lock.json
generated
22
package-lock.json
generated
@@ -385,12 +385,6 @@
|
||||
"integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=",
|
||||
"dev": true
|
||||
},
|
||||
"eventemitter3": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-3.1.2.tgz",
|
||||
"integrity": "sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q==",
|
||||
"dev": true
|
||||
},
|
||||
"extend": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||
@@ -535,14 +529,22 @@
|
||||
}
|
||||
},
|
||||
"http-proxy": {
|
||||
"version": "1.17.0",
|
||||
"resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.17.0.tgz",
|
||||
"integrity": "sha512-Taqn+3nNvYRfJ3bGvKfBSRwy1v6eePlm3oc/aWVxZp57DQr5Eq3xhKJi7Z4hZpS8PC3H4qI+Yly5EmFacGuA/g==",
|
||||
"version": "1.18.1",
|
||||
"resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz",
|
||||
"integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"eventemitter3": "^3.0.0",
|
||||
"eventemitter3": "^4.0.0",
|
||||
"follow-redirects": "^1.0.0",
|
||||
"requires-port": "^1.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"eventemitter3": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"iconv-lite": {
|
||||
|
||||
@@ -32,8 +32,8 @@ if 'PYTHONWARNINGS' not in os.environ:
|
||||
warnings.filterwarnings('ignore', "'U' mode is deprecated",
|
||||
DeprecationWarning, module='docutils.io')
|
||||
|
||||
__version__ = '3.2.2+'
|
||||
__released__ = '3.2.2' # used when Sphinx builds its own docs
|
||||
__version__ = '3.3.0+'
|
||||
__released__ = '3.3.0' # used when Sphinx builds its own docs
|
||||
|
||||
#: Version info for better programmatic use.
|
||||
#:
|
||||
@@ -43,7 +43,7 @@ __released__ = '3.2.2' # used when Sphinx builds its own docs
|
||||
#:
|
||||
#: .. versionadded:: 1.2
|
||||
#: Before version 1.2, check the string ``sphinx.__version__``.
|
||||
version_info = (3, 2, 2, 'beta', 0)
|
||||
version_info = (3, 3, 0, 'beta', 0)
|
||||
|
||||
package_dir = path.abspath(path.dirname(__file__))
|
||||
|
||||
|
||||
@@ -18,10 +18,11 @@ import warnings
|
||||
from collections import deque
|
||||
from io import StringIO
|
||||
from os import path
|
||||
from typing import Any, Callable, Dict, IO, List, Tuple, Union
|
||||
from typing import Any, Callable, Dict, IO, List, Optional, Tuple, Union
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, TextElement
|
||||
from docutils.parsers import Parser
|
||||
from docutils.parsers.rst import Directive, roles
|
||||
from docutils.transforms import Transform
|
||||
from pygments.lexer import Lexer
|
||||
@@ -293,7 +294,10 @@ class Sphinx:
|
||||
if catalog.domain == 'sphinx' and catalog.is_outdated():
|
||||
catalog.write_mo(self.config.language)
|
||||
|
||||
locale_dirs = [None, path.join(package_dir, 'locale')] + list(repo.locale_dirs)
|
||||
locale_dirs = [None] # type: List[Optional[str]]
|
||||
locale_dirs += list(repo.locale_dirs)
|
||||
locale_dirs += [path.join(package_dir, 'locale')]
|
||||
|
||||
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
|
||||
if has_translation or self.config.language == 'en':
|
||||
# "en" never needs to be translated
|
||||
@@ -468,8 +472,10 @@ class Sphinx:
|
||||
def add_builder(self, builder: "Type[Builder]", override: bool = False) -> None:
|
||||
"""Register a new builder.
|
||||
|
||||
*builder* must be a class that inherits from
|
||||
:class:`~sphinx.builders.Builder`.
|
||||
*builder* must be a class that inherits from :class:`~sphinx.builders.Builder`.
|
||||
|
||||
If *override* is True, the given *builder* is forcedly installed even if
|
||||
a builder having the same name is already installed.
|
||||
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@@ -526,6 +532,9 @@ class Sphinx:
|
||||
builtin translator. This allows extensions to use custom translator
|
||||
and define custom nodes for the translator (see :meth:`add_node`).
|
||||
|
||||
If *override* is True, the given *translator_class* is forcedly installed even if
|
||||
a translator for *name* is already installed.
|
||||
|
||||
.. versionadded:: 1.3
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@@ -560,6 +569,9 @@ class Sphinx:
|
||||
Obviously, translators for which you don't specify visitor methods will
|
||||
choke on the node when encountered in a document to translate.
|
||||
|
||||
If *override* is True, the given *node* is forcedly installed even if
|
||||
a node having the same name is already installed.
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
Added the support for keyword arguments giving visit functions.
|
||||
"""
|
||||
@@ -595,6 +607,9 @@ class Sphinx:
|
||||
Other keyword arguments are used for node visitor functions. See the
|
||||
:meth:`.Sphinx.add_node` for details.
|
||||
|
||||
If *override* is True, the given *node* is forcedly installed even if
|
||||
a node having the same name is already installed.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
"""
|
||||
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
|
||||
@@ -608,14 +623,14 @@ class Sphinx:
|
||||
details, see `the Docutils docs
|
||||
<http://docutils.sourceforge.net/docs/howto/rst-directives.html>`_ .
|
||||
|
||||
For example, the (already existing) :rst:dir:`literalinclude` directive
|
||||
would be added like this:
|
||||
For example, a custom directive named ``my-directive`` would be added
|
||||
like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from docutils.parsers.rst import Directive, directives
|
||||
|
||||
class LiteralIncludeDirective(Directive):
|
||||
class MyDirective(Directive):
|
||||
has_content = True
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
@@ -628,7 +643,11 @@ class Sphinx:
|
||||
def run(self):
|
||||
...
|
||||
|
||||
add_directive('literalinclude', LiteralIncludeDirective)
|
||||
def setup(app):
|
||||
add_directive('my-directive', MyDirective)
|
||||
|
||||
If *override* is True, the given *cls* is forcedly installed even if
|
||||
a directive named as *name* is already installed.
|
||||
|
||||
.. versionchanged:: 0.6
|
||||
Docutils 0.5-style directive classes are now supported.
|
||||
@@ -652,6 +671,9 @@ class Sphinx:
|
||||
<http://docutils.sourceforge.net/docs/howto/rst-roles.html>`_ for
|
||||
more information.
|
||||
|
||||
If *override* is True, the given *role* is forcedly installed even if
|
||||
a role named as *name* is already installed.
|
||||
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
"""
|
||||
@@ -667,6 +689,9 @@ class Sphinx:
|
||||
Register a Docutils role that does nothing but wrap its contents in the
|
||||
node given by *nodeclass*.
|
||||
|
||||
If *override* is True, the given *nodeclass* is forcedly installed even if
|
||||
a role named as *name* is already installed.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@@ -686,6 +711,9 @@ class Sphinx:
|
||||
Make the given *domain* (which must be a class; more precisely, a
|
||||
subclass of :class:`~sphinx.domains.Domain`) known to Sphinx.
|
||||
|
||||
If *override* is True, the given *domain* is forcedly installed even if
|
||||
a domain having the same name is already installed.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@@ -699,6 +727,9 @@ class Sphinx:
|
||||
Like :meth:`add_directive`, but the directive is added to the domain
|
||||
named *domain*.
|
||||
|
||||
If *override* is True, the given *directive* is forcedly installed even if
|
||||
a directive named as *name* is already installed.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@@ -712,6 +743,9 @@ class Sphinx:
|
||||
Like :meth:`add_role`, but the role is added to the domain named
|
||||
*domain*.
|
||||
|
||||
If *override* is True, the given *role* is forcedly installed even if
|
||||
a role named as *name* is already installed.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@@ -725,6 +759,9 @@ class Sphinx:
|
||||
Add a custom *index* class to the domain named *domain*. *index* must
|
||||
be a subclass of :class:`~sphinx.domains.Index`.
|
||||
|
||||
If *override* is True, the given *index* is forcedly installed even if
|
||||
an index having the same name is already installed.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@@ -788,6 +825,9 @@ class Sphinx:
|
||||
For the role content, you have the same syntactical possibilities as
|
||||
for standard Sphinx roles (see :ref:`xref-syntax`).
|
||||
|
||||
If *override* is True, the given object_type is forcedly installed even if
|
||||
an object_type having the same name is already installed.
|
||||
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
"""
|
||||
@@ -824,6 +864,9 @@ class Sphinx:
|
||||
(Of course, the element following the ``topic`` directive needn't be a
|
||||
section.)
|
||||
|
||||
If *override* is True, the given crossref_type is forcedly installed even if
|
||||
a crossref_type having the same name is already installed.
|
||||
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
"""
|
||||
@@ -1004,7 +1047,7 @@ class Sphinx:
|
||||
logger.debug('[app] adding lexer: %r', (alias, lexer))
|
||||
if isinstance(lexer, Lexer):
|
||||
warnings.warn('app.add_lexer() API changed; '
|
||||
'Please give lexer class instead instance',
|
||||
'Please give lexer class instead of instance',
|
||||
RemovedInSphinx40Warning, stacklevel=2)
|
||||
lexers[alias] = lexer
|
||||
else:
|
||||
@@ -1019,6 +1062,9 @@ class Sphinx:
|
||||
new types of objects. See the source of the autodoc module for
|
||||
examples on how to subclass :class:`Documenter`.
|
||||
|
||||
If *override* is True, the given *cls* is forcedly installed even if
|
||||
a documenter having the same name is already installed.
|
||||
|
||||
.. todo:: Add real docs for Documenter and subclassing
|
||||
|
||||
.. versionadded:: 0.6
|
||||
@@ -1067,13 +1113,19 @@ class Sphinx:
|
||||
Same as :confval:`source_suffix`. The users can override this
|
||||
using the setting.
|
||||
|
||||
If *override* is True, the given *suffix* is forcedly installed even if
|
||||
a same suffix is already installed.
|
||||
|
||||
.. versionadded:: 1.8
|
||||
"""
|
||||
self.registry.add_source_suffix(suffix, filetype, override=override)
|
||||
|
||||
def add_source_parser(self, *args: Any, **kwargs: Any) -> None:
|
||||
def add_source_parser(self, parser: "Type[Parser]", override: bool = False) -> None:
|
||||
"""Register a parser class.
|
||||
|
||||
If *override* is True, the given *parser* is forcedly installed even if
|
||||
a parser for the same suffix is already installed.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
.. versionchanged:: 1.8
|
||||
*suffix* argument is deprecated. It only accepts *parser* argument.
|
||||
@@ -1081,7 +1133,7 @@ class Sphinx:
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
"""
|
||||
self.registry.add_source_parser(*args, **kwargs)
|
||||
self.registry.add_source_parser(parser, override=override)
|
||||
|
||||
def add_env_collector(self, collector: "Type[EnvironmentCollector]") -> None:
|
||||
"""Register an environment collector class.
|
||||
|
||||
@@ -316,7 +316,7 @@ class MessageCatalogBuilder(I18nBuilder):
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_builder(MessageCatalogBuilder)
|
||||
|
||||
app.add_config_value('gettext_compact', True, 'gettext')
|
||||
app.add_config_value('gettext_compact', True, 'gettext', Any)
|
||||
app.add_config_value('gettext_location', True, 'gettext')
|
||||
app.add_config_value('gettext_uuid', False, 'gettext')
|
||||
app.add_config_value('gettext_auto_build', True, 'env')
|
||||
|
||||
@@ -641,17 +641,17 @@ class StandaloneHTMLBuilder(Builder):
|
||||
def gen_additional_pages(self) -> None:
|
||||
# additional pages from conf.py
|
||||
for pagename, template in self.config.html_additional_pages.items():
|
||||
logger.info(' ' + pagename, nonl=True)
|
||||
logger.info(pagename + ' ', nonl=True)
|
||||
self.handle_page(pagename, {}, template)
|
||||
|
||||
# the search page
|
||||
if self.search:
|
||||
logger.info(' search', nonl=True)
|
||||
logger.info('search ', nonl=True)
|
||||
self.handle_page('search', {}, 'search.html')
|
||||
|
||||
# the opensearch xml file
|
||||
if self.config.html_use_opensearch and self.search:
|
||||
logger.info(' opensearch', nonl=True)
|
||||
logger.info('opensearch ', nonl=True)
|
||||
fn = path.join(self.outdir, '_static', 'opensearch.xml')
|
||||
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
|
||||
|
||||
@@ -669,7 +669,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
'genindexcounts': indexcounts,
|
||||
'split_index': self.config.html_split_index,
|
||||
}
|
||||
logger.info(' genindex', nonl=True)
|
||||
logger.info('genindex ', nonl=True)
|
||||
|
||||
if self.config.html_split_index:
|
||||
self.handle_page('genindex', genindexcontext,
|
||||
@@ -691,7 +691,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
'content': content,
|
||||
'collapse_index': collapse,
|
||||
}
|
||||
logger.info(' ' + indexname, nonl=True)
|
||||
logger.info(indexname + ' ', nonl=True)
|
||||
self.handle_page(indexname, indexcontext, 'domainindex.html')
|
||||
|
||||
def copy_image_files(self) -> None:
|
||||
@@ -751,18 +751,27 @@ class StandaloneHTMLBuilder(Builder):
|
||||
copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))
|
||||
|
||||
def copy_theme_static_files(self, context: Dict) -> None:
|
||||
def onerror(filename: str, error: Exception) -> None:
|
||||
logger.warning(__('Failed to copy a file in html_static_file: %s: %r'),
|
||||
filename, error)
|
||||
|
||||
if self.theme:
|
||||
for entry in self.theme.get_theme_dirs()[::-1]:
|
||||
copy_asset(path.join(entry, 'static'),
|
||||
path.join(self.outdir, '_static'),
|
||||
excluded=DOTFILES, context=context, renderer=self.templates)
|
||||
excluded=DOTFILES, context=context,
|
||||
renderer=self.templates, onerror=onerror)
|
||||
|
||||
def copy_html_static_files(self, context: Dict) -> None:
|
||||
def onerror(filename: str, error: Exception) -> None:
|
||||
logger.warning(__('Failed to copy a file in html_static_file: %s: %r'),
|
||||
filename, error)
|
||||
|
||||
excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
|
||||
for entry in self.config.html_static_path:
|
||||
copy_asset(path.join(self.confdir, entry),
|
||||
path.join(self.outdir, '_static'),
|
||||
excluded, context=context, renderer=self.templates)
|
||||
excluded, context=context, renderer=self.templates, onerror=onerror)
|
||||
|
||||
def copy_html_logo(self) -> None:
|
||||
if self.config.html_logo:
|
||||
@@ -776,7 +785,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
|
||||
def copy_static_files(self) -> None:
|
||||
try:
|
||||
with progress_message(__('copying static files... ')):
|
||||
with progress_message(__('copying static files')):
|
||||
ensuredir(path.join(self.outdir, '_static'))
|
||||
|
||||
# prepare context for templates
|
||||
|
||||
@@ -24,7 +24,7 @@ from sphinx.builders.latex.constants import ADDITIONAL_SETTINGS, DEFAULT_SETTING
|
||||
from sphinx.builders.latex.theming import Theme, ThemeFactory
|
||||
from sphinx.builders.latex.util import ExtBabel
|
||||
from sphinx.config import Config, ENUM
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning, RemovedInSphinx50Warning
|
||||
from sphinx.environment.adapters.asset import ImageAdapter
|
||||
from sphinx.errors import NoUri, SphinxError
|
||||
from sphinx.locale import _, __
|
||||
@@ -128,8 +128,6 @@ class LaTeXBuilder(Builder):
|
||||
self.docnames = [] # type: Iterable[str]
|
||||
self.document_data = [] # type: List[Tuple[str, str, str, str, str, bool]]
|
||||
self.themes = ThemeFactory(self.app)
|
||||
self.usepackages = self.app.registry.latex_packages
|
||||
self.usepackages_after_hyperref = self.app.registry.latex_packages_after_hyperref
|
||||
texescape.init()
|
||||
|
||||
self.init_context()
|
||||
@@ -179,10 +177,6 @@ class LaTeXBuilder(Builder):
|
||||
key = (self.config.latex_engine, self.config.language[:2])
|
||||
self.context.update(ADDITIONAL_SETTINGS.get(key, {}))
|
||||
|
||||
# Apply extension settings to context
|
||||
self.context['packages'] = self.usepackages
|
||||
self.context['packages_after_hyperref'] = self.usepackages_after_hyperref
|
||||
|
||||
# Apply user settings to context
|
||||
self.context.update(self.config.latex_elements)
|
||||
self.context['release'] = self.config.release
|
||||
@@ -203,6 +197,13 @@ class LaTeXBuilder(Builder):
|
||||
# Show the release label only if release value exists
|
||||
self.context.setdefault('releasename', _('Release'))
|
||||
|
||||
def update_context(self) -> None:
|
||||
"""Update template variables for .tex file just before writing."""
|
||||
# Apply extension settings to context
|
||||
registry = self.app.registry
|
||||
self.context['packages'] = registry.latex_packages
|
||||
self.context['packages_after_hyperref'] = registry.latex_packages_after_hyperref
|
||||
|
||||
def init_babel(self) -> None:
|
||||
self.babel = ExtBabel(self.config.language, not self.context['babel'])
|
||||
if self.config.language and not self.babel.is_supported_language():
|
||||
@@ -290,6 +291,7 @@ class LaTeXBuilder(Builder):
|
||||
doctree['tocdepth'] = tocdepth
|
||||
self.post_process_images(doctree)
|
||||
self.update_doc_context(title, author, theme)
|
||||
self.update_context()
|
||||
|
||||
with progress_message(__("writing")):
|
||||
docsettings._author = author
|
||||
@@ -448,6 +450,18 @@ class LaTeXBuilder(Builder):
|
||||
filename = path.join(package_dir, 'templates', 'latex', 'sphinxmessages.sty_t')
|
||||
copy_asset_file(filename, self.outdir, context=context, renderer=LaTeXRenderer())
|
||||
|
||||
@property
|
||||
def usepackages(self) -> List[Tuple[str, str]]:
|
||||
warnings.warn('LaTeXBuilder.usepackages is deprecated.',
|
||||
RemovedInSphinx50Warning, stacklevel=2)
|
||||
return self.app.registry.latex_packages
|
||||
|
||||
@property
|
||||
def usepackages_after_hyperref(self) -> List[Tuple[str, str]]:
|
||||
warnings.warn('LaTeXBuilder.usepackages_after_hyperref is deprecated.',
|
||||
RemovedInSphinx50Warning, stacklevel=2)
|
||||
return self.app.registry.latex_packages_after_hyperref
|
||||
|
||||
|
||||
def patch_settings(settings: Any) -> Any:
|
||||
"""Make settings object to show deprecation messages."""
|
||||
@@ -503,9 +517,9 @@ def validate_latex_theme_options(app: Sphinx, config: Config) -> None:
|
||||
config.latex_theme_options.pop(key)
|
||||
|
||||
|
||||
def install_pakcages_for_ja(app: Sphinx) -> None:
|
||||
def install_packages_for_ja(app: Sphinx) -> None:
|
||||
"""Install packages for Japanese."""
|
||||
if app.config.language == 'ja':
|
||||
if app.config.language == 'ja' and app.config.latex_engine in ('platex', 'uplatex'):
|
||||
app.add_latex_package('pxjahyper', after_hyperref=True)
|
||||
|
||||
|
||||
@@ -556,7 +570,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_builder(LaTeXBuilder)
|
||||
app.connect('config-inited', validate_config_values, priority=800)
|
||||
app.connect('config-inited', validate_latex_theme_options, priority=800)
|
||||
app.connect('builder-inited', install_pakcages_for_ja)
|
||||
app.connect('builder-inited', install_packages_for_ja)
|
||||
|
||||
app.add_config_value('latex_engine', default_latex_engine, None,
|
||||
ENUM('pdflatex', 'xelatex', 'lualatex', 'platex', 'uplatex'))
|
||||
|
||||
@@ -166,6 +166,7 @@ class CheckExternalLinksBuilder(Builder):
|
||||
# Read the whole document and see if #anchor exists
|
||||
response = requests.get(req_url, stream=True, config=self.app.config,
|
||||
auth=auth_info, **kwargs)
|
||||
response.raise_for_status()
|
||||
found = check_anchor(response, unquote(anchor))
|
||||
|
||||
if not found:
|
||||
@@ -210,7 +211,7 @@ class CheckExternalLinksBuilder(Builder):
|
||||
else:
|
||||
return 'redirected', new_url, 0
|
||||
|
||||
def check() -> Tuple[str, str, int]:
|
||||
def check(docname: str) -> Tuple[str, str, int]:
|
||||
# check for various conditions without bothering the network
|
||||
if len(uri) == 0 or uri.startswith(('#', 'mailto:')):
|
||||
return 'unchecked', '', 0
|
||||
@@ -219,7 +220,8 @@ class CheckExternalLinksBuilder(Builder):
|
||||
# non supported URI schemes (ex. ftp)
|
||||
return 'unchecked', '', 0
|
||||
else:
|
||||
if path.exists(path.join(self.srcdir, uri)):
|
||||
srcdir = path.dirname(self.env.doc2path(docname))
|
||||
if path.exists(path.join(srcdir, uri)):
|
||||
return 'working', '', 0
|
||||
else:
|
||||
for rex in self.to_ignore:
|
||||
@@ -256,7 +258,7 @@ class CheckExternalLinksBuilder(Builder):
|
||||
uri, docname, lineno = self.wqueue.get()
|
||||
if uri is None:
|
||||
break
|
||||
status, info, code = check()
|
||||
status, info, code = check(docname)
|
||||
self.rqueue.put((uri, docname, lineno, status, info, code))
|
||||
|
||||
def process_result(self, result: Tuple[str, str, int, str, str, int]) -> None:
|
||||
|
||||
@@ -24,7 +24,7 @@ from sphinx.util import logging
|
||||
from sphinx.util import progress_message
|
||||
from sphinx.util.console import darkgreen # type: ignore
|
||||
from sphinx.util.nodes import inline_all_toctrees
|
||||
from sphinx.util.osutil import make_filename_from_project
|
||||
from sphinx.util.osutil import ensuredir, make_filename_from_project
|
||||
from sphinx.writers.manpage import ManualPageWriter, ManualPageTranslator
|
||||
|
||||
|
||||
@@ -80,7 +80,12 @@ class ManualPageBuilder(Builder):
|
||||
docsettings.authors = authors
|
||||
docsettings.section = section
|
||||
|
||||
targetname = '%s.%s' % (name, section)
|
||||
if self.config.man_make_section_directory:
|
||||
ensuredir(path.join(self.outdir, str(section)))
|
||||
targetname = '%s/%s.%s' % (section, name, section)
|
||||
else:
|
||||
targetname = '%s.%s' % (name, section)
|
||||
|
||||
logger.info(darkgreen(targetname) + ' { ', nonl=True)
|
||||
destination = FileOutput(
|
||||
destination_path=path.join(self.outdir, targetname),
|
||||
@@ -115,6 +120,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
|
||||
app.add_config_value('man_pages', default_man_pages, None)
|
||||
app.add_config_value('man_show_urls', False, None)
|
||||
app.add_config_value('man_make_section_directory', False, None)
|
||||
|
||||
return {
|
||||
'version': 'builtin',
|
||||
|
||||
@@ -72,7 +72,7 @@ def dedent_lines(lines: List[str], dedent: int, location: Tuple[str, int] = None
|
||||
return lines
|
||||
|
||||
if any(s[:dedent].strip() for s in lines):
|
||||
logger.warning(__('Over dedent has detected'), location=location)
|
||||
logger.warning(__('non-whitespace stripped by dedent'), location=location)
|
||||
|
||||
new_lines = []
|
||||
for line in lines:
|
||||
|
||||
@@ -140,8 +140,8 @@ class ASTIdentifier(ASTBaseBase):
|
||||
reftype='identifier',
|
||||
reftarget=targetText, modname=None,
|
||||
classname=None)
|
||||
# key = symbol.get_lookup_key()
|
||||
# pnode['c:parent_key'] = key
|
||||
key = symbol.get_lookup_key()
|
||||
pnode['c:parent_key'] = key
|
||||
if self.is_anon():
|
||||
pnode += nodes.strong(text="[anonymous]")
|
||||
else:
|
||||
@@ -1583,6 +1583,11 @@ class Symbol:
|
||||
for s in sChild.get_all_symbols():
|
||||
yield s
|
||||
|
||||
@property
|
||||
def children(self) -> Iterator["Symbol"]:
|
||||
for c in self._children:
|
||||
yield c
|
||||
|
||||
@property
|
||||
def children_recurse_anon(self) -> Iterator["Symbol"]:
|
||||
for c in self._children:
|
||||
@@ -3425,10 +3430,13 @@ class CNamespacePopObject(SphinxDirective):
|
||||
|
||||
|
||||
class AliasNode(nodes.Element):
|
||||
def __init__(self, sig: str, env: "BuildEnvironment" = None,
|
||||
def __init__(self, sig: str, maxdepth: int, document: Any, env: "BuildEnvironment" = None,
|
||||
parentKey: LookupKey = None) -> None:
|
||||
super().__init__()
|
||||
self.sig = sig
|
||||
self.maxdepth = maxdepth
|
||||
assert maxdepth >= 0
|
||||
self.document = document
|
||||
if env is not None:
|
||||
if 'c:parent_symbol' not in env.temp_data:
|
||||
root = env.domaindata['c']['root_symbol']
|
||||
@@ -3445,6 +3453,37 @@ class AliasNode(nodes.Element):
|
||||
class AliasTransform(SphinxTransform):
|
||||
default_priority = ReferencesResolver.default_priority - 1
|
||||
|
||||
def _render_symbol(self, s: Symbol, maxdepth: int, document: Any) -> List[Node]:
|
||||
nodes = [] # type: List[Node]
|
||||
options = dict() # type: ignore
|
||||
signode = addnodes.desc_signature('', '')
|
||||
nodes.append(signode)
|
||||
s.declaration.describe_signature(signode, 'markName', self.env, options)
|
||||
if maxdepth == 0:
|
||||
recurse = True
|
||||
elif maxdepth == 1:
|
||||
recurse = False
|
||||
else:
|
||||
maxdepth -= 1
|
||||
recurse = True
|
||||
if recurse:
|
||||
content = addnodes.desc_content()
|
||||
desc = addnodes.desc()
|
||||
content.append(desc)
|
||||
desc.document = document
|
||||
desc['domain'] = 'c'
|
||||
# 'desctype' is a backwards compatible attribute
|
||||
desc['objtype'] = desc['desctype'] = 'alias'
|
||||
desc['noindex'] = True
|
||||
|
||||
for sChild in s.children:
|
||||
childNodes = self._render_symbol(sChild, maxdepth, document)
|
||||
desc.extend(childNodes)
|
||||
|
||||
if len(desc.children) != 0:
|
||||
nodes.append(content)
|
||||
return nodes
|
||||
|
||||
def apply(self, **kwargs: Any) -> None:
|
||||
for node in self.document.traverse(AliasNode):
|
||||
sig = node.sig
|
||||
@@ -3485,17 +3524,16 @@ class AliasTransform(SphinxTransform):
|
||||
logger.warning("Could not find C declaration for alias '%s'." % name,
|
||||
location=node)
|
||||
node.replace_self(signode)
|
||||
else:
|
||||
nodes = []
|
||||
options = dict() # type: ignore
|
||||
signode = addnodes.desc_signature(sig, '')
|
||||
nodes.append(signode)
|
||||
s.declaration.describe_signature(signode, 'markName', self.env, options)
|
||||
node.replace_self(nodes)
|
||||
continue
|
||||
|
||||
nodes = self._render_symbol(s, maxdepth=node.maxdepth, document=node.document)
|
||||
node.replace_self(nodes)
|
||||
|
||||
|
||||
class CAliasObject(ObjectDescription):
|
||||
option_spec = {} # type: Dict
|
||||
option_spec = {
|
||||
'maxdepth': directives.nonnegative_int
|
||||
} # type: Dict
|
||||
|
||||
def run(self) -> List[Node]:
|
||||
if ':' in self.name:
|
||||
@@ -3511,16 +3549,10 @@ class CAliasObject(ObjectDescription):
|
||||
node['noindex'] = True
|
||||
|
||||
self.names = [] # type: List[str]
|
||||
maxdepth = self.options.get('maxdepth', 1)
|
||||
signatures = self.get_signatures()
|
||||
for i, sig in enumerate(signatures):
|
||||
node.append(AliasNode(sig, env=self.env))
|
||||
|
||||
contentnode = addnodes.desc_content()
|
||||
node.append(contentnode)
|
||||
self.before_content()
|
||||
self.state.nested_parse(self.content, self.content_offset, contentnode)
|
||||
self.env.temp_data['object'] = None
|
||||
self.after_content()
|
||||
node.append(AliasNode(sig, maxdepth, self.state.document, env=self.env))
|
||||
return [node]
|
||||
|
||||
|
||||
|
||||
@@ -500,7 +500,8 @@ class ProductionList(SphinxDirective):
|
||||
except ValueError:
|
||||
break
|
||||
subnode = addnodes.production(rule)
|
||||
subnode['tokenname'] = name.strip()
|
||||
name = name.strip()
|
||||
subnode['tokenname'] = name
|
||||
if subnode['tokenname']:
|
||||
prefix = 'grammar-token-%s' % productionGroup
|
||||
node_id = make_id(self.env, self.state.document, prefix, name)
|
||||
|
||||
@@ -224,6 +224,10 @@ class TocTreeCollector(EnvironmentCollector):
|
||||
def get_figtype(node: Node) -> str:
|
||||
for domain in env.domains.values():
|
||||
figtype = domain.get_enumerable_node_type(node)
|
||||
if domain.name == 'std' and not domain.get_numfig_title(node): # type: ignore
|
||||
# Skip if uncaptioned node
|
||||
continue
|
||||
|
||||
if figtype:
|
||||
return figtype
|
||||
|
||||
|
||||
@@ -94,7 +94,10 @@ def members_option(arg: Any) -> Union[object, List[str]]:
|
||||
"""Used to convert the :members: option to auto directives."""
|
||||
if arg is None or arg is True:
|
||||
return ALL
|
||||
return [x.strip() for x in arg.split(',') if x.strip()]
|
||||
elif arg is False:
|
||||
return None
|
||||
else:
|
||||
return [x.strip() for x in arg.split(',') if x.strip()]
|
||||
|
||||
|
||||
def members_set_option(arg: Any) -> Union[object, Set[str]]:
|
||||
@@ -172,7 +175,7 @@ def merge_members_option(options: Dict) -> None:
|
||||
|
||||
members = options.setdefault('members', [])
|
||||
for key in {'private-members', 'special-members'}:
|
||||
if key in options and options[key] is not ALL:
|
||||
if key in options and options[key] not in (ALL, None):
|
||||
for member in options[key]:
|
||||
if member not in members:
|
||||
members.append(member)
|
||||
@@ -532,6 +535,11 @@ class Documenter:
|
||||
self.env.app.emit('autodoc-process-docstring',
|
||||
self.objtype, self.fullname, self.object,
|
||||
self.options, docstringlines)
|
||||
|
||||
if docstringlines and docstringlines[-1] != '':
|
||||
# append a blank line to the end of the docstring
|
||||
docstringlines.append('')
|
||||
|
||||
yield from docstringlines
|
||||
|
||||
def get_sourcename(self) -> str:
|
||||
@@ -1205,7 +1213,8 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
||||
|
||||
try:
|
||||
self.env.app.emit('autodoc-before-process-signature', self.object, False)
|
||||
sig = inspect.signature(self.object, follow_wrapped=True)
|
||||
sig = inspect.signature(self.object, follow_wrapped=True,
|
||||
type_aliases=self.env.config.autodoc_type_aliases)
|
||||
args = stringify_signature(sig, **kwargs)
|
||||
except TypeError as exc:
|
||||
logger.warning(__("Failed to get a function signature for %s: %s"),
|
||||
@@ -1254,7 +1263,9 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
||||
if overloaded:
|
||||
__globals__ = safe_getattr(self.object, '__globals__', {})
|
||||
for overload in self.analyzer.overloads.get('.'.join(self.objpath)):
|
||||
overload = evaluate_signature(overload, __globals__)
|
||||
overload = evaluate_signature(overload, __globals__,
|
||||
self.env.config.autodoc_type_aliases)
|
||||
|
||||
sig = stringify_signature(overload, **kwargs)
|
||||
sigs.append(sig)
|
||||
|
||||
@@ -1263,7 +1274,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
||||
def annotate_to_first_argument(self, func: Callable, typ: Type) -> None:
|
||||
"""Annotate type hint to the first argument of function if needed."""
|
||||
try:
|
||||
sig = inspect.signature(func)
|
||||
sig = inspect.signature(func, type_aliases=self.env.config.autodoc_type_aliases)
|
||||
except TypeError as exc:
|
||||
logger.warning(__("Failed to get a function signature for %s: %s"),
|
||||
self.fullname, exc)
|
||||
@@ -1291,6 +1302,11 @@ class SingledispatchFunctionDocumenter(FunctionDocumenter):
|
||||
Retained for backwards compatibility, now does the same as the FunctionDocumenter
|
||||
"""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
warnings.warn("%s is deprecated." % self.__class__.__name__,
|
||||
RemovedInSphinx50Warning, stacklevel=2)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class DecoratorDocumenter(FunctionDocumenter):
|
||||
"""
|
||||
@@ -1317,6 +1333,12 @@ _METACLASS_CALL_BLACKLIST = [
|
||||
]
|
||||
|
||||
|
||||
# Types whose __new__ signature is a pass-thru.
|
||||
_CLASS_NEW_BLACKLIST = [
|
||||
'typing.Generic.__new__',
|
||||
]
|
||||
|
||||
|
||||
class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore
|
||||
"""
|
||||
Specialized Documenter subclass for classes.
|
||||
@@ -1378,17 +1400,24 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
if call is not None:
|
||||
self.env.app.emit('autodoc-before-process-signature', call, True)
|
||||
try:
|
||||
sig = inspect.signature(call, bound_method=True)
|
||||
sig = inspect.signature(call, bound_method=True,
|
||||
type_aliases=self.env.config.autodoc_type_aliases)
|
||||
return type(self.object), '__call__', sig
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Now we check if the 'obj' class has a '__new__' method
|
||||
new = get_user_defined_function_or_method(self.object, '__new__')
|
||||
|
||||
if new is not None:
|
||||
if "{0.__module__}.{0.__qualname__}".format(new) in _CLASS_NEW_BLACKLIST:
|
||||
new = None
|
||||
|
||||
if new is not None:
|
||||
self.env.app.emit('autodoc-before-process-signature', new, True)
|
||||
try:
|
||||
sig = inspect.signature(new, bound_method=True)
|
||||
sig = inspect.signature(new, bound_method=True,
|
||||
type_aliases=self.env.config.autodoc_type_aliases)
|
||||
return self.object, '__new__', sig
|
||||
except ValueError:
|
||||
pass
|
||||
@@ -1398,7 +1427,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
if init is not None:
|
||||
self.env.app.emit('autodoc-before-process-signature', init, True)
|
||||
try:
|
||||
sig = inspect.signature(init, bound_method=True)
|
||||
sig = inspect.signature(init, bound_method=True,
|
||||
type_aliases=self.env.config.autodoc_type_aliases)
|
||||
return self.object, '__init__', sig
|
||||
except ValueError:
|
||||
pass
|
||||
@@ -1409,7 +1439,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
# the signature from, so just pass the object itself to our hook.
|
||||
self.env.app.emit('autodoc-before-process-signature', self.object, False)
|
||||
try:
|
||||
sig = inspect.signature(self.object, bound_method=False)
|
||||
sig = inspect.signature(self.object, bound_method=False,
|
||||
type_aliases=self.env.config.autodoc_type_aliases)
|
||||
return None, None, sig
|
||||
except ValueError:
|
||||
pass
|
||||
@@ -1440,23 +1471,16 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
return ''
|
||||
|
||||
sig = super().format_signature()
|
||||
|
||||
overloaded = False
|
||||
qualname = None
|
||||
# TODO: recreate analyzer for the module of class (To be clear, owner of the method)
|
||||
if self._signature_class and self._signature_method_name and self.analyzer:
|
||||
qualname = '.'.join([self._signature_class.__qualname__,
|
||||
self._signature_method_name])
|
||||
if qualname in self.analyzer.overloads:
|
||||
overloaded = True
|
||||
|
||||
sigs = []
|
||||
if overloaded:
|
||||
|
||||
overloads = self.get_overloaded_signatures()
|
||||
if overloads:
|
||||
# Use signatures for overloaded methods instead of the implementation method.
|
||||
method = safe_getattr(self._signature_class, self._signature_method_name, None)
|
||||
__globals__ = safe_getattr(method, '__globals__', {})
|
||||
for overload in self.analyzer.overloads.get(qualname):
|
||||
overload = evaluate_signature(overload, __globals__)
|
||||
for overload in overloads:
|
||||
overload = evaluate_signature(overload, __globals__,
|
||||
self.env.config.autodoc_type_aliases)
|
||||
|
||||
parameters = list(overload.parameters.values())
|
||||
overload = overload.replace(parameters=parameters[1:],
|
||||
@@ -1468,6 +1492,20 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
|
||||
return "\n".join(sigs)
|
||||
|
||||
def get_overloaded_signatures(self) -> List[Signature]:
|
||||
if self._signature_class and self._signature_method_name:
|
||||
for cls in self._signature_class.__mro__:
|
||||
try:
|
||||
analyzer = ModuleAnalyzer.for_module(cls.__module__)
|
||||
analyzer.parse()
|
||||
qualname = '.'.join([cls.__qualname__, self._signature_method_name])
|
||||
if qualname in analyzer.overloads:
|
||||
return analyzer.overloads.get(qualname)
|
||||
except PycodeError:
|
||||
pass
|
||||
|
||||
return []
|
||||
|
||||
def add_directive_header(self, sig: str) -> None:
|
||||
sourcename = self.get_sourcename()
|
||||
|
||||
@@ -1704,7 +1742,8 @@ class GenericAliasDocumenter(DataDocumenter):
|
||||
return inspect.isgenericalias(member)
|
||||
|
||||
def add_directive_header(self, sig: str) -> None:
|
||||
self.options.annotation = SUPPRESS # type: ignore
|
||||
self.options = Options(self.options)
|
||||
self.options['annotation'] = SUPPRESS
|
||||
super().add_directive_header(sig)
|
||||
|
||||
def add_content(self, more_content: Any, no_docstring: bool = False) -> None:
|
||||
@@ -1728,7 +1767,8 @@ class TypeVarDocumenter(DataDocumenter):
|
||||
return isinstance(member, TypeVar) and isattr
|
||||
|
||||
def add_directive_header(self, sig: str) -> None:
|
||||
self.options.annotation = SUPPRESS # type: ignore
|
||||
self.options = Options(self.options)
|
||||
self.options['annotation'] = SUPPRESS
|
||||
super().add_directive_header(sig)
|
||||
|
||||
def get_doc(self, encoding: str = None, ignore: int = None) -> List[List[str]]:
|
||||
@@ -1801,11 +1841,13 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
else:
|
||||
if inspect.isstaticmethod(self.object, cls=self.parent, name=self.object_name):
|
||||
self.env.app.emit('autodoc-before-process-signature', self.object, False)
|
||||
sig = inspect.signature(self.object, bound_method=False)
|
||||
sig = inspect.signature(self.object, bound_method=False,
|
||||
type_aliases=self.env.config.autodoc_type_aliases)
|
||||
else:
|
||||
self.env.app.emit('autodoc-before-process-signature', self.object, True)
|
||||
sig = inspect.signature(self.object, bound_method=True,
|
||||
follow_wrapped=True)
|
||||
follow_wrapped=True,
|
||||
type_aliases=self.env.config.autodoc_type_aliases)
|
||||
args = stringify_signature(sig, **kwargs)
|
||||
except TypeError as exc:
|
||||
logger.warning(__("Failed to get a method signature for %s: %s"),
|
||||
@@ -1865,7 +1907,9 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
if overloaded:
|
||||
__globals__ = safe_getattr(self.object, '__globals__', {})
|
||||
for overload in self.analyzer.overloads.get('.'.join(self.objpath)):
|
||||
overload = evaluate_signature(overload, __globals__)
|
||||
overload = evaluate_signature(overload, __globals__,
|
||||
self.env.config.autodoc_type_aliases)
|
||||
|
||||
if not inspect.isstaticmethod(self.object, cls=self.parent,
|
||||
name=self.object_name):
|
||||
parameters = list(overload.parameters.values())
|
||||
@@ -1878,7 +1922,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
def annotate_to_first_argument(self, func: Callable, typ: Type) -> None:
|
||||
"""Annotate type hint to the first argument of function if needed."""
|
||||
try:
|
||||
sig = inspect.signature(func)
|
||||
sig = inspect.signature(func, type_aliases=self.env.config.autodoc_type_aliases)
|
||||
except TypeError as exc:
|
||||
logger.warning(__("Failed to get a method signature for %s: %s"),
|
||||
self.fullname, exc)
|
||||
@@ -1905,6 +1949,11 @@ class SingledispatchMethodDocumenter(MethodDocumenter):
|
||||
Retained for backwards compatibility, now does the same as the MethodDocumenter
|
||||
"""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
warnings.warn("%s is deprecated." % self.__class__.__name__,
|
||||
RemovedInSphinx50Warning, stacklevel=2)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class AttributeDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter): # type: ignore
|
||||
"""
|
||||
@@ -2218,6 +2267,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_config_value('autodoc_mock_imports', [], True)
|
||||
app.add_config_value('autodoc_typehints', "signature", True,
|
||||
ENUM("signature", "description", "none"))
|
||||
app.add_config_value('autodoc_type_aliases', {}, True)
|
||||
app.add_config_value('autodoc_warningiserror', True, True)
|
||||
app.add_config_value('autodoc_inherit_docstrings', True, True)
|
||||
app.add_event('autodoc-before-process-signature')
|
||||
|
||||
@@ -66,6 +66,10 @@ module_sig_re = re.compile(r'''^(?:([\w.]*)\.)? # module names
|
||||
''', re.VERBOSE)
|
||||
|
||||
|
||||
py_builtins = [obj for obj in vars(builtins).values()
|
||||
if inspect.isclass(obj)]
|
||||
|
||||
|
||||
def try_import(objname: str) -> Any:
|
||||
"""Import a object or module using *name* and *currentmodule*.
|
||||
*name* should be a relative name from *currentmodule* or
|
||||
@@ -178,7 +182,6 @@ class InheritanceGraph:
|
||||
traverse to. Multiple names can be specified separated by comma.
|
||||
"""
|
||||
all_classes = {}
|
||||
py_builtins = vars(builtins).values()
|
||||
|
||||
def recurse(cls: Any) -> None:
|
||||
if not show_builtins and cls in py_builtins:
|
||||
|
||||
@@ -31,12 +31,12 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
_directive_regex = re.compile(r'\.\. \S+::')
|
||||
_google_section_regex = re.compile(r'^(\s|\w)+:\s*$')
|
||||
_google_typed_arg_regex = re.compile(r'\s*(.+?)\s*\(\s*(.*[^\s]+)\s*\)')
|
||||
_google_typed_arg_regex = re.compile(r'(.+?)\(\s*(.*[^\s]+)\s*\)')
|
||||
_numpy_section_regex = re.compile(r'^[=\-`:\'"~^_*+#<>]{2,}\s*$')
|
||||
_single_colon_regex = re.compile(r'(?<!:):(?!:)')
|
||||
_xref_or_code_regex = re.compile(
|
||||
r'((?::(?:[a-zA-Z0-9]+[\-_+:.])*[a-zA-Z0-9]+:`.+?`)|'
|
||||
r'(?:``.+``))')
|
||||
r'(?:``.+?``))')
|
||||
_xref_regex = re.compile(
|
||||
r'(?:(?::(?:[a-zA-Z0-9]+[\-_+:.])*[a-zA-Z0-9]+:)?`.+?`)'
|
||||
)
|
||||
@@ -254,7 +254,7 @@ class GoogleDocstring:
|
||||
if parse_type:
|
||||
match = _google_typed_arg_regex.match(before)
|
||||
if match:
|
||||
_name = match.group(1)
|
||||
_name = match.group(1).strip()
|
||||
_type = match.group(2)
|
||||
|
||||
_name = self._escape_args_and_kwargs(_name)
|
||||
|
||||
@@ -106,7 +106,7 @@ class _TranslationProxy(UserString):
|
||||
translators = defaultdict(NullTranslations) # type: Dict[Tuple[str, str], NullTranslations]
|
||||
|
||||
|
||||
def init(locale_dirs: List[str], language: str,
|
||||
def init(locale_dirs: List[Optional[str]], language: str,
|
||||
catalog: str = 'sphinx', namespace: str = 'general') -> Tuple[NullTranslations, bool]:
|
||||
"""Look for message catalogs in `locale_dirs` and *ensure* that there is at
|
||||
least a NullTranslations catalog set in `translators`. If called multiple
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Mohammed Shannaq <sam@ms.per.jo>, 2018
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Julien Malard <julien.malard@mail.mcgill.ca>, 2019
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2008
|
||||
# Vilibald W. <vilibald.wanca@gmail.com>, 2014-2015
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2016
|
||||
# Geraint Palmer <palmer.geraint@googlemail.com>, 2016
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# askhl <asklarsen@gmail.com>, 2010-2011
|
||||
# Jakob Lykke Andersen <jakob@caput.dk>, 2014,2016
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Georg Brandl <g.brandl@gmx.net>, 2013-2015
|
||||
# Jean-François B. <jfbu@free.fr>, 2018
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Stelios Vitalis <liberostelios@gmail.com>, 2015
|
||||
# tzoumakers tzoumakers <tzoumakersx@gmail.com>, 2019
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Dinu Gherman <gherman@darwin.in-berlin.de>, 2014
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Edward Villegas-Pulgarin <cosmoscalibur@gmail.com>, 2018
|
||||
# Edward Villegas-Pulgarin <cosmoscalibur@gmail.com>, 2019
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Aivar Annamaa <aivar.annamaa@gmail.com>, 2011
|
||||
# Ivar Smolin <okul at linux ee>, 2012
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Ales Zabala Alava <shagi@gisa-elkartea.org>, 2011
|
||||
# Asier Iturralde Sarasola <asier.iturralde@gmail.com>, 2018
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Christophe CHAUVET <christophe.chauvet@gmail.com>, 2017
|
||||
# Christophe CHAUVET <christophe.chauvet@gmail.com>, 2013,2015
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2011
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Ajay Singh <ajaysajay@gmail.com>, 2019
|
||||
# Purnank H. Ghumalia <me@purnank.in>, 2015-2016
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Mario Šarić, 2015-2020
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2011
|
||||
# Molnár Dénes <denes.molnar2@stud.uni-corvinus.hu>, 2017
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Arif Budiman <arifpedia@gmail.com>, 2016-2017
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2009
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Denis Cappellin <denis@cappell.in>, 2018
|
||||
# Paolo Cavallini <cavallini@faunalia.it>, 2013-2017
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# shirou - しろう <shirou.faw@gmail.com>, 2013
|
||||
# Akitoshi Ohta <fire.kuma8@gmail.com>, 2011
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Minho Ryang <minhoryang@gmail.com>, 2019
|
||||
# YT H <dev@theYT.net>, 2019
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# DALIUS DOBRAVOLSKAS <DALIUS@SANDBOX.LT>, 2010
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Vasil Vangelovski <vvangelovski@gmail.com>, 2013
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2011
|
||||
# Takeshi KOMIYA <i.tkomiya@gmail.com>, 2016
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Bram Geron, 2017
|
||||
# brechtm, 2016
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# m_aciek <maciej.olko@gmail.com>, 2017-2020
|
||||
# Michael Gielda <michal.gielda@gmail.com>, 2014
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Claudio Rogerio Carvalho Filho <excriptbrasil@gmail.com>, 2016
|
||||
# FIRST AUTHOR <roger.demetrescu@gmail.com>, 2008
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Pedro Algarvio <pedro@algarvio.me>, 2013
|
||||
# Takeshi KOMIYA <i.tkomiya@gmail.com>, 2016
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Razvan Stefanescu <razvan.stefanescu@gmail.com>, 2015-2017
|
||||
# Takeshi KOMIYA <i.tkomiya@gmail.com>, 2016
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Alex Salikov <Salikvo57@gmail.com>, 2019
|
||||
# Dmitry Shachnev <mitya57@gmail.com>, 2013
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# callkalpa <callkalpa@gmail.com>, 2013
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2008
|
||||
# Slavko <linux@slavino.sk>, 2013-2019
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Risto Pejasinovic <risto.pejasinovic@gmail.com>, 2019
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Julien Malard <julien.malard@mail.mcgill.ca>, 2019
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# BouRock, 2020
|
||||
# Fırat Özgül <ozgulfirat@gmail.com>, 2013-2016
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Petro Sasnyk <petro@sasnyk.name>, 2009
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Hoat Le Van <hoatlevan@gmail.com>, 2014
|
||||
msgid ""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Yinian Chin <yinian1992@live.com>, 2015,2017-2018
|
||||
# Hsiaoming Yang <me@lepture.com>, 2018
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Translations template for Sphinx.
|
||||
# Copyright (C) 2020 ORGANIZATION
|
||||
# This file is distributed under the same license as the Sphinx project.
|
||||
#
|
||||
#
|
||||
# Translators:
|
||||
# Adrian Liaw <adrianliaw2000@gmail.com>, 2018
|
||||
# Fred Lin <gasolin@gmail.com>, 2008
|
||||
|
||||
@@ -58,17 +58,19 @@ def parse(code: str, mode: str = 'exec') -> "ast.AST":
|
||||
return ast.parse(code, mode=mode)
|
||||
|
||||
|
||||
def unparse(node: Optional[ast.AST]) -> Optional[str]:
|
||||
def unparse(node: Optional[ast.AST], code: str = '') -> Optional[str]:
|
||||
"""Unparse an AST to string."""
|
||||
if node is None:
|
||||
return None
|
||||
elif isinstance(node, str):
|
||||
return node
|
||||
return _UnparseVisitor().visit(node)
|
||||
return _UnparseVisitor(code).visit(node)
|
||||
|
||||
|
||||
# a greatly cut-down version of `ast._Unparser`
|
||||
class _UnparseVisitor(ast.NodeVisitor):
|
||||
def __init__(self, code: str = '') -> None:
|
||||
self.code = code
|
||||
|
||||
def _visit_op(self, node: ast.AST) -> str:
|
||||
return OPERATORS[node.__class__]
|
||||
@@ -166,14 +168,28 @@ class _UnparseVisitor(ast.NodeVisitor):
|
||||
return "{" + ", ".join(self.visit(e) for e in node.elts) + "}"
|
||||
|
||||
def visit_Subscript(self, node: ast.Subscript) -> str:
|
||||
return "%s[%s]" % (self.visit(node.value), self.visit(node.slice))
|
||||
def is_simple_tuple(value: ast.AST) -> bool:
|
||||
return (
|
||||
isinstance(value, ast.Tuple) and
|
||||
bool(value.elts) and
|
||||
not any(isinstance(elt, ast.Starred) for elt in value.elts)
|
||||
)
|
||||
|
||||
if is_simple_tuple(node.slice):
|
||||
elts = ", ".join(self.visit(e) for e in node.slice.elts) # type: ignore
|
||||
return "%s[%s]" % (self.visit(node.value), elts)
|
||||
elif isinstance(node.slice, ast.Index) and is_simple_tuple(node.slice.value):
|
||||
elts = ", ".join(self.visit(e) for e in node.slice.value.elts) # type: ignore
|
||||
return "%s[%s]" % (self.visit(node.value), elts)
|
||||
else:
|
||||
return "%s[%s]" % (self.visit(node.value), self.visit(node.slice))
|
||||
|
||||
def visit_UnaryOp(self, node: ast.UnaryOp) -> str:
|
||||
return "%s %s" % (self.visit(node.op), self.visit(node.operand))
|
||||
|
||||
def visit_Tuple(self, node: ast.Tuple) -> str:
|
||||
if node.elts:
|
||||
return ", ".join(self.visit(e) for e in node.elts)
|
||||
return "(" + ", ".join(self.visit(e) for e in node.elts) + ")"
|
||||
else:
|
||||
return "()"
|
||||
|
||||
@@ -181,6 +197,11 @@ class _UnparseVisitor(ast.NodeVisitor):
|
||||
def visit_Constant(self, node: ast.Constant) -> str:
|
||||
if node.value is Ellipsis:
|
||||
return "..."
|
||||
elif isinstance(node.value, (int, float, complex)):
|
||||
if self.code and sys.version_info > (3, 8):
|
||||
return ast.get_source_segment(self.code, node)
|
||||
else:
|
||||
return repr(node.value)
|
||||
else:
|
||||
return repr(node.value)
|
||||
|
||||
|
||||
@@ -259,12 +259,12 @@ class SphinxComponentRegistry:
|
||||
else:
|
||||
self.source_suffix[suffix] = filetype
|
||||
|
||||
def add_source_parser(self, parser: "Type[Parser]", **kwargs: Any) -> None:
|
||||
def add_source_parser(self, parser: "Type[Parser]", override: bool = False) -> None:
|
||||
logger.debug('[app] adding search source_parser: %r', parser)
|
||||
|
||||
# create a map from filetype to parser
|
||||
for filetype in parser.supported:
|
||||
if filetype in self.source_parsers and not kwargs.get('override'):
|
||||
if filetype in self.source_parsers and not override:
|
||||
raise ExtensionError(__('source_parser for %r is already registered') %
|
||||
filetype)
|
||||
else:
|
||||
@@ -367,7 +367,14 @@ class SphinxComponentRegistry:
|
||||
logger.debug('[app] adding js_file: %r, %r', filename, attributes)
|
||||
self.js_files.append((filename, attributes))
|
||||
|
||||
def has_latex_package(self, name: str) -> bool:
|
||||
packages = self.latex_packages + self.latex_packages_after_hyperref
|
||||
return bool([x for x in packages if x[0] == name])
|
||||
|
||||
def add_latex_package(self, name: str, options: str, after_hyperref: bool = False) -> None:
|
||||
if self.has_latex_package(name):
|
||||
logger.warn("latex package '%s' already included" % name)
|
||||
|
||||
logger.debug('[app] adding latex package: %r', name)
|
||||
if after_hyperref:
|
||||
self.latex_packages_after_hyperref.append((name, options))
|
||||
@@ -394,7 +401,7 @@ class SphinxComponentRegistry:
|
||||
|
||||
def load_extension(self, app: "Sphinx", extname: str) -> None:
|
||||
"""Load a Sphinx extension."""
|
||||
if extname in app.extensions: # alread loaded
|
||||
if extname in app.extensions: # already loaded
|
||||
return
|
||||
if extname in EXTENSION_BLACKLIST:
|
||||
logger.warning(__('the extension %r was already merged with Sphinx since '
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
@@ -143,7 +143,7 @@ JSX.resetProfileResults = function () {
|
||||
return $__jsx_profiler.resetResults();
|
||||
};
|
||||
JSX.DEBUG = false;
|
||||
var GeneratorFunction$0 =
|
||||
var GeneratorFunction$0 =
|
||||
(function () {
|
||||
try {
|
||||
return Function('import {GeneratorFunction} from "std:iteration"; return GeneratorFunction')();
|
||||
@@ -151,7 +151,7 @@ var GeneratorFunction$0 =
|
||||
return function GeneratorFunction () {};
|
||||
}
|
||||
})();
|
||||
var __jsx_generator_object$0 =
|
||||
var __jsx_generator_object$0 =
|
||||
(function () {
|
||||
function __jsx_generator_object() {
|
||||
this.__next = 0;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user