mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '3.x'
This commit is contained in:
commit
327c187284
@ -3,6 +3,8 @@ jobs:
|
||||
build:
|
||||
docker:
|
||||
- image: sphinxdoc/docker-ci
|
||||
environment:
|
||||
DO_EPUBCHECK: 1
|
||||
working_directory: /sphinx
|
||||
steps:
|
||||
- checkout
|
||||
|
40
CHANGES
40
CHANGES
@ -51,36 +51,69 @@ Incompatible changes
|
||||
Deprecated
|
||||
----------
|
||||
|
||||
* C, parsing of pre-v3 style type directives and roles, along with the options
|
||||
:confval:`c_allow_pre_v3` and :confval:`c_warn_on_allowed_pre_v3`.
|
||||
|
||||
Features added
|
||||
--------------
|
||||
|
||||
* #2076: autodoc: Allow overriding of exclude-members in skip-member function
|
||||
* #2024: autosummary: Add :confval:`autosummary_filename_map` to avoid conflict
|
||||
of filenames between two object with different case
|
||||
* #7849: html: Add :confval:`html_codeblock_linenos_style` to change the style
|
||||
of line numbers for code-blocks
|
||||
* #7853: C and C++, support parameterized GNU style attributes.
|
||||
* #7888: napoleon: Add aliases Warn and Raise.
|
||||
* C, added :rst:dir:`c:alias` directive for inserting copies
|
||||
of existing declarations.
|
||||
* #7745: html: inventory is broken if the docname contains a space
|
||||
* #7902: html theme: Add a new option :confval:`globaltoc_maxdepth` to control
|
||||
the behavior of globaltoc in sidebar
|
||||
* #7840: i18n: Optimize the dependencies check on bootstrap
|
||||
* #5208: linkcheck: Support checks for local links
|
||||
* #5090: setuptools: Link verbosity to distutils' -v and -q option
|
||||
* #7052: add ``:noindexentry:`` to the Python, C, C++, and Javascript domains.
|
||||
Update the documentation to better reflect the relationship between this option
|
||||
and the ``:noindex:`` option.
|
||||
* #7899: C, add possibility of parsing of some pre-v3 style type directives and
|
||||
roles and try to convert them to equivalent v3 directives/roles.
|
||||
Set the new option :confval:`c_allow_pre_v3` to ``True`` to enable this.
|
||||
The warnings printed from this functionality can be suppressed by setting
|
||||
:confval:`c_warn_on_allowed_pre_v3`` to ``True``.
|
||||
The functionality is immediately deprecated.
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
* #7886: autodoc: TypeError is raised on mocking generic-typed classes
|
||||
* #7935: autodoc: function signature is not shown when the function has a
|
||||
parameter having ``inspect._empty`` as its default value
|
||||
* #7901: autodoc: type annotations for overloaded functions are not resolved
|
||||
* #904: autodoc: An instance attribute cause a crash of autofunction directive
|
||||
* #1362: autodoc: ``private-members`` option does not work for class attributes
|
||||
* #7983: autodoc: Generator type annotation is wrongly rendered in py36
|
||||
* #7839: autosummary: cannot handle umlauts in function names
|
||||
* #7865: autosummary: Failed to extract summary line when abbreviations found
|
||||
* #7866: autosummary: Failed to extract correct summary line when docstring
|
||||
contains a hyperlink target
|
||||
* #7469: autosummary: "Module attributes" header is not translatable
|
||||
* #7940: apidoc: An extra newline is generated at the end of the rst file if a
|
||||
module has submodules
|
||||
* #4258: napoleon: decorated special methods are not shown
|
||||
* #7715: LaTeX: ``numfig_secnum_depth > 1`` leads to wrong figure links
|
||||
* #7846: html theme: XML-invalid files were generated
|
||||
* #7894: gettext: Wrong source info is shown when using rst_epilog
|
||||
* #7691: linkcheck: HEAD requests are not used for checking
|
||||
* #4888: i18n: Failed to add an explicit title to ``:ref:`` role on translation
|
||||
* #7928: py domain: failed to resolve a type annotation for the attribute
|
||||
* #7968: i18n: The content of ``math`` directive is interpreted as reST on
|
||||
translation
|
||||
* #7869: :rst:role:`abbr` role without an explanation will show the explanation
|
||||
from the previous abbr role
|
||||
* C and C++, removed ``noindex`` directive option as it did
|
||||
nothing.
|
||||
* #7619: Duplicated node IDs are generated if node has multiple IDs
|
||||
* #2050: Symbols sections are appeared twice in the index page
|
||||
|
||||
Testing
|
||||
--------
|
||||
@ -103,6 +136,9 @@ Features added
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
* C, don't deepcopy the entire symbol table and make a mess every time an
|
||||
enumerator is handled.
|
||||
|
||||
Testing
|
||||
--------
|
||||
|
||||
@ -626,7 +662,7 @@ Release 2.4.1 (released Feb 11, 2020)
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
* #7120: html: crashed when on scaling SVG images which have float dimentions
|
||||
* #7120: html: crashed when on scaling SVG images which have float dimensions
|
||||
* #7126: autodoc: TypeError: 'getset_descriptor' object is not iterable
|
||||
|
||||
Release 2.4.0 (released Feb 09, 2020)
|
||||
@ -772,7 +808,7 @@ Features added
|
||||
* #6548: html: Use favicon for OpenSearch if available
|
||||
* #6729: html theme: agogo theme now supports ``rightsidebar`` option
|
||||
* #6780: Add PEP-561 Support
|
||||
* #6762: latex: Allow to load additonal LaTeX packages via ``extrapackages`` key
|
||||
* #6762: latex: Allow to load additional LaTeX packages via ``extrapackages`` key
|
||||
of :confval:`latex_elements`
|
||||
* #1331: Add new config variable: :confval:`user_agent`
|
||||
* #6000: LaTeX: have backslash also be an inline literal word wrap break
|
||||
|
3
EXAMPLES
3
EXAMPLES
@ -363,7 +363,7 @@ Documentation using a custom theme or integrated in a website
|
||||
* `Roundup <http://www.roundup-tracker.org/>`__
|
||||
* `SaltStack <https://docs.saltstack.com/>`__
|
||||
* `scikit-learn <http://scikit-learn.org/stable/>`__
|
||||
* `SciPy <https://docs.scipy.org/doc/scipy/refrence/>`__
|
||||
* `SciPy <https://docs.scipy.org/doc/scipy/reference/>`__
|
||||
* `Scrapy <https://doc.scrapy.org/>`__
|
||||
* `Seaborn <https://seaborn.pydata.org/>`__
|
||||
* `Selenium <https://docs.seleniumhq.org/docs/>`__
|
||||
@ -390,6 +390,7 @@ Homepages and other non-documentation sites
|
||||
* `Pylearn2 <http://www.deeplearning.net/software/pylearn2/>`__ (sphinxdoc, customized)
|
||||
* `PyXLL <https://www.pyxll.com/>`__ (sphinx_bootstrap_theme, customized)
|
||||
* `SciPy Cookbook <https://scipy-cookbook.readthedocs.io/>`__ (sphinx_rtd_theme)
|
||||
* `Tech writer at work blog <https://blog.documatt.com/>`__ (custom theme)
|
||||
* `The Wine Cellar Book <https://www.thewinecellarbook.com/doc/en/>`__ (sphinxdoc)
|
||||
* `Thomas Cokelaer's Python, Sphinx and reStructuredText tutorials <https://thomas-cokelaer.info/tutorials/>`__ (standard)
|
||||
* `UC Berkeley ME233 Advanced Control Systems II course <https://berkeley-me233.github.io/>`__ (sphinxdoc)
|
||||
|
@ -30,6 +30,10 @@
|
||||
:target: https://opensource.org/licenses/BSD-3-Clause
|
||||
:alt: BSD 3 Clause
|
||||
|
||||
.. image:: https://codetriage.com/sphinx-doc/sphinx/badges/users.svg
|
||||
:target: https://codetriage.com/sphinx-doc/sphinx
|
||||
:alt: Open Source Helpers badge
|
||||
|
||||
Sphinx is a tool that makes it easy to create intelligent and beautiful
|
||||
documentation for Python projects (or other documents consisting of multiple
|
||||
reStructuredText sources), written by Georg Brandl. It was originally created
|
||||
|
@ -2546,6 +2546,23 @@ Options for the C domain
|
||||
|
||||
.. versionadded:: 3.0
|
||||
|
||||
.. confval:: c_allow_pre_v3
|
||||
|
||||
A boolean (default ``False``) controlling whether to parse and try to
|
||||
convert pre-v3 style type directives and type roles.
|
||||
|
||||
.. versionadded:: 3.2
|
||||
.. deprecated:: 3.2
|
||||
Use the directives and roles added in v3.
|
||||
|
||||
.. confval:: c_warn_on_allowed_pre_v3
|
||||
|
||||
A boolean (default ``True``) controlling whether to warn when a pre-v3
|
||||
style type directive/role is parsed and converted.
|
||||
|
||||
.. versionadded:: 3.2
|
||||
.. deprecated:: 3.2
|
||||
Use the directives and roles added in v3.
|
||||
|
||||
.. _cpp-config:
|
||||
|
||||
|
@ -175,7 +175,7 @@ also use these config values:
|
||||
|
||||
.. confval:: autosummary_generate_overwrite
|
||||
|
||||
If true, autosummary already overwrites stub files by generated contents.
|
||||
If true, autosummary overwrites existing files by generated stub pages.
|
||||
Defaults to true (enabled).
|
||||
|
||||
.. versionadded:: 3.0
|
||||
@ -195,6 +195,15 @@ also use these config values:
|
||||
|
||||
.. versionadded:: 2.1
|
||||
|
||||
.. confval:: autosummary_filename_map
|
||||
|
||||
A dict mapping object names to filenames. This is necessary to avoid
|
||||
filename conflicts where multiple objects have names that are
|
||||
indistinguishable when case is ignored, on file systems where filenames
|
||||
are case-insensitive.
|
||||
|
||||
.. versionadded:: 3.2
|
||||
|
||||
|
||||
Customizing templates
|
||||
---------------------
|
||||
|
@ -172,6 +172,12 @@ These themes are:
|
||||
|
||||
.. versionadded:: 3.1
|
||||
|
||||
- **globaltoc_maxdepth** (int): The maximum depth of the toctree in
|
||||
``globaltoc.html`` (see :confval:`html_sidebars`). Set it to -1 to allow
|
||||
unlimited depth. Defaults to the max depth selected in the toctree directive.
|
||||
|
||||
.. versionadded:: 3.2
|
||||
|
||||
**alabaster**
|
||||
`Alabaster theme`_ is a modified "Kr" Sphinx theme from @kennethreitz
|
||||
(especially as used in his Requests project), which was itself originally
|
||||
|
6
package-lock.json
generated
6
package-lock.json
generated
@ -702,9 +702,9 @@
|
||||
}
|
||||
},
|
||||
"lodash": {
|
||||
"version": "4.17.14",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.14.tgz",
|
||||
"integrity": "sha512-mmKYbW3GLuJeX+iGP+Y7Gp1AiGHGbXHCOh/jZmrawMmsE7MS4znI3RL2FsjbqOyMayHInjOeykW7PEajUk1/xw==",
|
||||
"version": "4.17.19",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz",
|
||||
"integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==",
|
||||
"dev": true
|
||||
},
|
||||
"log4js": {
|
||||
|
@ -393,7 +393,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
return ext in VECTOR_GRAPHICS_EXTENSIONS
|
||||
|
||||
def copy_image_files_pil(self) -> None:
|
||||
"""Copy images using Pillow, the Python Imaging Libary.
|
||||
"""Copy images using Pillow, the Python Imaging Library.
|
||||
The method tries to read and write the files with Pillow, converting
|
||||
the format and resizing the image if necessary/possible.
|
||||
"""
|
||||
|
@ -49,9 +49,12 @@ class DirectoryHTMLBuilder(StandaloneHTMLBuilder):
|
||||
# for compatibility
|
||||
deprecated_alias('sphinx.builders.html',
|
||||
{
|
||||
'DirectoryHTMLBuilder': DirectoryHTMLBuilder,
|
||||
'DirectoryHTMLBuilder': DirectoryHTMLBuilder,
|
||||
},
|
||||
RemovedInSphinx40Warning)
|
||||
RemovedInSphinx40Warning,
|
||||
{
|
||||
'DirectoryHTMLBuilder': 'sphinx.builders.dirhtml.DirectoryHTMLBuilder',
|
||||
})
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
|
@ -15,6 +15,7 @@ import sys
|
||||
from datetime import datetime
|
||||
from os import path
|
||||
from typing import Any, Dict, IO, Iterable, Iterator, List, Set, Tuple, Type
|
||||
from urllib.parse import quote
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.core import publish_parts
|
||||
@ -881,6 +882,8 @@ class StandaloneHTMLBuilder(Builder):
|
||||
def _get_local_toctree(self, docname: str, collapse: bool = True, **kwargs: Any) -> str:
|
||||
if 'includehidden' not in kwargs:
|
||||
kwargs['includehidden'] = False
|
||||
if kwargs.get('maxdepth') == '':
|
||||
kwargs.pop('maxdepth')
|
||||
return self.render_partial(TocTree(self.env).get_toctree_for(
|
||||
docname, self, collapse, **kwargs))['fragment']
|
||||
|
||||
@ -940,7 +943,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
# --------- these are overwritten by the serialization builder
|
||||
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
return docname + self.link_suffix
|
||||
return quote(docname) + self.link_suffix
|
||||
|
||||
def handle_page(self, pagename: str, addctx: Dict, templatename: str = 'page.html',
|
||||
outfilename: str = None, event_arg: Any = None) -> None:
|
||||
|
@ -35,6 +35,8 @@ from sphinx.util.requests import is_ssl_error
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
uri_re = re.compile('[a-z]+://')
|
||||
|
||||
|
||||
DEFAULT_REQUEST_HEADERS = {
|
||||
'Accept': 'text/html,application/xhtml+xml;q=0.9,*/*;q=0.8',
|
||||
@ -210,10 +212,21 @@ class CheckExternalLinksBuilder(Builder):
|
||||
|
||||
def check() -> Tuple[str, str, int]:
|
||||
# check for various conditions without bothering the network
|
||||
if len(uri) == 0 or uri.startswith(('#', 'mailto:', 'ftp:')):
|
||||
if len(uri) == 0 or uri.startswith(('#', 'mailto:')):
|
||||
return 'unchecked', '', 0
|
||||
elif not uri.startswith(('http:', 'https:')):
|
||||
return 'local', '', 0
|
||||
if uri_re.match(uri):
|
||||
# non supported URI schemes (ex. ftp)
|
||||
return 'unchecked', '', 0
|
||||
else:
|
||||
if path.exists(path.join(self.srcdir, uri)):
|
||||
return 'working', '', 0
|
||||
else:
|
||||
for rex in self.to_ignore:
|
||||
if rex.match(uri):
|
||||
return 'ignored', '', 0
|
||||
else:
|
||||
return 'broken', '', 0
|
||||
elif uri in self.good:
|
||||
return 'working', 'old', 0
|
||||
elif uri in self.broken:
|
||||
|
@ -193,7 +193,11 @@ deprecated_alias('sphinx.builders.html',
|
||||
{
|
||||
'SingleFileHTMLBuilder': SingleFileHTMLBuilder,
|
||||
},
|
||||
RemovedInSphinx40Warning)
|
||||
RemovedInSphinx40Warning,
|
||||
{
|
||||
'SingleFileHTMLBuilder':
|
||||
'sphinx.builders.singlehtml.SingleFileHTMLBuilder',
|
||||
})
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
|
@ -29,27 +29,39 @@ class RemovedInSphinx60Warning(PendingDeprecationWarning):
|
||||
RemovedInNextVersionWarning = RemovedInSphinx50Warning
|
||||
|
||||
|
||||
def deprecated_alias(modname: str, objects: Dict, warning: "Type[Warning]") -> None:
|
||||
def deprecated_alias(modname: str, objects: Dict[str, object],
|
||||
warning: "Type[Warning]", names: Dict[str, str] = None) -> None:
|
||||
module = import_module(modname)
|
||||
sys.modules[modname] = _ModuleWrapper(module, modname, objects, warning) # type: ignore
|
||||
sys.modules[modname] = _ModuleWrapper( # type: ignore
|
||||
module, modname, objects, warning, names)
|
||||
|
||||
|
||||
class _ModuleWrapper:
|
||||
def __init__(self, module: Any, modname: str, objects: Dict, warning: "Type[Warning]"
|
||||
) -> None:
|
||||
def __init__(self, module: Any, modname: str,
|
||||
objects: Dict[str, object],
|
||||
warning: "Type[Warning]",
|
||||
names: Dict[str, str]) -> None:
|
||||
self._module = module
|
||||
self._modname = modname
|
||||
self._objects = objects
|
||||
self._warning = warning
|
||||
self._names = names
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
if name in self._objects:
|
||||
warnings.warn("%s.%s is deprecated. Check CHANGES for Sphinx "
|
||||
"API modifications." % (self._modname, name),
|
||||
self._warning, stacklevel=3)
|
||||
return self._objects[name]
|
||||
if name not in self._objects:
|
||||
return getattr(self._module, name)
|
||||
|
||||
return getattr(self._module, name)
|
||||
canonical_name = self._names.get(name, None)
|
||||
if canonical_name is not None:
|
||||
warnings.warn(
|
||||
"The alias '{}.{}' is deprecated, use '{}' instead. Check CHANGES for "
|
||||
"Sphinx API modifications.".format(self._modname, name, canonical_name),
|
||||
self._warning, stacklevel=3)
|
||||
else:
|
||||
warnings.warn("{}.{} is deprecated. Check CHANGES for Sphinx "
|
||||
"API modifications.".format(self._modname, name),
|
||||
self._warning, stacklevel=3)
|
||||
return self._objects[name]
|
||||
|
||||
|
||||
class DeprecatedDict(dict):
|
||||
|
@ -268,7 +268,10 @@ deprecated_alias('sphinx.directives',
|
||||
{
|
||||
'DescDirective': ObjectDescription,
|
||||
},
|
||||
RemovedInSphinx50Warning)
|
||||
RemovedInSphinx50Warning,
|
||||
{
|
||||
'DescDirective': 'sphinx.directives.ObjectDescription',
|
||||
})
|
||||
|
||||
|
||||
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||
|
@ -22,6 +22,7 @@ from sphinx import addnodes
|
||||
from sphinx.addnodes import pending_xref
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.deprecation import RemovedInSphinx50Warning
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.environment import BuildEnvironment
|
||||
@ -111,6 +112,9 @@ class ASTIdentifier(ASTBaseBase):
|
||||
assert len(identifier) != 0
|
||||
self.identifier = identifier
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return type(other) is ASTIdentifier and self.identifier == other.identifier
|
||||
|
||||
def is_anon(self) -> bool:
|
||||
return self.identifier[0] == '@'
|
||||
|
||||
@ -1335,6 +1339,10 @@ class ASTDeclaration(ASTBaseBase):
|
||||
# set by CObject._add_enumerator_to_parent
|
||||
self.enumeratorScopedSymbol = None # type: Symbol
|
||||
|
||||
def clone(self) -> "ASTDeclaration":
|
||||
return ASTDeclaration(self.objectType, self.directiveType,
|
||||
self.declaration.clone(), self.semicolon)
|
||||
|
||||
@property
|
||||
def name(self) -> ASTNestedName:
|
||||
return self.declaration.name
|
||||
@ -1424,6 +1432,16 @@ class Symbol:
|
||||
debug_lookup = False
|
||||
debug_show_tree = False
|
||||
|
||||
def __copy__(self):
|
||||
assert False # shouldn't happen
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
if self.parent:
|
||||
assert False # shouldn't happen
|
||||
else:
|
||||
# the domain base class makes a copy of the initial data, which is fine
|
||||
return Symbol(None, None, None, None)
|
||||
|
||||
@staticmethod
|
||||
def debug_print(*args: Any) -> None:
|
||||
print(Symbol.debug_indent_string * Symbol.debug_indent, end="")
|
||||
@ -1512,7 +1530,6 @@ class Symbol:
|
||||
self.parent = None
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
newChildren = [] # type: List[Symbol]
|
||||
for sChild in self._children:
|
||||
sChild.clear_doc(docname)
|
||||
if sChild.declaration and sChild.docname == docname:
|
||||
@ -1524,8 +1541,6 @@ class Symbol:
|
||||
sChild.siblingBelow.siblingAbove = sChild.siblingAbove
|
||||
sChild.siblingAbove = None
|
||||
sChild.siblingBelow = None
|
||||
newChildren.append(sChild)
|
||||
self._children = newChildren
|
||||
|
||||
def get_all_symbols(self) -> Iterator["Symbol"]:
|
||||
yield self
|
||||
@ -2937,6 +2952,23 @@ class DefinitionParser(BaseParser):
|
||||
init = ASTInitializer(initVal)
|
||||
return ASTEnumerator(name, init)
|
||||
|
||||
def parse_pre_v3_type_definition(self) -> ASTDeclaration:
|
||||
self.skip_ws()
|
||||
declaration = None # type: Any
|
||||
if self.skip_word('struct'):
|
||||
typ = 'struct'
|
||||
declaration = self._parse_struct()
|
||||
elif self.skip_word('union'):
|
||||
typ = 'union'
|
||||
declaration = self._parse_union()
|
||||
elif self.skip_word('enum'):
|
||||
typ = 'enum'
|
||||
declaration = self._parse_enum()
|
||||
else:
|
||||
self.fail("Could not parse pre-v3 type directive."
|
||||
" Must start with 'struct', 'union', or 'enum'.")
|
||||
return ASTDeclaration(typ, typ, declaration, False)
|
||||
|
||||
def parse_declaration(self, objectType: str, directiveType: str) -> ASTDeclaration:
|
||||
if objectType not in ('function', 'member',
|
||||
'macro', 'struct', 'union', 'enum', 'enumerator', 'type'):
|
||||
@ -3114,6 +3146,9 @@ class CObject(ObjectDescription):
|
||||
def parse_definition(self, parser: DefinitionParser) -> ASTDeclaration:
|
||||
return parser.parse_declaration(self.object_type, self.objtype)
|
||||
|
||||
def parse_pre_v3_type_definition(self, parser: DefinitionParser) -> ASTDeclaration:
|
||||
return parser.parse_pre_v3_type_definition()
|
||||
|
||||
def describe_signature(self, signode: TextElement, ast: Any, options: Dict) -> None:
|
||||
ast.describe_signature(signode, 'lastIsName', self.env, options)
|
||||
|
||||
@ -3135,8 +3170,27 @@ class CObject(ObjectDescription):
|
||||
|
||||
parser = DefinitionParser(sig, location=signode, config=self.env.config)
|
||||
try:
|
||||
ast = self.parse_definition(parser)
|
||||
parser.assert_end()
|
||||
try:
|
||||
ast = self.parse_definition(parser)
|
||||
parser.assert_end()
|
||||
except DefinitionError as eOrig:
|
||||
if not self.env.config['c_allow_pre_v3']:
|
||||
raise
|
||||
if self.objtype != 'type':
|
||||
raise
|
||||
try:
|
||||
ast = self.parse_pre_v3_type_definition(parser)
|
||||
parser.assert_end()
|
||||
except DefinitionError:
|
||||
raise eOrig
|
||||
self.object_type = ast.objectType # type: ignore
|
||||
if self.env.config['c_warn_on_allowed_pre_v3']:
|
||||
msg = "{}: Pre-v3 C type directive '.. c:type:: {}' converted to " \
|
||||
"'.. c:{}:: {}'." \
|
||||
"\nThe original parsing error was:\n{}"
|
||||
msg = msg.format(RemovedInSphinx50Warning.__name__,
|
||||
sig, ast.objectType, ast, eOrig)
|
||||
logger.warning(msg, location=signode)
|
||||
except DefinitionError as e:
|
||||
logger.warning(e, location=signode)
|
||||
# It is easier to assume some phony name than handling the error in
|
||||
@ -3445,6 +3499,39 @@ class CXRefRole(XRefRole):
|
||||
title = title[dot + 1:]
|
||||
return title, target
|
||||
|
||||
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||
if not self.env.config['c_allow_pre_v3']:
|
||||
return super().run()
|
||||
|
||||
text = self.text.replace('\n', ' ')
|
||||
parser = DefinitionParser(text, location=self.get_source_info(),
|
||||
config=self.env.config)
|
||||
try:
|
||||
parser.parse_xref_object()
|
||||
# it succeeded, so let it through
|
||||
return super().run()
|
||||
except DefinitionError as eOrig:
|
||||
# try as if it was an c:expr
|
||||
parser.pos = 0
|
||||
try:
|
||||
ast = parser.parse_expression()
|
||||
except DefinitionError:
|
||||
# that didn't go well, just default back
|
||||
return super().run()
|
||||
classes = ['xref', 'c', 'c-texpr']
|
||||
parentSymbol = self.env.temp_data.get('cpp:parent_symbol', None)
|
||||
if parentSymbol is None:
|
||||
parentSymbol = self.env.domaindata['c']['root_symbol']
|
||||
signode = nodes.inline(classes=classes)
|
||||
ast.describe_signature(signode, 'markType', self.env, parentSymbol)
|
||||
|
||||
if self.env.config['c_warn_on_allowed_pre_v3']:
|
||||
msg = "{}: Pre-v3 C type role ':c:type:`{}`' converted to ':c:expr:`{}`'."
|
||||
msg += "\nThe original parsing error was:\n{}"
|
||||
msg = msg.format(RemovedInSphinx50Warning.__name__, text, text, eOrig)
|
||||
logger.warning(msg, location=self.get_source_info())
|
||||
return [signode], []
|
||||
|
||||
|
||||
class CExprRole(SphinxRole):
|
||||
def __init__(self, asCode: bool) -> None:
|
||||
@ -3646,6 +3733,9 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_config_value("c_paren_attributes", [], 'env')
|
||||
app.add_post_transform(AliasTransform)
|
||||
|
||||
app.add_config_value("c_allow_pre_v3", False, 'env')
|
||||
app.add_config_value("c_warn_on_allowed_pre_v3", True, 'env')
|
||||
|
||||
return {
|
||||
'version': 'builtin',
|
||||
'env_version': 2,
|
||||
|
@ -141,7 +141,7 @@ T = TypeVar('T')
|
||||
visibility storage-class-specifier function-specifier "friend"
|
||||
"constexpr" "volatile" "const" trailing-type-specifier
|
||||
# where trailing-type-specifier can no be cv-qualifier
|
||||
# Inside e.g., template paramters a strict subset is used
|
||||
# Inside e.g., template parameters a strict subset is used
|
||||
# (see type-specifier-seq)
|
||||
trailing-type-specifier ->
|
||||
simple-type-specifier ->
|
||||
@ -3782,6 +3782,16 @@ class Symbol:
|
||||
debug_lookup = False # overridden by the corresponding config value
|
||||
debug_show_tree = False # overridden by the corresponding config value
|
||||
|
||||
def __copy__(self):
|
||||
assert False # shouldn't happen
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
if self.parent:
|
||||
assert False # shouldn't happen
|
||||
else:
|
||||
# the domain base class makes a copy of the initial data, which is fine
|
||||
return Symbol(None, None, None, None, None, None)
|
||||
|
||||
@staticmethod
|
||||
def debug_print(*args: Any) -> None:
|
||||
print(Symbol.debug_indent_string * Symbol.debug_indent, end="")
|
||||
|
@ -149,7 +149,7 @@ class JSObject(ObjectDescription):
|
||||
|
||||
:py:class:`JSObject` represents JavaScript language constructs. For
|
||||
constructs that are nestable, this method will build up a stack of the
|
||||
nesting heirarchy so that it can be later de-nested correctly, in
|
||||
nesting hierarchy so that it can be later de-nested correctly, in
|
||||
:py:meth:`after_content`.
|
||||
|
||||
For constructs that aren't nestable, the stack is bypassed, and instead
|
||||
|
@ -79,18 +79,24 @@ class ModuleEntry(NamedTuple):
|
||||
deprecated: bool
|
||||
|
||||
|
||||
def type_to_xref(text: str) -> addnodes.pending_xref:
|
||||
def type_to_xref(text: str, env: BuildEnvironment = None) -> addnodes.pending_xref:
|
||||
"""Convert a type string to a cross reference node."""
|
||||
if text == 'None':
|
||||
reftype = 'obj'
|
||||
else:
|
||||
reftype = 'class'
|
||||
|
||||
if env:
|
||||
kwargs = {'py:module': env.ref_context.get('py:module'),
|
||||
'py:class': env.ref_context.get('py:class')}
|
||||
else:
|
||||
kwargs = {}
|
||||
|
||||
return pending_xref('', nodes.Text(text),
|
||||
refdomain='py', reftype=reftype, reftarget=text)
|
||||
refdomain='py', reftype=reftype, reftarget=text, **kwargs)
|
||||
|
||||
|
||||
def _parse_annotation(annotation: str) -> List[Node]:
|
||||
def _parse_annotation(annotation: str, env: BuildEnvironment = None) -> List[Node]:
|
||||
"""Parse type annotation."""
|
||||
def unparse(node: ast.AST) -> List[Node]:
|
||||
if isinstance(node, ast.Attribute):
|
||||
@ -132,18 +138,22 @@ def _parse_annotation(annotation: str) -> List[Node]:
|
||||
else:
|
||||
raise SyntaxError # unsupported syntax
|
||||
|
||||
if env is None:
|
||||
warnings.warn("The env parameter for _parse_annotation becomes required now.",
|
||||
RemovedInSphinx50Warning, stacklevel=2)
|
||||
|
||||
try:
|
||||
tree = ast_parse(annotation)
|
||||
result = unparse(tree)
|
||||
for i, node in enumerate(result):
|
||||
if isinstance(node, nodes.Text):
|
||||
result[i] = type_to_xref(str(node))
|
||||
result[i] = type_to_xref(str(node), env)
|
||||
return result
|
||||
except SyntaxError:
|
||||
return [type_to_xref(annotation)]
|
||||
return [type_to_xref(annotation, env)]
|
||||
|
||||
|
||||
def _parse_arglist(arglist: str) -> addnodes.desc_parameterlist:
|
||||
def _parse_arglist(arglist: str, env: BuildEnvironment = None) -> addnodes.desc_parameterlist:
|
||||
"""Parse a list of arguments using AST parser"""
|
||||
params = addnodes.desc_parameterlist(arglist)
|
||||
sig = signature_from_str('(%s)' % arglist)
|
||||
@ -169,7 +179,7 @@ def _parse_arglist(arglist: str) -> addnodes.desc_parameterlist:
|
||||
node += addnodes.desc_sig_name('', param.name)
|
||||
|
||||
if param.annotation is not param.empty:
|
||||
children = _parse_annotation(param.annotation)
|
||||
children = _parse_annotation(param.annotation, env)
|
||||
node += addnodes.desc_sig_punctuation('', ':')
|
||||
node += nodes.Text(' ')
|
||||
node += addnodes.desc_sig_name('', '', *children) # type: ignore
|
||||
@ -418,7 +428,7 @@ class PyObject(ObjectDescription):
|
||||
signode += addnodes.desc_name(name, name)
|
||||
if arglist:
|
||||
try:
|
||||
signode += _parse_arglist(arglist)
|
||||
signode += _parse_arglist(arglist, self.env)
|
||||
except SyntaxError:
|
||||
# fallback to parse arglist original parser.
|
||||
# it supports to represent optional arguments (ex. "func(foo [, bar])")
|
||||
@ -433,7 +443,7 @@ class PyObject(ObjectDescription):
|
||||
signode += addnodes.desc_parameterlist()
|
||||
|
||||
if retann:
|
||||
children = _parse_annotation(retann)
|
||||
children = _parse_annotation(retann, self.env)
|
||||
signode += addnodes.desc_returns(retann, '', *children)
|
||||
|
||||
anno = self.options.get('annotation')
|
||||
@ -478,7 +488,7 @@ class PyObject(ObjectDescription):
|
||||
|
||||
:py:class:`PyObject` represents Python language constructs. For
|
||||
constructs that are nestable, such as a Python classes, this method will
|
||||
build up a stack of the nesting heirarchy so that it can be later
|
||||
build up a stack of the nesting hierarchy so that it can be later
|
||||
de-nested correctly, in :py:meth:`after_content`.
|
||||
|
||||
For constructs that aren't nestable, the stack is bypassed, and instead
|
||||
@ -600,7 +610,7 @@ class PyVariable(PyObject):
|
||||
|
||||
typ = self.options.get('type')
|
||||
if typ:
|
||||
annotations = _parse_annotation(typ)
|
||||
annotations = _parse_annotation(typ, self.env)
|
||||
signode += addnodes.desc_annotation(typ, '', nodes.Text(': '), *annotations)
|
||||
|
||||
value = self.options.get('value')
|
||||
@ -761,7 +771,7 @@ class PyAttribute(PyObject):
|
||||
|
||||
typ = self.options.get('type')
|
||||
if typ:
|
||||
annotations = _parse_annotation(typ)
|
||||
annotations = _parse_annotation(typ, self.env)
|
||||
signode += addnodes.desc_annotation(typ, '', nodes.Text(': '), *annotations)
|
||||
|
||||
value = self.options.get('value')
|
||||
|
@ -370,11 +370,11 @@ class BuildEnvironment:
|
||||
# add catalog mo file dependency
|
||||
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
|
||||
self.config.language, self.config.source_encoding)
|
||||
mo_paths = {c.domain: c.mo_path for c in repo.catalogs}
|
||||
for docname in self.found_docs:
|
||||
domain = docname_to_domain(docname, self.config.gettext_compact)
|
||||
for catalog in repo.catalogs:
|
||||
if catalog.domain == domain:
|
||||
self.dependencies[docname].add(catalog.mo_path)
|
||||
if domain in mo_paths:
|
||||
self.dependencies[docname].add(mo_paths[domain])
|
||||
except OSError as exc:
|
||||
raise DocumentError(__('Failed to scan documents in %s: %r') %
|
||||
(self.srcdir, exc)) from exc
|
||||
|
@ -98,9 +98,8 @@ class IndexEntries:
|
||||
for subentry in indexentry[1].values():
|
||||
subentry[0].sort(key=keyfunc0) # type: ignore
|
||||
|
||||
# sort the index entries; put all symbols at the front, even those
|
||||
# following the letters in ASCII, this is where the chr(127) comes from
|
||||
def keyfunc(entry: Tuple[str, List]) -> Tuple[str, str]:
|
||||
# sort the index entries
|
||||
def keyfunc(entry: Tuple[str, List]) -> Tuple[Tuple[int, str], str]:
|
||||
key, (void, void, category_key) = entry
|
||||
if category_key:
|
||||
# using specified category key to sort
|
||||
@ -108,11 +107,16 @@ class IndexEntries:
|
||||
lckey = unicodedata.normalize('NFD', key.lower())
|
||||
if lckey.startswith('\N{RIGHT-TO-LEFT MARK}'):
|
||||
lckey = lckey[1:]
|
||||
|
||||
if lckey[0:1].isalpha() or lckey.startswith('_'):
|
||||
lckey = chr(127) + lckey
|
||||
# put non-symbol characters at the folloing group (1)
|
||||
sortkey = (1, lckey)
|
||||
else:
|
||||
# put symbols at the front of the index (0)
|
||||
sortkey = (0, lckey)
|
||||
# ensure a determinstic order *within* letters by also sorting on
|
||||
# the entry itself
|
||||
return (lckey, entry[0])
|
||||
return (sortkey, entry[0])
|
||||
newlist = sorted(new.items(), key=keyfunc)
|
||||
|
||||
if group_entries:
|
||||
|
@ -107,7 +107,7 @@ class EventManager:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise ExtensionError(__("Handler %r for event %r threw an exception") %
|
||||
(listener.handler, name)) from exc
|
||||
(listener.handler, name), exc) from exc
|
||||
return results
|
||||
|
||||
def emit_firstresult(self, name: str, *args: Any,
|
||||
|
@ -34,7 +34,9 @@ from sphinx.pycode import ModuleAnalyzer, PycodeError
|
||||
from sphinx.util import inspect
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docstrings import extract_metadata, prepare_docstring
|
||||
from sphinx.util.inspect import getdoc, object_description, safe_getattr, stringify_signature
|
||||
from sphinx.util.inspect import (
|
||||
evaluate_signature, getdoc, object_description, safe_getattr, stringify_signature
|
||||
)
|
||||
from sphinx.util.typing import stringify as stringify_typehint
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -333,7 +335,7 @@ class Documenter:
|
||||
('.' + '.'.join(self.objpath) if self.objpath else '')
|
||||
return True
|
||||
|
||||
def import_object(self) -> bool:
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
"""Import the object given by *self.modname* and *self.objpath* and set
|
||||
it as *self.object*.
|
||||
|
||||
@ -347,9 +349,12 @@ class Documenter:
|
||||
self.module, self.parent, self.object_name, self.object = ret
|
||||
return True
|
||||
except ImportError as exc:
|
||||
logger.warning(exc.args[0], type='autodoc', subtype='import_object')
|
||||
self.env.note_reread()
|
||||
return False
|
||||
if raiseerror:
|
||||
raise
|
||||
else:
|
||||
logger.warning(exc.args[0], type='autodoc', subtype='import_object')
|
||||
self.env.note_reread()
|
||||
return False
|
||||
|
||||
def get_real_modname(self) -> str:
|
||||
"""Get the real module name of an object to document.
|
||||
@ -619,6 +624,10 @@ class Documenter:
|
||||
if safe_getattr(member, '__sphinx_mock__', False):
|
||||
# mocked module or object
|
||||
pass
|
||||
elif (self.options.exclude_members not in (None, ALL) and
|
||||
membername in self.options.exclude_members):
|
||||
# remove members given by exclude-members
|
||||
keep = False
|
||||
elif want_all and membername.startswith('__') and \
|
||||
membername.endswith('__') and len(membername) > 4:
|
||||
# special __methods__
|
||||
@ -688,16 +697,6 @@ class Documenter:
|
||||
# find out which members are documentable
|
||||
members_check_module, members = self.get_object_members(want_all)
|
||||
|
||||
# remove members given by exclude-members
|
||||
if self.options.exclude_members:
|
||||
members = [
|
||||
(membername, member) for (membername, member) in members
|
||||
if (
|
||||
self.options.exclude_members is ALL or
|
||||
membername not in self.options.exclude_members
|
||||
)
|
||||
]
|
||||
|
||||
# document non-skipped members
|
||||
memberdocumenters = [] # type: List[Tuple[Documenter, bool]]
|
||||
for (mname, member, isattr) in self.filter_members(members, want_all):
|
||||
@ -885,7 +884,7 @@ class ModuleDocumenter(Documenter):
|
||||
type='autodoc')
|
||||
return ret
|
||||
|
||||
def import_object(self) -> Any:
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
def is_valid_module_all(__all__: Any) -> bool:
|
||||
"""Check the given *__all__* is valid for a module."""
|
||||
if (isinstance(__all__, (list, tuple)) and
|
||||
@ -894,7 +893,7 @@ class ModuleDocumenter(Documenter):
|
||||
else:
|
||||
return False
|
||||
|
||||
ret = super().import_object()
|
||||
ret = super().import_object(raiseerror)
|
||||
|
||||
if not self.options.ignore_module_all:
|
||||
__all__ = getattr(self.object, '__all__', None)
|
||||
@ -1190,7 +1189,9 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
||||
documenter.objpath = [None]
|
||||
sigs.append(documenter.format_signature())
|
||||
if overloaded:
|
||||
__globals__ = safe_getattr(self.object, '__globals__', {})
|
||||
for overload in self.analyzer.overloads.get('.'.join(self.objpath)):
|
||||
overload = evaluate_signature(overload, __globals__)
|
||||
sig = stringify_signature(overload, **kwargs)
|
||||
sigs.append(sig)
|
||||
|
||||
@ -1279,8 +1280,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
) -> bool:
|
||||
return isinstance(member, type)
|
||||
|
||||
def import_object(self) -> Any:
|
||||
ret = super().import_object()
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
ret = super().import_object(raiseerror)
|
||||
# if the class is documented under another name, document it
|
||||
# as data/attribute
|
||||
if ret:
|
||||
@ -1389,7 +1390,11 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
sigs = []
|
||||
if overloaded:
|
||||
# Use signatures for overloaded methods instead of the implementation method.
|
||||
method = safe_getattr(self._signature_class, self._signature_method_name, None)
|
||||
__globals__ = safe_getattr(method, '__globals__', {})
|
||||
for overload in self.analyzer.overloads.get(qualname):
|
||||
overload = evaluate_signature(overload, __globals__)
|
||||
|
||||
parameters = list(overload.parameters.values())
|
||||
overload = overload.replace(parameters=parameters[1:],
|
||||
return_annotation=Parameter.empty)
|
||||
@ -1586,7 +1591,7 @@ class DataDeclarationDocumenter(DataDocumenter):
|
||||
isattr and
|
||||
member is INSTANCEATTR)
|
||||
|
||||
def import_object(self) -> bool:
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
"""Never import anything."""
|
||||
# disguise as a data
|
||||
self.objtype = 'data'
|
||||
@ -1685,8 +1690,8 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
return inspect.isroutine(member) and \
|
||||
not isinstance(parent, ModuleDocumenter)
|
||||
|
||||
def import_object(self) -> Any:
|
||||
ret = super().import_object()
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
ret = super().import_object(raiseerror)
|
||||
if not ret:
|
||||
return ret
|
||||
|
||||
@ -1778,7 +1783,9 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
documenter.objpath = [None]
|
||||
sigs.append(documenter.format_signature())
|
||||
if overloaded:
|
||||
__globals__ = safe_getattr(self.object, '__globals__', {})
|
||||
for overload in self.analyzer.overloads.get('.'.join(self.objpath)):
|
||||
overload = evaluate_signature(overload, __globals__)
|
||||
if not inspect.isstaticmethod(self.object, cls=self.parent,
|
||||
name=self.object_name):
|
||||
parameters = list(overload.parameters.values())
|
||||
@ -1851,15 +1858,42 @@ class AttributeDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter):
|
||||
def document_members(self, all_members: bool = False) -> None:
|
||||
pass
|
||||
|
||||
def import_object(self) -> Any:
|
||||
ret = super().import_object()
|
||||
if inspect.isenumattribute(self.object):
|
||||
self.object = self.object.value
|
||||
if inspect.isattributedescriptor(self.object):
|
||||
self._datadescriptor = True
|
||||
else:
|
||||
# if it's not a data descriptor
|
||||
self._datadescriptor = False
|
||||
def isinstanceattribute(self) -> bool:
|
||||
"""Check the subject is an instance attribute."""
|
||||
try:
|
||||
analyzer = ModuleAnalyzer.for_module(self.modname)
|
||||
attr_docs = analyzer.find_attr_docs()
|
||||
if self.objpath:
|
||||
key = ('.'.join(self.objpath[:-1]), self.objpath[-1])
|
||||
if key in attr_docs:
|
||||
return True
|
||||
|
||||
return False
|
||||
except PycodeError:
|
||||
return False
|
||||
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
try:
|
||||
ret = super().import_object(raiseerror=True)
|
||||
if inspect.isenumattribute(self.object):
|
||||
self.object = self.object.value
|
||||
if inspect.isattributedescriptor(self.object):
|
||||
self._datadescriptor = True
|
||||
else:
|
||||
# if it's not a data descriptor
|
||||
self._datadescriptor = False
|
||||
except ImportError as exc:
|
||||
if self.isinstanceattribute():
|
||||
self.object = INSTANCEATTR
|
||||
self._datadescriptor = False
|
||||
ret = True
|
||||
elif raiseerror:
|
||||
raise
|
||||
else:
|
||||
logger.warning(exc.args[0], type='autodoc', subtype='import_object')
|
||||
self.env.note_reread()
|
||||
ret = False
|
||||
|
||||
return ret
|
||||
|
||||
def get_real_modname(self) -> str:
|
||||
@ -1966,7 +2000,7 @@ class InstanceAttributeDocumenter(AttributeDocumenter):
|
||||
isattr and
|
||||
member is INSTANCEATTR)
|
||||
|
||||
def import_object(self) -> bool:
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
"""Never import anything."""
|
||||
# disguise as an attribute
|
||||
self.objtype = 'attribute'
|
||||
@ -1997,7 +2031,7 @@ class SlotsAttributeDocumenter(AttributeDocumenter):
|
||||
"""This documents only SLOTSATTR members."""
|
||||
return member is SLOTSATTR
|
||||
|
||||
def import_object(self) -> Any:
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
"""Never import anything."""
|
||||
# disguise as an attribute
|
||||
self.objtype = 'attribute'
|
||||
@ -2011,9 +2045,12 @@ class SlotsAttributeDocumenter(AttributeDocumenter):
|
||||
self.module, _, _, self.parent = ret
|
||||
return True
|
||||
except ImportError as exc:
|
||||
logger.warning(exc.args[0], type='autodoc', subtype='import_object')
|
||||
self.env.note_reread()
|
||||
return False
|
||||
if raiseerror:
|
||||
raise
|
||||
else:
|
||||
logger.warning(exc.args[0], type='autodoc', subtype='import_object')
|
||||
self.env.note_reread()
|
||||
return False
|
||||
|
||||
def get_doc(self, ignore: int = None) -> List[List[str]]:
|
||||
"""Decode and return lines of the docstring(s) for the object."""
|
||||
|
@ -11,7 +11,7 @@
|
||||
import importlib
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Mapping, NamedTuple, Tuple
|
||||
from typing import Any, Callable, Dict, List, Mapping, NamedTuple, Optional, Tuple
|
||||
|
||||
from sphinx.pycode import ModuleAnalyzer
|
||||
from sphinx.util import logging
|
||||
@ -20,6 +20,36 @@ from sphinx.util.inspect import isclass, isenumclass, safe_getattr
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def mangle(subject: Any, name: str) -> str:
|
||||
"""mangle the given name."""
|
||||
try:
|
||||
if isclass(subject) and name.startswith('__') and not name.endswith('__'):
|
||||
return "_%s%s" % (subject.__name__, name)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def unmangle(subject: Any, name: str) -> Optional[str]:
|
||||
"""unmangle the given name."""
|
||||
try:
|
||||
if isclass(subject) and not name.endswith('__'):
|
||||
prefix = "_%s__" % subject.__name__
|
||||
if name.startswith(prefix):
|
||||
return name.replace(prefix, "__", 1)
|
||||
else:
|
||||
for cls in subject.__mro__:
|
||||
prefix = "_%s__" % cls.__name__
|
||||
if name.startswith(prefix):
|
||||
# mangled attribute defined in parent class
|
||||
return None
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def import_module(modname: str, warningiserror: bool = False) -> Any:
|
||||
"""
|
||||
Call importlib.import_module(modname), convert exceptions to ImportError
|
||||
@ -67,7 +97,8 @@ def import_object(modname: str, objpath: List[str], objtype: str = '',
|
||||
for attrname in objpath:
|
||||
parent = obj
|
||||
logger.debug('[autodoc] getattr(_, %r)', attrname)
|
||||
obj = attrgetter(obj, attrname)
|
||||
mangled_name = mangle(obj, attrname)
|
||||
obj = attrgetter(obj, mangled_name)
|
||||
logger.debug('[autodoc] => %r', obj)
|
||||
object_name = attrname
|
||||
return [module, parent, object_name, obj]
|
||||
@ -161,7 +192,8 @@ def get_object_members(subject: Any, objpath: List[str], attrgetter: Callable,
|
||||
try:
|
||||
value = attrgetter(subject, name)
|
||||
directly_defined = name in obj_dict
|
||||
if name not in members:
|
||||
name = unmangle(subject, name)
|
||||
if name and name not in members:
|
||||
members[name] = Attribute(name, directly_defined, value)
|
||||
except AttributeError:
|
||||
continue
|
||||
@ -169,7 +201,8 @@ def get_object_members(subject: Any, objpath: List[str], attrgetter: Callable,
|
||||
# annotation only member (ex. attr: int)
|
||||
if hasattr(subject, '__annotations__') and isinstance(subject.__annotations__, Mapping):
|
||||
for name in subject.__annotations__:
|
||||
if name not in members:
|
||||
name = unmangle(subject, name)
|
||||
if name and name not in members:
|
||||
members[name] = Attribute(name, True, INSTANCEATTR)
|
||||
|
||||
if analyzer:
|
||||
|
@ -248,7 +248,9 @@ class Autosummary(SphinxDirective):
|
||||
tree_prefix = self.options['toctree'].strip()
|
||||
docnames = []
|
||||
excluded = Matcher(self.config.exclude_patterns)
|
||||
filename_map = self.config.autosummary_filename_map
|
||||
for name, sig, summary, real_name in items:
|
||||
real_name = filename_map.get(real_name, real_name)
|
||||
docname = posixpath.join(tree_prefix, real_name)
|
||||
docname = posixpath.normpath(posixpath.join(dirname, docname))
|
||||
if docname not in self.env.found_docs:
|
||||
@ -731,6 +733,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_role('autolink', AutoLink())
|
||||
app.connect('builder-inited', process_generate_options)
|
||||
app.add_config_value('autosummary_context', {}, True)
|
||||
app.add_config_value('autosummary_filename_map', {}, 'html')
|
||||
app.add_config_value('autosummary_generate', [], True, [bool])
|
||||
app.add_config_value('autosummary_generate_overwrite', True, False)
|
||||
app.add_config_value('autosummary_mock_imports',
|
||||
|
@ -70,6 +70,7 @@ class DummyApplication:
|
||||
self.warningiserror = False
|
||||
|
||||
self.config.add('autosummary_context', {}, True, None)
|
||||
self.config.add('autosummary_filename_map', {}, True, None)
|
||||
self.config.init_values()
|
||||
|
||||
def emit_firstresult(self, *args: Any) -> None:
|
||||
@ -375,6 +376,11 @@ def generate_autosummary_docs(sources: List[str], output_dir: str = None,
|
||||
# keep track of new files
|
||||
new_files = []
|
||||
|
||||
if app:
|
||||
filename_map = app.config.autosummary_filename_map
|
||||
else:
|
||||
filename_map = {}
|
||||
|
||||
# write
|
||||
for entry in sorted(set(items), key=str):
|
||||
if entry.path is None:
|
||||
@ -400,7 +406,7 @@ def generate_autosummary_docs(sources: List[str], output_dir: str = None,
|
||||
imported_members, app, entry.recursive, context,
|
||||
modname, qualname)
|
||||
|
||||
filename = os.path.join(path, name + suffix)
|
||||
filename = os.path.join(path, filename_map.get(name, name) + suffix)
|
||||
if os.path.isfile(filename):
|
||||
with open(filename, encoding=encoding) as f:
|
||||
old_content = f.read()
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
{% block attributes %}
|
||||
{% if attributes %}
|
||||
.. rubric:: Module Attributes
|
||||
.. rubric:: {{ _('Module Attributes') }}
|
||||
|
||||
.. autosummary::
|
||||
{% for item in attributes %}
|
||||
|
@ -62,7 +62,7 @@ def process_ifconfig_nodes(app: Sphinx, doctree: nodes.document, docname: str) -
|
||||
# handle exceptions in a clean fashion
|
||||
from traceback import format_exception_only
|
||||
msg = ''.join(format_exception_only(err.__class__, err))
|
||||
newnode = doctree.reporter.error('Exception occured in '
|
||||
newnode = doctree.reporter.error('Exception occurred in '
|
||||
'ifconfig expression: \n%s' %
|
||||
msg, base_node=node)
|
||||
node.replace_self(newnode)
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
import re
|
||||
import tokenize
|
||||
from collections import OrderedDict
|
||||
from importlib import import_module
|
||||
from inspect import Signature
|
||||
from io import StringIO
|
||||
@ -145,7 +146,7 @@ class ModuleAnalyzer:
|
||||
parser = Parser(self.code)
|
||||
parser.parse()
|
||||
|
||||
self.attr_docs = {}
|
||||
self.attr_docs = OrderedDict()
|
||||
for (scope, comment) in parser.comments.items():
|
||||
if comment:
|
||||
self.attr_docs[scope] = comment.splitlines() + ['']
|
||||
|
@ -12,6 +12,7 @@ import itertools
|
||||
import re
|
||||
import sys
|
||||
import tokenize
|
||||
from collections import OrderedDict
|
||||
from inspect import Signature
|
||||
from token import NAME, NEWLINE, INDENT, DEDENT, NUMBER, OP, STRING
|
||||
from tokenize import COMMENT, NL
|
||||
@ -231,7 +232,7 @@ class VariableCommentPicker(ast.NodeVisitor):
|
||||
self.context = [] # type: List[str]
|
||||
self.current_classes = [] # type: List[str]
|
||||
self.current_function = None # type: ast.FunctionDef
|
||||
self.comments = {} # type: Dict[Tuple[str, str], str]
|
||||
self.comments = OrderedDict() # type: Dict[Tuple[str, str], str]
|
||||
self.annotations = {} # type: Dict[Tuple[str, str], str]
|
||||
self.previous = None # type: ast.AST
|
||||
self.deforders = {} # type: Dict[str, int]
|
||||
|
@ -105,7 +105,8 @@ class BuildDoc(Command):
|
||||
self.config_dir = None # type: str
|
||||
self.link_index = False
|
||||
self.copyright = ''
|
||||
self.verbosity = 0
|
||||
# Link verbosity to distutils' (which uses 1 by default).
|
||||
self.verbosity = self.distribution.verbose - 1 # type: ignore
|
||||
self.traceback = False
|
||||
self.nitpicky = False
|
||||
self.keep_going = False
|
||||
|
@ -35,7 +35,7 @@ Submodules
|
||||
----------
|
||||
{% if separatemodules %}
|
||||
{{ toctree(submodules) }}
|
||||
{%- else %}
|
||||
{% else %}
|
||||
{%- for submodule in submodules %}
|
||||
{% if show_headings %}
|
||||
{{- [submodule, "module"] | join(" ") | e | heading(2) }}
|
||||
@ -43,7 +43,7 @@ Submodules
|
||||
{{ automodule(submodule, automodule_options) }}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{% endif %}
|
||||
{%- endif %}
|
||||
|
||||
{%- if not modulefirst and not is_namespace %}
|
||||
Module contents
|
||||
|
@ -8,4 +8,4 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
#}
|
||||
<h3><a href="{{ pathto(master_doc)|e }}">{{ _('Table of Contents') }}</a></h3>
|
||||
{{ toctree(includehidden=theme_globaltoc_includehidden, collapse=theme_globaltoc_collapse) }}
|
||||
{{ toctree(includehidden=theme_globaltoc_includehidden, collapse=theme_globaltoc_collapse, maxdepth=theme_globaltoc_maxdepth) }}
|
||||
|
@ -12,3 +12,4 @@ body_max_width = 800
|
||||
navigation_with_keys = False
|
||||
globaltoc_collapse = true
|
||||
globaltoc_includehidden = false
|
||||
globaltoc_maxdepth =
|
||||
|
@ -1035,7 +1035,7 @@ domReady(function enableCssMediaQueries() {
|
||||
var vpw = cssHelper.getViewportWidth();
|
||||
var vph = cssHelper.getViewportHeight();
|
||||
// check whether vp size has really changed, because IE also triggers resize event when body size changes
|
||||
// 20px allowance to accomodate short appearance of scrollbars in IE in some cases
|
||||
// 20px allowance to accommodate short appearance of scrollbars in IE in some cases
|
||||
if (Math.abs(vpw - cvpw) > scrollbarWidth || Math.abs(vph - cvph) > scrollbarWidth) {
|
||||
cvpw = vpw;
|
||||
cvph = vph;
|
||||
|
@ -36,6 +36,13 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# The attributes not copied to the translated node
|
||||
#
|
||||
# * refexplict: For allow to give (or not to give) an explicit title
|
||||
# to the pending_xref on translation
|
||||
EXCLUDED_PENDING_XREF_ATTRIBUTES = ('refexplicit',)
|
||||
|
||||
|
||||
N = TypeVar('N', bound=nodes.Node)
|
||||
|
||||
|
||||
@ -429,11 +436,8 @@ class Locale(SphinxTransform):
|
||||
# Copy attributes to keep original node behavior. Especially
|
||||
# copying 'reftarget', 'py:module', 'py:class' are needed.
|
||||
for k, v in xref_reftarget_map.get(key, {}).items():
|
||||
# Note: This implementation overwrite all attributes.
|
||||
# if some attributes `k` should not be overwritten,
|
||||
# you should provide exclude list as:
|
||||
# `if k not in EXCLUDE_LIST: new[k] = v`
|
||||
new[k] = v
|
||||
if k not in EXCLUDED_PENDING_XREF_ATTRIBUTES:
|
||||
new[k] = v
|
||||
|
||||
# update leaves
|
||||
for child in patch.children:
|
||||
|
@ -258,7 +258,7 @@ def get_full_modname(modname: str, attribute: str) -> str:
|
||||
return None
|
||||
module = import_module(modname)
|
||||
|
||||
# Allow an attribute to have multiple parts and incidentially allow
|
||||
# Allow an attribute to have multiple parts and incidentally allow
|
||||
# repeated .s in the attribute.
|
||||
value = module
|
||||
for attr in attribute.split('.'):
|
||||
|
@ -103,7 +103,6 @@ class ASTBaseBase:
|
||||
__hash__ = None # type: Callable[[], int]
|
||||
|
||||
def clone(self) -> Any:
|
||||
"""Clone a definition expression node."""
|
||||
return deepcopy(self)
|
||||
|
||||
def _stringify(self, transform: StringifyTransform) -> str:
|
||||
|
@ -20,7 +20,7 @@ def register_application_for_autosummary(app: "Sphinx") -> None:
|
||||
"""Register application object to autosummary module.
|
||||
|
||||
Since Sphinx-1.7, documenters and attrgetters are registered into
|
||||
applicaiton object. As a result, the arguments of
|
||||
application object. As a result, the arguments of
|
||||
``get_documenter()`` has been changed. To keep compatibility,
|
||||
this handler registers application object to the module.
|
||||
"""
|
||||
|
@ -57,7 +57,7 @@ def prepare_docstring(s: str, ignore: int = None, tabsize: int = 8) -> List[str]
|
||||
if ignore is None:
|
||||
ignore = 1
|
||||
else:
|
||||
warnings.warn("The 'ignore' argument to parepare_docstring() is deprecated.",
|
||||
warnings.warn("The 'ignore' argument to prepare_docstring() is deprecated.",
|
||||
RemovedInSphinx50Warning, stacklevel=2)
|
||||
|
||||
lines = s.expandtabs(tabsize).splitlines()
|
||||
|
@ -495,7 +495,7 @@ def new_document(source_path: str, settings: Any = None) -> nodes.document:
|
||||
"""Return a new empty document object. This is an alternative of docutils'.
|
||||
|
||||
This is a simple wrapper for ``docutils.utils.new_document()``. It
|
||||
caches the result of docutils' and use it on second call for instanciation.
|
||||
caches the result of docutils' and use it on second call for instantiation.
|
||||
This makes an instantiation of document nodes much faster.
|
||||
"""
|
||||
global __document_cache__
|
||||
|
@ -22,13 +22,14 @@ from inspect import ( # NOQA
|
||||
Parameter, isclass, ismethod, ismethoddescriptor, ismodule
|
||||
)
|
||||
from io import StringIO
|
||||
from typing import Any, Callable
|
||||
from typing import Any, Callable, Dict
|
||||
from typing import cast
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx50Warning
|
||||
from sphinx.pycode.ast import ast # for py36-37
|
||||
from sphinx.pycode.ast import unparse as ast_unparse
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.typing import ForwardRef
|
||||
from sphinx.util.typing import stringify as stringify_annotation
|
||||
|
||||
if sys.version_info > (3, 7):
|
||||
@ -467,7 +468,53 @@ def signature(subject: Callable, bound_method: bool = False, follow_wrapped: boo
|
||||
if len(parameters) > 0:
|
||||
parameters.pop(0)
|
||||
|
||||
return inspect.Signature(parameters, return_annotation=return_annotation)
|
||||
# To allow to create signature object correctly for pure python functions,
|
||||
# pass an internal parameter __validate_parameters__=False to Signature
|
||||
#
|
||||
# For example, this helps a function having a default value `inspect._empty`.
|
||||
# refs: https://github.com/sphinx-doc/sphinx/issues/7935
|
||||
return inspect.Signature(parameters, return_annotation=return_annotation, # type: ignore
|
||||
__validate_parameters__=False)
|
||||
|
||||
|
||||
def evaluate_signature(sig: inspect.Signature, globalns: Dict = None, localns: Dict = None
|
||||
) -> inspect.Signature:
|
||||
"""Evaluate unresolved type annotations in a signature object."""
|
||||
def evaluate(annotation: Any, globalns: Dict, localns: Dict) -> Any:
|
||||
"""Evaluate unresolved type annotation."""
|
||||
try:
|
||||
if isinstance(annotation, str):
|
||||
ref = ForwardRef(annotation, True)
|
||||
annotation = ref._evaluate(globalns, localns)
|
||||
|
||||
if isinstance(annotation, ForwardRef):
|
||||
annotation = annotation._evaluate(globalns, localns)
|
||||
elif isinstance(annotation, str):
|
||||
# might be a ForwardRef'ed annotation in overloaded functions
|
||||
ref = ForwardRef(annotation, True)
|
||||
annotation = ref._evaluate(globalns, localns)
|
||||
except (NameError, TypeError):
|
||||
# failed to evaluate type. skipped.
|
||||
pass
|
||||
|
||||
return annotation
|
||||
|
||||
if globalns is None:
|
||||
globalns = {}
|
||||
if localns is None:
|
||||
localns = globalns
|
||||
|
||||
parameters = list(sig.parameters.values())
|
||||
for i, param in enumerate(parameters):
|
||||
if param.annotation:
|
||||
annotation = evaluate(param.annotation, globalns, localns)
|
||||
parameters[i] = param.replace(annotation=annotation)
|
||||
|
||||
return_annotation = sig.return_annotation
|
||||
if return_annotation:
|
||||
return_annotation = evaluate(return_annotation, globalns, localns)
|
||||
|
||||
return sig.replace(parameters=parameters, return_annotation=return_annotation)
|
||||
|
||||
|
||||
def stringify_signature(sig: inspect.Signature, show_annotation: bool = True,
|
||||
|
@ -239,6 +239,7 @@ def is_translatable(node: Node) -> bool:
|
||||
LITERAL_TYPE_NODES = (
|
||||
nodes.literal_block,
|
||||
nodes.doctest_block,
|
||||
nodes.math_block,
|
||||
nodes.raw,
|
||||
)
|
||||
IMAGE_TYPE_NODES = (
|
||||
|
@ -124,4 +124,4 @@ def head(url: str, **kwargs: Any) -> requests.Response:
|
||||
headers.setdefault('User-Agent', useragent_header[0][1])
|
||||
|
||||
with ignore_insecure_warning(**kwargs):
|
||||
return requests.get(url, **kwargs)
|
||||
return requests.head(url, **kwargs)
|
||||
|
@ -10,12 +10,27 @@
|
||||
|
||||
import sys
|
||||
import typing
|
||||
from typing import Any, Callable, Dict, List, Tuple, TypeVar, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, Tuple, TypeVar, Union
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst.states import Inliner
|
||||
|
||||
|
||||
if sys.version_info > (3, 7):
|
||||
from typing import ForwardRef
|
||||
else:
|
||||
from typing import _ForwardRef # type: ignore
|
||||
|
||||
class ForwardRef:
|
||||
"""A pseudo ForwardRef class for py35 and py36."""
|
||||
def __init__(self, arg: Any, is_argument: bool = True) -> None:
|
||||
self.arg = arg
|
||||
|
||||
def _evaluate(self, globalns: Dict, localns: Dict) -> Any:
|
||||
ref = _ForwardRef(self.arg)
|
||||
return ref._eval_type(globalns, localns)
|
||||
|
||||
|
||||
# An entry of Directive.option_spec
|
||||
DirectiveOption = Callable[[str], Any]
|
||||
|
||||
@ -147,6 +162,8 @@ def _stringify_py36(annotation: Any) -> str:
|
||||
params = None
|
||||
if annotation.__args__ is None or len(annotation.__args__) <= 2: # type: ignore # NOQA
|
||||
params = annotation.__args__ # type: ignore
|
||||
elif annotation.__origin__ == Generator: # type: ignore
|
||||
params = annotation.__args__ # type: ignore
|
||||
else: # typing.Callable
|
||||
args = ', '.join(stringify(arg) for arg
|
||||
in annotation.__args__[:-1]) # type: ignore
|
||||
|
@ -116,7 +116,7 @@ def merge_doctrees(old: Node, new: Node, condition: Any) -> Iterator[Node]:
|
||||
|
||||
|
||||
def get_ratio(old: str, new: str) -> float:
|
||||
"""Return a "similiarity ratio" (in percent) representing the similarity
|
||||
"""Return a "similarity ratio" (in percent) representing the similarity
|
||||
between the two strings where 0 is equal and anything above less than equal.
|
||||
"""
|
||||
if not all([old, new]):
|
||||
|
@ -73,7 +73,7 @@ class Table:
|
||||
|
||||
Cell spanning on multiple rows or multiple columns (having a
|
||||
colspan or rowspan greater than one) are automatically referenced
|
||||
by all the table cells they covers. This is a usefull
|
||||
by all the table cells they covers. This is a useful
|
||||
representation as we can simply check ``if self[x, y] is self[x,
|
||||
y+1]`` to recognize a rowspan.
|
||||
|
||||
|
11
tests/roots/test-ext-autodoc/target/name_mangling.py
Normal file
11
tests/roots/test-ext-autodoc/target/name_mangling.py
Normal file
@ -0,0 +1,11 @@
|
||||
class Foo:
|
||||
#: name of Foo
|
||||
__name = None
|
||||
__age = None
|
||||
|
||||
|
||||
class Bar(Foo):
|
||||
__address = None
|
||||
|
||||
#: a member having mangled-like name
|
||||
_Baz__email = None
|
@ -7,7 +7,7 @@ def sum(x: int, y: int) -> int:
|
||||
|
||||
|
||||
@overload
|
||||
def sum(x: float, y: float) -> float:
|
||||
def sum(x: "float", y: "float") -> "float":
|
||||
...
|
||||
|
||||
|
||||
@ -29,7 +29,7 @@ class Math:
|
||||
...
|
||||
|
||||
@overload
|
||||
def sum(self, x: float, y: float) -> float:
|
||||
def sum(self, x: "float", y: "float") -> "float":
|
||||
...
|
||||
|
||||
@overload
|
||||
@ -49,7 +49,7 @@ class Foo:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __new__(cls, x: str, y: str) -> "Foo":
|
||||
def __new__(cls, x: "str", y: "str") -> "Foo":
|
||||
...
|
||||
|
||||
def __new__(cls, x, y):
|
||||
@ -64,7 +64,7 @@ class Bar:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __init__(cls, x: str, y: str) -> None:
|
||||
def __init__(cls, x: "str", y: "str") -> "None":
|
||||
...
|
||||
|
||||
def __init__(cls, x, y):
|
||||
@ -77,7 +77,7 @@ class Meta(type):
|
||||
...
|
||||
|
||||
@overload
|
||||
def __call__(cls, x: str, y: str) -> Any:
|
||||
def __call__(cls, x: "str", y: "str") -> "Any":
|
||||
...
|
||||
|
||||
def __call__(cls, x, y):
|
||||
|
@ -0,0 +1,21 @@
|
||||
from os import path # NOQA
|
||||
from typing import Union
|
||||
|
||||
|
||||
class Foo:
|
||||
class Bar:
|
||||
pass
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def bar(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def baz(self):
|
||||
pass
|
||||
|
||||
|
||||
def bar(x: Union[int, str], y: int = 1) -> None:
|
||||
pass
|
11
tests/roots/test-ext-autosummary-filename-map/conf.py
Normal file
11
tests/roots/test-ext-autosummary-filename-map/conf.py
Normal file
@ -0,0 +1,11 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
extensions = ['sphinx.ext.autosummary']
|
||||
autosummary_generate = True
|
||||
autosummary_filename_map = {
|
||||
"autosummary_dummy_module": "module_mangled",
|
||||
"autosummary_dummy_module.bar": "bar"
|
||||
}
|
9
tests/roots/test-ext-autosummary-filename-map/index.rst
Normal file
9
tests/roots/test-ext-autosummary-filename-map/index.rst
Normal file
@ -0,0 +1,9 @@
|
||||
|
||||
.. autosummary::
|
||||
:toctree: generated
|
||||
:caption: An autosummary
|
||||
|
||||
autosummary_dummy_module
|
||||
autosummary_dummy_module.Foo
|
||||
autosummary_dummy_module.Foo.bar
|
||||
autosummary_dummy_module.bar
|
@ -2,7 +2,16 @@ from os import path # NOQA
|
||||
from typing import Union
|
||||
|
||||
|
||||
#: module variable
|
||||
CONSTANT1 = None
|
||||
CONSTANT2 = None
|
||||
|
||||
|
||||
class Foo:
|
||||
#: class variable
|
||||
CONSTANT3 = None
|
||||
CONSTANT4 = None
|
||||
|
||||
class Bar:
|
||||
pass
|
||||
|
||||
|
@ -14,7 +14,7 @@ same type links
|
||||
|
||||
link to :term:`Some term` and :term:`Some other term`.
|
||||
|
||||
link to :ref:`i18n-role-xref` and :ref:`same-type-links`.
|
||||
link to :ref:`i18n-role-xref`, :ref:`same-type-links` and :ref:`label <same-type-links>`.
|
||||
|
||||
link to :doc:`index` and :doc:`glossary_terms`.
|
||||
|
||||
|
@ -28,8 +28,8 @@ msgstr "SAME TYPE LINKS"
|
||||
msgid "link to :term:`Some term` and :term:`Some other term`."
|
||||
msgstr "LINK TO :term:`SOME OTHER NEW TERM` AND :term:`SOME NEW TERM`."
|
||||
|
||||
msgid "link to :ref:`i18n-role-xref` and :ref:`same-type-links`."
|
||||
msgstr "LINK TO :ref:`same-type-links` AND :ref:`i18n-role-xref`."
|
||||
msgid "link to :ref:`i18n-role-xref`, :ref:`same-type-links` and :ref:`label <same-type-links>`."
|
||||
msgstr "LINK TO :ref:`LABEL <i18n-role-xref>` AND :ref:`same-type-links` AND :ref:`same-type-links`."
|
||||
|
||||
msgid "link to :doc:`index` and :doc:`glossary_terms`."
|
||||
msgstr "LINK TO :doc:`glossary_terms` AND :doc:`index`."
|
||||
|
@ -69,4 +69,4 @@ subsubsection
|
||||
|
||||
otherdoc
|
||||
|
||||
* Embeded standalone hyperlink reference(refs: #5948): `subsection <section1_>`_.
|
||||
* Embedded standalone hyperlink reference(refs: #5948): `subsection <section1_>`_.
|
||||
|
@ -11,6 +11,8 @@ Some additional anchors to exercise ignore code
|
||||
* `Example Bar invalid <https://www.google.com/#top>`_
|
||||
* `Example anchor invalid <http://www.sphinx-doc.org/en/1.7/intro.html#does-not-exist>`_
|
||||
* `Complete nonsense <https://localhost:7777/doesnotexist>`_
|
||||
* `Example valid local file <conf.py>`_
|
||||
* `Example invalid local file <path/to/notfound>`_
|
||||
|
||||
.. image:: https://www.google.com/image.png
|
||||
.. figure:: https://www.google.com/image2.png
|
||||
|
@ -32,14 +32,11 @@ Contents:
|
||||
Latest reference <http://sphinx-doc.org/latest/>
|
||||
Python <http://python.org/>
|
||||
|
||||
self
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
References
|
||||
==========
|
||||
|
@ -16,6 +16,7 @@ Contents:
|
||||
foo
|
||||
bar
|
||||
http://sphinx-doc.org/
|
||||
self
|
||||
|
||||
.. only:: html
|
||||
|
||||
|
@ -387,6 +387,6 @@ def test_run_epubcheck(app):
|
||||
subprocess.run(['java', '-jar', epubcheck, app.outdir / 'SphinxTests.epub'],
|
||||
stdout=PIPE, stderr=PIPE, check=True)
|
||||
except CalledProcessError as exc:
|
||||
print(exc.stdout)
|
||||
print(exc.stderr)
|
||||
print(exc.stdout.decode('utf-8'))
|
||||
print(exc.stderr.decode('utf-8'))
|
||||
assert False, 'epubcheck exited with return code %s' % exc.returncode
|
||||
|
@ -357,7 +357,6 @@ def test_html4_output(app, status, warning):
|
||||
"[@class='reference external']", ''),
|
||||
(".//li/p/a[@href='genindex.html']/span", 'Index'),
|
||||
(".//li/p/a[@href='py-modindex.html']/span", 'Module Index'),
|
||||
(".//li/p/a[@href='search.html']/span", 'Search Page'),
|
||||
# custom sidebar only for contents
|
||||
(".//h4", 'Contents sidebar'),
|
||||
# custom JavaScript
|
||||
|
@ -1470,7 +1470,7 @@ def test_latex_labels(app, status, warning):
|
||||
r'\label{\detokenize{otherdoc:otherdoc}}'
|
||||
r'\label{\detokenize{otherdoc::doc}}' in result)
|
||||
|
||||
# Embeded standalone hyperlink reference (refs: #5948)
|
||||
# Embedded standalone hyperlink reference (refs: #5948)
|
||||
assert result.count(r'\label{\detokenize{index:section1}}') == 1
|
||||
|
||||
|
||||
|
@ -30,7 +30,9 @@ def test_defaults(app, status, warning):
|
||||
# images should fail
|
||||
assert "Not Found for url: https://www.google.com/image.png" in content
|
||||
assert "Not Found for url: https://www.google.com/image2.png" in content
|
||||
assert len(content.splitlines()) == 5
|
||||
# looking for local file should fail
|
||||
assert "[broken] path/to/notfound" in content
|
||||
assert len(content.splitlines()) == 6
|
||||
|
||||
|
||||
@pytest.mark.sphinx('linkcheck', testroot='linkcheck', freshenv=True)
|
||||
@ -47,8 +49,8 @@ def test_defaults_json(app, status, warning):
|
||||
"info"]:
|
||||
assert attr in row
|
||||
|
||||
assert len(content.splitlines()) == 8
|
||||
assert len(rows) == 8
|
||||
assert len(content.splitlines()) == 10
|
||||
assert len(rows) == 10
|
||||
# the output order of the rows is not stable
|
||||
# due to possible variance in network latency
|
||||
rowsby = {row["uri"]:row for row in rows}
|
||||
@ -69,7 +71,7 @@ def test_defaults_json(app, status, warning):
|
||||
assert dnerow['uri'] == 'https://localhost:7777/doesnotexist'
|
||||
assert rowsby['https://www.google.com/image2.png'] == {
|
||||
'filename': 'links.txt',
|
||||
'lineno': 16,
|
||||
'lineno': 18,
|
||||
'status': 'broken',
|
||||
'code': 0,
|
||||
'uri': 'https://www.google.com/image2.png',
|
||||
@ -92,7 +94,8 @@ def test_defaults_json(app, status, warning):
|
||||
'https://localhost:7777/doesnotexist',
|
||||
'http://www.sphinx-doc.org/en/1.7/intro.html#',
|
||||
'https://www.google.com/image.png',
|
||||
'https://www.google.com/image2.png']
|
||||
'https://www.google.com/image2.png',
|
||||
'path/to/notfound']
|
||||
})
|
||||
def test_anchors_ignored(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
@ -238,18 +238,18 @@ def test_get_full_qualified_name():
|
||||
assert domain.get_full_qualified_name(node) == 'module1.Class.func'
|
||||
|
||||
|
||||
def test_parse_annotation():
|
||||
doctree = _parse_annotation("int")
|
||||
def test_parse_annotation(app):
|
||||
doctree = _parse_annotation("int", app.env)
|
||||
assert_node(doctree, ([pending_xref, "int"],))
|
||||
assert_node(doctree[0], pending_xref, refdomain="py", reftype="class", reftarget="int")
|
||||
|
||||
doctree = _parse_annotation("List[int]")
|
||||
doctree = _parse_annotation("List[int]", app.env)
|
||||
assert_node(doctree, ([pending_xref, "List"],
|
||||
[desc_sig_punctuation, "["],
|
||||
[pending_xref, "int"],
|
||||
[desc_sig_punctuation, "]"]))
|
||||
|
||||
doctree = _parse_annotation("Tuple[int, int]")
|
||||
doctree = _parse_annotation("Tuple[int, int]", app.env)
|
||||
assert_node(doctree, ([pending_xref, "Tuple"],
|
||||
[desc_sig_punctuation, "["],
|
||||
[pending_xref, "int"],
|
||||
@ -257,14 +257,14 @@ def test_parse_annotation():
|
||||
[pending_xref, "int"],
|
||||
[desc_sig_punctuation, "]"]))
|
||||
|
||||
doctree = _parse_annotation("Tuple[()]")
|
||||
doctree = _parse_annotation("Tuple[()]", app.env)
|
||||
assert_node(doctree, ([pending_xref, "Tuple"],
|
||||
[desc_sig_punctuation, "["],
|
||||
[desc_sig_punctuation, "("],
|
||||
[desc_sig_punctuation, ")"],
|
||||
[desc_sig_punctuation, "]"]))
|
||||
|
||||
doctree = _parse_annotation("Callable[[int, int], int]")
|
||||
doctree = _parse_annotation("Callable[[int, int], int]", app.env)
|
||||
assert_node(doctree, ([pending_xref, "Callable"],
|
||||
[desc_sig_punctuation, "["],
|
||||
[desc_sig_punctuation, "["],
|
||||
@ -277,12 +277,11 @@ def test_parse_annotation():
|
||||
[desc_sig_punctuation, "]"]))
|
||||
|
||||
# None type makes an object-reference (not a class reference)
|
||||
doctree = _parse_annotation("None")
|
||||
doctree = _parse_annotation("None", app.env)
|
||||
assert_node(doctree, ([pending_xref, "None"],))
|
||||
assert_node(doctree[0], pending_xref, refdomain="py", reftype="obj", reftarget="None")
|
||||
|
||||
|
||||
|
||||
def test_pyfunction_signature(app):
|
||||
text = ".. py:function:: hello(name: str) -> str"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
@ -460,14 +459,22 @@ def test_pyobject_prefix(app):
|
||||
|
||||
|
||||
def test_pydata(app):
|
||||
text = ".. py:data:: var\n"
|
||||
text = (".. py:module:: example\n"
|
||||
".. py:data:: var\n"
|
||||
" :type: int\n")
|
||||
domain = app.env.get_domain('py')
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, desc_name, "var"],
|
||||
assert_node(doctree, (nodes.target,
|
||||
addnodes.index,
|
||||
addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_addname, "example."],
|
||||
[desc_name, "var"],
|
||||
[desc_annotation, (": ",
|
||||
[pending_xref, "int"])])],
|
||||
[desc_content, ()])]))
|
||||
assert 'var' in domain.objects
|
||||
assert domain.objects['var'] == ('index', 'var', 'data', False)
|
||||
assert_node(doctree[3][0][2][1], pending_xref, **{"py:module": "example"})
|
||||
assert 'example.var' in domain.objects
|
||||
assert domain.objects['example.var'] == ('index', 'example.var', 'data', False)
|
||||
|
||||
|
||||
def test_pyfunction(app):
|
||||
@ -700,6 +707,8 @@ def test_pyattribute(app):
|
||||
[desc_sig_punctuation, "]"])],
|
||||
[desc_annotation, " = ''"])],
|
||||
[desc_content, ()]))
|
||||
assert_node(doctree[1][1][1][0][1][1], pending_xref, **{"py:class": "Class"})
|
||||
assert_node(doctree[1][1][1][0][1][3], pending_xref, **{"py:class": "Class"})
|
||||
assert 'Class.attr' in domain.objects
|
||||
assert domain.objects['Class.attr'] == ('index', 'Class.attr', 'attribute', False)
|
||||
|
||||
|
@ -25,12 +25,14 @@ def test_create_single_index(app):
|
||||
".. index:: ёлка\n"
|
||||
".. index:: תירבע\n"
|
||||
".. index:: 9-symbol\n"
|
||||
".. index:: &-symbol\n")
|
||||
".. index:: &-symbol\n"
|
||||
".. index:: £100\n")
|
||||
restructuredtext.parse(app, text)
|
||||
index = IndexEntries(app.env).create_index(app.builder)
|
||||
assert len(index) == 6
|
||||
assert index[0] == ('Symbols', [('&-symbol', [[('', '#index-9')], [], None]),
|
||||
('9-symbol', [[('', '#index-8')], [], None])])
|
||||
('9-symbol', [[('', '#index-8')], [], None]),
|
||||
('£100', [[('', '#index-10')], [], None])])
|
||||
assert index[1] == ('D', [('docutils', [[('', '#index-0')], [], None])])
|
||||
assert index[2] == ('P', [('pip', [[], [('install', [('', '#index-2')]),
|
||||
('upgrade', [('', '#index-3')])], None]),
|
||||
|
@ -41,7 +41,8 @@ def test_process_doc(app):
|
||||
assert_node(toctree[0][1][0], addnodes.toctree,
|
||||
caption="Table of Contents", glob=False, hidden=False,
|
||||
titlesonly=False, maxdepth=2, numbered=999,
|
||||
entries=[(None, 'foo'), (None, 'bar'), (None, 'http://sphinx-doc.org/')],
|
||||
entries=[(None, 'foo'), (None, 'bar'), (None, 'http://sphinx-doc.org/'),
|
||||
(None, 'self')],
|
||||
includefiles=['foo', 'bar'])
|
||||
|
||||
# only branch
|
||||
@ -219,7 +220,9 @@ def test_get_toctree_for(app):
|
||||
([list_item, ([compact_paragraph, reference, "foo"],
|
||||
bullet_list)],
|
||||
[list_item, compact_paragraph, reference, "bar"],
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"]))
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"],
|
||||
[list_item, compact_paragraph, reference,
|
||||
"Welcome to Sphinx Tests’s documentation!"]))
|
||||
assert_node(toctree[1][0][1],
|
||||
([list_item, compact_paragraph, reference, "quux"],
|
||||
[list_item, compact_paragraph, reference, "foo.1"],
|
||||
@ -231,6 +234,7 @@ def test_get_toctree_for(app):
|
||||
assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=[1, 3])
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2])
|
||||
assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/")
|
||||
assert_node(toctree[1][3][0][0], reference, refuri="")
|
||||
|
||||
assert_node(toctree[2],
|
||||
[bullet_list, list_item, compact_paragraph, reference, "baz"])
|
||||
@ -255,10 +259,13 @@ def test_get_toctree_for_collapse(app):
|
||||
assert_node(toctree[1],
|
||||
([list_item, compact_paragraph, reference, "foo"],
|
||||
[list_item, compact_paragraph, reference, "bar"],
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"]))
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"],
|
||||
[list_item, compact_paragraph, reference,
|
||||
"Welcome to Sphinx Tests’s documentation!"]))
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1])
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2])
|
||||
assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/")
|
||||
assert_node(toctree[1][3][0][0], reference, refuri="")
|
||||
|
||||
assert_node(toctree[2],
|
||||
[bullet_list, list_item, compact_paragraph, reference, "baz"])
|
||||
@ -285,7 +292,9 @@ def test_get_toctree_for_maxdepth(app):
|
||||
([list_item, ([compact_paragraph, reference, "foo"],
|
||||
bullet_list)],
|
||||
[list_item, compact_paragraph, reference, "bar"],
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"]))
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"],
|
||||
[list_item, compact_paragraph, reference,
|
||||
"Welcome to Sphinx Tests’s documentation!"]))
|
||||
assert_node(toctree[1][0][1],
|
||||
([list_item, compact_paragraph, reference, "quux"],
|
||||
[list_item, ([compact_paragraph, reference, "foo.1"],
|
||||
@ -302,6 +311,7 @@ def test_get_toctree_for_maxdepth(app):
|
||||
assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=[1, 3])
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2])
|
||||
assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/")
|
||||
assert_node(toctree[1][3][0][0], reference, refuri="")
|
||||
|
||||
assert_node(toctree[2],
|
||||
[bullet_list, list_item, compact_paragraph, reference, "baz"])
|
||||
@ -327,7 +337,9 @@ def test_get_toctree_for_includehidden(app):
|
||||
([list_item, ([compact_paragraph, reference, "foo"],
|
||||
bullet_list)],
|
||||
[list_item, compact_paragraph, reference, "bar"],
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"]))
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"],
|
||||
[list_item, compact_paragraph, reference,
|
||||
"Welcome to Sphinx Tests’s documentation!"]))
|
||||
assert_node(toctree[1][0][1],
|
||||
([list_item, compact_paragraph, reference, "quux"],
|
||||
[list_item, compact_paragraph, reference, "foo.1"],
|
||||
|
@ -121,7 +121,6 @@ def test_pep_0420_enabled_separate(make_app, apidoc):
|
||||
|
||||
with open(outdir / 'a.b.c.rst') as f:
|
||||
rst = f.read()
|
||||
|
||||
assert ".. toctree::\n :maxdepth: 4\n\n a.b.c.d\n" in rst
|
||||
|
||||
with open(outdir / 'a.b.e.rst') as f:
|
||||
@ -509,7 +508,6 @@ def test_package_file(tempdir):
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n"
|
||||
"\n"
|
||||
"\n"
|
||||
"Module contents\n"
|
||||
"---------------\n"
|
||||
"\n"
|
||||
@ -595,8 +593,7 @@ def test_package_file_module_first(tempdir):
|
||||
".. automodule:: testpkg.example\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n"
|
||||
"\n")
|
||||
" :show-inheritance:\n")
|
||||
|
||||
|
||||
def test_package_file_without_submodules(tempdir):
|
||||
@ -639,5 +636,4 @@ def test_namespace_package_file(tempdir):
|
||||
".. automodule:: testpkg.example\n"
|
||||
" :members:\n"
|
||||
" :undoc-members:\n"
|
||||
" :show-inheritance:\n"
|
||||
"\n")
|
||||
" :show-inheritance:\n")
|
||||
|
@ -1047,7 +1047,7 @@ def test_class_attributes(app):
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_instance_attributes(app):
|
||||
def test_autoclass_instance_attributes(app):
|
||||
options = {"members": None}
|
||||
actual = do_autodoc(app, 'class', 'target.InstAttCls', options)
|
||||
assert list(actual) == [
|
||||
@ -1120,6 +1120,19 @@ def test_instance_attributes(app):
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_autoattribute_instance_attributes(app):
|
||||
actual = do_autodoc(app, 'attribute', 'target.InstAttCls.ia1')
|
||||
assert list(actual) == [
|
||||
'',
|
||||
'.. py:attribute:: InstAttCls.ia1',
|
||||
' :module: target',
|
||||
'',
|
||||
' Doc comment for instance attribute InstAttCls.ia1',
|
||||
''
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_slots(app):
|
||||
options = {"members": None,
|
||||
@ -1960,3 +1973,48 @@ def test_name_conflict(app):
|
||||
' docstring of target.name_conflict.foo::bar.',
|
||||
'',
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_name_mangling(app):
|
||||
options = {"members": None,
|
||||
"undoc-members": None,
|
||||
"private-members": None}
|
||||
actual = do_autodoc(app, 'module', 'target.name_mangling', options)
|
||||
assert list(actual) == [
|
||||
'',
|
||||
'.. py:module:: target.name_mangling',
|
||||
'',
|
||||
'',
|
||||
'.. py:class:: Bar()',
|
||||
' :module: target.name_mangling',
|
||||
'',
|
||||
'',
|
||||
' .. py:attribute:: Bar._Baz__email',
|
||||
' :module: target.name_mangling',
|
||||
' :value: None',
|
||||
'',
|
||||
' a member having mangled-like name',
|
||||
'',
|
||||
'',
|
||||
' .. py:attribute:: Bar.__address',
|
||||
' :module: target.name_mangling',
|
||||
' :value: None',
|
||||
'',
|
||||
'',
|
||||
'.. py:class:: Foo()',
|
||||
' :module: target.name_mangling',
|
||||
'',
|
||||
'',
|
||||
' .. py:attribute:: Foo.__age',
|
||||
' :module: target.name_mangling',
|
||||
' :value: None',
|
||||
'',
|
||||
'',
|
||||
' .. py:attribute:: Foo.__name',
|
||||
' :module: target.name_mangling',
|
||||
' :value: None',
|
||||
'',
|
||||
' name of Foo',
|
||||
'',
|
||||
]
|
||||
|
@ -208,17 +208,17 @@ def test_autosummary_generate_content_for_module(app):
|
||||
assert template.render.call_args[0][0] == 'module'
|
||||
|
||||
context = template.render.call_args[0][1]
|
||||
assert context['members'] == ['Exc', 'Foo', '_Baz', '_Exc', '__builtins__',
|
||||
'__cached__', '__doc__', '__file__', '__name__',
|
||||
'__package__', '_quux', 'bar', 'qux']
|
||||
assert context['members'] == ['CONSTANT1', 'CONSTANT2', 'Exc', 'Foo', '_Baz', '_Exc',
|
||||
'__builtins__', '__cached__', '__doc__', '__file__',
|
||||
'__name__', '__package__', '_quux', 'bar', 'qux']
|
||||
assert context['functions'] == ['bar']
|
||||
assert context['all_functions'] == ['_quux', 'bar']
|
||||
assert context['classes'] == ['Foo']
|
||||
assert context['all_classes'] == ['Foo', '_Baz']
|
||||
assert context['exceptions'] == ['Exc']
|
||||
assert context['all_exceptions'] == ['Exc', '_Exc']
|
||||
assert context['attributes'] == ['qux']
|
||||
assert context['all_attributes'] == ['qux']
|
||||
assert context['attributes'] == ['CONSTANT1', 'qux']
|
||||
assert context['all_attributes'] == ['CONSTANT1', 'qux']
|
||||
assert context['fullname'] == 'autosummary_dummy_module'
|
||||
assert context['module'] == 'autosummary_dummy_module'
|
||||
assert context['objname'] == ''
|
||||
@ -239,8 +239,9 @@ def test_autosummary_generate_content_for_module_skipped(app):
|
||||
generate_autosummary_content('autosummary_dummy_module', autosummary_dummy_module, None,
|
||||
template, None, False, app, False, {})
|
||||
context = template.render.call_args[0][1]
|
||||
assert context['members'] == ['_Baz', '_Exc', '__builtins__', '__cached__', '__doc__',
|
||||
'__file__', '__name__', '__package__', '_quux', 'qux']
|
||||
assert context['members'] == ['CONSTANT1', 'CONSTANT2', '_Baz', '_Exc', '__builtins__',
|
||||
'__cached__', '__doc__', '__file__', '__name__',
|
||||
'__package__', '_quux', 'qux']
|
||||
assert context['functions'] == []
|
||||
assert context['classes'] == []
|
||||
assert context['exceptions'] == []
|
||||
@ -256,18 +257,18 @@ def test_autosummary_generate_content_for_module_imported_members(app):
|
||||
assert template.render.call_args[0][0] == 'module'
|
||||
|
||||
context = template.render.call_args[0][1]
|
||||
assert context['members'] == ['Exc', 'Foo', 'Union', '_Baz', '_Exc', '__builtins__',
|
||||
'__cached__', '__doc__', '__file__', '__loader__',
|
||||
'__name__', '__package__', '__spec__', '_quux',
|
||||
'bar', 'path', 'qux']
|
||||
assert context['members'] == ['CONSTANT1', 'CONSTANT2', 'Exc', 'Foo', 'Union', '_Baz',
|
||||
'_Exc', '__builtins__', '__cached__', '__doc__',
|
||||
'__file__', '__loader__', '__name__', '__package__',
|
||||
'__spec__', '_quux', 'bar', 'path', 'qux']
|
||||
assert context['functions'] == ['bar']
|
||||
assert context['all_functions'] == ['_quux', 'bar']
|
||||
assert context['classes'] == ['Foo']
|
||||
assert context['all_classes'] == ['Foo', '_Baz']
|
||||
assert context['exceptions'] == ['Exc']
|
||||
assert context['all_exceptions'] == ['Exc', '_Exc']
|
||||
assert context['attributes'] == ['qux']
|
||||
assert context['all_attributes'] == ['qux']
|
||||
assert context['attributes'] == ['CONSTANT1', 'qux']
|
||||
assert context['all_attributes'] == ['CONSTANT1', 'qux']
|
||||
assert context['fullname'] == 'autosummary_dummy_module'
|
||||
assert context['module'] == 'autosummary_dummy_module'
|
||||
assert context['objname'] == ''
|
||||
@ -307,6 +308,11 @@ def test_autosummary_generate(app, status, warning):
|
||||
' \n'
|
||||
' Foo\n'
|
||||
' \n' in module)
|
||||
assert (' .. autosummary::\n'
|
||||
' \n'
|
||||
' CONSTANT1\n'
|
||||
' qux\n'
|
||||
' \n' in module)
|
||||
|
||||
Foo = (app.srcdir / 'generated' / 'autosummary_dummy_module.Foo.rst').read_text()
|
||||
assert '.. automethod:: __init__' in Foo
|
||||
@ -317,6 +323,8 @@ def test_autosummary_generate(app, status, warning):
|
||||
' \n' in Foo)
|
||||
assert (' .. autosummary::\n'
|
||||
' \n'
|
||||
' ~Foo.CONSTANT3\n'
|
||||
' ~Foo.CONSTANT4\n'
|
||||
' ~Foo.baz\n'
|
||||
' \n' in Foo)
|
||||
|
||||
@ -386,6 +394,20 @@ def test_autosummary_recursive(app, status, warning):
|
||||
assert 'package.package.module' in content
|
||||
|
||||
|
||||
@pytest.mark.sphinx('dummy', testroot='ext-autosummary-filename-map')
|
||||
def test_autosummary_filename_map(app, status, warning):
|
||||
app.build()
|
||||
|
||||
assert (app.srcdir / 'generated' / 'module_mangled.rst').exists()
|
||||
assert not (app.srcdir / 'generated' / 'autosummary_dummy_module.rst').exists()
|
||||
assert (app.srcdir / 'generated' / 'bar.rst').exists()
|
||||
assert not (app.srcdir / 'generated' / 'autosummary_dummy_module.bar.rst').exists()
|
||||
assert (app.srcdir / 'generated' / 'autosummary_dummy_module.Foo.rst').exists()
|
||||
|
||||
html_warnings = app._warning.getvalue()
|
||||
assert html_warnings == ''
|
||||
|
||||
|
||||
@pytest.mark.sphinx('latex', **default_kw)
|
||||
def test_autosummary_latex_table_colspec(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
@ -109,7 +109,7 @@ def test_inheritance_diagram(app, status, warning):
|
||||
('dummy.test.B', 'dummy.test.B', [], None)
|
||||
]
|
||||
|
||||
# inheritance diagram with 2 top classes and specifiying the entire module
|
||||
# inheritance diagram with 2 top classes and specifying the entire module
|
||||
# rendering should be
|
||||
#
|
||||
# A
|
||||
|
@ -956,9 +956,9 @@ def test_xml_role_xref(app):
|
||||
'glossary_terms#term-Some-term'])
|
||||
assert_elem(
|
||||
para2[1],
|
||||
['LINK TO', 'SAME TYPE LINKS', 'AND',
|
||||
"I18N ROCK'N ROLE XREF", '.'],
|
||||
['same-type-links', 'i18n-role-xref'])
|
||||
['LINK TO', 'LABEL', 'AND',
|
||||
'SAME TYPE LINKS', 'AND', 'SAME TYPE LINKS', '.'],
|
||||
['i18n-role-xref', 'same-type-links', 'same-type-links'])
|
||||
assert_elem(
|
||||
para2[2],
|
||||
['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS', '.'],
|
||||
|
@ -130,7 +130,7 @@ def test_signature_partialmethod():
|
||||
|
||||
def test_signature_annotations():
|
||||
from typing_test_data import (f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10,
|
||||
f11, f12, f13, f14, f15, f16, f17, f18, f19, f20, Node)
|
||||
f11, f12, f13, f14, f15, f16, f17, f18, f19, f20, f21, Node)
|
||||
|
||||
# Class annotations
|
||||
sig = inspect.signature(f0)
|
||||
@ -214,6 +214,10 @@ def test_signature_annotations():
|
||||
sig = inspect.signature(f19)
|
||||
assert stringify_signature(sig) == '(*args: int, **kwargs: str)'
|
||||
|
||||
# default value is inspect.Signature.empty
|
||||
sig = inspect.signature(f21)
|
||||
assert stringify_signature(sig) == "(arg1='whatever', arg2)"
|
||||
|
||||
# type hints by string
|
||||
sig = inspect.signature(Node.children)
|
||||
if (3, 5, 0) <= sys.version_info < (3, 5, 3):
|
||||
|
@ -10,7 +10,9 @@
|
||||
|
||||
import sys
|
||||
from numbers import Integral
|
||||
from typing import Any, Dict, List, TypeVar, Union, Callable, Tuple, Optional, Generic
|
||||
from typing import (
|
||||
Any, Dict, Generator, List, TypeVar, Union, Callable, Tuple, Optional, Generic
|
||||
)
|
||||
|
||||
import pytest
|
||||
|
||||
@ -48,6 +50,7 @@ def test_stringify_type_hints_containers():
|
||||
assert stringify(Tuple[str, ...]) == "Tuple[str, ...]"
|
||||
assert stringify(List[Dict[str, Tuple]]) == "List[Dict[str, Tuple]]"
|
||||
assert stringify(MyList[Tuple[int, int]]) == "test_util_typing.MyList[Tuple[int, int]]"
|
||||
assert stringify(Generator[None, None, None]) == "Generator[None, None, None]"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info < (3, 9), reason='python 3.9+ is required.')
|
||||
|
@ -1,3 +1,4 @@
|
||||
from inspect import Signature
|
||||
from numbers import Integral
|
||||
from typing import Any, Dict, List, TypeVar, Union, Callable, Tuple, Optional
|
||||
|
||||
@ -100,6 +101,9 @@ def f20() -> Optional[Union[int, str]]:
|
||||
pass
|
||||
|
||||
|
||||
def f21(arg1='whatever', arg2=Signature.empty):
|
||||
pass
|
||||
|
||||
|
||||
class Node:
|
||||
def __init__(self, parent: Optional['Node']) -> None:
|
||||
|
@ -50,6 +50,9 @@ def lint(path: str) -> int:
|
||||
if re.match(r'^\s*\.\. ', line):
|
||||
# ignore directives and hyperlink targets
|
||||
pass
|
||||
elif re.match(r'^\s*``[^`]+``$', line):
|
||||
# ignore a very long literal string
|
||||
pass
|
||||
else:
|
||||
print('%s:%d: the line is too long (%d > %d).' %
|
||||
(path, i + 1, len(line), MAX_LINE_LENGTH))
|
||||
|
@ -1,6 +1,6 @@
|
||||
# lint Python modules using external checkers.
|
||||
#
|
||||
# This is the main checker controling the other ones and the reports
|
||||
# This is the main checker controlling the other ones and the reports
|
||||
# generation. It is itself both a raw checker and an astng checker in order
|
||||
# to:
|
||||
# * handle message activation / deactivation at the module level
|
||||
@ -71,7 +71,7 @@ reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note).You have access to the variables errors warning, statement which
|
||||
# respectivly contain the number of errors / warnings messages and the total
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (R0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
Loading…
Reference in New Issue
Block a user