mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '3.x'
This commit is contained in:
commit
445b9515c1
@ -24,9 +24,6 @@ jobs:
|
||||
- python: '3.9-dev'
|
||||
env:
|
||||
- TOXENV=py39
|
||||
- python: 'nightly'
|
||||
env:
|
||||
- TOXENV=py310
|
||||
|
||||
- language: node_js
|
||||
node_js: '10.7'
|
||||
|
9
CHANGES
9
CHANGES
@ -87,6 +87,8 @@ Bugs fixed
|
||||
typing.Generic
|
||||
* #8157: autodoc: TypeError is raised when annotation has invalid __args__
|
||||
* #7964: autodoc: Tuple in default value is wrongly rendered
|
||||
* #8200: autodoc: type aliases break type formatting of autoattribute
|
||||
* #7786: autodoc: can't detect overloaded methods defined in other file
|
||||
* #8192: napoleon: description is disappeared when it contains inline literals
|
||||
* #8142: napoleon: Potential of regex denial of service in google style docs
|
||||
* #8169: LaTeX: pxjahyper loaded even when latex_engine is not platex
|
||||
@ -94,6 +96,8 @@ Bugs fixed
|
||||
* #8277: sphinx-build: missing and redundant spacing (and etc) for console
|
||||
output on building
|
||||
* #7973: imgconverter: Check availability of imagemagick many times
|
||||
* #8255: py domain: number in default argument value is changed from hexadecimal
|
||||
to decimal
|
||||
* #8093: The highlight warning has wrong location in some builders (LaTeX,
|
||||
singlehtml and so on)
|
||||
* #8239: Failed to refer a token in productionlist if it is indented
|
||||
@ -126,6 +130,11 @@ Bugs fixed
|
||||
|
||||
* #8188: C, add missing items to internal object types dictionary,
|
||||
e.g., preventing intersphinx from resolving them.
|
||||
* C, fix anon objects in intersphinx.
|
||||
* #8270, C++, properly reject functions as duplicate declarations if a
|
||||
non-function declaration of the same name already exists.
|
||||
* C, fix references to function parameters.
|
||||
Link to the function instead of a non-existing anchor.
|
||||
|
||||
|
||||
Testing
|
||||
|
1
EXAMPLES
1
EXAMPLES
@ -236,6 +236,7 @@ Documentation using sphinx_rtd_theme
|
||||
* `MyHDL <http://docs.myhdl.org/>`__
|
||||
* `Nextflow <https://www.nextflow.io/docs/latest/index.html>`__
|
||||
* `NICOS <https://forge.frm2.tum.de/nicos/doc/nicos-master/>`__ (customized)
|
||||
* `OpenFAST <https://openfast.readthedocs.io/>`__
|
||||
* `Pelican <http://docs.getpelican.com/>`__
|
||||
* `picamera <https://picamera.readthedocs.io/>`__
|
||||
* `Pillow <https://pillow.readthedocs.io/>`__
|
||||
|
@ -110,8 +110,6 @@ texinfo_documents = [
|
||||
1),
|
||||
]
|
||||
|
||||
# We're not using intersphinx right now, but if we did, this would be part of
|
||||
# the mapping:
|
||||
intersphinx_mapping = {'python': ('https://docs.python.org/3/', None)}
|
||||
|
||||
# Sphinx document translation with sphinx gettext feature uses these settings:
|
||||
|
2
setup.py
2
setup.py
@ -44,7 +44,7 @@ extras_require = {
|
||||
'lint': [
|
||||
'flake8>=3.5.0',
|
||||
'flake8-import-order',
|
||||
'mypy>=0.780',
|
||||
'mypy>=0.790',
|
||||
'docutils-stubs',
|
||||
],
|
||||
'test': [
|
||||
|
@ -22,6 +22,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, TextElement
|
||||
from docutils.parsers import Parser
|
||||
from docutils.parsers.rst import Directive, roles
|
||||
from docutils.transforms import Transform
|
||||
from pygments.lexer import Lexer
|
||||
@ -467,8 +468,10 @@ class Sphinx:
|
||||
def add_builder(self, builder: "Type[Builder]", override: bool = False) -> None:
|
||||
"""Register a new builder.
|
||||
|
||||
*builder* must be a class that inherits from
|
||||
:class:`~sphinx.builders.Builder`.
|
||||
*builder* must be a class that inherits from :class:`~sphinx.builders.Builder`.
|
||||
|
||||
If *override* is True, the given *builder* is forcedly installed even if
|
||||
a builder having the same name is already installed.
|
||||
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@ -525,6 +528,9 @@ class Sphinx:
|
||||
builtin translator. This allows extensions to use custom translator
|
||||
and define custom nodes for the translator (see :meth:`add_node`).
|
||||
|
||||
If *override* is True, the given *translator_class* is forcedly installed even if
|
||||
a translator for *name* is already installed.
|
||||
|
||||
.. versionadded:: 1.3
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@ -559,6 +565,9 @@ class Sphinx:
|
||||
Obviously, translators for which you don't specify visitor methods will
|
||||
choke on the node when encountered in a document to translate.
|
||||
|
||||
If *override* is True, the given *node* is forcedly installed even if
|
||||
a node having the same name is already installed.
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
Added the support for keyword arguments giving visit functions.
|
||||
"""
|
||||
@ -594,6 +603,9 @@ class Sphinx:
|
||||
Other keyword arguments are used for node visitor functions. See the
|
||||
:meth:`.Sphinx.add_node` for details.
|
||||
|
||||
If *override* is True, the given *node* is forcedly installed even if
|
||||
a node having the same name is already installed.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
"""
|
||||
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
|
||||
@ -607,14 +619,14 @@ class Sphinx:
|
||||
details, see `the Docutils docs
|
||||
<http://docutils.sourceforge.net/docs/howto/rst-directives.html>`_ .
|
||||
|
||||
For example, the (already existing) :rst:dir:`literalinclude` directive
|
||||
would be added like this:
|
||||
For example, a custom directive named ``my-directive`` would be added
|
||||
like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from docutils.parsers.rst import Directive, directives
|
||||
|
||||
class LiteralIncludeDirective(Directive):
|
||||
class MyDirective(Directive):
|
||||
has_content = True
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
@ -627,7 +639,11 @@ class Sphinx:
|
||||
def run(self):
|
||||
...
|
||||
|
||||
add_directive('literalinclude', LiteralIncludeDirective)
|
||||
def setup(app):
|
||||
add_directive('my-directive', MyDirective)
|
||||
|
||||
If *override* is True, the given *cls* is forcedly installed even if
|
||||
a directive named as *name* is already installed.
|
||||
|
||||
.. versionchanged:: 0.6
|
||||
Docutils 0.5-style directive classes are now supported.
|
||||
@ -651,6 +667,9 @@ class Sphinx:
|
||||
<http://docutils.sourceforge.net/docs/howto/rst-roles.html>`_ for
|
||||
more information.
|
||||
|
||||
If *override* is True, the given *role* is forcedly installed even if
|
||||
a role named as *name* is already installed.
|
||||
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
"""
|
||||
@ -666,6 +685,9 @@ class Sphinx:
|
||||
Register a Docutils role that does nothing but wrap its contents in the
|
||||
node given by *nodeclass*.
|
||||
|
||||
If *override* is True, the given *nodeclass* is forcedly installed even if
|
||||
a role named as *name* is already installed.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@ -685,6 +707,9 @@ class Sphinx:
|
||||
Make the given *domain* (which must be a class; more precisely, a
|
||||
subclass of :class:`~sphinx.domains.Domain`) known to Sphinx.
|
||||
|
||||
If *override* is True, the given *domain* is forcedly installed even if
|
||||
a domain having the same name is already installed.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@ -698,6 +723,9 @@ class Sphinx:
|
||||
Like :meth:`add_directive`, but the directive is added to the domain
|
||||
named *domain*.
|
||||
|
||||
If *override* is True, the given *directive* is forcedly installed even if
|
||||
a directive named as *name* is already installed.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@ -711,6 +739,9 @@ class Sphinx:
|
||||
Like :meth:`add_role`, but the role is added to the domain named
|
||||
*domain*.
|
||||
|
||||
If *override* is True, the given *role* is forcedly installed even if
|
||||
a role named as *name* is already installed.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@ -724,6 +755,9 @@ class Sphinx:
|
||||
Add a custom *index* class to the domain named *domain*. *index* must
|
||||
be a subclass of :class:`~sphinx.domains.Index`.
|
||||
|
||||
If *override* is True, the given *index* is forcedly installed even if
|
||||
an index having the same name is already installed.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
@ -787,6 +821,9 @@ class Sphinx:
|
||||
For the role content, you have the same syntactical possibilities as
|
||||
for standard Sphinx roles (see :ref:`xref-syntax`).
|
||||
|
||||
If *override* is True, the given object_type is forcedly installed even if
|
||||
an object_type having the same name is already installed.
|
||||
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
"""
|
||||
@ -823,6 +860,9 @@ class Sphinx:
|
||||
(Of course, the element following the ``topic`` directive needn't be a
|
||||
section.)
|
||||
|
||||
If *override* is True, the given crossref_type is forcedly installed even if
|
||||
a crossref_type having the same name is already installed.
|
||||
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
"""
|
||||
@ -987,6 +1027,9 @@ class Sphinx:
|
||||
new types of objects. See the source of the autodoc module for
|
||||
examples on how to subclass :class:`Documenter`.
|
||||
|
||||
If *override* is True, the given *cls* is forcedly installed even if
|
||||
a documenter having the same name is already installed.
|
||||
|
||||
.. todo:: Add real docs for Documenter and subclassing
|
||||
|
||||
.. versionadded:: 0.6
|
||||
@ -1035,13 +1078,19 @@ class Sphinx:
|
||||
Same as :confval:`source_suffix`. The users can override this
|
||||
using the setting.
|
||||
|
||||
If *override* is True, the given *suffix* is forcedly installed even if
|
||||
a same suffix is already installed.
|
||||
|
||||
.. versionadded:: 1.8
|
||||
"""
|
||||
self.registry.add_source_suffix(suffix, filetype, override=override)
|
||||
|
||||
def add_source_parser(self, *args: Any, **kwargs: Any) -> None:
|
||||
def add_source_parser(self, parser: "Type[Parser]", override: bool = False) -> None:
|
||||
"""Register a parser class.
|
||||
|
||||
If *override* is True, the given *parser* is forcedly installed even if
|
||||
a parser for the same suffix is already installed.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
.. versionchanged:: 1.8
|
||||
*suffix* argument is deprecated. It only accepts *parser* argument.
|
||||
@ -1049,7 +1098,7 @@ class Sphinx:
|
||||
.. versionchanged:: 1.8
|
||||
Add *override* keyword.
|
||||
"""
|
||||
self.registry.add_source_parser(*args, **kwargs)
|
||||
self.registry.add_source_parser(parser, override=override)
|
||||
|
||||
def add_env_collector(self, collector: "Type[EnvironmentCollector]") -> None:
|
||||
"""Register an environment collector class.
|
||||
|
@ -10,9 +10,8 @@
|
||||
|
||||
import re
|
||||
from typing import (
|
||||
Any, Callable, Dict, Generator, Iterator, List, Type, TypeVar, Tuple, Union
|
||||
Any, Callable, cast, Dict, Generator, Iterator, List, Type, TypeVar, Tuple, Union
|
||||
)
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node, TextElement, system_message
|
||||
@ -47,6 +46,11 @@ from sphinx.util.nodes import make_refnode
|
||||
logger = logging.getLogger(__name__)
|
||||
T = TypeVar('T')
|
||||
|
||||
DeclarationType = Union[
|
||||
"ASTStruct", "ASTUnion", "ASTEnum", "ASTEnumerator",
|
||||
"ASTType", "ASTTypeWithInit", "ASTMacro",
|
||||
]
|
||||
|
||||
# https://en.cppreference.com/w/c/keyword
|
||||
_keywords = [
|
||||
'auto', 'break', 'case', 'char', 'const', 'continue', 'default', 'do', 'double',
|
||||
@ -636,6 +640,10 @@ class ASTFunctionParameter(ASTBase):
|
||||
self.arg = arg
|
||||
self.ellipsis = ellipsis
|
||||
|
||||
def get_id(self, version: int, objectType: str, symbol: "Symbol") -> str:
|
||||
# the anchor will be our parent
|
||||
return symbol.parent.declaration.get_id(version, prefixed=False)
|
||||
|
||||
def _stringify(self, transform: StringifyTransform) -> str:
|
||||
if self.ellipsis:
|
||||
return '...'
|
||||
@ -1149,6 +1157,9 @@ class ASTType(ASTBase):
|
||||
def name(self) -> ASTNestedName:
|
||||
return self.decl.name
|
||||
|
||||
def get_id(self, version: int, objectType: str, symbol: "Symbol") -> str:
|
||||
return symbol.get_full_nested_name().get_id(version)
|
||||
|
||||
@property
|
||||
def function_params(self) -> List[ASTFunctionParameter]:
|
||||
return self.decl.function_params
|
||||
@ -1191,6 +1202,9 @@ class ASTTypeWithInit(ASTBase):
|
||||
def name(self) -> ASTNestedName:
|
||||
return self.type.name
|
||||
|
||||
def get_id(self, version: int, objectType: str, symbol: "Symbol") -> str:
|
||||
return self.type.get_id(version, objectType, symbol)
|
||||
|
||||
def _stringify(self, transform: StringifyTransform) -> str:
|
||||
res = []
|
||||
res.append(transform(self.type))
|
||||
@ -1242,6 +1256,9 @@ class ASTMacro(ASTBase):
|
||||
def name(self) -> ASTNestedName:
|
||||
return self.ident
|
||||
|
||||
def get_id(self, version: int, objectType: str, symbol: "Symbol") -> str:
|
||||
return symbol.get_full_nested_name().get_id(version)
|
||||
|
||||
def _stringify(self, transform: StringifyTransform) -> str:
|
||||
res = []
|
||||
res.append(transform(self.ident))
|
||||
@ -1342,7 +1359,8 @@ class ASTEnumerator(ASTBase):
|
||||
|
||||
|
||||
class ASTDeclaration(ASTBaseBase):
|
||||
def __init__(self, objectType: str, directiveType: str, declaration: Any,
|
||||
def __init__(self, objectType: str, directiveType: str,
|
||||
declaration: Union[DeclarationType, ASTFunctionParameter],
|
||||
semicolon: bool = False) -> None:
|
||||
self.objectType = objectType
|
||||
self.directiveType = directiveType
|
||||
@ -1359,18 +1377,20 @@ class ASTDeclaration(ASTBaseBase):
|
||||
|
||||
@property
|
||||
def name(self) -> ASTNestedName:
|
||||
return self.declaration.name
|
||||
decl = cast(DeclarationType, self.declaration)
|
||||
return decl.name
|
||||
|
||||
@property
|
||||
def function_params(self) -> List[ASTFunctionParameter]:
|
||||
if self.objectType != 'function':
|
||||
return None
|
||||
return self.declaration.function_params
|
||||
decl = cast(ASTType, self.declaration)
|
||||
return decl.function_params
|
||||
|
||||
def get_id(self, version: int, prefixed: bool = True) -> str:
|
||||
if self.objectType == 'enumerator' and self.enumeratorScopedSymbol:
|
||||
return self.enumeratorScopedSymbol.declaration.get_id(version, prefixed)
|
||||
id_ = self.symbol.get_full_nested_name().get_id(version)
|
||||
id_ = self.declaration.get_id(version, self.objectType, self.symbol)
|
||||
if prefixed:
|
||||
return _id_prefix[version] + id_
|
||||
else:
|
||||
@ -1413,7 +1433,8 @@ class ASTDeclaration(ASTBaseBase):
|
||||
elif self.objectType == 'enumerator':
|
||||
mainDeclNode += addnodes.desc_annotation('enumerator ', 'enumerator ')
|
||||
elif self.objectType == 'type':
|
||||
prefix = self.declaration.get_type_declaration_prefix()
|
||||
decl = cast(ASTType, self.declaration)
|
||||
prefix = decl.get_type_declaration_prefix()
|
||||
prefix += ' '
|
||||
mainDeclNode += addnodes.desc_annotation(prefix, prefix)
|
||||
else:
|
||||
@ -2988,7 +3009,7 @@ class DefinitionParser(BaseParser):
|
||||
|
||||
def parse_pre_v3_type_definition(self) -> ASTDeclaration:
|
||||
self.skip_ws()
|
||||
declaration = None # type: Any
|
||||
declaration = None # type: DeclarationType
|
||||
if self.skip_word('struct'):
|
||||
typ = 'struct'
|
||||
declaration = self._parse_struct()
|
||||
@ -3011,7 +3032,7 @@ class DefinitionParser(BaseParser):
|
||||
'macro', 'struct', 'union', 'enum', 'enumerator', 'type'):
|
||||
raise Exception('Internal error, unknown directiveType "%s".' % directiveType)
|
||||
|
||||
declaration = None # type: Any
|
||||
declaration = None # type: DeclarationType
|
||||
if objectType == 'member':
|
||||
declaration = self._parse_type_with_init(named=True, outer='member')
|
||||
elif objectType == 'function':
|
||||
@ -3158,10 +3179,6 @@ class CObject(ObjectDescription):
|
||||
|
||||
self.state.document.note_explicit_target(signode)
|
||||
|
||||
domain = cast(CDomain, self.env.get_domain('c'))
|
||||
if name not in domain.objects:
|
||||
domain.objects[name] = (domain.env.docname, newestId, self.objtype)
|
||||
|
||||
if 'noindexentry' not in self.options:
|
||||
indexText = self.get_index_text(name)
|
||||
self.indexnode['entries'].append(('single', indexText, newestId, '', None))
|
||||
@ -3681,10 +3698,6 @@ class CDomain(Domain):
|
||||
'objects': {}, # fullname -> docname, node_id, objtype
|
||||
} # type: Dict[str, Union[Symbol, Dict[str, Tuple[str, str, str]]]]
|
||||
|
||||
@property
|
||||
def objects(self) -> Dict[str, Tuple[str, str, str]]:
|
||||
return self.data.setdefault('objects', {}) # fullname -> docname, node_id, objtype
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
if Symbol.debug_show_tree:
|
||||
print("clear_doc:", docname)
|
||||
@ -3700,9 +3713,6 @@ class CDomain(Domain):
|
||||
print(self.data['root_symbol'].dump(1))
|
||||
print("\tafter end")
|
||||
print("clear_doc end:", docname)
|
||||
for fullname, (fn, _id, _l) in list(self.objects.items()):
|
||||
if fn == docname:
|
||||
del self.objects[fullname]
|
||||
|
||||
def process_doc(self, env: BuildEnvironment, docname: str,
|
||||
document: nodes.document) -> None:
|
||||
@ -3788,8 +3798,18 @@ class CDomain(Domain):
|
||||
return []
|
||||
|
||||
def get_objects(self) -> Iterator[Tuple[str, str, str, str, str, int]]:
|
||||
for refname, (docname, node_id, objtype) in list(self.objects.items()):
|
||||
yield (refname, refname, objtype, docname, node_id, 1)
|
||||
rootSymbol = self.data['root_symbol']
|
||||
for symbol in rootSymbol.get_all_symbols():
|
||||
if symbol.declaration is None:
|
||||
continue
|
||||
assert symbol.docname
|
||||
fullNestedName = symbol.get_full_nested_name()
|
||||
name = str(fullNestedName).lstrip('.')
|
||||
dispname = fullNestedName.get_display_string().lstrip('.')
|
||||
objectType = symbol.declaration.objectType
|
||||
docname = symbol.docname
|
||||
newestId = symbol.declaration.get_newest_id()
|
||||
yield (name, dispname, objectType, docname, newestId, 1)
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
|
@ -1836,7 +1836,7 @@ class ASTFunctionParameter(ASTBase):
|
||||
# this is not part of the normal name mangling in C++
|
||||
if symbol:
|
||||
# the anchor will be our parent
|
||||
return symbol.parent.declaration.get_id(version, prefixed=None)
|
||||
return symbol.parent.declaration.get_id(version, prefixed=False)
|
||||
# else, do the usual
|
||||
if self.ellipsis:
|
||||
return 'z'
|
||||
@ -4107,7 +4107,7 @@ class Symbol:
|
||||
Symbol.debug_print("self:")
|
||||
print(self.to_string(Symbol.debug_indent + 1), end="")
|
||||
Symbol.debug_print("nestedName: ", nestedName)
|
||||
Symbol.debug_print("templateDecls: ", templateDecls)
|
||||
Symbol.debug_print("templateDecls: ", ",".join(str(t) for t in templateDecls))
|
||||
Symbol.debug_print("strictTemplateParamArgLists:", strictTemplateParamArgLists)
|
||||
Symbol.debug_print("ancestorLookupType:", ancestorLookupType)
|
||||
Symbol.debug_print("templateShorthand: ", templateShorthand)
|
||||
@ -4231,7 +4231,7 @@ class Symbol:
|
||||
Symbol.debug_indent += 1
|
||||
Symbol.debug_print("_add_symbols:")
|
||||
Symbol.debug_indent += 1
|
||||
Symbol.debug_print("tdecls:", templateDecls)
|
||||
Symbol.debug_print("tdecls:", ",".join(str(t) for t in templateDecls))
|
||||
Symbol.debug_print("nn: ", nestedName)
|
||||
Symbol.debug_print("decl: ", declaration)
|
||||
Symbol.debug_print("doc: ", docname)
|
||||
@ -4360,6 +4360,11 @@ class Symbol:
|
||||
if Symbol.debug_lookup:
|
||||
Symbol.debug_print("candId:", candId)
|
||||
for symbol in withDecl:
|
||||
# but all existing must be functions as well,
|
||||
# otherwise we declare it to be a duplicate
|
||||
if symbol.declaration.objectType != 'function':
|
||||
handleDuplicateDeclaration(symbol, candSymbol)
|
||||
# (not reachable)
|
||||
oldId = symbol.declaration.get_newest_id()
|
||||
if Symbol.debug_lookup:
|
||||
Symbol.debug_print("oldId: ", oldId)
|
||||
@ -4370,7 +4375,11 @@ class Symbol:
|
||||
# if there is an empty symbol, fill that one
|
||||
if len(noDecl) == 0:
|
||||
if Symbol.debug_lookup:
|
||||
Symbol.debug_print("no match, no empty, candSybmol is not None?:", candSymbol is not None) # NOQA
|
||||
Symbol.debug_print("no match, no empty")
|
||||
if candSymbol is not None:
|
||||
Symbol.debug_print("result is already created candSymbol")
|
||||
else:
|
||||
Symbol.debug_print("result is makeCandSymbol()")
|
||||
Symbol.debug_indent -= 2
|
||||
if candSymbol is not None:
|
||||
return candSymbol
|
||||
@ -6814,10 +6823,12 @@ class CPPObject(ObjectDescription):
|
||||
parentSymbol = env.temp_data['cpp:parent_symbol']
|
||||
parentDecl = parentSymbol.declaration
|
||||
if parentDecl is not None and parentDecl.objectType == 'function':
|
||||
logger.warning("C++ declarations inside functions are not supported." +
|
||||
" Parent function is " +
|
||||
str(parentSymbol.get_full_nested_name()),
|
||||
location=self.get_source_info())
|
||||
msg = "C++ declarations inside functions are not supported." \
|
||||
" Parent function: {}\nDirective name: {}\nDirective arg: {}"
|
||||
logger.warning(msg.format(
|
||||
str(parentSymbol.get_full_nested_name()),
|
||||
self.name, self.arguments[0]
|
||||
), location=self.get_source_info())
|
||||
name = _make_phony_error_name()
|
||||
symbol = parentSymbol.add_name(name)
|
||||
env.temp_data['cpp:last_symbol'] = symbol
|
||||
|
@ -1456,22 +1456,14 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
return ''
|
||||
|
||||
sig = super().format_signature()
|
||||
|
||||
overloaded = False
|
||||
qualname = None
|
||||
# TODO: recreate analyzer for the module of class (To be clear, owner of the method)
|
||||
if self._signature_class and self._signature_method_name and self.analyzer:
|
||||
qualname = '.'.join([self._signature_class.__qualname__,
|
||||
self._signature_method_name])
|
||||
if qualname in self.analyzer.overloads:
|
||||
overloaded = True
|
||||
|
||||
sigs = []
|
||||
if overloaded:
|
||||
|
||||
overloads = self.get_overloaded_signatures()
|
||||
if overloads:
|
||||
# Use signatures for overloaded methods instead of the implementation method.
|
||||
method = safe_getattr(self._signature_class, self._signature_method_name, None)
|
||||
__globals__ = safe_getattr(method, '__globals__', {})
|
||||
for overload in self.analyzer.overloads.get(qualname):
|
||||
for overload in overloads:
|
||||
overload = evaluate_signature(overload, __globals__,
|
||||
self.env.config.autodoc_type_aliases)
|
||||
|
||||
@ -1485,6 +1477,20 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
|
||||
return "\n".join(sigs)
|
||||
|
||||
def get_overloaded_signatures(self) -> List[Signature]:
|
||||
if self._signature_class and self._signature_method_name:
|
||||
for cls in self._signature_class.__mro__:
|
||||
try:
|
||||
analyzer = ModuleAnalyzer.for_module(cls.__module__)
|
||||
analyzer.parse()
|
||||
qualname = '.'.join([cls.__qualname__, self._signature_method_name])
|
||||
if qualname in analyzer.overloads:
|
||||
return analyzer.overloads.get(qualname)
|
||||
except PycodeError:
|
||||
pass
|
||||
|
||||
return []
|
||||
|
||||
def add_directive_header(self, sig: str) -> None:
|
||||
sourcename = self.get_sourcename()
|
||||
|
||||
@ -1714,7 +1720,8 @@ class GenericAliasDocumenter(DataDocumenter):
|
||||
return inspect.isgenericalias(member)
|
||||
|
||||
def add_directive_header(self, sig: str) -> None:
|
||||
self.options.annotation = SUPPRESS # type: ignore
|
||||
self.options = Options(self.options)
|
||||
self.options['annotation'] = SUPPRESS
|
||||
super().add_directive_header(sig)
|
||||
|
||||
def add_content(self, more_content: Any, no_docstring: bool = False) -> None:
|
||||
@ -1735,10 +1742,11 @@ class TypeVarDocumenter(DataDocumenter):
|
||||
@classmethod
|
||||
def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any
|
||||
) -> bool:
|
||||
return isinstance(member, TypeVar) and isattr # type: ignore
|
||||
return isinstance(member, TypeVar) and isattr
|
||||
|
||||
def add_directive_header(self, sig: str) -> None:
|
||||
self.options.annotation = SUPPRESS # type: ignore
|
||||
self.options = Options(self.options)
|
||||
self.options['annotation'] = SUPPRESS
|
||||
super().add_directive_header(sig)
|
||||
|
||||
def get_doc(self, ignore: int = None) -> List[List[str]]:
|
||||
|
@ -60,26 +60,28 @@ def parse(code: str, mode: str = 'exec') -> "ast.AST":
|
||||
|
||||
|
||||
@overload
|
||||
def unparse(node: None) -> None:
|
||||
def unparse(node: None, code: str = '') -> None:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def unparse(node: ast.AST) -> str:
|
||||
def unparse(node: ast.AST, code: str = '') -> str:
|
||||
...
|
||||
|
||||
|
||||
def unparse(node: Optional[ast.AST]) -> Optional[str]:
|
||||
def unparse(node: Optional[ast.AST], code: str = '') -> Optional[str]:
|
||||
"""Unparse an AST to string."""
|
||||
if node is None:
|
||||
return None
|
||||
elif isinstance(node, str):
|
||||
return node
|
||||
return _UnparseVisitor().visit(node)
|
||||
return _UnparseVisitor(code).visit(node)
|
||||
|
||||
|
||||
# a greatly cut-down version of `ast._Unparser`
|
||||
class _UnparseVisitor(ast.NodeVisitor):
|
||||
def __init__(self, code: str = '') -> None:
|
||||
self.code = code
|
||||
|
||||
def _visit_op(self, node: ast.AST) -> str:
|
||||
return OPERATORS[node.__class__]
|
||||
@ -155,9 +157,14 @@ class _UnparseVisitor(ast.NodeVisitor):
|
||||
["%s=%s" % (k.arg, self.visit(k.value)) for k in node.keywords])
|
||||
return "%s(%s)" % (self.visit(node.func), ", ".join(args))
|
||||
|
||||
def visit_Constant(self, node: ast.Constant) -> str: # type: ignore
|
||||
def visit_Constant(self, node: ast.Constant) -> str:
|
||||
if node.value is Ellipsis:
|
||||
return "..."
|
||||
elif isinstance(node.value, (int, float, complex)):
|
||||
if self.code and sys.version_info > (3, 8):
|
||||
return ast.get_source_segment(self.code, node)
|
||||
else:
|
||||
return repr(node.value)
|
||||
else:
|
||||
return repr(node.value)
|
||||
|
||||
|
@ -258,12 +258,12 @@ class SphinxComponentRegistry:
|
||||
else:
|
||||
self.source_suffix[suffix] = filetype
|
||||
|
||||
def add_source_parser(self, parser: "Type[Parser]", **kwargs: Any) -> None:
|
||||
def add_source_parser(self, parser: "Type[Parser]", override: bool = False) -> None:
|
||||
logger.debug('[app] adding search source_parser: %r', parser)
|
||||
|
||||
# create a map from filetype to parser
|
||||
for filetype in parser.supported:
|
||||
if filetype in self.source_parsers and not kwargs.get('override'):
|
||||
if filetype in self.source_parsers and not override:
|
||||
raise ExtensionError(__('source_parser for %r is already registered') %
|
||||
filetype)
|
||||
else:
|
||||
|
@ -288,8 +288,8 @@ class IndexBuilder:
|
||||
frozen.get('envversion') != self.env.version:
|
||||
raise ValueError('old format')
|
||||
index2fn = frozen['docnames']
|
||||
self._filenames = dict(zip(index2fn, frozen['filenames'])) # type: ignore
|
||||
self._titles = dict(zip(index2fn, frozen['titles'])) # type: ignore
|
||||
self._filenames = dict(zip(index2fn, frozen['filenames']))
|
||||
self._titles = dict(zip(index2fn, frozen['titles']))
|
||||
|
||||
def load_terms(mapping: Dict[str, Any]) -> Dict[str, Set[str]]:
|
||||
rv = {}
|
||||
@ -350,13 +350,13 @@ class IndexBuilder:
|
||||
def get_terms(self, fn2index: Dict) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]:
|
||||
rvs = {}, {} # type: Tuple[Dict[str, List[str]], Dict[str, List[str]]]
|
||||
for rv, mapping in zip(rvs, (self._mapping, self._title_mapping)):
|
||||
for k, v in mapping.items(): # type: ignore
|
||||
for k, v in mapping.items():
|
||||
if len(v) == 1:
|
||||
fn, = v
|
||||
if fn in fn2index:
|
||||
rv[k] = fn2index[fn] # type: ignore
|
||||
rv[k] = fn2index[fn]
|
||||
else:
|
||||
rv[k] = sorted([fn2index[fn] for fn in v if fn in fn2index]) # type: ignore # NOQA
|
||||
rv[k] = sorted([fn2index[fn] for fn in v if fn in fn2index])
|
||||
return rvs
|
||||
|
||||
def freeze(self) -> Dict[str, Any]:
|
||||
|
@ -583,13 +583,14 @@ def stringify_signature(sig: inspect.Signature, show_annotation: bool = True,
|
||||
|
||||
def signature_from_str(signature: str) -> inspect.Signature:
|
||||
"""Create a Signature object from string."""
|
||||
module = ast.parse('def func' + signature + ': pass')
|
||||
code = 'def func' + signature + ': pass'
|
||||
module = ast.parse(code)
|
||||
function = cast(ast.FunctionDef, module.body[0]) # type: ignore
|
||||
|
||||
return signature_from_ast(function)
|
||||
return signature_from_ast(function, code)
|
||||
|
||||
|
||||
def signature_from_ast(node: ast.FunctionDef) -> inspect.Signature:
|
||||
def signature_from_ast(node: ast.FunctionDef, code: str = '') -> inspect.Signature:
|
||||
"""Create a Signature object from AST *node*."""
|
||||
args = node.args
|
||||
defaults = list(args.defaults)
|
||||
@ -609,9 +610,9 @@ def signature_from_ast(node: ast.FunctionDef) -> inspect.Signature:
|
||||
if defaults[i] is Parameter.empty:
|
||||
default = Parameter.empty
|
||||
else:
|
||||
default = ast_unparse(defaults[i])
|
||||
default = ast_unparse(defaults[i], code)
|
||||
|
||||
annotation = ast_unparse(arg.annotation) or Parameter.empty
|
||||
annotation = ast_unparse(arg.annotation, code) or Parameter.empty
|
||||
params.append(Parameter(arg.arg, Parameter.POSITIONAL_ONLY,
|
||||
default=default, annotation=annotation))
|
||||
|
||||
@ -619,29 +620,29 @@ def signature_from_ast(node: ast.FunctionDef) -> inspect.Signature:
|
||||
if defaults[i + posonlyargs] is Parameter.empty:
|
||||
default = Parameter.empty
|
||||
else:
|
||||
default = ast_unparse(defaults[i + posonlyargs])
|
||||
default = ast_unparse(defaults[i + posonlyargs], code)
|
||||
|
||||
annotation = ast_unparse(arg.annotation) or Parameter.empty
|
||||
annotation = ast_unparse(arg.annotation, code) or Parameter.empty
|
||||
params.append(Parameter(arg.arg, Parameter.POSITIONAL_OR_KEYWORD,
|
||||
default=default, annotation=annotation))
|
||||
|
||||
if args.vararg:
|
||||
annotation = ast_unparse(args.vararg.annotation) or Parameter.empty
|
||||
annotation = ast_unparse(args.vararg.annotation, code) or Parameter.empty
|
||||
params.append(Parameter(args.vararg.arg, Parameter.VAR_POSITIONAL,
|
||||
annotation=annotation))
|
||||
|
||||
for i, arg in enumerate(args.kwonlyargs):
|
||||
default = ast_unparse(args.kw_defaults[i]) or Parameter.empty
|
||||
annotation = ast_unparse(arg.annotation) or Parameter.empty
|
||||
default = ast_unparse(args.kw_defaults[i], code) or Parameter.empty
|
||||
annotation = ast_unparse(arg.annotation, code) or Parameter.empty
|
||||
params.append(Parameter(arg.arg, Parameter.KEYWORD_ONLY, default=default,
|
||||
annotation=annotation))
|
||||
|
||||
if args.kwarg:
|
||||
annotation = ast_unparse(args.kwarg.annotation) or Parameter.empty
|
||||
annotation = ast_unparse(args.kwarg.annotation, code) or Parameter.empty
|
||||
params.append(Parameter(args.kwarg.arg, Parameter.VAR_KEYWORD,
|
||||
annotation=annotation))
|
||||
|
||||
return_annotation = ast_unparse(node.returns) or Parameter.empty
|
||||
return_annotation = ast_unparse(node.returns, code) or Parameter.empty
|
||||
|
||||
return inspect.Signature(params, return_annotation=return_annotation)
|
||||
|
||||
|
@ -57,7 +57,7 @@ Inventory = Dict[str, Dict[str, Tuple[str, str, str, str]]]
|
||||
def is_system_TypeVar(typ: Any) -> bool:
|
||||
"""Check *typ* is system defined TypeVar."""
|
||||
modname = getattr(typ, '__module__', '')
|
||||
return modname == 'typing' and isinstance(typ, TypeVar) # type: ignore
|
||||
return modname == 'typing' and isinstance(typ, TypeVar)
|
||||
|
||||
|
||||
def stringify(annotation: Any) -> str:
|
||||
@ -68,7 +68,7 @@ def stringify(annotation: Any) -> str:
|
||||
return annotation[1:-2]
|
||||
else:
|
||||
return annotation
|
||||
elif isinstance(annotation, TypeVar): # type: ignore
|
||||
elif isinstance(annotation, TypeVar):
|
||||
return annotation.__name__
|
||||
elif not annotation:
|
||||
return repr(annotation)
|
||||
|
@ -368,7 +368,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
"""Return an escaped string suitable for use as an argument
|
||||
to a Texinfo command."""
|
||||
s = self.escape(s)
|
||||
# commas are the argument delimeters
|
||||
# commas are the argument delimiters
|
||||
s = s.replace(',', '@comma{}')
|
||||
# normalize white space
|
||||
s = ' '.join(s.split()).strip()
|
||||
|
5
tests/roots/test-domain-c/function_param_target.rst
Normal file
5
tests/roots/test-domain-c/function_param_target.rst
Normal file
@ -0,0 +1,5 @@
|
||||
.. c:function:: void f(int i)
|
||||
|
||||
- :c:var:`i`
|
||||
|
||||
- :c:var:`f.i`
|
5
tests/roots/test-ext-autodoc/target/overload2.py
Normal file
5
tests/roots/test-ext-autodoc/target/overload2.py
Normal file
@ -0,0 +1,5 @@
|
||||
from target.overload import Bar
|
||||
|
||||
|
||||
class Baz(Bar):
|
||||
pass
|
@ -9,6 +9,8 @@
|
||||
"""
|
||||
import pytest
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.addnodes import desc
|
||||
from sphinx.domains.c import DefinitionParser, DefinitionError
|
||||
@ -529,6 +531,25 @@ def filter_warnings(warning, file):
|
||||
return res
|
||||
|
||||
|
||||
def extract_role_links(app, filename):
|
||||
t = (app.outdir / filename).read_text()
|
||||
lis = [l for l in t.split('\n') if l.startswith("<li")]
|
||||
entries = []
|
||||
for l in lis:
|
||||
li = ElementTree.fromstring(l)
|
||||
aList = list(li.iter('a'))
|
||||
assert len(aList) == 1
|
||||
a = aList[0]
|
||||
target = a.attrib['href'].lstrip('#')
|
||||
title = a.attrib['title']
|
||||
assert len(a) == 1
|
||||
code = a[0]
|
||||
assert code.tag == 'code'
|
||||
text = ''.join(code.itertext())
|
||||
entries.append((target, title, text))
|
||||
return entries
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True})
|
||||
def test_build_domain_c(app, status, warning):
|
||||
app.builder.build_all()
|
||||
@ -562,6 +583,26 @@ def test_build_domain_c_semicolon(app, status, warning):
|
||||
assert len(ws) == 0
|
||||
|
||||
|
||||
@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True})
|
||||
def test_build_function_param_target(app, warning):
|
||||
# the anchor for function parameters should be the function
|
||||
app.builder.build_all()
|
||||
ws = filter_warnings(warning, "function_param_target")
|
||||
assert len(ws) == 0
|
||||
entries = extract_role_links(app, "function_param_target.html")
|
||||
assert entries == [
|
||||
('c.f', 'i', 'i'),
|
||||
('c.f', 'f.i', 'f.i'),
|
||||
]
|
||||
|
||||
|
||||
def _get_obj(app, queryName):
|
||||
domain = app.env.get_domain('c')
|
||||
for name, dispname, objectType, docname, anchor, prio in domain.get_objects():
|
||||
if name == queryName:
|
||||
return (docname, anchor, objectType)
|
||||
return (queryName, "not", "found")
|
||||
|
||||
def test_cfunction(app):
|
||||
text = (".. c:function:: PyObject* "
|
||||
"PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems)")
|
||||
@ -569,8 +610,7 @@ def test_cfunction(app):
|
||||
assert_node(doctree[1], addnodes.desc, desctype="function",
|
||||
domain="c", objtype="function", noindex=False)
|
||||
|
||||
domain = app.env.get_domain('c')
|
||||
entry = domain.objects.get('PyType_GenericAlloc')
|
||||
entry = _get_obj(app, 'PyType_GenericAlloc')
|
||||
assert entry == ('index', 'c.PyType_GenericAlloc', 'function')
|
||||
|
||||
|
||||
@ -580,8 +620,7 @@ def test_cmember(app):
|
||||
assert_node(doctree[1], addnodes.desc, desctype="member",
|
||||
domain="c", objtype="member", noindex=False)
|
||||
|
||||
domain = app.env.get_domain('c')
|
||||
entry = domain.objects.get('PyTypeObject.tp_bases')
|
||||
entry = _get_obj(app, 'PyTypeObject.tp_bases')
|
||||
assert entry == ('index', 'c.PyTypeObject.tp_bases', 'member')
|
||||
|
||||
|
||||
@ -591,9 +630,8 @@ def test_cvar(app):
|
||||
assert_node(doctree[1], addnodes.desc, desctype="var",
|
||||
domain="c", objtype="var", noindex=False)
|
||||
|
||||
domain = app.env.get_domain('c')
|
||||
entry = domain.objects.get('PyClass_Type')
|
||||
assert entry == ('index', 'c.PyClass_Type', 'var')
|
||||
entry = _get_obj(app, 'PyClass_Type')
|
||||
assert entry == ('index', 'c.PyClass_Type', 'member')
|
||||
|
||||
|
||||
def test_noindexentry(app):
|
||||
|
@ -1231,3 +1231,18 @@ def test_noindexentry(app):
|
||||
assert_node(doctree, (addnodes.index, desc, addnodes.index, desc))
|
||||
assert_node(doctree[0], addnodes.index, entries=[('single', 'f (C++ function)', '_CPPv41fv', '', None)])
|
||||
assert_node(doctree[2], addnodes.index, entries=[])
|
||||
|
||||
|
||||
def test_mix_decl_duplicate(app, warning):
|
||||
# Issue 8270
|
||||
text = (".. cpp:struct:: A\n"
|
||||
".. cpp:function:: void A()\n"
|
||||
".. cpp:struct:: A\n")
|
||||
restructuredtext.parse(app, text)
|
||||
ws = warning.getvalue().split("\n")
|
||||
assert len(ws) == 5
|
||||
assert "index.rst:2: WARNING: Duplicate C++ declaration, also defined in 'index'." in ws[0]
|
||||
assert "Declaration is 'void A()'." in ws[1]
|
||||
assert "index.rst:3: WARNING: Duplicate C++ declaration, also defined in 'index'." in ws[2]
|
||||
assert "Declaration is 'A'." in ws[3]
|
||||
assert ws[4] == ""
|
@ -315,7 +315,7 @@ def test_pyfunction_signature(app):
|
||||
|
||||
def test_pyfunction_signature_full(app):
|
||||
text = (".. py:function:: hello(a: str, b = 1, *args: str, "
|
||||
"c: bool = True, **kwargs: str) -> str")
|
||||
"c: bool = True, d: tuple = (1, 2), **kwargs: str) -> str")
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree, (addnodes.index,
|
||||
[desc, ([desc_signature, ([desc_name, "hello"],
|
||||
@ -345,6 +345,14 @@ def test_pyfunction_signature_full(app):
|
||||
[desc_sig_operator, "="],
|
||||
" ",
|
||||
[nodes.inline, "True"])],
|
||||
[desc_parameter, ([desc_sig_name, "d"],
|
||||
[desc_sig_punctuation, ":"],
|
||||
" ",
|
||||
[desc_sig_name, pending_xref, "tuple"],
|
||||
" ",
|
||||
[desc_sig_operator, "="],
|
||||
" ",
|
||||
[nodes.inline, "(1, 2)"])],
|
||||
[desc_parameter, ([desc_sig_operator, "**"],
|
||||
[desc_sig_name, "kwargs"],
|
||||
[desc_sig_punctuation, ":"],
|
||||
@ -388,6 +396,19 @@ def test_pyfunction_signature_full_py38(app):
|
||||
[desc_parameter, desc_sig_operator, "/"])])
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info < (3, 8), reason='python 3.8+ is required.')
|
||||
def test_pyfunction_with_number_literals(app):
|
||||
text = ".. py:function:: hello(age=0x10, height=1_6_0)"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
assert_node(doctree[1][0][1],
|
||||
[desc_parameterlist, ([desc_parameter, ([desc_sig_name, "age"],
|
||||
[desc_sig_operator, "="],
|
||||
[nodes.inline, "0x10"])],
|
||||
[desc_parameter, ([desc_sig_name, "height"],
|
||||
[desc_sig_operator, "="],
|
||||
[nodes.inline, "1_6_0"])])])
|
||||
|
||||
|
||||
def test_optional_pyfunction_signature(app):
|
||||
text = ".. py:function:: compile(source [, filename [, symbol]]) -> ast object"
|
||||
doctree = restructuredtext.parse(app, text)
|
||||
|
@ -2002,6 +2002,22 @@ def test_overload(app):
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_overload2(app):
|
||||
options = {"members": None}
|
||||
actual = do_autodoc(app, 'module', 'target.overload2', options)
|
||||
assert list(actual) == [
|
||||
'',
|
||||
'.. py:module:: target.overload2',
|
||||
'',
|
||||
'',
|
||||
'.. py:class:: Baz(x: int, y: int)',
|
||||
' Baz(x: str, y: str)',
|
||||
' :module: target.overload2',
|
||||
'',
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_pymodule_for_ModuleLevelDocumenter(app):
|
||||
app.env.ref_context['py:module'] = 'target.classes'
|
||||
|
@ -58,7 +58,7 @@ from sphinx.pycode import ast
|
||||
])
|
||||
def test_unparse(source, expected):
|
||||
module = ast.parse(source)
|
||||
assert ast.unparse(module.body[0].value) == expected
|
||||
assert ast.unparse(module.body[0].value, source) == expected
|
||||
|
||||
|
||||
def test_unparse_None():
|
||||
@ -66,8 +66,12 @@ def test_unparse_None():
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info < (3, 8), reason='python 3.8+ is required.')
|
||||
def test_unparse_py38():
|
||||
source = "lambda x=0, /, y=1, *args, z, **kwargs: x + y + z"
|
||||
expected = "lambda x=0, /, y=1, *args, z, **kwargs: ..."
|
||||
@pytest.mark.parametrize('source,expected', [
|
||||
("lambda x=0, /, y=1, *args, z, **kwargs: x + y + z",
|
||||
"lambda x=0, /, y=1, *args, z, **kwargs: ..."), # posonlyargs
|
||||
("0x1234", "0x1234"), # Constant
|
||||
("1_000_000", "1_000_000"), # Constant
|
||||
])
|
||||
def test_unparse_py38(source, expected):
|
||||
module = ast.parse(source)
|
||||
assert ast.unparse(module.body[0].value) == expected
|
||||
assert ast.unparse(module.body[0].value, source) == expected
|
||||
|
Loading…
Reference in New Issue
Block a user