mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Split the Python domain into multiple modules
``sphinx.domains.python`` was previously over 1,700 lines long
This commit is contained in:
parent
1327ec71f2
commit
e9dcfebcf9
@ -11,7 +11,7 @@ from docutils.parsers.rst import directives
|
|||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.directives import ObjectDescription
|
from sphinx.directives import ObjectDescription
|
||||||
from sphinx.domains import Domain, ObjType
|
from sphinx.domains import Domain, ObjType
|
||||||
from sphinx.domains.python import _pseudo_parse_arglist
|
from sphinx.domains.python._annotations import _pseudo_parse_arglist
|
||||||
from sphinx.locale import _, __
|
from sphinx.locale import _, __
|
||||||
from sphinx.roles import XRefRole
|
from sphinx.roles import XRefRole
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
|
@ -2,33 +2,22 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import ast
|
|
||||||
import builtins
|
import builtins
|
||||||
import contextlib
|
|
||||||
import functools
|
|
||||||
import inspect
|
import inspect
|
||||||
import operator
|
|
||||||
import re
|
|
||||||
import token
|
|
||||||
import typing
|
import typing
|
||||||
from collections import deque
|
|
||||||
from inspect import Parameter
|
|
||||||
from typing import TYPE_CHECKING, Any, NamedTuple, cast
|
from typing import TYPE_CHECKING, Any, NamedTuple, cast
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.parsers.rst import directives
|
from docutils.parsers.rst import directives
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.addnodes import desc_signature, pending_xref, pending_xref_condition
|
|
||||||
from sphinx.directives import ObjectDescription
|
|
||||||
from sphinx.domains import Domain, Index, IndexEntry, ObjType
|
from sphinx.domains import Domain, Index, IndexEntry, ObjType
|
||||||
|
from sphinx.domains.python._annotations import _parse_annotation
|
||||||
|
from sphinx.domains.python._object import PyObject
|
||||||
from sphinx.locale import _, __
|
from sphinx.locale import _, __
|
||||||
from sphinx.pycode.parser import Token, TokenProcessor
|
|
||||||
from sphinx.roles import XRefRole
|
from sphinx.roles import XRefRole
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from sphinx.util.docfields import Field, GroupedField, TypedField
|
|
||||||
from sphinx.util.docutils import SphinxDirective
|
from sphinx.util.docutils import SphinxDirective
|
||||||
from sphinx.util.inspect import signature_from_str
|
|
||||||
from sphinx.util.nodes import (
|
from sphinx.util.nodes import (
|
||||||
find_pending_xref_condition,
|
find_pending_xref_condition,
|
||||||
make_id,
|
make_id,
|
||||||
@ -40,27 +29,15 @@ if TYPE_CHECKING:
|
|||||||
from collections.abc import Iterable, Iterator
|
from collections.abc import Iterable, Iterator
|
||||||
|
|
||||||
from docutils.nodes import Element, Node
|
from docutils.nodes import Element, Node
|
||||||
from docutils.parsers.rst.states import Inliner
|
|
||||||
|
|
||||||
|
from sphinx.addnodes import desc_signature, pending_xref
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
from sphinx.builders import Builder
|
from sphinx.builders import Builder
|
||||||
from sphinx.environment import BuildEnvironment
|
from sphinx.environment import BuildEnvironment
|
||||||
from sphinx.util.typing import OptionSpec, TextlikeNode
|
from sphinx.util.typing import OptionSpec
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# REs for Python signatures
|
|
||||||
py_sig_re = re.compile(
|
|
||||||
r'''^ ([\w.]*\.)? # class name(s)
|
|
||||||
(\w+) \s* # thing name
|
|
||||||
(?: \[\s*(.*)\s*])? # optional: type parameters list
|
|
||||||
(?: \(\s*(.*)\s*\) # optional: arguments
|
|
||||||
(?:\s* -> \s* (.*))? # return annotation
|
|
||||||
)? $ # and nothing more
|
|
||||||
''', re.VERBOSE)
|
|
||||||
|
|
||||||
|
|
||||||
pairindextypes = {
|
pairindextypes = {
|
||||||
'module': 'module',
|
'module': 'module',
|
||||||
'keyword': 'keyword',
|
'keyword': 'keyword',
|
||||||
@ -87,869 +64,6 @@ class ModuleEntry(NamedTuple):
|
|||||||
deprecated: bool
|
deprecated: bool
|
||||||
|
|
||||||
|
|
||||||
def parse_reftarget(reftarget: str, suppress_prefix: bool = False,
|
|
||||||
) -> tuple[str, str, str, bool]:
|
|
||||||
"""Parse a type string and return (reftype, reftarget, title, refspecific flag)"""
|
|
||||||
refspecific = False
|
|
||||||
if reftarget.startswith('.'):
|
|
||||||
reftarget = reftarget[1:]
|
|
||||||
title = reftarget
|
|
||||||
refspecific = True
|
|
||||||
elif reftarget.startswith('~'):
|
|
||||||
reftarget = reftarget[1:]
|
|
||||||
title = reftarget.split('.')[-1]
|
|
||||||
elif suppress_prefix:
|
|
||||||
title = reftarget.split('.')[-1]
|
|
||||||
elif reftarget.startswith('typing.'):
|
|
||||||
title = reftarget[7:]
|
|
||||||
else:
|
|
||||||
title = reftarget
|
|
||||||
|
|
||||||
if reftarget == 'None' or reftarget.startswith('typing.'):
|
|
||||||
# typing module provides non-class types. Obj reference is good to refer them.
|
|
||||||
reftype = 'obj'
|
|
||||||
else:
|
|
||||||
reftype = 'class'
|
|
||||||
|
|
||||||
return reftype, reftarget, title, refspecific
|
|
||||||
|
|
||||||
|
|
||||||
def type_to_xref(target: str, env: BuildEnvironment, *,
|
|
||||||
suppress_prefix: bool = False) -> addnodes.pending_xref:
|
|
||||||
"""Convert a type string to a cross reference node."""
|
|
||||||
if env:
|
|
||||||
kwargs = {'py:module': env.ref_context.get('py:module'),
|
|
||||||
'py:class': env.ref_context.get('py:class')}
|
|
||||||
else:
|
|
||||||
kwargs = {}
|
|
||||||
|
|
||||||
reftype, target, title, refspecific = parse_reftarget(target, suppress_prefix)
|
|
||||||
|
|
||||||
if env.config.python_use_unqualified_type_names:
|
|
||||||
# Note: It would be better to use qualname to describe the object to support support
|
|
||||||
# nested classes. But python domain can't access the real python object because this
|
|
||||||
# module should work not-dynamically.
|
|
||||||
shortname = title.split('.')[-1]
|
|
||||||
contnodes: list[Node] = [pending_xref_condition('', shortname, condition='resolved'),
|
|
||||||
pending_xref_condition('', title, condition='*')]
|
|
||||||
else:
|
|
||||||
contnodes = [nodes.Text(title)]
|
|
||||||
|
|
||||||
return pending_xref('', *contnodes,
|
|
||||||
refdomain='py', reftype=reftype, reftarget=target,
|
|
||||||
refspecific=refspecific, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_annotation(annotation: str, env: BuildEnvironment) -> list[Node]:
|
|
||||||
"""Parse type annotation."""
|
|
||||||
short_literals = env.config.python_display_short_literal_types
|
|
||||||
|
|
||||||
def unparse(node: ast.AST) -> list[Node]:
|
|
||||||
if isinstance(node, ast.Attribute):
|
|
||||||
return [nodes.Text(f"{unparse(node.value)[0]}.{node.attr}")]
|
|
||||||
if isinstance(node, ast.BinOp):
|
|
||||||
result: list[Node] = unparse(node.left)
|
|
||||||
result.extend(unparse(node.op))
|
|
||||||
result.extend(unparse(node.right))
|
|
||||||
return result
|
|
||||||
if isinstance(node, ast.BitOr):
|
|
||||||
return [addnodes.desc_sig_space(),
|
|
||||||
addnodes.desc_sig_punctuation('', '|'),
|
|
||||||
addnodes.desc_sig_space()]
|
|
||||||
if isinstance(node, ast.Constant):
|
|
||||||
if node.value is Ellipsis:
|
|
||||||
return [addnodes.desc_sig_punctuation('', "...")]
|
|
||||||
if isinstance(node.value, bool):
|
|
||||||
return [addnodes.desc_sig_keyword('', repr(node.value))]
|
|
||||||
if isinstance(node.value, int):
|
|
||||||
return [addnodes.desc_sig_literal_number('', repr(node.value))]
|
|
||||||
if isinstance(node.value, str):
|
|
||||||
return [addnodes.desc_sig_literal_string('', repr(node.value))]
|
|
||||||
else:
|
|
||||||
# handles None, which is further handled by type_to_xref later
|
|
||||||
# and fallback for other types that should be converted
|
|
||||||
return [nodes.Text(repr(node.value))]
|
|
||||||
if isinstance(node, ast.Expr):
|
|
||||||
return unparse(node.value)
|
|
||||||
if isinstance(node, ast.Invert):
|
|
||||||
return [addnodes.desc_sig_punctuation('', '~')]
|
|
||||||
if isinstance(node, ast.List):
|
|
||||||
result = [addnodes.desc_sig_punctuation('', '[')]
|
|
||||||
if node.elts:
|
|
||||||
# check if there are elements in node.elts to only pop the
|
|
||||||
# last element of result if the for-loop was run at least
|
|
||||||
# once
|
|
||||||
for elem in node.elts:
|
|
||||||
result.extend(unparse(elem))
|
|
||||||
result.append(addnodes.desc_sig_punctuation('', ','))
|
|
||||||
result.append(addnodes.desc_sig_space())
|
|
||||||
result.pop()
|
|
||||||
result.pop()
|
|
||||||
result.append(addnodes.desc_sig_punctuation('', ']'))
|
|
||||||
return result
|
|
||||||
if isinstance(node, ast.Module):
|
|
||||||
return functools.reduce(operator.iadd, (unparse(e) for e in node.body), [])
|
|
||||||
if isinstance(node, ast.Name):
|
|
||||||
return [nodes.Text(node.id)]
|
|
||||||
if isinstance(node, ast.Subscript):
|
|
||||||
if getattr(node.value, 'id', '') in {'Optional', 'Union'}:
|
|
||||||
return _unparse_pep_604_annotation(node)
|
|
||||||
if short_literals and getattr(node.value, 'id', '') == 'Literal':
|
|
||||||
return _unparse_pep_604_annotation(node)
|
|
||||||
result = unparse(node.value)
|
|
||||||
result.append(addnodes.desc_sig_punctuation('', '['))
|
|
||||||
result.extend(unparse(node.slice))
|
|
||||||
result.append(addnodes.desc_sig_punctuation('', ']'))
|
|
||||||
|
|
||||||
# Wrap the Text nodes inside brackets by literal node if the subscript is a Literal
|
|
||||||
if result[0] in ('Literal', 'typing.Literal'):
|
|
||||||
for i, subnode in enumerate(result[1:], start=1):
|
|
||||||
if isinstance(subnode, nodes.Text):
|
|
||||||
result[i] = nodes.literal('', '', subnode)
|
|
||||||
return result
|
|
||||||
if isinstance(node, ast.UnaryOp):
|
|
||||||
return unparse(node.op) + unparse(node.operand)
|
|
||||||
if isinstance(node, ast.Tuple):
|
|
||||||
if node.elts:
|
|
||||||
result = []
|
|
||||||
for elem in node.elts:
|
|
||||||
result.extend(unparse(elem))
|
|
||||||
result.append(addnodes.desc_sig_punctuation('', ','))
|
|
||||||
result.append(addnodes.desc_sig_space())
|
|
||||||
result.pop()
|
|
||||||
result.pop()
|
|
||||||
else:
|
|
||||||
result = [addnodes.desc_sig_punctuation('', '('),
|
|
||||||
addnodes.desc_sig_punctuation('', ')')]
|
|
||||||
|
|
||||||
return result
|
|
||||||
raise SyntaxError # unsupported syntax
|
|
||||||
|
|
||||||
def _unparse_pep_604_annotation(node: ast.Subscript) -> list[Node]:
|
|
||||||
subscript = node.slice
|
|
||||||
|
|
||||||
flattened: list[Node] = []
|
|
||||||
if isinstance(subscript, ast.Tuple):
|
|
||||||
flattened.extend(unparse(subscript.elts[0]))
|
|
||||||
for elt in subscript.elts[1:]:
|
|
||||||
flattened.extend(unparse(ast.BitOr()))
|
|
||||||
flattened.extend(unparse(elt))
|
|
||||||
else:
|
|
||||||
# e.g. a Union[] inside an Optional[]
|
|
||||||
flattened.extend(unparse(subscript))
|
|
||||||
|
|
||||||
if getattr(node.value, 'id', '') == 'Optional':
|
|
||||||
flattened.extend(unparse(ast.BitOr()))
|
|
||||||
flattened.append(nodes.Text('None'))
|
|
||||||
|
|
||||||
return flattened
|
|
||||||
|
|
||||||
try:
|
|
||||||
tree = ast.parse(annotation, type_comments=True)
|
|
||||||
result: list[Node] = []
|
|
||||||
for node in unparse(tree):
|
|
||||||
if isinstance(node, nodes.literal):
|
|
||||||
result.append(node[0])
|
|
||||||
elif isinstance(node, nodes.Text) and node.strip():
|
|
||||||
if (result and isinstance(result[-1], addnodes.desc_sig_punctuation) and
|
|
||||||
result[-1].astext() == '~'):
|
|
||||||
result.pop()
|
|
||||||
result.append(type_to_xref(str(node), env, suppress_prefix=True))
|
|
||||||
else:
|
|
||||||
result.append(type_to_xref(str(node), env))
|
|
||||||
else:
|
|
||||||
result.append(node)
|
|
||||||
return result
|
|
||||||
except SyntaxError:
|
|
||||||
return [type_to_xref(annotation, env)]
|
|
||||||
|
|
||||||
|
|
||||||
class _TypeParameterListParser(TokenProcessor):
|
|
||||||
def __init__(self, sig: str) -> None:
|
|
||||||
signature = sig.replace('\n', '').strip()
|
|
||||||
super().__init__([signature])
|
|
||||||
# Each item is a tuple (name, kind, default, annotation) mimicking
|
|
||||||
# ``inspect.Parameter`` to allow default values on VAR_POSITIONAL
|
|
||||||
# or VAR_KEYWORD parameters.
|
|
||||||
self.type_params: list[tuple[str, int, Any, Any]] = []
|
|
||||||
|
|
||||||
def fetch_type_param_spec(self) -> list[Token]:
|
|
||||||
tokens = []
|
|
||||||
while current := self.fetch_token():
|
|
||||||
tokens.append(current)
|
|
||||||
for ldelim, rdelim in ('(', ')'), ('{', '}'), ('[', ']'):
|
|
||||||
if current == [token.OP, ldelim]:
|
|
||||||
tokens += self.fetch_until([token.OP, rdelim])
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if current == token.INDENT:
|
|
||||||
tokens += self.fetch_until(token.DEDENT)
|
|
||||||
elif current.match(
|
|
||||||
[token.OP, ':'], [token.OP, '='], [token.OP, ',']):
|
|
||||||
tokens.pop()
|
|
||||||
break
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
def parse(self) -> None:
|
|
||||||
while current := self.fetch_token():
|
|
||||||
if current == token.NAME:
|
|
||||||
tp_name = current.value.strip()
|
|
||||||
if self.previous and self.previous.match([token.OP, '*'], [token.OP, '**']):
|
|
||||||
if self.previous == [token.OP, '*']:
|
|
||||||
tp_kind = Parameter.VAR_POSITIONAL
|
|
||||||
else:
|
|
||||||
tp_kind = Parameter.VAR_KEYWORD # type: ignore[assignment]
|
|
||||||
else:
|
|
||||||
tp_kind = Parameter.POSITIONAL_OR_KEYWORD # type: ignore[assignment]
|
|
||||||
|
|
||||||
tp_ann: Any = Parameter.empty
|
|
||||||
tp_default: Any = Parameter.empty
|
|
||||||
|
|
||||||
current = self.fetch_token()
|
|
||||||
if current and current.match([token.OP, ':'], [token.OP, '=']):
|
|
||||||
if current == [token.OP, ':']:
|
|
||||||
tokens = self.fetch_type_param_spec()
|
|
||||||
tp_ann = self._build_identifier(tokens)
|
|
||||||
|
|
||||||
if self.current and self.current == [token.OP, '=']:
|
|
||||||
tokens = self.fetch_type_param_spec()
|
|
||||||
tp_default = self._build_identifier(tokens)
|
|
||||||
|
|
||||||
if tp_kind != Parameter.POSITIONAL_OR_KEYWORD and tp_ann != Parameter.empty:
|
|
||||||
msg = ('type parameter bound or constraint is not allowed '
|
|
||||||
f'for {tp_kind.description} parameters')
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
type_param = (tp_name, tp_kind, tp_default, tp_ann)
|
|
||||||
self.type_params.append(type_param)
|
|
||||||
|
|
||||||
def _build_identifier(self, tokens: list[Token]) -> str:
|
|
||||||
from itertools import chain, islice
|
|
||||||
|
|
||||||
def triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]:
|
|
||||||
# sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG
|
|
||||||
it = iter(iterable)
|
|
||||||
window = deque(islice(it, 3), maxlen=3)
|
|
||||||
if len(window) == 3:
|
|
||||||
yield tuple(window)
|
|
||||||
for x in it:
|
|
||||||
window.append(x)
|
|
||||||
yield tuple(window)
|
|
||||||
|
|
||||||
idents: list[str] = []
|
|
||||||
tokens: Iterable[Token] = iter(tokens) # type: ignore[no-redef]
|
|
||||||
# do not format opening brackets
|
|
||||||
for tok in tokens:
|
|
||||||
if not tok.match([token.OP, '('], [token.OP, '['], [token.OP, '{']):
|
|
||||||
# check if the first non-delimiter character is an unpack operator
|
|
||||||
is_unpack_operator = tok.match([token.OP, '*'], [token.OP, ['**']])
|
|
||||||
idents.append(self._pformat_token(tok, native=is_unpack_operator))
|
|
||||||
break
|
|
||||||
idents.append(tok.value)
|
|
||||||
|
|
||||||
# check the remaining tokens
|
|
||||||
stop = Token(token.ENDMARKER, '', (-1, -1), (-1, -1), '<sentinel>')
|
|
||||||
is_unpack_operator = False
|
|
||||||
for tok, op, after in triplewise(chain(tokens, [stop, stop])):
|
|
||||||
ident = self._pformat_token(tok, native=is_unpack_operator)
|
|
||||||
idents.append(ident)
|
|
||||||
# determine if the next token is an unpack operator depending
|
|
||||||
# on the left and right hand side of the operator symbol
|
|
||||||
is_unpack_operator = (
|
|
||||||
op.match([token.OP, '*'], [token.OP, '**']) and not (
|
|
||||||
tok.match(token.NAME, token.NUMBER, token.STRING,
|
|
||||||
[token.OP, ')'], [token.OP, ']'], [token.OP, '}'])
|
|
||||||
and after.match(token.NAME, token.NUMBER, token.STRING,
|
|
||||||
[token.OP, '('], [token.OP, '['], [token.OP, '{'])
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return ''.join(idents).strip()
|
|
||||||
|
|
||||||
def _pformat_token(self, tok: Token, native: bool = False) -> str:
|
|
||||||
if native:
|
|
||||||
return tok.value
|
|
||||||
|
|
||||||
if tok.match(token.NEWLINE, token.ENDMARKER):
|
|
||||||
return ''
|
|
||||||
|
|
||||||
if tok.match([token.OP, ':'], [token.OP, ','], [token.OP, '#']):
|
|
||||||
return f'{tok.value} '
|
|
||||||
|
|
||||||
# Arithmetic operators are allowed because PEP 695 specifies the
|
|
||||||
# default type parameter to be *any* expression (so "T1 << T2" is
|
|
||||||
# allowed if it makes sense). The caller is responsible to ensure
|
|
||||||
# that a multiplication operator ("*") is not to be confused with
|
|
||||||
# an unpack operator (which will not be surrounded by spaces).
|
|
||||||
#
|
|
||||||
# The operators are ordered according to how likely they are to
|
|
||||||
# be used and for (possible) future implementations (e.g., "&" for
|
|
||||||
# an intersection type).
|
|
||||||
if tok.match(
|
|
||||||
# Most likely operators to appear
|
|
||||||
[token.OP, '='], [token.OP, '|'],
|
|
||||||
# Type composition (future compatibility)
|
|
||||||
[token.OP, '&'], [token.OP, '^'], [token.OP, '<'], [token.OP, '>'],
|
|
||||||
# Unlikely type composition
|
|
||||||
[token.OP, '+'], [token.OP, '-'], [token.OP, '*'], [token.OP, '**'],
|
|
||||||
# Unlikely operators but included for completeness
|
|
||||||
[token.OP, '@'], [token.OP, '/'], [token.OP, '//'], [token.OP, '%'],
|
|
||||||
[token.OP, '<<'], [token.OP, '>>'], [token.OP, '>>>'],
|
|
||||||
[token.OP, '<='], [token.OP, '>='], [token.OP, '=='], [token.OP, '!='],
|
|
||||||
):
|
|
||||||
return f' {tok.value} '
|
|
||||||
|
|
||||||
return tok.value
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_type_list(
|
|
||||||
tp_list: str, env: BuildEnvironment,
|
|
||||||
multi_line_parameter_list: bool = False,
|
|
||||||
) -> addnodes.desc_type_parameter_list:
|
|
||||||
"""Parse a list of type parameters according to PEP 695."""
|
|
||||||
type_params = addnodes.desc_type_parameter_list(tp_list)
|
|
||||||
type_params['multi_line_parameter_list'] = multi_line_parameter_list
|
|
||||||
# formal parameter names are interpreted as type parameter names and
|
|
||||||
# type annotations are interpreted as type parameter bound or constraints
|
|
||||||
parser = _TypeParameterListParser(tp_list)
|
|
||||||
parser.parse()
|
|
||||||
for (tp_name, tp_kind, tp_default, tp_ann) in parser.type_params:
|
|
||||||
# no positional-only or keyword-only allowed in a type parameters list
|
|
||||||
if tp_kind in {Parameter.POSITIONAL_ONLY, Parameter.KEYWORD_ONLY}:
|
|
||||||
msg = ('positional-only or keyword-only parameters '
|
|
||||||
'are prohibited in type parameter lists')
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
node = addnodes.desc_type_parameter()
|
|
||||||
if tp_kind == Parameter.VAR_POSITIONAL:
|
|
||||||
node += addnodes.desc_sig_operator('', '*')
|
|
||||||
elif tp_kind == Parameter.VAR_KEYWORD:
|
|
||||||
node += addnodes.desc_sig_operator('', '**')
|
|
||||||
node += addnodes.desc_sig_name('', tp_name)
|
|
||||||
|
|
||||||
if tp_ann is not Parameter.empty:
|
|
||||||
annotation = _parse_annotation(tp_ann, env)
|
|
||||||
if not annotation:
|
|
||||||
continue
|
|
||||||
|
|
||||||
node += addnodes.desc_sig_punctuation('', ':')
|
|
||||||
node += addnodes.desc_sig_space()
|
|
||||||
|
|
||||||
type_ann_expr = addnodes.desc_sig_name('', '',
|
|
||||||
*annotation) # type: ignore[arg-type]
|
|
||||||
# a type bound is ``T: U`` whereas type constraints
|
|
||||||
# must be enclosed with parentheses. ``T: (U, V)``
|
|
||||||
if tp_ann.startswith('(') and tp_ann.endswith(')'):
|
|
||||||
type_ann_text = type_ann_expr.astext()
|
|
||||||
if type_ann_text.startswith('(') and type_ann_text.endswith(')'):
|
|
||||||
node += type_ann_expr
|
|
||||||
else:
|
|
||||||
# surrounding braces are lost when using _parse_annotation()
|
|
||||||
node += addnodes.desc_sig_punctuation('', '(')
|
|
||||||
node += type_ann_expr # type constraint
|
|
||||||
node += addnodes.desc_sig_punctuation('', ')')
|
|
||||||
else:
|
|
||||||
node += type_ann_expr # type bound
|
|
||||||
|
|
||||||
if tp_default is not Parameter.empty:
|
|
||||||
# Always surround '=' with spaces, even if there is no annotation
|
|
||||||
node += addnodes.desc_sig_space()
|
|
||||||
node += addnodes.desc_sig_operator('', '=')
|
|
||||||
node += addnodes.desc_sig_space()
|
|
||||||
node += nodes.inline('', tp_default,
|
|
||||||
classes=['default_value'],
|
|
||||||
support_smartquotes=False)
|
|
||||||
|
|
||||||
type_params += node
|
|
||||||
return type_params
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_arglist(
|
|
||||||
arglist: str, env: BuildEnvironment, multi_line_parameter_list: bool = False,
|
|
||||||
) -> addnodes.desc_parameterlist:
|
|
||||||
"""Parse a list of arguments using AST parser"""
|
|
||||||
params = addnodes.desc_parameterlist(arglist)
|
|
||||||
params['multi_line_parameter_list'] = multi_line_parameter_list
|
|
||||||
sig = signature_from_str('(%s)' % arglist)
|
|
||||||
last_kind = None
|
|
||||||
for param in sig.parameters.values():
|
|
||||||
if param.kind != param.POSITIONAL_ONLY and last_kind == param.POSITIONAL_ONLY:
|
|
||||||
# PEP-570: Separator for Positional Only Parameter: /
|
|
||||||
params += addnodes.desc_parameter('', '', addnodes.desc_sig_operator('', '/'))
|
|
||||||
if param.kind == param.KEYWORD_ONLY and last_kind in (param.POSITIONAL_OR_KEYWORD,
|
|
||||||
param.POSITIONAL_ONLY,
|
|
||||||
None):
|
|
||||||
# PEP-3102: Separator for Keyword Only Parameter: *
|
|
||||||
params += addnodes.desc_parameter('', '', addnodes.desc_sig_operator('', '*'))
|
|
||||||
|
|
||||||
node = addnodes.desc_parameter()
|
|
||||||
if param.kind == param.VAR_POSITIONAL:
|
|
||||||
node += addnodes.desc_sig_operator('', '*')
|
|
||||||
node += addnodes.desc_sig_name('', param.name)
|
|
||||||
elif param.kind == param.VAR_KEYWORD:
|
|
||||||
node += addnodes.desc_sig_operator('', '**')
|
|
||||||
node += addnodes.desc_sig_name('', param.name)
|
|
||||||
else:
|
|
||||||
node += addnodes.desc_sig_name('', param.name)
|
|
||||||
|
|
||||||
if param.annotation is not param.empty:
|
|
||||||
children = _parse_annotation(param.annotation, env)
|
|
||||||
node += addnodes.desc_sig_punctuation('', ':')
|
|
||||||
node += addnodes.desc_sig_space()
|
|
||||||
node += addnodes.desc_sig_name('', '', *children) # type: ignore[arg-type]
|
|
||||||
if param.default is not param.empty:
|
|
||||||
if param.annotation is not param.empty:
|
|
||||||
node += addnodes.desc_sig_space()
|
|
||||||
node += addnodes.desc_sig_operator('', '=')
|
|
||||||
node += addnodes.desc_sig_space()
|
|
||||||
else:
|
|
||||||
node += addnodes.desc_sig_operator('', '=')
|
|
||||||
node += nodes.inline('', param.default, classes=['default_value'],
|
|
||||||
support_smartquotes=False)
|
|
||||||
|
|
||||||
params += node
|
|
||||||
last_kind = param.kind
|
|
||||||
|
|
||||||
if last_kind == Parameter.POSITIONAL_ONLY:
|
|
||||||
# PEP-570: Separator for Positional Only Parameter: /
|
|
||||||
params += addnodes.desc_parameter('', '', addnodes.desc_sig_operator('', '/'))
|
|
||||||
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
def _pseudo_parse_arglist(
|
|
||||||
signode: desc_signature, arglist: str, multi_line_parameter_list: bool = False,
|
|
||||||
) -> None:
|
|
||||||
""""Parse" a list of arguments separated by commas.
|
|
||||||
|
|
||||||
Arguments can have "optional" annotations given by enclosing them in
|
|
||||||
brackets. Currently, this will split at any comma, even if it's inside a
|
|
||||||
string literal (e.g. default argument value).
|
|
||||||
"""
|
|
||||||
paramlist = addnodes.desc_parameterlist()
|
|
||||||
paramlist['multi_line_parameter_list'] = multi_line_parameter_list
|
|
||||||
stack: list[Element] = [paramlist]
|
|
||||||
try:
|
|
||||||
for argument in arglist.split(','):
|
|
||||||
argument = argument.strip()
|
|
||||||
ends_open = ends_close = 0
|
|
||||||
while argument.startswith('['):
|
|
||||||
stack.append(addnodes.desc_optional())
|
|
||||||
stack[-2] += stack[-1]
|
|
||||||
argument = argument[1:].strip()
|
|
||||||
while argument.startswith(']'):
|
|
||||||
stack.pop()
|
|
||||||
argument = argument[1:].strip()
|
|
||||||
while argument.endswith(']') and not argument.endswith('[]'):
|
|
||||||
ends_close += 1
|
|
||||||
argument = argument[:-1].strip()
|
|
||||||
while argument.endswith('['):
|
|
||||||
ends_open += 1
|
|
||||||
argument = argument[:-1].strip()
|
|
||||||
if argument:
|
|
||||||
stack[-1] += addnodes.desc_parameter(
|
|
||||||
'', '', addnodes.desc_sig_name(argument, argument))
|
|
||||||
while ends_open:
|
|
||||||
stack.append(addnodes.desc_optional())
|
|
||||||
stack[-2] += stack[-1]
|
|
||||||
ends_open -= 1
|
|
||||||
while ends_close:
|
|
||||||
stack.pop()
|
|
||||||
ends_close -= 1
|
|
||||||
if len(stack) != 1:
|
|
||||||
raise IndexError
|
|
||||||
except IndexError:
|
|
||||||
# if there are too few or too many elements on the stack, just give up
|
|
||||||
# and treat the whole argument list as one argument, discarding the
|
|
||||||
# already partially populated paramlist node
|
|
||||||
paramlist = addnodes.desc_parameterlist()
|
|
||||||
paramlist += addnodes.desc_parameter(arglist, arglist)
|
|
||||||
signode += paramlist
|
|
||||||
else:
|
|
||||||
signode += paramlist
|
|
||||||
|
|
||||||
|
|
||||||
# This override allows our inline type specifiers to behave like :class: link
|
|
||||||
# when it comes to handling "." and "~" prefixes.
|
|
||||||
class PyXrefMixin:
|
|
||||||
def make_xref(
|
|
||||||
self,
|
|
||||||
rolename: str,
|
|
||||||
domain: str,
|
|
||||||
target: str,
|
|
||||||
innernode: type[TextlikeNode] = nodes.emphasis,
|
|
||||||
contnode: Node | None = None,
|
|
||||||
env: BuildEnvironment | None = None,
|
|
||||||
inliner: Inliner | None = None,
|
|
||||||
location: Node | None = None,
|
|
||||||
) -> Node:
|
|
||||||
# we use inliner=None to make sure we get the old behaviour with a single
|
|
||||||
# pending_xref node
|
|
||||||
result = super().make_xref(rolename, domain, target, # type: ignore[misc]
|
|
||||||
innernode, contnode,
|
|
||||||
env, inliner=None, location=None)
|
|
||||||
if isinstance(result, pending_xref):
|
|
||||||
assert env is not None
|
|
||||||
result['refspecific'] = True
|
|
||||||
result['py:module'] = env.ref_context.get('py:module')
|
|
||||||
result['py:class'] = env.ref_context.get('py:class')
|
|
||||||
|
|
||||||
reftype, reftarget, reftitle, _ = parse_reftarget(target)
|
|
||||||
if reftarget != reftitle:
|
|
||||||
result['reftype'] = reftype
|
|
||||||
result['reftarget'] = reftarget
|
|
||||||
|
|
||||||
result.clear()
|
|
||||||
result += innernode(reftitle, reftitle)
|
|
||||||
elif env.config.python_use_unqualified_type_names:
|
|
||||||
children = result.children
|
|
||||||
result.clear()
|
|
||||||
|
|
||||||
shortname = target.split('.')[-1]
|
|
||||||
textnode = innernode('', shortname)
|
|
||||||
contnodes = [pending_xref_condition('', '', textnode, condition='resolved'),
|
|
||||||
pending_xref_condition('', '', *children, condition='*')]
|
|
||||||
result.extend(contnodes)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def make_xrefs(
|
|
||||||
self,
|
|
||||||
rolename: str,
|
|
||||||
domain: str,
|
|
||||||
target: str,
|
|
||||||
innernode: type[TextlikeNode] = nodes.emphasis,
|
|
||||||
contnode: Node | None = None,
|
|
||||||
env: BuildEnvironment | None = None,
|
|
||||||
inliner: Inliner | None = None,
|
|
||||||
location: Node | None = None,
|
|
||||||
) -> list[Node]:
|
|
||||||
delims = r'(\s*[\[\]\(\),](?:\s*o[rf]\s)?\s*|\s+o[rf]\s+|\s*\|\s*|\.\.\.)'
|
|
||||||
delims_re = re.compile(delims)
|
|
||||||
sub_targets = re.split(delims, target)
|
|
||||||
|
|
||||||
split_contnode = bool(contnode and contnode.astext() == target)
|
|
||||||
|
|
||||||
in_literal = False
|
|
||||||
results = []
|
|
||||||
for sub_target in filter(None, sub_targets):
|
|
||||||
if split_contnode:
|
|
||||||
contnode = nodes.Text(sub_target)
|
|
||||||
|
|
||||||
if in_literal or delims_re.match(sub_target):
|
|
||||||
results.append(contnode or innernode(sub_target, sub_target))
|
|
||||||
else:
|
|
||||||
results.append(self.make_xref(rolename, domain, sub_target,
|
|
||||||
innernode, contnode, env, inliner, location))
|
|
||||||
|
|
||||||
if sub_target in ('Literal', 'typing.Literal', '~typing.Literal'):
|
|
||||||
in_literal = True
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
class PyField(PyXrefMixin, Field):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PyGroupedField(PyXrefMixin, GroupedField):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PyTypedField(PyXrefMixin, TypedField):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PyObject(ObjectDescription[tuple[str, str]]):
|
|
||||||
"""
|
|
||||||
Description of a general Python object.
|
|
||||||
|
|
||||||
:cvar allow_nesting: Class is an object that allows for nested namespaces
|
|
||||||
:vartype allow_nesting: bool
|
|
||||||
"""
|
|
||||||
|
|
||||||
option_spec: OptionSpec = {
|
|
||||||
'no-index': directives.flag,
|
|
||||||
'no-index-entry': directives.flag,
|
|
||||||
'no-contents-entry': directives.flag,
|
|
||||||
'no-typesetting': directives.flag,
|
|
||||||
'noindex': directives.flag,
|
|
||||||
'noindexentry': directives.flag,
|
|
||||||
'nocontentsentry': directives.flag,
|
|
||||||
'single-line-parameter-list': directives.flag,
|
|
||||||
'single-line-type-parameter-list': directives.flag,
|
|
||||||
'module': directives.unchanged,
|
|
||||||
'canonical': directives.unchanged,
|
|
||||||
'annotation': directives.unchanged,
|
|
||||||
}
|
|
||||||
|
|
||||||
doc_field_types = [
|
|
||||||
PyTypedField('parameter', label=_('Parameters'),
|
|
||||||
names=('param', 'parameter', 'arg', 'argument',
|
|
||||||
'keyword', 'kwarg', 'kwparam'),
|
|
||||||
typerolename='class', typenames=('paramtype', 'type'),
|
|
||||||
can_collapse=True),
|
|
||||||
PyTypedField('variable', label=_('Variables'),
|
|
||||||
names=('var', 'ivar', 'cvar'),
|
|
||||||
typerolename='class', typenames=('vartype',),
|
|
||||||
can_collapse=True),
|
|
||||||
PyGroupedField('exceptions', label=_('Raises'), rolename='exc',
|
|
||||||
names=('raises', 'raise', 'exception', 'except'),
|
|
||||||
can_collapse=True),
|
|
||||||
Field('returnvalue', label=_('Returns'), has_arg=False,
|
|
||||||
names=('returns', 'return')),
|
|
||||||
PyField('returntype', label=_('Return type'), has_arg=False,
|
|
||||||
names=('rtype',), bodyrolename='class'),
|
|
||||||
]
|
|
||||||
|
|
||||||
allow_nesting = False
|
|
||||||
|
|
||||||
def get_signature_prefix(self, sig: str) -> list[nodes.Node]:
|
|
||||||
"""May return a prefix to put before the object name in the
|
|
||||||
signature.
|
|
||||||
"""
|
|
||||||
return []
|
|
||||||
|
|
||||||
def needs_arglist(self) -> bool:
|
|
||||||
"""May return true if an empty argument list is to be generated even if
|
|
||||||
the document contains none.
|
|
||||||
"""
|
|
||||||
return False
|
|
||||||
|
|
||||||
def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]:
|
|
||||||
"""Transform a Python signature into RST nodes.
|
|
||||||
|
|
||||||
Return (fully qualified name of the thing, classname if any).
|
|
||||||
|
|
||||||
If inside a class, the current class name is handled intelligently:
|
|
||||||
* it is stripped from the displayed name if present
|
|
||||||
* it is added to the full name (return value) if not present
|
|
||||||
"""
|
|
||||||
m = py_sig_re.match(sig)
|
|
||||||
if m is None:
|
|
||||||
raise ValueError
|
|
||||||
prefix, name, tp_list, arglist, retann = m.groups()
|
|
||||||
|
|
||||||
# determine module and class name (if applicable), as well as full name
|
|
||||||
modname = self.options.get('module', self.env.ref_context.get('py:module'))
|
|
||||||
classname = self.env.ref_context.get('py:class')
|
|
||||||
if classname:
|
|
||||||
add_module = False
|
|
||||||
if prefix and (prefix == classname or
|
|
||||||
prefix.startswith(classname + ".")):
|
|
||||||
fullname = prefix + name
|
|
||||||
# class name is given again in the signature
|
|
||||||
prefix = prefix[len(classname):].lstrip('.')
|
|
||||||
elif prefix:
|
|
||||||
# class name is given in the signature, but different
|
|
||||||
# (shouldn't happen)
|
|
||||||
fullname = classname + '.' + prefix + name
|
|
||||||
else:
|
|
||||||
# class name is not given in the signature
|
|
||||||
fullname = classname + '.' + name
|
|
||||||
else:
|
|
||||||
add_module = True
|
|
||||||
if prefix:
|
|
||||||
classname = prefix.rstrip('.')
|
|
||||||
fullname = prefix + name
|
|
||||||
else:
|
|
||||||
classname = ''
|
|
||||||
fullname = name
|
|
||||||
|
|
||||||
signode['module'] = modname
|
|
||||||
signode['class'] = classname
|
|
||||||
signode['fullname'] = fullname
|
|
||||||
|
|
||||||
max_len = (self.env.config.python_maximum_signature_line_length
|
|
||||||
or self.env.config.maximum_signature_line_length
|
|
||||||
or 0)
|
|
||||||
|
|
||||||
# determine if the function arguments (without its type parameters)
|
|
||||||
# should be formatted on a multiline or not by removing the width of
|
|
||||||
# the type parameters list (if any)
|
|
||||||
sig_len = len(sig)
|
|
||||||
tp_list_span = m.span(3)
|
|
||||||
multi_line_parameter_list = (
|
|
||||||
'single-line-parameter-list' not in self.options
|
|
||||||
and (sig_len - (tp_list_span[1] - tp_list_span[0])) > max_len > 0
|
|
||||||
)
|
|
||||||
|
|
||||||
# determine whether the type parameter list must be wrapped or not
|
|
||||||
arglist_span = m.span(4)
|
|
||||||
multi_line_type_parameter_list = (
|
|
||||||
'single-line-type-parameter-list' not in self.options
|
|
||||||
and (sig_len - (arglist_span[1] - arglist_span[0])) > max_len > 0
|
|
||||||
)
|
|
||||||
|
|
||||||
sig_prefix = self.get_signature_prefix(sig)
|
|
||||||
if sig_prefix:
|
|
||||||
if type(sig_prefix) is str:
|
|
||||||
msg = ("Python directive method get_signature_prefix()"
|
|
||||||
" must return a list of nodes."
|
|
||||||
f" Return value was '{sig_prefix}'.")
|
|
||||||
raise TypeError(msg)
|
|
||||||
signode += addnodes.desc_annotation(str(sig_prefix), '', *sig_prefix)
|
|
||||||
|
|
||||||
if prefix:
|
|
||||||
signode += addnodes.desc_addname(prefix, prefix)
|
|
||||||
elif modname and add_module and self.env.config.add_module_names:
|
|
||||||
nodetext = modname + '.'
|
|
||||||
signode += addnodes.desc_addname(nodetext, nodetext)
|
|
||||||
|
|
||||||
signode += addnodes.desc_name(name, name)
|
|
||||||
|
|
||||||
if tp_list:
|
|
||||||
try:
|
|
||||||
signode += _parse_type_list(tp_list, self.env, multi_line_type_parameter_list)
|
|
||||||
except Exception as exc:
|
|
||||||
logger.warning("could not parse tp_list (%r): %s", tp_list, exc,
|
|
||||||
location=signode)
|
|
||||||
|
|
||||||
if arglist:
|
|
||||||
try:
|
|
||||||
signode += _parse_arglist(arglist, self.env, multi_line_parameter_list)
|
|
||||||
except SyntaxError:
|
|
||||||
# fallback to parse arglist original parser
|
|
||||||
# (this may happen if the argument list is incorrectly used
|
|
||||||
# as a list of bases when documenting a class)
|
|
||||||
# it supports to represent optional arguments (ex. "func(foo [, bar])")
|
|
||||||
_pseudo_parse_arglist(signode, arglist, multi_line_parameter_list)
|
|
||||||
except (NotImplementedError, ValueError) as exc:
|
|
||||||
# duplicated parameter names raise ValueError and not a SyntaxError
|
|
||||||
logger.warning("could not parse arglist (%r): %s", arglist, exc,
|
|
||||||
location=signode)
|
|
||||||
_pseudo_parse_arglist(signode, arglist, multi_line_parameter_list)
|
|
||||||
else:
|
|
||||||
if self.needs_arglist():
|
|
||||||
# for callables, add an empty parameter list
|
|
||||||
signode += addnodes.desc_parameterlist()
|
|
||||||
|
|
||||||
if retann:
|
|
||||||
children = _parse_annotation(retann, self.env)
|
|
||||||
signode += addnodes.desc_returns(retann, '', *children)
|
|
||||||
|
|
||||||
anno = self.options.get('annotation')
|
|
||||||
if anno:
|
|
||||||
signode += addnodes.desc_annotation(' ' + anno, '',
|
|
||||||
addnodes.desc_sig_space(),
|
|
||||||
nodes.Text(anno))
|
|
||||||
|
|
||||||
return fullname, prefix
|
|
||||||
|
|
||||||
def _object_hierarchy_parts(self, sig_node: desc_signature) -> tuple[str, ...]:
|
|
||||||
if 'fullname' not in sig_node:
|
|
||||||
return ()
|
|
||||||
modname = sig_node.get('module')
|
|
||||||
fullname = sig_node['fullname']
|
|
||||||
|
|
||||||
if modname:
|
|
||||||
return (modname, *fullname.split('.'))
|
|
||||||
else:
|
|
||||||
return tuple(fullname.split('.'))
|
|
||||||
|
|
||||||
def get_index_text(self, modname: str, name: tuple[str, str]) -> str:
|
|
||||||
"""Return the text for the index entry of the object."""
|
|
||||||
msg = 'must be implemented in subclasses'
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
def add_target_and_index(self, name_cls: tuple[str, str], sig: str,
|
|
||||||
signode: desc_signature) -> None:
|
|
||||||
modname = self.options.get('module', self.env.ref_context.get('py:module'))
|
|
||||||
fullname = (modname + '.' if modname else '') + name_cls[0]
|
|
||||||
node_id = make_id(self.env, self.state.document, '', fullname)
|
|
||||||
signode['ids'].append(node_id)
|
|
||||||
self.state.document.note_explicit_target(signode)
|
|
||||||
|
|
||||||
domain = cast(PythonDomain, self.env.get_domain('py'))
|
|
||||||
domain.note_object(fullname, self.objtype, node_id, location=signode)
|
|
||||||
|
|
||||||
canonical_name = self.options.get('canonical')
|
|
||||||
if canonical_name:
|
|
||||||
domain.note_object(canonical_name, self.objtype, node_id, aliased=True,
|
|
||||||
location=signode)
|
|
||||||
|
|
||||||
if 'no-index-entry' not in self.options:
|
|
||||||
indextext = self.get_index_text(modname, name_cls)
|
|
||||||
if indextext:
|
|
||||||
self.indexnode['entries'].append(('single', indextext, node_id, '', None))
|
|
||||||
|
|
||||||
def before_content(self) -> None:
|
|
||||||
"""Handle object nesting before content
|
|
||||||
|
|
||||||
:py:class:`PyObject` represents Python language constructs. For
|
|
||||||
constructs that are nestable, such as a Python classes, this method will
|
|
||||||
build up a stack of the nesting hierarchy so that it can be later
|
|
||||||
de-nested correctly, in :py:meth:`after_content`.
|
|
||||||
|
|
||||||
For constructs that aren't nestable, the stack is bypassed, and instead
|
|
||||||
only the most recent object is tracked. This object prefix name will be
|
|
||||||
removed with :py:meth:`after_content`.
|
|
||||||
"""
|
|
||||||
prefix = None
|
|
||||||
if self.names:
|
|
||||||
# fullname and name_prefix come from the `handle_signature` method.
|
|
||||||
# fullname represents the full object name that is constructed using
|
|
||||||
# object nesting and explicit prefixes. `name_prefix` is the
|
|
||||||
# explicit prefix given in a signature
|
|
||||||
(fullname, name_prefix) = self.names[-1]
|
|
||||||
if self.allow_nesting:
|
|
||||||
prefix = fullname
|
|
||||||
elif name_prefix:
|
|
||||||
prefix = name_prefix.strip('.')
|
|
||||||
if prefix:
|
|
||||||
self.env.ref_context['py:class'] = prefix
|
|
||||||
if self.allow_nesting:
|
|
||||||
classes = self.env.ref_context.setdefault('py:classes', [])
|
|
||||||
classes.append(prefix)
|
|
||||||
if 'module' in self.options:
|
|
||||||
modules = self.env.ref_context.setdefault('py:modules', [])
|
|
||||||
modules.append(self.env.ref_context.get('py:module'))
|
|
||||||
self.env.ref_context['py:module'] = self.options['module']
|
|
||||||
|
|
||||||
def after_content(self) -> None:
|
|
||||||
"""Handle object de-nesting after content
|
|
||||||
|
|
||||||
If this class is a nestable object, removing the last nested class prefix
|
|
||||||
ends further nesting in the object.
|
|
||||||
|
|
||||||
If this class is not a nestable object, the list of classes should not
|
|
||||||
be altered as we didn't affect the nesting levels in
|
|
||||||
:py:meth:`before_content`.
|
|
||||||
"""
|
|
||||||
classes = self.env.ref_context.setdefault('py:classes', [])
|
|
||||||
if self.allow_nesting:
|
|
||||||
with contextlib.suppress(IndexError):
|
|
||||||
classes.pop()
|
|
||||||
|
|
||||||
self.env.ref_context['py:class'] = (classes[-1] if len(classes) > 0
|
|
||||||
else None)
|
|
||||||
if 'module' in self.options:
|
|
||||||
modules = self.env.ref_context.setdefault('py:modules', [])
|
|
||||||
if modules:
|
|
||||||
self.env.ref_context['py:module'] = modules.pop()
|
|
||||||
else:
|
|
||||||
self.env.ref_context.pop('py:module')
|
|
||||||
|
|
||||||
def _toc_entry_name(self, sig_node: desc_signature) -> str:
|
|
||||||
if not sig_node.get('_toc_parts'):
|
|
||||||
return ''
|
|
||||||
|
|
||||||
config = self.env.app.config
|
|
||||||
objtype = sig_node.parent.get('objtype')
|
|
||||||
if config.add_function_parentheses and objtype in {'function', 'method'}:
|
|
||||||
parens = '()'
|
|
||||||
else:
|
|
||||||
parens = ''
|
|
||||||
*parents, name = sig_node['_toc_parts']
|
|
||||||
if config.toc_object_entries_show_parents == 'domain':
|
|
||||||
return sig_node.get('fullname', name) + parens
|
|
||||||
if config.toc_object_entries_show_parents == 'hide':
|
|
||||||
return name + parens
|
|
||||||
if config.toc_object_entries_show_parents == 'all':
|
|
||||||
return '.'.join(parents + [name + parens])
|
|
||||||
return ''
|
|
||||||
|
|
||||||
|
|
||||||
class PyFunction(PyObject):
|
class PyFunction(PyObject):
|
||||||
"""Description of a function."""
|
"""Description of a function."""
|
||||||
|
|
||||||
|
505
sphinx/domains/python/_annotations.py
Normal file
505
sphinx/domains/python/_annotations.py
Normal file
@ -0,0 +1,505 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import ast
|
||||||
|
import functools
|
||||||
|
import operator
|
||||||
|
import token
|
||||||
|
from collections import deque
|
||||||
|
from inspect import Parameter
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
|
||||||
|
from sphinx import addnodes
|
||||||
|
from sphinx.addnodes import desc_signature, pending_xref, pending_xref_condition
|
||||||
|
from sphinx.pycode.parser import Token, TokenProcessor
|
||||||
|
from sphinx.util.inspect import signature_from_str
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Iterable, Iterator
|
||||||
|
|
||||||
|
from docutils.nodes import Element, Node
|
||||||
|
|
||||||
|
from sphinx.environment import BuildEnvironment
|
||||||
|
|
||||||
|
|
||||||
|
def parse_reftarget(reftarget: str, suppress_prefix: bool = False,
|
||||||
|
) -> tuple[str, str, str, bool]:
|
||||||
|
"""Parse a type string and return (reftype, reftarget, title, refspecific flag)"""
|
||||||
|
refspecific = False
|
||||||
|
if reftarget.startswith('.'):
|
||||||
|
reftarget = reftarget[1:]
|
||||||
|
title = reftarget
|
||||||
|
refspecific = True
|
||||||
|
elif reftarget.startswith('~'):
|
||||||
|
reftarget = reftarget[1:]
|
||||||
|
title = reftarget.split('.')[-1]
|
||||||
|
elif suppress_prefix:
|
||||||
|
title = reftarget.split('.')[-1]
|
||||||
|
elif reftarget.startswith('typing.'):
|
||||||
|
title = reftarget[7:]
|
||||||
|
else:
|
||||||
|
title = reftarget
|
||||||
|
|
||||||
|
if reftarget == 'None' or reftarget.startswith('typing.'):
|
||||||
|
# typing module provides non-class types. Obj reference is good to refer them.
|
||||||
|
reftype = 'obj'
|
||||||
|
else:
|
||||||
|
reftype = 'class'
|
||||||
|
|
||||||
|
return reftype, reftarget, title, refspecific
|
||||||
|
|
||||||
|
|
||||||
|
def type_to_xref(target: str, env: BuildEnvironment, *,
|
||||||
|
suppress_prefix: bool = False) -> addnodes.pending_xref:
|
||||||
|
"""Convert a type string to a cross reference node."""
|
||||||
|
if env:
|
||||||
|
kwargs = {'py:module': env.ref_context.get('py:module'),
|
||||||
|
'py:class': env.ref_context.get('py:class')}
|
||||||
|
else:
|
||||||
|
kwargs = {}
|
||||||
|
|
||||||
|
reftype, target, title, refspecific = parse_reftarget(target, suppress_prefix)
|
||||||
|
|
||||||
|
if env.config.python_use_unqualified_type_names:
|
||||||
|
# Note: It would be better to use qualname to describe the object to support support
|
||||||
|
# nested classes. But python domain can't access the real python object because this
|
||||||
|
# module should work not-dynamically.
|
||||||
|
shortname = title.split('.')[-1]
|
||||||
|
contnodes: list[Node] = [pending_xref_condition('', shortname, condition='resolved'),
|
||||||
|
pending_xref_condition('', title, condition='*')]
|
||||||
|
else:
|
||||||
|
contnodes = [nodes.Text(title)]
|
||||||
|
|
||||||
|
return pending_xref('', *contnodes,
|
||||||
|
refdomain='py', reftype=reftype, reftarget=target,
|
||||||
|
refspecific=refspecific, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_annotation(annotation: str, env: BuildEnvironment) -> list[Node]:
|
||||||
|
"""Parse type annotation."""
|
||||||
|
short_literals = env.config.python_display_short_literal_types
|
||||||
|
|
||||||
|
def unparse(node: ast.AST) -> list[Node]:
|
||||||
|
if isinstance(node, ast.Attribute):
|
||||||
|
return [nodes.Text(f"{unparse(node.value)[0]}.{node.attr}")]
|
||||||
|
if isinstance(node, ast.BinOp):
|
||||||
|
result: list[Node] = unparse(node.left)
|
||||||
|
result.extend(unparse(node.op))
|
||||||
|
result.extend(unparse(node.right))
|
||||||
|
return result
|
||||||
|
if isinstance(node, ast.BitOr):
|
||||||
|
return [addnodes.desc_sig_space(),
|
||||||
|
addnodes.desc_sig_punctuation('', '|'),
|
||||||
|
addnodes.desc_sig_space()]
|
||||||
|
if isinstance(node, ast.Constant):
|
||||||
|
if node.value is Ellipsis:
|
||||||
|
return [addnodes.desc_sig_punctuation('', "...")]
|
||||||
|
if isinstance(node.value, bool):
|
||||||
|
return [addnodes.desc_sig_keyword('', repr(node.value))]
|
||||||
|
if isinstance(node.value, int):
|
||||||
|
return [addnodes.desc_sig_literal_number('', repr(node.value))]
|
||||||
|
if isinstance(node.value, str):
|
||||||
|
return [addnodes.desc_sig_literal_string('', repr(node.value))]
|
||||||
|
else:
|
||||||
|
# handles None, which is further handled by type_to_xref later
|
||||||
|
# and fallback for other types that should be converted
|
||||||
|
return [nodes.Text(repr(node.value))]
|
||||||
|
if isinstance(node, ast.Expr):
|
||||||
|
return unparse(node.value)
|
||||||
|
if isinstance(node, ast.Invert):
|
||||||
|
return [addnodes.desc_sig_punctuation('', '~')]
|
||||||
|
if isinstance(node, ast.List):
|
||||||
|
result = [addnodes.desc_sig_punctuation('', '[')]
|
||||||
|
if node.elts:
|
||||||
|
# check if there are elements in node.elts to only pop the
|
||||||
|
# last element of result if the for-loop was run at least
|
||||||
|
# once
|
||||||
|
for elem in node.elts:
|
||||||
|
result.extend(unparse(elem))
|
||||||
|
result.append(addnodes.desc_sig_punctuation('', ','))
|
||||||
|
result.append(addnodes.desc_sig_space())
|
||||||
|
result.pop()
|
||||||
|
result.pop()
|
||||||
|
result.append(addnodes.desc_sig_punctuation('', ']'))
|
||||||
|
return result
|
||||||
|
if isinstance(node, ast.Module):
|
||||||
|
return functools.reduce(operator.iadd, (unparse(e) for e in node.body), [])
|
||||||
|
if isinstance(node, ast.Name):
|
||||||
|
return [nodes.Text(node.id)]
|
||||||
|
if isinstance(node, ast.Subscript):
|
||||||
|
if getattr(node.value, 'id', '') in {'Optional', 'Union'}:
|
||||||
|
return _unparse_pep_604_annotation(node)
|
||||||
|
if short_literals and getattr(node.value, 'id', '') == 'Literal':
|
||||||
|
return _unparse_pep_604_annotation(node)
|
||||||
|
result = unparse(node.value)
|
||||||
|
result.append(addnodes.desc_sig_punctuation('', '['))
|
||||||
|
result.extend(unparse(node.slice))
|
||||||
|
result.append(addnodes.desc_sig_punctuation('', ']'))
|
||||||
|
|
||||||
|
# Wrap the Text nodes inside brackets by literal node if the subscript is a Literal
|
||||||
|
if result[0] in ('Literal', 'typing.Literal'):
|
||||||
|
for i, subnode in enumerate(result[1:], start=1):
|
||||||
|
if isinstance(subnode, nodes.Text):
|
||||||
|
result[i] = nodes.literal('', '', subnode)
|
||||||
|
return result
|
||||||
|
if isinstance(node, ast.UnaryOp):
|
||||||
|
return unparse(node.op) + unparse(node.operand)
|
||||||
|
if isinstance(node, ast.Tuple):
|
||||||
|
if node.elts:
|
||||||
|
result = []
|
||||||
|
for elem in node.elts:
|
||||||
|
result.extend(unparse(elem))
|
||||||
|
result.append(addnodes.desc_sig_punctuation('', ','))
|
||||||
|
result.append(addnodes.desc_sig_space())
|
||||||
|
result.pop()
|
||||||
|
result.pop()
|
||||||
|
else:
|
||||||
|
result = [addnodes.desc_sig_punctuation('', '('),
|
||||||
|
addnodes.desc_sig_punctuation('', ')')]
|
||||||
|
|
||||||
|
return result
|
||||||
|
raise SyntaxError # unsupported syntax
|
||||||
|
|
||||||
|
def _unparse_pep_604_annotation(node: ast.Subscript) -> list[Node]:
|
||||||
|
subscript = node.slice
|
||||||
|
|
||||||
|
flattened: list[Node] = []
|
||||||
|
if isinstance(subscript, ast.Tuple):
|
||||||
|
flattened.extend(unparse(subscript.elts[0]))
|
||||||
|
for elt in subscript.elts[1:]:
|
||||||
|
flattened.extend(unparse(ast.BitOr()))
|
||||||
|
flattened.extend(unparse(elt))
|
||||||
|
else:
|
||||||
|
# e.g. a Union[] inside an Optional[]
|
||||||
|
flattened.extend(unparse(subscript))
|
||||||
|
|
||||||
|
if getattr(node.value, 'id', '') == 'Optional':
|
||||||
|
flattened.extend(unparse(ast.BitOr()))
|
||||||
|
flattened.append(nodes.Text('None'))
|
||||||
|
|
||||||
|
return flattened
|
||||||
|
|
||||||
|
try:
|
||||||
|
tree = ast.parse(annotation, type_comments=True)
|
||||||
|
result: list[Node] = []
|
||||||
|
for node in unparse(tree):
|
||||||
|
if isinstance(node, nodes.literal):
|
||||||
|
result.append(node[0])
|
||||||
|
elif isinstance(node, nodes.Text) and node.strip():
|
||||||
|
if (result and isinstance(result[-1], addnodes.desc_sig_punctuation) and
|
||||||
|
result[-1].astext() == '~'):
|
||||||
|
result.pop()
|
||||||
|
result.append(type_to_xref(str(node), env, suppress_prefix=True))
|
||||||
|
else:
|
||||||
|
result.append(type_to_xref(str(node), env))
|
||||||
|
else:
|
||||||
|
result.append(node)
|
||||||
|
return result
|
||||||
|
except SyntaxError:
|
||||||
|
return [type_to_xref(annotation, env)]
|
||||||
|
|
||||||
|
|
||||||
|
class _TypeParameterListParser(TokenProcessor):
|
||||||
|
def __init__(self, sig: str) -> None:
|
||||||
|
signature = sig.replace('\n', '').strip()
|
||||||
|
super().__init__([signature])
|
||||||
|
# Each item is a tuple (name, kind, default, annotation) mimicking
|
||||||
|
# ``inspect.Parameter`` to allow default values on VAR_POSITIONAL
|
||||||
|
# or VAR_KEYWORD parameters.
|
||||||
|
self.type_params: list[tuple[str, int, Any, Any]] = []
|
||||||
|
|
||||||
|
def fetch_type_param_spec(self) -> list[Token]:
|
||||||
|
tokens = []
|
||||||
|
while current := self.fetch_token():
|
||||||
|
tokens.append(current)
|
||||||
|
for ldelim, rdelim in ('(', ')'), ('{', '}'), ('[', ']'):
|
||||||
|
if current == [token.OP, ldelim]:
|
||||||
|
tokens += self.fetch_until([token.OP, rdelim])
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if current == token.INDENT:
|
||||||
|
tokens += self.fetch_until(token.DEDENT)
|
||||||
|
elif current.match(
|
||||||
|
[token.OP, ':'], [token.OP, '='], [token.OP, ',']):
|
||||||
|
tokens.pop()
|
||||||
|
break
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
def parse(self) -> None:
|
||||||
|
while current := self.fetch_token():
|
||||||
|
if current == token.NAME:
|
||||||
|
tp_name = current.value.strip()
|
||||||
|
if self.previous and self.previous.match([token.OP, '*'], [token.OP, '**']):
|
||||||
|
if self.previous == [token.OP, '*']:
|
||||||
|
tp_kind = Parameter.VAR_POSITIONAL
|
||||||
|
else:
|
||||||
|
tp_kind = Parameter.VAR_KEYWORD # type: ignore[assignment]
|
||||||
|
else:
|
||||||
|
tp_kind = Parameter.POSITIONAL_OR_KEYWORD # type: ignore[assignment]
|
||||||
|
|
||||||
|
tp_ann: Any = Parameter.empty
|
||||||
|
tp_default: Any = Parameter.empty
|
||||||
|
|
||||||
|
current = self.fetch_token()
|
||||||
|
if current and current.match([token.OP, ':'], [token.OP, '=']):
|
||||||
|
if current == [token.OP, ':']:
|
||||||
|
tokens = self.fetch_type_param_spec()
|
||||||
|
tp_ann = self._build_identifier(tokens)
|
||||||
|
|
||||||
|
if self.current and self.current == [token.OP, '=']:
|
||||||
|
tokens = self.fetch_type_param_spec()
|
||||||
|
tp_default = self._build_identifier(tokens)
|
||||||
|
|
||||||
|
if tp_kind != Parameter.POSITIONAL_OR_KEYWORD and tp_ann != Parameter.empty:
|
||||||
|
msg = ('type parameter bound or constraint is not allowed '
|
||||||
|
f'for {tp_kind.description} parameters')
|
||||||
|
raise SyntaxError(msg)
|
||||||
|
|
||||||
|
type_param = (tp_name, tp_kind, tp_default, tp_ann)
|
||||||
|
self.type_params.append(type_param)
|
||||||
|
|
||||||
|
def _build_identifier(self, tokens: list[Token]) -> str:
|
||||||
|
from itertools import chain, islice
|
||||||
|
|
||||||
|
def triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]:
|
||||||
|
# sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG
|
||||||
|
it = iter(iterable)
|
||||||
|
window = deque(islice(it, 3), maxlen=3)
|
||||||
|
if len(window) == 3:
|
||||||
|
yield tuple(window)
|
||||||
|
for x in it:
|
||||||
|
window.append(x)
|
||||||
|
yield tuple(window)
|
||||||
|
|
||||||
|
idents: list[str] = []
|
||||||
|
tokens: Iterable[Token] = iter(tokens) # type: ignore[no-redef]
|
||||||
|
# do not format opening brackets
|
||||||
|
for tok in tokens:
|
||||||
|
if not tok.match([token.OP, '('], [token.OP, '['], [token.OP, '{']):
|
||||||
|
# check if the first non-delimiter character is an unpack operator
|
||||||
|
is_unpack_operator = tok.match([token.OP, '*'], [token.OP, ['**']])
|
||||||
|
idents.append(self._pformat_token(tok, native=is_unpack_operator))
|
||||||
|
break
|
||||||
|
idents.append(tok.value)
|
||||||
|
|
||||||
|
# check the remaining tokens
|
||||||
|
stop = Token(token.ENDMARKER, '', (-1, -1), (-1, -1), '<sentinel>')
|
||||||
|
is_unpack_operator = False
|
||||||
|
for tok, op, after in triplewise(chain(tokens, [stop, stop])):
|
||||||
|
ident = self._pformat_token(tok, native=is_unpack_operator)
|
||||||
|
idents.append(ident)
|
||||||
|
# determine if the next token is an unpack operator depending
|
||||||
|
# on the left and right hand side of the operator symbol
|
||||||
|
is_unpack_operator = (
|
||||||
|
op.match([token.OP, '*'], [token.OP, '**']) and not (
|
||||||
|
tok.match(token.NAME, token.NUMBER, token.STRING,
|
||||||
|
[token.OP, ')'], [token.OP, ']'], [token.OP, '}'])
|
||||||
|
and after.match(token.NAME, token.NUMBER, token.STRING,
|
||||||
|
[token.OP, '('], [token.OP, '['], [token.OP, '{'])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return ''.join(idents).strip()
|
||||||
|
|
||||||
|
def _pformat_token(self, tok: Token, native: bool = False) -> str:
|
||||||
|
if native:
|
||||||
|
return tok.value
|
||||||
|
|
||||||
|
if tok.match(token.NEWLINE, token.ENDMARKER):
|
||||||
|
return ''
|
||||||
|
|
||||||
|
if tok.match([token.OP, ':'], [token.OP, ','], [token.OP, '#']):
|
||||||
|
return f'{tok.value} '
|
||||||
|
|
||||||
|
# Arithmetic operators are allowed because PEP 695 specifies the
|
||||||
|
# default type parameter to be *any* expression (so "T1 << T2" is
|
||||||
|
# allowed if it makes sense). The caller is responsible to ensure
|
||||||
|
# that a multiplication operator ("*") is not to be confused with
|
||||||
|
# an unpack operator (which will not be surrounded by spaces).
|
||||||
|
#
|
||||||
|
# The operators are ordered according to how likely they are to
|
||||||
|
# be used and for (possible) future implementations (e.g., "&" for
|
||||||
|
# an intersection type).
|
||||||
|
if tok.match(
|
||||||
|
# Most likely operators to appear
|
||||||
|
[token.OP, '='], [token.OP, '|'],
|
||||||
|
# Type composition (future compatibility)
|
||||||
|
[token.OP, '&'], [token.OP, '^'], [token.OP, '<'], [token.OP, '>'],
|
||||||
|
# Unlikely type composition
|
||||||
|
[token.OP, '+'], [token.OP, '-'], [token.OP, '*'], [token.OP, '**'],
|
||||||
|
# Unlikely operators but included for completeness
|
||||||
|
[token.OP, '@'], [token.OP, '/'], [token.OP, '//'], [token.OP, '%'],
|
||||||
|
[token.OP, '<<'], [token.OP, '>>'], [token.OP, '>>>'],
|
||||||
|
[token.OP, '<='], [token.OP, '>='], [token.OP, '=='], [token.OP, '!='],
|
||||||
|
):
|
||||||
|
return f' {tok.value} '
|
||||||
|
|
||||||
|
return tok.value
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_type_list(
|
||||||
|
tp_list: str, env: BuildEnvironment,
|
||||||
|
multi_line_parameter_list: bool = False,
|
||||||
|
) -> addnodes.desc_type_parameter_list:
|
||||||
|
"""Parse a list of type parameters according to PEP 695."""
|
||||||
|
type_params = addnodes.desc_type_parameter_list(tp_list)
|
||||||
|
type_params['multi_line_parameter_list'] = multi_line_parameter_list
|
||||||
|
# formal parameter names are interpreted as type parameter names and
|
||||||
|
# type annotations are interpreted as type parameter bound or constraints
|
||||||
|
parser = _TypeParameterListParser(tp_list)
|
||||||
|
parser.parse()
|
||||||
|
for (tp_name, tp_kind, tp_default, tp_ann) in parser.type_params:
|
||||||
|
# no positional-only or keyword-only allowed in a type parameters list
|
||||||
|
if tp_kind in {Parameter.POSITIONAL_ONLY, Parameter.KEYWORD_ONLY}:
|
||||||
|
msg = ('positional-only or keyword-only parameters '
|
||||||
|
'are prohibited in type parameter lists')
|
||||||
|
raise SyntaxError(msg)
|
||||||
|
|
||||||
|
node = addnodes.desc_type_parameter()
|
||||||
|
if tp_kind == Parameter.VAR_POSITIONAL:
|
||||||
|
node += addnodes.desc_sig_operator('', '*')
|
||||||
|
elif tp_kind == Parameter.VAR_KEYWORD:
|
||||||
|
node += addnodes.desc_sig_operator('', '**')
|
||||||
|
node += addnodes.desc_sig_name('', tp_name)
|
||||||
|
|
||||||
|
if tp_ann is not Parameter.empty:
|
||||||
|
annotation = _parse_annotation(tp_ann, env)
|
||||||
|
if not annotation:
|
||||||
|
continue
|
||||||
|
|
||||||
|
node += addnodes.desc_sig_punctuation('', ':')
|
||||||
|
node += addnodes.desc_sig_space()
|
||||||
|
|
||||||
|
type_ann_expr = addnodes.desc_sig_name('', '',
|
||||||
|
*annotation) # type: ignore[arg-type]
|
||||||
|
# a type bound is ``T: U`` whereas type constraints
|
||||||
|
# must be enclosed with parentheses. ``T: (U, V)``
|
||||||
|
if tp_ann.startswith('(') and tp_ann.endswith(')'):
|
||||||
|
type_ann_text = type_ann_expr.astext()
|
||||||
|
if type_ann_text.startswith('(') and type_ann_text.endswith(')'):
|
||||||
|
node += type_ann_expr
|
||||||
|
else:
|
||||||
|
# surrounding braces are lost when using _parse_annotation()
|
||||||
|
node += addnodes.desc_sig_punctuation('', '(')
|
||||||
|
node += type_ann_expr # type constraint
|
||||||
|
node += addnodes.desc_sig_punctuation('', ')')
|
||||||
|
else:
|
||||||
|
node += type_ann_expr # type bound
|
||||||
|
|
||||||
|
if tp_default is not Parameter.empty:
|
||||||
|
# Always surround '=' with spaces, even if there is no annotation
|
||||||
|
node += addnodes.desc_sig_space()
|
||||||
|
node += addnodes.desc_sig_operator('', '=')
|
||||||
|
node += addnodes.desc_sig_space()
|
||||||
|
node += nodes.inline('', tp_default,
|
||||||
|
classes=['default_value'],
|
||||||
|
support_smartquotes=False)
|
||||||
|
|
||||||
|
type_params += node
|
||||||
|
return type_params
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_arglist(
|
||||||
|
arglist: str, env: BuildEnvironment, multi_line_parameter_list: bool = False,
|
||||||
|
) -> addnodes.desc_parameterlist:
|
||||||
|
"""Parse a list of arguments using AST parser"""
|
||||||
|
params = addnodes.desc_parameterlist(arglist)
|
||||||
|
params['multi_line_parameter_list'] = multi_line_parameter_list
|
||||||
|
sig = signature_from_str('(%s)' % arglist)
|
||||||
|
last_kind = None
|
||||||
|
for param in sig.parameters.values():
|
||||||
|
if param.kind != param.POSITIONAL_ONLY and last_kind == param.POSITIONAL_ONLY:
|
||||||
|
# PEP-570: Separator for Positional Only Parameter: /
|
||||||
|
params += addnodes.desc_parameter('', '', addnodes.desc_sig_operator('', '/'))
|
||||||
|
if param.kind == param.KEYWORD_ONLY and last_kind in (param.POSITIONAL_OR_KEYWORD,
|
||||||
|
param.POSITIONAL_ONLY,
|
||||||
|
None):
|
||||||
|
# PEP-3102: Separator for Keyword Only Parameter: *
|
||||||
|
params += addnodes.desc_parameter('', '', addnodes.desc_sig_operator('', '*'))
|
||||||
|
|
||||||
|
node = addnodes.desc_parameter()
|
||||||
|
if param.kind == param.VAR_POSITIONAL:
|
||||||
|
node += addnodes.desc_sig_operator('', '*')
|
||||||
|
node += addnodes.desc_sig_name('', param.name)
|
||||||
|
elif param.kind == param.VAR_KEYWORD:
|
||||||
|
node += addnodes.desc_sig_operator('', '**')
|
||||||
|
node += addnodes.desc_sig_name('', param.name)
|
||||||
|
else:
|
||||||
|
node += addnodes.desc_sig_name('', param.name)
|
||||||
|
|
||||||
|
if param.annotation is not param.empty:
|
||||||
|
children = _parse_annotation(param.annotation, env)
|
||||||
|
node += addnodes.desc_sig_punctuation('', ':')
|
||||||
|
node += addnodes.desc_sig_space()
|
||||||
|
node += addnodes.desc_sig_name('', '', *children) # type: ignore[arg-type]
|
||||||
|
if param.default is not param.empty:
|
||||||
|
if param.annotation is not param.empty:
|
||||||
|
node += addnodes.desc_sig_space()
|
||||||
|
node += addnodes.desc_sig_operator('', '=')
|
||||||
|
node += addnodes.desc_sig_space()
|
||||||
|
else:
|
||||||
|
node += addnodes.desc_sig_operator('', '=')
|
||||||
|
node += nodes.inline('', param.default, classes=['default_value'],
|
||||||
|
support_smartquotes=False)
|
||||||
|
|
||||||
|
params += node
|
||||||
|
last_kind = param.kind
|
||||||
|
|
||||||
|
if last_kind == Parameter.POSITIONAL_ONLY:
|
||||||
|
# PEP-570: Separator for Positional Only Parameter: /
|
||||||
|
params += addnodes.desc_parameter('', '', addnodes.desc_sig_operator('', '/'))
|
||||||
|
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def _pseudo_parse_arglist(
|
||||||
|
signode: desc_signature, arglist: str, multi_line_parameter_list: bool = False,
|
||||||
|
) -> None:
|
||||||
|
""""Parse" a list of arguments separated by commas.
|
||||||
|
|
||||||
|
Arguments can have "optional" annotations given by enclosing them in
|
||||||
|
brackets. Currently, this will split at any comma, even if it's inside a
|
||||||
|
string literal (e.g. default argument value).
|
||||||
|
"""
|
||||||
|
paramlist = addnodes.desc_parameterlist()
|
||||||
|
paramlist['multi_line_parameter_list'] = multi_line_parameter_list
|
||||||
|
stack: list[Element] = [paramlist]
|
||||||
|
try:
|
||||||
|
for argument in arglist.split(','):
|
||||||
|
argument = argument.strip()
|
||||||
|
ends_open = ends_close = 0
|
||||||
|
while argument.startswith('['):
|
||||||
|
stack.append(addnodes.desc_optional())
|
||||||
|
stack[-2] += stack[-1]
|
||||||
|
argument = argument[1:].strip()
|
||||||
|
while argument.startswith(']'):
|
||||||
|
stack.pop()
|
||||||
|
argument = argument[1:].strip()
|
||||||
|
while argument.endswith(']') and not argument.endswith('[]'):
|
||||||
|
ends_close += 1
|
||||||
|
argument = argument[:-1].strip()
|
||||||
|
while argument.endswith('['):
|
||||||
|
ends_open += 1
|
||||||
|
argument = argument[:-1].strip()
|
||||||
|
if argument:
|
||||||
|
stack[-1] += addnodes.desc_parameter(
|
||||||
|
'', '', addnodes.desc_sig_name(argument, argument))
|
||||||
|
while ends_open:
|
||||||
|
stack.append(addnodes.desc_optional())
|
||||||
|
stack[-2] += stack[-1]
|
||||||
|
ends_open -= 1
|
||||||
|
while ends_close:
|
||||||
|
stack.pop()
|
||||||
|
ends_close -= 1
|
||||||
|
if len(stack) != 1:
|
||||||
|
raise IndexError
|
||||||
|
except IndexError:
|
||||||
|
# if there are too few or too many elements on the stack, just give up
|
||||||
|
# and treat the whole argument list as one argument, discarding the
|
||||||
|
# already partially populated paramlist node
|
||||||
|
paramlist = addnodes.desc_parameterlist()
|
||||||
|
paramlist += addnodes.desc_parameter(arglist, arglist)
|
||||||
|
signode += paramlist
|
||||||
|
else:
|
||||||
|
signode += paramlist
|
426
sphinx/domains/python/_object.py
Normal file
426
sphinx/domains/python/_object.py
Normal file
@ -0,0 +1,426 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import re
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.parsers.rst import directives
|
||||||
|
|
||||||
|
from sphinx import addnodes
|
||||||
|
from sphinx.addnodes import desc_signature, pending_xref, pending_xref_condition
|
||||||
|
from sphinx.directives import ObjectDescription
|
||||||
|
from sphinx.domains.python._annotations import (
|
||||||
|
_parse_annotation,
|
||||||
|
_parse_arglist,
|
||||||
|
_parse_type_list,
|
||||||
|
_pseudo_parse_arglist,
|
||||||
|
parse_reftarget,
|
||||||
|
)
|
||||||
|
from sphinx.locale import _
|
||||||
|
from sphinx.util import logging
|
||||||
|
from sphinx.util.docfields import Field, GroupedField, TypedField
|
||||||
|
from sphinx.util.nodes import (
|
||||||
|
make_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
|
||||||
|
from docutils.nodes import Node
|
||||||
|
from docutils.parsers.rst.states import Inliner
|
||||||
|
|
||||||
|
from sphinx.environment import BuildEnvironment
|
||||||
|
from sphinx.util.typing import OptionSpec, TextlikeNode
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# REs for Python signatures
|
||||||
|
py_sig_re = re.compile(
|
||||||
|
r'''^ ([\w.]*\.)? # class name(s)
|
||||||
|
(\w+) \s* # thing name
|
||||||
|
(?: \[\s*(.*)\s*])? # optional: type parameters list
|
||||||
|
(?: \(\s*(.*)\s*\) # optional: arguments
|
||||||
|
(?:\s* -> \s* (.*))? # return annotation
|
||||||
|
)? $ # and nothing more
|
||||||
|
''', re.VERBOSE)
|
||||||
|
|
||||||
|
|
||||||
|
# This override allows our inline type specifiers to behave like :class: link
|
||||||
|
# when it comes to handling "." and "~" prefixes.
|
||||||
|
class PyXrefMixin:
|
||||||
|
def make_xref(
|
||||||
|
self,
|
||||||
|
rolename: str,
|
||||||
|
domain: str,
|
||||||
|
target: str,
|
||||||
|
innernode: type[TextlikeNode] = nodes.emphasis,
|
||||||
|
contnode: Node | None = None,
|
||||||
|
env: BuildEnvironment | None = None,
|
||||||
|
inliner: Inliner | None = None,
|
||||||
|
location: Node | None = None,
|
||||||
|
) -> Node:
|
||||||
|
# we use inliner=None to make sure we get the old behaviour with a single
|
||||||
|
# pending_xref node
|
||||||
|
result = super().make_xref(rolename, domain, target, # type: ignore[misc]
|
||||||
|
innernode, contnode,
|
||||||
|
env, inliner=None, location=None)
|
||||||
|
if isinstance(result, pending_xref):
|
||||||
|
assert env is not None
|
||||||
|
result['refspecific'] = True
|
||||||
|
result['py:module'] = env.ref_context.get('py:module')
|
||||||
|
result['py:class'] = env.ref_context.get('py:class')
|
||||||
|
|
||||||
|
reftype, reftarget, reftitle, _ = parse_reftarget(target)
|
||||||
|
if reftarget != reftitle:
|
||||||
|
result['reftype'] = reftype
|
||||||
|
result['reftarget'] = reftarget
|
||||||
|
|
||||||
|
result.clear()
|
||||||
|
result += innernode(reftitle, reftitle)
|
||||||
|
elif env.config.python_use_unqualified_type_names:
|
||||||
|
children = result.children
|
||||||
|
result.clear()
|
||||||
|
|
||||||
|
shortname = target.split('.')[-1]
|
||||||
|
textnode = innernode('', shortname)
|
||||||
|
contnodes = [pending_xref_condition('', '', textnode, condition='resolved'),
|
||||||
|
pending_xref_condition('', '', *children, condition='*')]
|
||||||
|
result.extend(contnodes)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def make_xrefs(
|
||||||
|
self,
|
||||||
|
rolename: str,
|
||||||
|
domain: str,
|
||||||
|
target: str,
|
||||||
|
innernode: type[TextlikeNode] = nodes.emphasis,
|
||||||
|
contnode: Node | None = None,
|
||||||
|
env: BuildEnvironment | None = None,
|
||||||
|
inliner: Inliner | None = None,
|
||||||
|
location: Node | None = None,
|
||||||
|
) -> list[Node]:
|
||||||
|
delims = r'(\s*[\[\]\(\),](?:\s*o[rf]\s)?\s*|\s+o[rf]\s+|\s*\|\s*|\.\.\.)'
|
||||||
|
delims_re = re.compile(delims)
|
||||||
|
sub_targets = re.split(delims, target)
|
||||||
|
|
||||||
|
split_contnode = bool(contnode and contnode.astext() == target)
|
||||||
|
|
||||||
|
in_literal = False
|
||||||
|
results = []
|
||||||
|
for sub_target in filter(None, sub_targets):
|
||||||
|
if split_contnode:
|
||||||
|
contnode = nodes.Text(sub_target)
|
||||||
|
|
||||||
|
if in_literal or delims_re.match(sub_target):
|
||||||
|
results.append(contnode or innernode(sub_target, sub_target))
|
||||||
|
else:
|
||||||
|
results.append(self.make_xref(rolename, domain, sub_target,
|
||||||
|
innernode, contnode, env, inliner, location))
|
||||||
|
|
||||||
|
if sub_target in ('Literal', 'typing.Literal', '~typing.Literal'):
|
||||||
|
in_literal = True
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
class PyField(PyXrefMixin, Field):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PyGroupedField(PyXrefMixin, GroupedField):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PyTypedField(PyXrefMixin, TypedField):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PyObject(ObjectDescription[tuple[str, str]]):
|
||||||
|
"""
|
||||||
|
Description of a general Python object.
|
||||||
|
|
||||||
|
:cvar allow_nesting: Class is an object that allows for nested namespaces
|
||||||
|
:vartype allow_nesting: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
option_spec: OptionSpec = {
|
||||||
|
'no-index': directives.flag,
|
||||||
|
'no-index-entry': directives.flag,
|
||||||
|
'no-contents-entry': directives.flag,
|
||||||
|
'no-typesetting': directives.flag,
|
||||||
|
'noindex': directives.flag,
|
||||||
|
'noindexentry': directives.flag,
|
||||||
|
'nocontentsentry': directives.flag,
|
||||||
|
'single-line-parameter-list': directives.flag,
|
||||||
|
'single-line-type-parameter-list': directives.flag,
|
||||||
|
'module': directives.unchanged,
|
||||||
|
'canonical': directives.unchanged,
|
||||||
|
'annotation': directives.unchanged,
|
||||||
|
}
|
||||||
|
|
||||||
|
doc_field_types = [
|
||||||
|
PyTypedField('parameter', label=_('Parameters'),
|
||||||
|
names=('param', 'parameter', 'arg', 'argument',
|
||||||
|
'keyword', 'kwarg', 'kwparam'),
|
||||||
|
typerolename='class', typenames=('paramtype', 'type'),
|
||||||
|
can_collapse=True),
|
||||||
|
PyTypedField('variable', label=_('Variables'),
|
||||||
|
names=('var', 'ivar', 'cvar'),
|
||||||
|
typerolename='class', typenames=('vartype',),
|
||||||
|
can_collapse=True),
|
||||||
|
PyGroupedField('exceptions', label=_('Raises'), rolename='exc',
|
||||||
|
names=('raises', 'raise', 'exception', 'except'),
|
||||||
|
can_collapse=True),
|
||||||
|
Field('returnvalue', label=_('Returns'), has_arg=False,
|
||||||
|
names=('returns', 'return')),
|
||||||
|
PyField('returntype', label=_('Return type'), has_arg=False,
|
||||||
|
names=('rtype',), bodyrolename='class'),
|
||||||
|
]
|
||||||
|
|
||||||
|
allow_nesting = False
|
||||||
|
|
||||||
|
def get_signature_prefix(self, sig: str) -> list[nodes.Node]:
|
||||||
|
"""May return a prefix to put before the object name in the
|
||||||
|
signature.
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def needs_arglist(self) -> bool:
|
||||||
|
"""May return true if an empty argument list is to be generated even if
|
||||||
|
the document contains none.
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]:
|
||||||
|
"""Transform a Python signature into RST nodes.
|
||||||
|
|
||||||
|
Return (fully qualified name of the thing, classname if any).
|
||||||
|
|
||||||
|
If inside a class, the current class name is handled intelligently:
|
||||||
|
* it is stripped from the displayed name if present
|
||||||
|
* it is added to the full name (return value) if not present
|
||||||
|
"""
|
||||||
|
m = py_sig_re.match(sig)
|
||||||
|
if m is None:
|
||||||
|
raise ValueError
|
||||||
|
prefix, name, tp_list, arglist, retann = m.groups()
|
||||||
|
|
||||||
|
# determine module and class name (if applicable), as well as full name
|
||||||
|
modname = self.options.get('module', self.env.ref_context.get('py:module'))
|
||||||
|
classname = self.env.ref_context.get('py:class')
|
||||||
|
if classname:
|
||||||
|
add_module = False
|
||||||
|
if prefix and (prefix == classname or
|
||||||
|
prefix.startswith(classname + ".")):
|
||||||
|
fullname = prefix + name
|
||||||
|
# class name is given again in the signature
|
||||||
|
prefix = prefix[len(classname):].lstrip('.')
|
||||||
|
elif prefix:
|
||||||
|
# class name is given in the signature, but different
|
||||||
|
# (shouldn't happen)
|
||||||
|
fullname = classname + '.' + prefix + name
|
||||||
|
else:
|
||||||
|
# class name is not given in the signature
|
||||||
|
fullname = classname + '.' + name
|
||||||
|
else:
|
||||||
|
add_module = True
|
||||||
|
if prefix:
|
||||||
|
classname = prefix.rstrip('.')
|
||||||
|
fullname = prefix + name
|
||||||
|
else:
|
||||||
|
classname = ''
|
||||||
|
fullname = name
|
||||||
|
|
||||||
|
signode['module'] = modname
|
||||||
|
signode['class'] = classname
|
||||||
|
signode['fullname'] = fullname
|
||||||
|
|
||||||
|
max_len = (self.env.config.python_maximum_signature_line_length
|
||||||
|
or self.env.config.maximum_signature_line_length
|
||||||
|
or 0)
|
||||||
|
|
||||||
|
# determine if the function arguments (without its type parameters)
|
||||||
|
# should be formatted on a multiline or not by removing the width of
|
||||||
|
# the type parameters list (if any)
|
||||||
|
sig_len = len(sig)
|
||||||
|
tp_list_span = m.span(3)
|
||||||
|
multi_line_parameter_list = (
|
||||||
|
'single-line-parameter-list' not in self.options
|
||||||
|
and (sig_len - (tp_list_span[1] - tp_list_span[0])) > max_len > 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# determine whether the type parameter list must be wrapped or not
|
||||||
|
arglist_span = m.span(4)
|
||||||
|
multi_line_type_parameter_list = (
|
||||||
|
'single-line-type-parameter-list' not in self.options
|
||||||
|
and (sig_len - (arglist_span[1] - arglist_span[0])) > max_len > 0
|
||||||
|
)
|
||||||
|
|
||||||
|
sig_prefix = self.get_signature_prefix(sig)
|
||||||
|
if sig_prefix:
|
||||||
|
if type(sig_prefix) is str:
|
||||||
|
msg = ("Python directive method get_signature_prefix()"
|
||||||
|
" must return a list of nodes."
|
||||||
|
f" Return value was '{sig_prefix}'.")
|
||||||
|
raise TypeError(msg)
|
||||||
|
signode += addnodes.desc_annotation(str(sig_prefix), '', *sig_prefix)
|
||||||
|
|
||||||
|
if prefix:
|
||||||
|
signode += addnodes.desc_addname(prefix, prefix)
|
||||||
|
elif modname and add_module and self.env.config.add_module_names:
|
||||||
|
nodetext = modname + '.'
|
||||||
|
signode += addnodes.desc_addname(nodetext, nodetext)
|
||||||
|
|
||||||
|
signode += addnodes.desc_name(name, name)
|
||||||
|
|
||||||
|
if tp_list:
|
||||||
|
try:
|
||||||
|
signode += _parse_type_list(tp_list, self.env, multi_line_type_parameter_list)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("could not parse tp_list (%r): %s", tp_list, exc,
|
||||||
|
location=signode)
|
||||||
|
|
||||||
|
if arglist:
|
||||||
|
try:
|
||||||
|
signode += _parse_arglist(arglist, self.env, multi_line_parameter_list)
|
||||||
|
except SyntaxError:
|
||||||
|
# fallback to parse arglist original parser
|
||||||
|
# (this may happen if the argument list is incorrectly used
|
||||||
|
# as a list of bases when documenting a class)
|
||||||
|
# it supports to represent optional arguments (ex. "func(foo [, bar])")
|
||||||
|
_pseudo_parse_arglist(signode, arglist, multi_line_parameter_list)
|
||||||
|
except (NotImplementedError, ValueError) as exc:
|
||||||
|
# duplicated parameter names raise ValueError and not a SyntaxError
|
||||||
|
logger.warning("could not parse arglist (%r): %s", arglist, exc,
|
||||||
|
location=signode)
|
||||||
|
_pseudo_parse_arglist(signode, arglist, multi_line_parameter_list)
|
||||||
|
else:
|
||||||
|
if self.needs_arglist():
|
||||||
|
# for callables, add an empty parameter list
|
||||||
|
signode += addnodes.desc_parameterlist()
|
||||||
|
|
||||||
|
if retann:
|
||||||
|
children = _parse_annotation(retann, self.env)
|
||||||
|
signode += addnodes.desc_returns(retann, '', *children)
|
||||||
|
|
||||||
|
anno = self.options.get('annotation')
|
||||||
|
if anno:
|
||||||
|
signode += addnodes.desc_annotation(' ' + anno, '',
|
||||||
|
addnodes.desc_sig_space(),
|
||||||
|
nodes.Text(anno))
|
||||||
|
|
||||||
|
return fullname, prefix
|
||||||
|
|
||||||
|
def _object_hierarchy_parts(self, sig_node: desc_signature) -> tuple[str, ...]:
|
||||||
|
if 'fullname' not in sig_node:
|
||||||
|
return ()
|
||||||
|
modname = sig_node.get('module')
|
||||||
|
fullname = sig_node['fullname']
|
||||||
|
|
||||||
|
if modname:
|
||||||
|
return (modname, *fullname.split('.'))
|
||||||
|
else:
|
||||||
|
return tuple(fullname.split('.'))
|
||||||
|
|
||||||
|
def get_index_text(self, modname: str, name: tuple[str, str]) -> str:
|
||||||
|
"""Return the text for the index entry of the object."""
|
||||||
|
msg = 'must be implemented in subclasses'
|
||||||
|
raise NotImplementedError(msg)
|
||||||
|
|
||||||
|
def add_target_and_index(self, name_cls: tuple[str, str], sig: str,
|
||||||
|
signode: desc_signature) -> None:
|
||||||
|
modname = self.options.get('module', self.env.ref_context.get('py:module'))
|
||||||
|
fullname = (modname + '.' if modname else '') + name_cls[0]
|
||||||
|
node_id = make_id(self.env, self.state.document, '', fullname)
|
||||||
|
signode['ids'].append(node_id)
|
||||||
|
self.state.document.note_explicit_target(signode)
|
||||||
|
|
||||||
|
domain = self.env.domains['py']
|
||||||
|
domain.note_object(fullname, self.objtype, node_id, location=signode)
|
||||||
|
|
||||||
|
canonical_name = self.options.get('canonical')
|
||||||
|
if canonical_name:
|
||||||
|
domain.note_object(canonical_name, self.objtype, node_id, aliased=True,
|
||||||
|
location=signode)
|
||||||
|
|
||||||
|
if 'no-index-entry' not in self.options:
|
||||||
|
indextext = self.get_index_text(modname, name_cls)
|
||||||
|
if indextext:
|
||||||
|
self.indexnode['entries'].append(('single', indextext, node_id, '', None))
|
||||||
|
|
||||||
|
def before_content(self) -> None:
|
||||||
|
"""Handle object nesting before content
|
||||||
|
|
||||||
|
:py:class:`PyObject` represents Python language constructs. For
|
||||||
|
constructs that are nestable, such as a Python classes, this method will
|
||||||
|
build up a stack of the nesting hierarchy so that it can be later
|
||||||
|
de-nested correctly, in :py:meth:`after_content`.
|
||||||
|
|
||||||
|
For constructs that aren't nestable, the stack is bypassed, and instead
|
||||||
|
only the most recent object is tracked. This object prefix name will be
|
||||||
|
removed with :py:meth:`after_content`.
|
||||||
|
"""
|
||||||
|
prefix = None
|
||||||
|
if self.names:
|
||||||
|
# fullname and name_prefix come from the `handle_signature` method.
|
||||||
|
# fullname represents the full object name that is constructed using
|
||||||
|
# object nesting and explicit prefixes. `name_prefix` is the
|
||||||
|
# explicit prefix given in a signature
|
||||||
|
(fullname, name_prefix) = self.names[-1]
|
||||||
|
if self.allow_nesting:
|
||||||
|
prefix = fullname
|
||||||
|
elif name_prefix:
|
||||||
|
prefix = name_prefix.strip('.')
|
||||||
|
if prefix:
|
||||||
|
self.env.ref_context['py:class'] = prefix
|
||||||
|
if self.allow_nesting:
|
||||||
|
classes = self.env.ref_context.setdefault('py:classes', [])
|
||||||
|
classes.append(prefix)
|
||||||
|
if 'module' in self.options:
|
||||||
|
modules = self.env.ref_context.setdefault('py:modules', [])
|
||||||
|
modules.append(self.env.ref_context.get('py:module'))
|
||||||
|
self.env.ref_context['py:module'] = self.options['module']
|
||||||
|
|
||||||
|
def after_content(self) -> None:
|
||||||
|
"""Handle object de-nesting after content
|
||||||
|
|
||||||
|
If this class is a nestable object, removing the last nested class prefix
|
||||||
|
ends further nesting in the object.
|
||||||
|
|
||||||
|
If this class is not a nestable object, the list of classes should not
|
||||||
|
be altered as we didn't affect the nesting levels in
|
||||||
|
:py:meth:`before_content`.
|
||||||
|
"""
|
||||||
|
classes = self.env.ref_context.setdefault('py:classes', [])
|
||||||
|
if self.allow_nesting:
|
||||||
|
with contextlib.suppress(IndexError):
|
||||||
|
classes.pop()
|
||||||
|
|
||||||
|
self.env.ref_context['py:class'] = (classes[-1] if len(classes) > 0
|
||||||
|
else None)
|
||||||
|
if 'module' in self.options:
|
||||||
|
modules = self.env.ref_context.setdefault('py:modules', [])
|
||||||
|
if modules:
|
||||||
|
self.env.ref_context['py:module'] = modules.pop()
|
||||||
|
else:
|
||||||
|
self.env.ref_context.pop('py:module')
|
||||||
|
|
||||||
|
def _toc_entry_name(self, sig_node: desc_signature) -> str:
|
||||||
|
if not sig_node.get('_toc_parts'):
|
||||||
|
return ''
|
||||||
|
|
||||||
|
config = self.env.app.config
|
||||||
|
objtype = sig_node.parent.get('objtype')
|
||||||
|
if config.add_function_parentheses and objtype in {'function', 'method'}:
|
||||||
|
parens = '()'
|
||||||
|
else:
|
||||||
|
parens = ''
|
||||||
|
*parents, name = sig_node['_toc_parts']
|
||||||
|
if config.toc_object_entries_show_parents == 'domain':
|
||||||
|
return sig_node.get('fullname', name) + parens
|
||||||
|
if config.toc_object_entries_show_parents == 'hide':
|
||||||
|
return name + parens
|
||||||
|
if config.toc_object_entries_show_parents == 'all':
|
||||||
|
return '.'.join(parents + [name + parens])
|
||||||
|
return ''
|
@ -330,7 +330,7 @@ def setup(app: Sphinx) -> dict[str, Any]:
|
|||||||
|
|
||||||
|
|
||||||
def _patch_python_domain() -> None:
|
def _patch_python_domain() -> None:
|
||||||
from sphinx.domains.python import PyObject, PyTypedField
|
from sphinx.domains.python._object import PyObject, PyTypedField
|
||||||
from sphinx.locale import _
|
from sphinx.locale import _
|
||||||
for doc_field in PyObject.doc_field_types:
|
for doc_field in PyObject.doc_field_types:
|
||||||
if doc_field.name == 'parameter':
|
if doc_field.name == 'parameter':
|
||||||
|
@ -31,13 +31,9 @@ from sphinx.addnodes import (
|
|||||||
pending_xref,
|
pending_xref,
|
||||||
)
|
)
|
||||||
from sphinx.domains import IndexEntry
|
from sphinx.domains import IndexEntry
|
||||||
from sphinx.domains.python import (
|
from sphinx.domains.python import PythonDomain, PythonModuleIndex
|
||||||
PythonDomain,
|
from sphinx.domains.python._annotations import _parse_annotation, _pseudo_parse_arglist
|
||||||
PythonModuleIndex,
|
from sphinx.domains.python._object import py_sig_re
|
||||||
_parse_annotation,
|
|
||||||
_pseudo_parse_arglist,
|
|
||||||
py_sig_re,
|
|
||||||
)
|
|
||||||
from sphinx.testing import restructuredtext
|
from sphinx.testing import restructuredtext
|
||||||
from sphinx.testing.util import assert_node
|
from sphinx.testing.util import assert_node
|
||||||
from sphinx.writers.text import STDINDENT
|
from sphinx.writers.text import STDINDENT
|
||||||
|
Loading…
Reference in New Issue
Block a user