mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
De-glob mypy whitelist for 'sphinx.domains.*' (#11064)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
This commit is contained in:
@@ -220,7 +220,12 @@ module = [
|
||||
"sphinx.builders.html",
|
||||
"sphinx.builders.latex",
|
||||
"sphinx.builders.linkcheck",
|
||||
"sphinx.domains.*",
|
||||
"sphinx.domains",
|
||||
"sphinx.domains.c",
|
||||
"sphinx.domains.cpp",
|
||||
"sphinx.domains.javascript",
|
||||
"sphinx.domains.python",
|
||||
"sphinx.domains.std",
|
||||
"sphinx.environment",
|
||||
"sphinx.environment.adapters.toctree",
|
||||
"sphinx.environment.adapters.indexentries",
|
||||
@@ -266,7 +271,9 @@ module = [
|
||||
"sphinx.builders.linkcheck",
|
||||
"sphinx.cmd.quickstart",
|
||||
"sphinx.config",
|
||||
"sphinx.domains.*",
|
||||
"sphinx.domains",
|
||||
"sphinx.domains.c",
|
||||
"sphinx.domains.cpp",
|
||||
"sphinx.environment.*",
|
||||
"sphinx.events",
|
||||
"sphinx.ext.*",
|
||||
|
||||
@@ -28,7 +28,7 @@ strip_backslash_re = re.compile(r'\\(.)')
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def optional_int(argument: str) -> int:
|
||||
def optional_int(argument: str) -> int | None:
|
||||
"""
|
||||
Check for an integer argument or None value; raise ``ValueError`` if not.
|
||||
"""
|
||||
@@ -61,8 +61,8 @@ class ObjectDescription(SphinxDirective, Generic[T]):
|
||||
# types of doc fields that this directive handles, see sphinx.util.docfields
|
||||
doc_field_types: list[Field] = []
|
||||
domain: str | None = None
|
||||
objtype: str | None = None
|
||||
indexnode: addnodes.index | None = None
|
||||
objtype: str # set when `run` method is called
|
||||
indexnode: addnodes.index
|
||||
|
||||
# Warning: this might be removed in future version. Don't touch this from extensions.
|
||||
_doc_field_type_map: dict[str, tuple[Field, bool]] = {}
|
||||
|
||||
@@ -8,7 +8,7 @@ from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterable, NamedTuple, cast
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterable, NamedTuple, Optional, cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node, system_message
|
||||
@@ -21,6 +21,8 @@ from sphinx.roles import XRefRole
|
||||
from sphinx.util.typing import RoleFunction
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from docutils.parsers.rst import Directive
|
||||
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.environment import BuildEnvironment
|
||||
|
||||
@@ -84,9 +86,9 @@ class Index(ABC):
|
||||
:rst:role:`ref` role.
|
||||
"""
|
||||
|
||||
name: str = None
|
||||
localname: str = None
|
||||
shortname: str = None
|
||||
name: str
|
||||
localname: str
|
||||
shortname: str | None = None
|
||||
|
||||
def __init__(self, domain: Domain) -> None:
|
||||
if self.name is None or self.localname is None:
|
||||
@@ -95,7 +97,7 @@ class Index(ABC):
|
||||
self.domain = domain
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, docnames: Iterable[str] = None
|
||||
def generate(self, docnames: Iterable[str] | None = None
|
||||
) -> tuple[list[tuple[str, list[IndexEntry]]], bool]:
|
||||
"""Get entries for the index.
|
||||
|
||||
@@ -149,6 +151,9 @@ class Index(ABC):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
TitleGetter = Callable[[Node], Optional[str]]
|
||||
|
||||
|
||||
class Domain:
|
||||
"""
|
||||
A Domain is meant to be a group of "object" description directives for
|
||||
@@ -179,7 +184,7 @@ class Domain:
|
||||
#: type (usually directive) name -> ObjType instance
|
||||
object_types: dict[str, ObjType] = {}
|
||||
#: directive name -> directive class
|
||||
directives: dict[str, Any] = {}
|
||||
directives: dict[str, type[Directive]] = {}
|
||||
#: role name -> role callable
|
||||
roles: dict[str, RoleFunction | XRefRole] = {}
|
||||
#: a list of Index subclasses
|
||||
@@ -187,8 +192,7 @@ class Domain:
|
||||
#: role name -> a warning message if reference is missing
|
||||
dangling_warnings: dict[str, str] = {}
|
||||
#: node_class -> (enum_node_type, title_getter)
|
||||
enumerable_nodes: dict[type[Node], tuple[str, Callable]] = {}
|
||||
|
||||
enumerable_nodes: dict[type[Node], tuple[str, TitleGetter | None]] = {}
|
||||
#: data value for a fresh environment
|
||||
initial_data: dict = {}
|
||||
#: data value
|
||||
@@ -276,7 +280,7 @@ class Domain:
|
||||
fullname = f'{self.name}:{name}'
|
||||
BaseDirective = self.directives[name]
|
||||
|
||||
class DirectiveAdapter(BaseDirective): # type: ignore
|
||||
class DirectiveAdapter(BaseDirective): # type: ignore[valid-type,misc]
|
||||
def run(self) -> list[Node]:
|
||||
self.name = fullname
|
||||
return super().run()
|
||||
|
||||
@@ -1259,7 +1259,7 @@ class ASTTypeWithInit(ASTBase):
|
||||
|
||||
|
||||
class ASTMacroParameter(ASTBase):
|
||||
def __init__(self, arg: ASTNestedName, ellipsis: bool = False,
|
||||
def __init__(self, arg: ASTNestedName | None, ellipsis: bool = False,
|
||||
variadic: bool = False) -> None:
|
||||
self.arg = arg
|
||||
self.ellipsis = ellipsis
|
||||
@@ -1286,7 +1286,7 @@ class ASTMacroParameter(ASTBase):
|
||||
|
||||
|
||||
class ASTMacro(ASTBase):
|
||||
def __init__(self, ident: ASTNestedName, args: list[ASTMacroParameter]) -> None:
|
||||
def __init__(self, ident: ASTNestedName, args: list[ASTMacroParameter] | None) -> None:
|
||||
self.ident = ident
|
||||
self.args = args
|
||||
|
||||
@@ -1405,7 +1405,7 @@ class ASTEnumerator(ASTBase):
|
||||
|
||||
|
||||
class ASTDeclaration(ASTBaseBase):
|
||||
def __init__(self, objectType: str, directiveType: str,
|
||||
def __init__(self, objectType: str, directiveType: str | None,
|
||||
declaration: DeclarationType | ASTFunctionParameter,
|
||||
semicolon: bool = False) -> None:
|
||||
self.objectType = objectType
|
||||
@@ -1427,7 +1427,7 @@ class ASTDeclaration(ASTBaseBase):
|
||||
return decl.name
|
||||
|
||||
@property
|
||||
def function_params(self) -> list[ASTFunctionParameter]:
|
||||
def function_params(self) -> list[ASTFunctionParameter] | None:
|
||||
if self.objectType != 'function':
|
||||
return None
|
||||
decl = cast(ASTType, self.declaration)
|
||||
@@ -1547,8 +1547,14 @@ class Symbol:
|
||||
else:
|
||||
return super().__setattr__(key, value)
|
||||
|
||||
def __init__(self, parent: Symbol, ident: ASTIdentifier,
|
||||
declaration: ASTDeclaration, docname: str, line: int) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
parent: Symbol,
|
||||
ident: ASTIdentifier,
|
||||
declaration: ASTDeclaration | None,
|
||||
docname: str | None,
|
||||
line: int | None,
|
||||
) -> None:
|
||||
self.parent = parent
|
||||
# declarations in a single directive are linked together
|
||||
self.siblingAbove: Symbol = None
|
||||
@@ -1682,7 +1688,7 @@ class Symbol:
|
||||
return ASTNestedName(names, rooted=False)
|
||||
|
||||
def _find_first_named_symbol(self, ident: ASTIdentifier,
|
||||
matchSelf: bool, recurseInAnon: bool) -> Symbol:
|
||||
matchSelf: bool, recurseInAnon: bool) -> Symbol | None:
|
||||
# TODO: further simplification from C++ to C
|
||||
if Symbol.debug_lookup:
|
||||
Symbol.debug_print("_find_first_named_symbol ->")
|
||||
@@ -1743,10 +1749,15 @@ class Symbol:
|
||||
if Symbol.debug_lookup:
|
||||
Symbol.debug_indent -= 2
|
||||
|
||||
def _symbol_lookup(self, nestedName: ASTNestedName,
|
||||
onMissingQualifiedSymbol: Callable[[Symbol, ASTIdentifier], Symbol],
|
||||
ancestorLookupType: str, matchSelf: bool,
|
||||
recurseInAnon: bool, searchInSiblings: bool) -> SymbolLookupResult:
|
||||
def _symbol_lookup(
|
||||
self,
|
||||
nestedName: ASTNestedName,
|
||||
onMissingQualifiedSymbol: Callable[[Symbol, ASTIdentifier], Symbol | None],
|
||||
ancestorLookupType: str | None,
|
||||
matchSelf: bool,
|
||||
recurseInAnon: bool,
|
||||
searchInSiblings: bool
|
||||
) -> SymbolLookupResult | None:
|
||||
# TODO: further simplification from C++ to C
|
||||
# ancestorLookupType: if not None, specifies the target type of the lookup
|
||||
if Symbol.debug_lookup:
|
||||
@@ -1815,8 +1826,13 @@ class Symbol:
|
||||
Symbol.debug_indent -= 2
|
||||
return SymbolLookupResult(symbols, parentSymbol, ident)
|
||||
|
||||
def _add_symbols(self, nestedName: ASTNestedName,
|
||||
declaration: ASTDeclaration, docname: str, line: int) -> Symbol:
|
||||
def _add_symbols(
|
||||
self,
|
||||
nestedName: ASTNestedName,
|
||||
declaration: ASTDeclaration | None,
|
||||
docname: str | None,
|
||||
line: int | None,
|
||||
) -> Symbol:
|
||||
# TODO: further simplification from C++ to C
|
||||
# Used for adding a whole path of symbols, where the last may or may not
|
||||
# be an actual declaration.
|
||||
@@ -2038,7 +2054,7 @@ class Symbol:
|
||||
|
||||
def find_identifier(self, ident: ASTIdentifier,
|
||||
matchSelf: bool, recurseInAnon: bool, searchInSiblings: bool
|
||||
) -> Symbol:
|
||||
) -> Symbol | None:
|
||||
if Symbol.debug_lookup:
|
||||
Symbol.debug_indent += 1
|
||||
Symbol.debug_print("find_identifier:")
|
||||
@@ -2067,7 +2083,7 @@ class Symbol:
|
||||
current = current.siblingAbove
|
||||
return None
|
||||
|
||||
def direct_lookup(self, key: LookupKey) -> Symbol:
|
||||
def direct_lookup(self, key: LookupKey) -> Symbol | None:
|
||||
if Symbol.debug_lookup:
|
||||
Symbol.debug_indent += 1
|
||||
Symbol.debug_print("direct_lookup:")
|
||||
@@ -2096,14 +2112,16 @@ class Symbol:
|
||||
return s
|
||||
|
||||
def find_declaration(self, nestedName: ASTNestedName, typ: str,
|
||||
matchSelf: bool, recurseInAnon: bool) -> Symbol:
|
||||
matchSelf: bool, recurseInAnon: bool) -> Symbol | None:
|
||||
# templateShorthand: missing template parameter lists for templates is ok
|
||||
if Symbol.debug_lookup:
|
||||
Symbol.debug_indent += 1
|
||||
Symbol.debug_print("find_declaration:")
|
||||
|
||||
def onMissingQualifiedSymbol(parentSymbol: Symbol,
|
||||
ident: ASTIdentifier) -> Symbol:
|
||||
def onMissingQualifiedSymbol(
|
||||
parentSymbol: Symbol,
|
||||
ident: ASTIdentifier,
|
||||
) -> Symbol | None:
|
||||
return None
|
||||
|
||||
lookupResult = self._symbol_lookup(nestedName,
|
||||
@@ -2163,7 +2181,7 @@ class DefinitionParser(BaseParser):
|
||||
def paren_attributes(self):
|
||||
return self.config.c_paren_attributes
|
||||
|
||||
def _parse_string(self) -> str:
|
||||
def _parse_string(self) -> str | None:
|
||||
if self.current_char != '"':
|
||||
return None
|
||||
startPos = self.pos
|
||||
@@ -2182,7 +2200,7 @@ class DefinitionParser(BaseParser):
|
||||
self.pos += 1
|
||||
return self.definition[startPos:self.pos]
|
||||
|
||||
def _parse_literal(self) -> ASTLiteral:
|
||||
def _parse_literal(self) -> ASTLiteral | None:
|
||||
# -> integer-literal
|
||||
# | character-literal
|
||||
# | floating-literal
|
||||
@@ -2220,7 +2238,7 @@ class DefinitionParser(BaseParser):
|
||||
" resulting in multiple decoded characters.")
|
||||
return None
|
||||
|
||||
def _parse_paren_expression(self) -> ASTExpression:
|
||||
def _parse_paren_expression(self) -> ASTExpression | None:
|
||||
# "(" expression ")"
|
||||
if self.current_char != '(':
|
||||
return None
|
||||
@@ -2231,12 +2249,12 @@ class DefinitionParser(BaseParser):
|
||||
self.fail("Expected ')' in end of parenthesized expression.")
|
||||
return ASTParenExpr(res)
|
||||
|
||||
def _parse_primary_expression(self) -> ASTExpression:
|
||||
def _parse_primary_expression(self) -> ASTExpression | None:
|
||||
# literal
|
||||
# "(" expression ")"
|
||||
# id-expression -> we parse this with _parse_nested_name
|
||||
self.skip_ws()
|
||||
res: ASTExpression = self._parse_literal()
|
||||
res: ASTExpression | None = self._parse_literal()
|
||||
if res is not None:
|
||||
return res
|
||||
res = self._parse_paren_expression()
|
||||
@@ -2277,7 +2295,7 @@ class DefinitionParser(BaseParser):
|
||||
break
|
||||
return exprs, trailingComma
|
||||
|
||||
def _parse_paren_expression_list(self) -> ASTParenExprList:
|
||||
def _parse_paren_expression_list(self) -> ASTParenExprList | None:
|
||||
# -> '(' expression-list ')'
|
||||
# though, we relax it to also allow empty parens
|
||||
# as it's needed in some cases
|
||||
@@ -2290,7 +2308,7 @@ class DefinitionParser(BaseParser):
|
||||
return None
|
||||
return ASTParenExprList(exprs)
|
||||
|
||||
def _parse_braced_init_list(self) -> ASTBracedInitList:
|
||||
def _parse_braced_init_list(self) -> ASTBracedInitList | None:
|
||||
# -> '{' initializer-list ','[opt] '}'
|
||||
# | '{' '}'
|
||||
exprs, trailingComma = self._parse_initializer_list("braced-init-list", '{', '}')
|
||||
@@ -2455,7 +2473,7 @@ class DefinitionParser(BaseParser):
|
||||
return ASTBinOpExpr(exprs, ops)
|
||||
return _parse_bin_op_expr(self, 0)
|
||||
|
||||
def _parse_conditional_expression_tail(self, orExprHead: Any) -> ASTExpression:
|
||||
def _parse_conditional_expression_tail(self, orExprHead: Any) -> ASTExpression | None:
|
||||
# -> "?" expression ":" assignment-expression
|
||||
return None
|
||||
|
||||
@@ -2583,7 +2601,7 @@ class DefinitionParser(BaseParser):
|
||||
return t
|
||||
return None
|
||||
|
||||
def _parse_simple_type_specifiers(self) -> ASTTrailingTypeSpecFundamental:
|
||||
def _parse_simple_type_specifiers(self) -> ASTTrailingTypeSpecFundamental | None:
|
||||
names: list[str] = []
|
||||
|
||||
self.skip_ws()
|
||||
@@ -2654,7 +2672,9 @@ class DefinitionParser(BaseParser):
|
||||
attrs = self._parse_attribute_list()
|
||||
return ASTParameters(args, attrs)
|
||||
|
||||
def _parse_decl_specs_simple(self, outer: str, typed: bool) -> ASTDeclSpecsSimple:
|
||||
def _parse_decl_specs_simple(
|
||||
self, outer: str | None, typed: bool
|
||||
) -> ASTDeclSpecsSimple:
|
||||
"""Just parse the simple ones."""
|
||||
storage = None
|
||||
threadLocal = None
|
||||
@@ -2712,7 +2732,7 @@ class DefinitionParser(BaseParser):
|
||||
return ASTDeclSpecsSimple(storage, threadLocal, inline,
|
||||
restrict, volatile, const, ASTAttributeList(attrs))
|
||||
|
||||
def _parse_decl_specs(self, outer: str, typed: bool = True) -> ASTDeclSpecs:
|
||||
def _parse_decl_specs(self, outer: str | None, typed: bool = True) -> ASTDeclSpecs:
|
||||
if outer:
|
||||
if outer not in ('type', 'member', 'function'):
|
||||
raise Exception('Internal error, unknown outer "%s".' % outer)
|
||||
@@ -2888,8 +2908,8 @@ class DefinitionParser(BaseParser):
|
||||
header = "Error in declarator or parameters"
|
||||
raise self._make_multi_error(prevErrors, header) from e
|
||||
|
||||
def _parse_initializer(self, outer: str = None, allowFallback: bool = True
|
||||
) -> ASTInitializer:
|
||||
def _parse_initializer(self, outer: str | None = None, allowFallback: bool = True
|
||||
) -> ASTInitializer | None:
|
||||
self.skip_ws()
|
||||
if outer == 'member' and False: # TODO
|
||||
bracedInit = self._parse_braced_init_list()
|
||||
@@ -2976,7 +2996,7 @@ class DefinitionParser(BaseParser):
|
||||
decl = self._parse_declarator(named=named, paramMode=paramMode)
|
||||
return ASTType(declSpecs, decl)
|
||||
|
||||
def _parse_type_with_init(self, named: bool | str, outer: str) -> ASTTypeWithInit:
|
||||
def _parse_type_with_init(self, named: bool | str, outer: str | None) -> ASTTypeWithInit:
|
||||
if outer:
|
||||
assert outer in ('type', 'member', 'function')
|
||||
type = self._parse_type(outer=outer, named=named)
|
||||
@@ -3445,9 +3465,14 @@ class CNamespacePopObject(SphinxDirective):
|
||||
|
||||
|
||||
class AliasNode(nodes.Element):
|
||||
def __init__(self, sig: str, aliasOptions: dict,
|
||||
document: Any, env: BuildEnvironment = None,
|
||||
parentKey: LookupKey = None) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
sig: str,
|
||||
aliasOptions: dict,
|
||||
document: Any,
|
||||
env: BuildEnvironment | None = None,
|
||||
parentKey: LookupKey | None = None,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.sig = sig
|
||||
self.aliasOptions = aliasOptions
|
||||
|
||||
@@ -35,8 +35,8 @@ class ChangeSet(NamedTuple):
|
||||
type: str
|
||||
docname: str
|
||||
lineno: int
|
||||
module: str
|
||||
descname: str
|
||||
module: str | None
|
||||
descname: str | None
|
||||
content: str
|
||||
|
||||
|
||||
@@ -107,7 +107,7 @@ class ChangeSetDomain(Domain):
|
||||
name = 'changeset'
|
||||
label = 'changeset'
|
||||
|
||||
initial_data: dict = {
|
||||
initial_data: dict[str, Any] = {
|
||||
'changes': {}, # version -> list of ChangeSet
|
||||
}
|
||||
|
||||
@@ -129,7 +129,7 @@ class ChangeSetDomain(Domain):
|
||||
if changeset.docname == docname:
|
||||
changes.remove(changeset)
|
||||
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict[str, Any]) -> None:
|
||||
# XXX duplicates?
|
||||
for version, otherchanges in otherdata['changes'].items():
|
||||
changes = self.changesets.setdefault(version, [])
|
||||
|
||||
@@ -51,7 +51,7 @@ class CitationDomain(Domain):
|
||||
elif docname in docnames:
|
||||
docnames.remove(docname)
|
||||
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict[str, Any]) -> None:
|
||||
# XXX duplicates?
|
||||
for key, data in otherdata['citations'].items():
|
||||
if data[0] in docnames:
|
||||
|
||||
@@ -35,7 +35,7 @@ class IndexDomain(Domain):
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
self.entries.pop(docname, None)
|
||||
|
||||
def merge_domaindata(self, docnames: Iterable[str], otherdata: dict) -> None:
|
||||
def merge_domaindata(self, docnames: Iterable[str], otherdata: dict[str, Any]) -> None:
|
||||
for docname in docnames:
|
||||
self.entries[docname] = otherdata['entries'][docname]
|
||||
|
||||
|
||||
@@ -412,7 +412,7 @@ class JavaScriptDomain(Domain):
|
||||
if pkg_docname == docname:
|
||||
del self.modules[modname]
|
||||
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict[str, Any]) -> None:
|
||||
# XXX check duplicates
|
||||
for fullname, (fn, node_id, objtype) in otherdata['objects'].items():
|
||||
if fn in docnames:
|
||||
@@ -421,8 +421,15 @@ class JavaScriptDomain(Domain):
|
||||
if pkg_docname in docnames:
|
||||
self.modules[mod_name] = (pkg_docname, node_id)
|
||||
|
||||
def find_obj(self, env: BuildEnvironment, mod_name: str, prefix: str, name: str,
|
||||
typ: str, searchorder: int = 0) -> tuple[str, tuple[str, str, str]]:
|
||||
def find_obj(
|
||||
self,
|
||||
env: BuildEnvironment,
|
||||
mod_name: str,
|
||||
prefix: str,
|
||||
name: str,
|
||||
typ: str | None,
|
||||
searchorder: int = 0
|
||||
) -> tuple[str | None, tuple[str, str, str] | None]:
|
||||
if name[-2:] == '()':
|
||||
name = name[:-2]
|
||||
|
||||
@@ -471,7 +478,7 @@ class JavaScriptDomain(Domain):
|
||||
for refname, (docname, node_id, typ) in list(self.objects.items()):
|
||||
yield refname, refname, typ, docname, node_id, 1
|
||||
|
||||
def get_full_qualified_name(self, node: Element) -> str:
|
||||
def get_full_qualified_name(self, node: Element) -> str | None:
|
||||
modname = node.get('js:module')
|
||||
prefix = node.get('js:object')
|
||||
target = node.get('reftarget')
|
||||
|
||||
@@ -35,7 +35,7 @@ class MathDomain(Domain):
|
||||
name = 'math'
|
||||
label = 'mathematics'
|
||||
|
||||
initial_data: dict = {
|
||||
initial_data: dict[str, Any] = {
|
||||
'objects': {}, # labelid -> (docname, eqno)
|
||||
'has_equations': {}, # docname -> bool
|
||||
}
|
||||
@@ -61,7 +61,7 @@ class MathDomain(Domain):
|
||||
|
||||
self.equations[labelid] = (docname, self.env.new_serialno('eqno') + 1)
|
||||
|
||||
def get_equation_number_for(self, labelid: str) -> int:
|
||||
def get_equation_number_for(self, labelid: str) -> int | None:
|
||||
if labelid in self.equations:
|
||||
return self.equations[labelid][1]
|
||||
else:
|
||||
@@ -81,7 +81,7 @@ class MathDomain(Domain):
|
||||
|
||||
self.data['has_equations'].pop(docname, None)
|
||||
|
||||
def merge_domaindata(self, docnames: Iterable[str], otherdata: dict) -> None:
|
||||
def merge_domaindata(self, docnames: Iterable[str], otherdata: dict[str, Any]) -> None:
|
||||
for labelid, (doc, eqno) in otherdata['objects'].items():
|
||||
if doc in docnames:
|
||||
self.equations[labelid] = (doc, eqno)
|
||||
@@ -93,8 +93,9 @@ class MathDomain(Domain):
|
||||
typ: str, target: str, node: pending_xref, contnode: Element
|
||||
) -> Element | None:
|
||||
assert typ in ('eq', 'numref')
|
||||
docname, number = self.equations.get(target, (None, None))
|
||||
if docname:
|
||||
result = self.equations.get(target)
|
||||
if result:
|
||||
docname, number = result
|
||||
# TODO: perhaps use rather a sphinx-core provided prefix here?
|
||||
node_id = make_id('equation-%s' % target)
|
||||
if env.config.math_numfig and env.config.numfig:
|
||||
@@ -127,10 +128,10 @@ class MathDomain(Domain):
|
||||
else:
|
||||
return [('eq', refnode)]
|
||||
|
||||
def get_objects(self) -> list:
|
||||
def get_objects(self) -> Iterable[tuple[str, str, str, str, str, int]]:
|
||||
return []
|
||||
|
||||
def has_equations(self, docname: str = None) -> bool:
|
||||
def has_equations(self, docname: str | None = None) -> bool:
|
||||
if docname:
|
||||
return self.data['has_equations'].get(docname, False)
|
||||
else:
|
||||
|
||||
@@ -124,7 +124,7 @@ def type_to_xref(target: str, env: BuildEnvironment | None = None,
|
||||
refspecific=refspecific, **kwargs)
|
||||
|
||||
|
||||
def _parse_annotation(annotation: str, env: BuildEnvironment) -> list[Node]:
|
||||
def _parse_annotation(annotation: str, env: BuildEnvironment | None) -> list[Node]:
|
||||
"""Parse type annotation."""
|
||||
def unparse(node: ast.AST) -> list[Node]:
|
||||
if isinstance(node, ast.Attribute):
|
||||
@@ -354,10 +354,17 @@ def _pseudo_parse_arglist(signode: desc_signature, arglist: str) -> None:
|
||||
# This override allows our inline type specifiers to behave like :class: link
|
||||
# when it comes to handling "." and "~" prefixes.
|
||||
class PyXrefMixin:
|
||||
def make_xref(self, rolename: str, domain: str, target: str,
|
||||
innernode: type[TextlikeNode] = nodes.emphasis,
|
||||
contnode: Node = None, env: BuildEnvironment = None,
|
||||
inliner: Inliner = None, location: Node = None) -> Node:
|
||||
def make_xref(
|
||||
self,
|
||||
rolename: str,
|
||||
domain: str,
|
||||
target: str,
|
||||
innernode: type[TextlikeNode] = nodes.emphasis,
|
||||
contnode: Node | None = None,
|
||||
env: BuildEnvironment | None = None,
|
||||
inliner: Inliner | None = None,
|
||||
location: Node | None = None
|
||||
) -> Node:
|
||||
# we use inliner=None to make sure we get the old behaviour with a single
|
||||
# pending_xref node
|
||||
result = super().make_xref(rolename, domain, target, # type: ignore
|
||||
@@ -387,10 +394,17 @@ class PyXrefMixin:
|
||||
|
||||
return result
|
||||
|
||||
def make_xrefs(self, rolename: str, domain: str, target: str,
|
||||
innernode: type[TextlikeNode] = nodes.emphasis,
|
||||
contnode: Node = None, env: BuildEnvironment = None,
|
||||
inliner: Inliner = None, location: Node = None) -> list[Node]:
|
||||
def make_xrefs(
|
||||
self,
|
||||
rolename: str,
|
||||
domain: str,
|
||||
target: str,
|
||||
innernode: type[TextlikeNode] = nodes.emphasis,
|
||||
contnode: Node | None = None,
|
||||
env: BuildEnvironment | None = None,
|
||||
inliner: Inliner | None = None,
|
||||
location: Node | None = None,
|
||||
) -> list[Node]:
|
||||
delims = r'(\s*[\[\]\(\),](?:\s*o[rf]\s)?\s*|\s+o[rf]\s+|\s*\|\s*|\.\.\.)'
|
||||
delims_re = re.compile(delims)
|
||||
sub_targets = re.split(delims, target)
|
||||
@@ -712,7 +726,7 @@ class PyFunction(PyObject):
|
||||
text = f'{pairindextypes["builtin"]}; {name}()'
|
||||
self.indexnode['entries'].append(('pair', text, node_id, '', None))
|
||||
|
||||
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
|
||||
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str | None:
|
||||
# add index in own add_target_and_index() instead.
|
||||
return None
|
||||
|
||||
@@ -1124,7 +1138,7 @@ class PythonModuleIndex(Index):
|
||||
localname = _('Python Module Index')
|
||||
shortname = _('modules')
|
||||
|
||||
def generate(self, docnames: Iterable[str] = None
|
||||
def generate(self, docnames: Iterable[str] | None = None
|
||||
) -> tuple[list[tuple[str, list[IndexEntry]]], bool]:
|
||||
content: dict[str, list[IndexEntry]] = {}
|
||||
# list of prefixes to ignore
|
||||
@@ -1284,7 +1298,7 @@ class PythonDomain(Domain):
|
||||
if mod.docname == docname:
|
||||
del self.modules[modname]
|
||||
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict[str, Any]) -> None:
|
||||
# XXX check duplicates?
|
||||
for fullname, obj in otherdata['objects'].items():
|
||||
if obj.docname in docnames:
|
||||
@@ -1294,7 +1308,7 @@ class PythonDomain(Domain):
|
||||
self.modules[modname] = mod
|
||||
|
||||
def find_obj(self, env: BuildEnvironment, modname: str, classname: str,
|
||||
name: str, type: str, searchmode: int = 0
|
||||
name: str, type: str | None, searchmode: int = 0
|
||||
) -> list[tuple[str, ObjectEntry]]:
|
||||
"""Find a Python object for "name", perhaps using the given module
|
||||
and/or classname. Returns a list of (name, object entry) tuples.
|
||||
@@ -1467,7 +1481,7 @@ class PythonDomain(Domain):
|
||||
|
||||
|
||||
def builtin_resolver(app: Sphinx, env: BuildEnvironment,
|
||||
node: pending_xref, contnode: Element) -> Element:
|
||||
node: pending_xref, contnode: Element) -> Element | None:
|
||||
"""Do not emit nitpicky warnings for built-in types."""
|
||||
def istyping(s: str) -> bool:
|
||||
if s.startswith('typing.'):
|
||||
|
||||
@@ -256,7 +256,7 @@ class ReSTDomain(Domain):
|
||||
if doc == docname:
|
||||
del self.objects[typ, name]
|
||||
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict[str, Any]) -> None:
|
||||
# XXX check duplicates
|
||||
for (typ, name), (doc, node_id) in otherdata['objects'].items():
|
||||
if doc in docnames:
|
||||
@@ -267,8 +267,9 @@ class ReSTDomain(Domain):
|
||||
) -> Element | None:
|
||||
objtypes = self.objtypes_for_role(typ)
|
||||
for objtype in objtypes:
|
||||
todocname, node_id = self.objects.get((objtype, target), (None, None))
|
||||
if todocname:
|
||||
result = self.objects.get((objtype, target))
|
||||
if result:
|
||||
todocname, node_id = result
|
||||
return make_refnode(builder, fromdocname, todocname, node_id,
|
||||
contnode, target + ' ' + objtype)
|
||||
return None
|
||||
@@ -278,8 +279,9 @@ class ReSTDomain(Domain):
|
||||
) -> list[tuple[str, Element]]:
|
||||
results: list[tuple[str, Element]] = []
|
||||
for objtype in self.object_types:
|
||||
todocname, node_id = self.objects.get((objtype, target), (None, None))
|
||||
if todocname:
|
||||
result = self.objects.get((objtype, target))
|
||||
if result:
|
||||
todocname, node_id = result
|
||||
results.append(('rst:' + self.role_for_objtype(objtype),
|
||||
make_refnode(builder, fromdocname, todocname, node_id,
|
||||
contnode, target + ' ' + objtype)))
|
||||
|
||||
@@ -14,7 +14,7 @@ from docutils.statemachine import StringList
|
||||
from sphinx import addnodes
|
||||
from sphinx.addnodes import desc_signature, pending_xref
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.domains import Domain, ObjType, TitleGetter
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.roles import EmphasizedLiteral, XRefRole
|
||||
from sphinx.util import docname_join, logging, ws_re
|
||||
@@ -42,7 +42,7 @@ class GenericObject(ObjectDescription[str]):
|
||||
A generic x-ref directive registered with Sphinx.add_object_type().
|
||||
"""
|
||||
indextemplate: str = ''
|
||||
parse_node: Callable[[BuildEnvironment, str, desc_signature], str] = None
|
||||
parse_node: Callable[[BuildEnvironment, str, desc_signature], str] | None = None
|
||||
|
||||
def handle_signature(self, sig: str, signode: desc_signature) -> str:
|
||||
if self.parse_node:
|
||||
@@ -292,7 +292,7 @@ def split_term_classifiers(line: str) -> list[str | None]:
|
||||
|
||||
|
||||
def make_glossary_term(env: BuildEnvironment, textnodes: Iterable[Node], index_key: str,
|
||||
source: str, lineno: int, node_id: str, document: nodes.document
|
||||
source: str, lineno: int, node_id: str | None, document: nodes.document
|
||||
) -> nodes.term:
|
||||
# get a text-only representation of the term and register it
|
||||
# as a cross-reference target
|
||||
@@ -614,7 +614,7 @@ class StandardDomain(Domain):
|
||||
}
|
||||
|
||||
# node_class -> (figtype, title_getter)
|
||||
enumerable_nodes: dict[type[Node], tuple[str, Callable | None]] = {
|
||||
enumerable_nodes: dict[type[Node], tuple[str, TitleGetter | None]] = {
|
||||
nodes.figure: ('figure', None),
|
||||
nodes.table: ('table', None),
|
||||
nodes.container: ('code-block', None),
|
||||
@@ -712,7 +712,7 @@ class StandardDomain(Domain):
|
||||
if fn == docname:
|
||||
del self.anonlabels[key]
|
||||
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict) -> None:
|
||||
def merge_domaindata(self, docnames: list[str], otherdata: dict[str, Any]) -> None:
|
||||
# XXX duplicates?
|
||||
for key, data in otherdata['progoptions'].items():
|
||||
if data[0] in docnames:
|
||||
@@ -757,7 +757,7 @@ class StandardDomain(Domain):
|
||||
self.anonlabels[name] = docname, labelid
|
||||
if node.tagname == 'section':
|
||||
title = cast(nodes.title, node[0])
|
||||
sectname = clean_astext(title)
|
||||
sectname: str | None = clean_astext(title)
|
||||
elif node.tagname == 'rubric':
|
||||
sectname = clean_astext(node)
|
||||
elif self.is_enumerable_node(node):
|
||||
@@ -975,7 +975,7 @@ class StandardDomain(Domain):
|
||||
|
||||
def _resolve_term_xref(self, env: BuildEnvironment, fromdocname: str,
|
||||
builder: Builder, typ: str, target: str,
|
||||
node: pending_xref, contnode: Element) -> Element:
|
||||
node: pending_xref, contnode: Element) -> Element | None:
|
||||
result = self._resolve_obj_xref(env, fromdocname, builder, typ,
|
||||
target, node, contnode)
|
||||
if result:
|
||||
@@ -1084,8 +1084,14 @@ class StandardDomain(Domain):
|
||||
figtype, _ = self.enumerable_nodes.get(node.__class__, (None, None))
|
||||
return figtype
|
||||
|
||||
def get_fignumber(self, env: BuildEnvironment, builder: Builder,
|
||||
figtype: str, docname: str, target_node: Element) -> tuple[int, ...]:
|
||||
def get_fignumber(
|
||||
self,
|
||||
env: BuildEnvironment,
|
||||
builder: Builder,
|
||||
figtype: str,
|
||||
docname: str,
|
||||
target_node: Element
|
||||
) -> tuple[int, ...] | None:
|
||||
if figtype == 'section':
|
||||
if builder.name == 'latex':
|
||||
return ()
|
||||
|
||||
@@ -68,7 +68,7 @@ class SphinxComponentRegistry:
|
||||
|
||||
#: additional directives for domains
|
||||
#: a dict of domain name -> dict of directive name -> directive
|
||||
self.domain_directives: dict[str, dict[str, Any]] = {}
|
||||
self.domain_directives: dict[str, dict[str, type[Directive]]] = {}
|
||||
|
||||
#: additional indices for domains
|
||||
#: a dict of domain name -> list of index class
|
||||
@@ -196,7 +196,7 @@ class SphinxComponentRegistry:
|
||||
if domain not in self.domains:
|
||||
raise ExtensionError(__('domain %s not yet registered') % domain)
|
||||
|
||||
directives = self.domain_directives.setdefault(domain, {})
|
||||
directives: dict[str, type[Directive]] = self.domain_directives.setdefault(domain, {})
|
||||
if name in directives and not override:
|
||||
raise ExtensionError(__('The %r directive is already registered to domain %s') %
|
||||
(name, domain))
|
||||
|
||||
@@ -458,5 +458,5 @@ class BaseParser:
|
||||
res.append(attr)
|
||||
return ASTAttributeList(res)
|
||||
|
||||
def _parse_paren_expression_list(self) -> ASTBaseParenExprList:
|
||||
def _parse_paren_expression_list(self) -> ASTBaseParenExprList | None:
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -530,7 +530,7 @@ def find_pending_xref_condition(node: addnodes.pending_xref, condition: str
|
||||
return None
|
||||
|
||||
|
||||
def make_refnode(builder: Builder, fromdocname: str, todocname: str, targetid: str,
|
||||
def make_refnode(builder: Builder, fromdocname: str, todocname: str, targetid: str | None,
|
||||
child: Node | list[Node], title: str | None = None
|
||||
) -> nodes.reference:
|
||||
"""Shortcut to create a reference node."""
|
||||
|
||||
Reference in New Issue
Block a user