mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Remove `sphinx.util.docfields
` from the no-any-generics whitelist
This commit is contained in:
parent
14db5ffba1
commit
705cf7fcf5
@ -234,7 +234,6 @@ module = [
|
||||
"sphinx.domains",
|
||||
"sphinx.domains.c",
|
||||
"sphinx.domains.cpp",
|
||||
"sphinx.util.docfields",
|
||||
"sphinx.util.docutils",
|
||||
"sphinx.util.inspect",
|
||||
"sphinx.util.logging",
|
||||
|
@ -17,7 +17,7 @@ from sphinx.util import logging
|
||||
from sphinx.util.nodes import get_node_line
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import TypeAlias, TypeVar
|
||||
|
||||
from docutils.nodes import Element, Node
|
||||
from docutils.parsers.rst.states import Inliner
|
||||
@ -26,6 +26,11 @@ if TYPE_CHECKING:
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.util.typing import TextlikeNode
|
||||
|
||||
ObjDescT = TypeVar('ObjDescT')
|
||||
_FieldEntry: TypeAlias = tuple[str, list[Node]]
|
||||
_FieldTypes: TypeAlias = dict[str, list[Node]]
|
||||
_EntriesTriple: TypeAlias = tuple['Field', _FieldEntry | list[_FieldEntry], Element]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -133,14 +138,14 @@ class Field:
|
||||
)
|
||||
]
|
||||
|
||||
def make_entry(self, fieldarg: str, content: list[Node]) -> tuple[str, list[Node]]:
|
||||
def make_entry(self, fieldarg: str, content: list[Node]) -> _FieldEntry:
|
||||
return fieldarg, content
|
||||
|
||||
def make_field(
|
||||
self,
|
||||
types: dict[str, list[Node]],
|
||||
types: _FieldTypes,
|
||||
domain: str,
|
||||
item: tuple,
|
||||
item: _FieldEntry,
|
||||
env: BuildEnvironment | None = None,
|
||||
inliner: Inliner | None = None,
|
||||
location: Element | None = None,
|
||||
@ -211,9 +216,9 @@ class GroupedField(Field):
|
||||
|
||||
def make_field(
|
||||
self,
|
||||
types: dict[str, list[Node]],
|
||||
types: _FieldTypes,
|
||||
domain: str,
|
||||
items: tuple,
|
||||
items: list[_FieldEntry], # type: ignore[override]
|
||||
env: BuildEnvironment | None = None,
|
||||
inliner: Inliner | None = None,
|
||||
location: Element | None = None,
|
||||
@ -283,9 +288,9 @@ class TypedField(GroupedField):
|
||||
|
||||
def make_field(
|
||||
self,
|
||||
types: dict[str, list[Node]],
|
||||
types: _FieldTypes,
|
||||
domain: str,
|
||||
items: tuple,
|
||||
items: list[_FieldEntry], # type: ignore[override]
|
||||
env: BuildEnvironment | None = None,
|
||||
inliner: Inliner | None = None,
|
||||
location: Element | None = None,
|
||||
@ -344,7 +349,7 @@ class DocFieldTransformer:
|
||||
|
||||
typemap: dict[str, tuple[Field, bool]]
|
||||
|
||||
def __init__(self, directive: ObjectDescription) -> None:
|
||||
def __init__(self, directive: ObjectDescription[ObjDescT]) -> None:
|
||||
self.directive = directive
|
||||
|
||||
self.typemap = directive.get_field_type_map()
|
||||
@ -358,115 +363,129 @@ class DocFieldTransformer:
|
||||
|
||||
def transform(self, node: nodes.field_list) -> None:
|
||||
"""Transform a single field list *node*."""
|
||||
typemap = self.typemap
|
||||
|
||||
entries: list[nodes.field | tuple[Field, Any, Element]] = []
|
||||
entries: list[nodes.field | _EntriesTriple] = []
|
||||
groupindices: dict[str, int] = {}
|
||||
types: dict[str, dict] = {}
|
||||
types: dict[str, _FieldTypes] = {}
|
||||
|
||||
# step 1: traverse all fields and collect field types and content
|
||||
for field in cast('list[nodes.field]', node):
|
||||
assert len(field) == 2
|
||||
field_name = cast('nodes.field_name', field[0])
|
||||
field_body = cast('nodes.field_body', field[1])
|
||||
self._transform_step_1(field, entries, types, groupindices)
|
||||
|
||||
new_list = self._transform_step_2(entries, types)
|
||||
node.replace_self(new_list)
|
||||
|
||||
def _transform_step_1(
|
||||
self,
|
||||
field: nodes.field,
|
||||
entries: list[nodes.field | _EntriesTriple],
|
||||
types: dict[str, _FieldTypes],
|
||||
group_indices: dict[str, int],
|
||||
) -> None:
|
||||
assert len(field) == 2
|
||||
field_name = cast('nodes.field_name', field[0])
|
||||
field_body = cast('nodes.field_body', field[1])
|
||||
try:
|
||||
# split into field type and argument
|
||||
fieldtype_name, fieldarg = field_name.astext().split(None, 1)
|
||||
except ValueError:
|
||||
# maybe an argument-less field type?
|
||||
fieldtype_name, fieldarg = field_name.astext(), ''
|
||||
typedesc, is_typefield = self.typemap.get(fieldtype_name, (None, None))
|
||||
|
||||
# collect the content, trying not to keep unnecessary paragraphs
|
||||
if _is_single_paragraph(field_body):
|
||||
paragraph = cast('nodes.paragraph', field_body[0])
|
||||
content = paragraph.children
|
||||
else:
|
||||
content = field_body.children
|
||||
|
||||
# sort out unknown fields
|
||||
if typedesc is None or typedesc.has_arg != bool(fieldarg):
|
||||
# either the field name is unknown, or the argument doesn't
|
||||
# match the spec; capitalize field name and be done with it
|
||||
new_fieldname = fieldtype_name[0:1].upper() + fieldtype_name[1:]
|
||||
if fieldarg:
|
||||
new_fieldname += ' ' + fieldarg
|
||||
field_name[0] = nodes.Text(new_fieldname)
|
||||
entries.append(field)
|
||||
|
||||
# but if this has a type then we can at least link it
|
||||
if (
|
||||
typedesc
|
||||
and is_typefield
|
||||
and content
|
||||
and len(content) == 1
|
||||
and isinstance(content[0], nodes.Text)
|
||||
):
|
||||
typed_field = cast('TypedField', typedesc)
|
||||
target = content[0].astext()
|
||||
xrefs = typed_field.make_xrefs(
|
||||
typed_field.typerolename,
|
||||
self.directive.domain or '',
|
||||
target,
|
||||
contnode=content[0],
|
||||
env=self.directive.env,
|
||||
)
|
||||
if _is_single_paragraph(field_body):
|
||||
paragraph = cast('nodes.paragraph', field_body[0])
|
||||
paragraph.clear()
|
||||
paragraph.extend(xrefs)
|
||||
else:
|
||||
field_body.clear()
|
||||
field_body += nodes.paragraph('', '', *xrefs)
|
||||
|
||||
return
|
||||
|
||||
typename = typedesc.name
|
||||
|
||||
# if the field specifies a type, put it in the types collection
|
||||
if is_typefield:
|
||||
# filter out only inline nodes; others will result in invalid
|
||||
# markup being written out
|
||||
content = [n for n in content if isinstance(n, nodes.Inline | nodes.Text)]
|
||||
if content:
|
||||
types.setdefault(typename, {})[fieldarg] = content
|
||||
return
|
||||
|
||||
# also support syntax like ``:param type name:``
|
||||
if typedesc.is_typed:
|
||||
try:
|
||||
# split into field type and argument
|
||||
fieldtype_name, fieldarg = field_name.astext().split(None, 1)
|
||||
argtype, argname = fieldarg.rsplit(None, 1)
|
||||
except ValueError:
|
||||
# maybe an argument-less field type?
|
||||
fieldtype_name, fieldarg = field_name.astext(), ''
|
||||
typedesc, is_typefield = typemap.get(fieldtype_name, (None, None))
|
||||
|
||||
# collect the content, trying not to keep unnecessary paragraphs
|
||||
if _is_single_paragraph(field_body):
|
||||
paragraph = cast('nodes.paragraph', field_body[0])
|
||||
content = paragraph.children
|
||||
pass
|
||||
else:
|
||||
content = field_body.children
|
||||
types.setdefault(typename, {})[argname] = [nodes.Text(argtype)]
|
||||
fieldarg = argname
|
||||
|
||||
# sort out unknown fields
|
||||
if typedesc is None or typedesc.has_arg != bool(fieldarg):
|
||||
# either the field name is unknown, or the argument doesn't
|
||||
# match the spec; capitalize field name and be done with it
|
||||
new_fieldname = fieldtype_name[0:1].upper() + fieldtype_name[1:]
|
||||
if fieldarg:
|
||||
new_fieldname += ' ' + fieldarg
|
||||
field_name[0] = nodes.Text(new_fieldname)
|
||||
entries.append(field)
|
||||
translatable_content = nodes.inline(field_body.rawsource, translatable=True)
|
||||
translatable_content.document = field_body.parent.document
|
||||
translatable_content.source = field_body.parent.source
|
||||
translatable_content.line = field_body.parent.line
|
||||
translatable_content += content
|
||||
|
||||
# but if this has a type then we can at least link it
|
||||
if (
|
||||
typedesc
|
||||
and is_typefield
|
||||
and content
|
||||
and len(content) == 1
|
||||
and isinstance(content[0], nodes.Text)
|
||||
):
|
||||
typed_field = cast('TypedField', typedesc)
|
||||
target = content[0].astext()
|
||||
xrefs = typed_field.make_xrefs(
|
||||
typed_field.typerolename,
|
||||
self.directive.domain or '',
|
||||
target,
|
||||
contnode=content[0],
|
||||
env=self.directive.env,
|
||||
)
|
||||
if _is_single_paragraph(field_body):
|
||||
paragraph = cast('nodes.paragraph', field_body[0])
|
||||
paragraph.clear()
|
||||
paragraph.extend(xrefs)
|
||||
else:
|
||||
field_body.clear()
|
||||
field_body += nodes.paragraph('', '', *xrefs)
|
||||
|
||||
continue
|
||||
|
||||
typename = typedesc.name
|
||||
|
||||
# if the field specifies a type, put it in the types collection
|
||||
if is_typefield:
|
||||
# filter out only inline nodes; others will result in invalid
|
||||
# markup being written out
|
||||
content = [
|
||||
n for n in content if isinstance(n, nodes.Inline | nodes.Text)
|
||||
]
|
||||
if content:
|
||||
types.setdefault(typename, {})[fieldarg] = content
|
||||
continue
|
||||
|
||||
# also support syntax like ``:param type name:``
|
||||
if typedesc.is_typed:
|
||||
try:
|
||||
argtype, argname = fieldarg.rsplit(None, 1)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
types.setdefault(typename, {})[argname] = [nodes.Text(argtype)]
|
||||
fieldarg = argname
|
||||
|
||||
translatable_content = nodes.inline(field_body.rawsource, translatable=True)
|
||||
translatable_content.document = field_body.parent.document
|
||||
translatable_content.source = field_body.parent.source
|
||||
translatable_content.line = field_body.parent.line
|
||||
translatable_content += content
|
||||
|
||||
# grouped entries need to be collected in one entry, while others
|
||||
# get one entry per field
|
||||
if typedesc.is_grouped:
|
||||
if typename in groupindices:
|
||||
group = cast(
|
||||
'tuple[Field, list, Node]', entries[groupindices[typename]]
|
||||
)
|
||||
else:
|
||||
groupindices[typename] = len(entries)
|
||||
group = (typedesc, [], field)
|
||||
entries.append(group)
|
||||
new_entry = typedesc.make_entry(fieldarg, [translatable_content])
|
||||
group[1].append(new_entry)
|
||||
# grouped entries need to be collected in one entry, while others
|
||||
# get one entry per field
|
||||
if typedesc.is_grouped:
|
||||
if typename in group_indices:
|
||||
group = cast(
|
||||
'tuple[Field, list[_FieldEntry], Node]',
|
||||
entries[group_indices[typename]],
|
||||
)
|
||||
else:
|
||||
new_entry = typedesc.make_entry(fieldarg, [translatable_content])
|
||||
entries.append((typedesc, new_entry, field))
|
||||
group_indices[typename] = len(entries)
|
||||
group = (typedesc, [], field)
|
||||
entries.append(group)
|
||||
new_entry = typedesc.make_entry(fieldarg, [translatable_content])
|
||||
group[1].append(new_entry)
|
||||
else:
|
||||
new_entry = typedesc.make_entry(fieldarg, [translatable_content])
|
||||
entries.append((typedesc, new_entry, field))
|
||||
|
||||
def _transform_step_2(
|
||||
self,
|
||||
entries: list[nodes.field | _EntriesTriple],
|
||||
types: dict[str, _FieldTypes],
|
||||
) -> nodes.field_list:
|
||||
# step 2: all entries are collected, construct the new field list
|
||||
new_list = nodes.field_list()
|
||||
for entry in entries:
|
||||
@ -482,10 +501,10 @@ class DocFieldTransformer:
|
||||
new_list += fieldtype.make_field(
|
||||
fieldtypes,
|
||||
domain,
|
||||
items,
|
||||
items, # type: ignore[arg-type]
|
||||
env=env,
|
||||
inliner=inliner,
|
||||
location=location,
|
||||
)
|
||||
|
||||
node.replace_self(new_list)
|
||||
return new_list
|
||||
|
Loading…
Reference in New Issue
Block a user