mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Fix annotations for util
This commit is contained in:
parent
69065682f4
commit
a0bb4c8c8f
@ -36,6 +36,7 @@ if False:
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.config import Config # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
from sphinx.util.docfields import Field # NOQA
|
||||
from sphinx.util.typing import N_co, unicode # NOQA
|
||||
|
||||
|
||||
@ -60,7 +61,7 @@ class ObjectDescription(SphinxDirective):
|
||||
}
|
||||
|
||||
# types of doc fields that this directive handles, see sphinx.util.docfields
|
||||
doc_field_types = [] # type: List[Any]
|
||||
doc_field_types = [] # type: List[Field]
|
||||
domain = None # type: unicode
|
||||
objtype = None # type: unicode
|
||||
indexnode = None # type: addnodes.index
|
||||
|
@ -352,11 +352,11 @@ _coding_re = re.compile(r'coding[:=]\s*([-\w.]+)')
|
||||
|
||||
|
||||
def detect_encoding(readline):
|
||||
# type: (Callable) -> unicode
|
||||
# type: (Callable[[], bytes]) -> unicode
|
||||
"""Like tokenize.detect_encoding() from Py3k, but a bit simplified."""
|
||||
|
||||
def read_or_stop():
|
||||
# type: () -> unicode
|
||||
# type: () -> bytes
|
||||
try:
|
||||
return readline()
|
||||
except StopIteration:
|
||||
@ -375,7 +375,7 @@ def detect_encoding(readline):
|
||||
return orig_enc
|
||||
|
||||
def find_cookie(line):
|
||||
# type: (unicode) -> unicode
|
||||
# type: (bytes) -> unicode
|
||||
try:
|
||||
line_string = line.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
|
@ -11,25 +11,27 @@
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from typing import List, cast
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, List, Tuple, Type # NOQA
|
||||
from typing import Any, Dict, Tuple, Type # NOQA
|
||||
from sphinx.domains import Domain # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
from sphinx.util.typing import unicode # NOQA
|
||||
|
||||
|
||||
def _is_single_paragraph(node):
|
||||
# type: (nodes.Node) -> bool
|
||||
# type: (nodes.field_body) -> bool
|
||||
"""True if the node only contains one paragraph (and system messages)."""
|
||||
if len(node) == 0:
|
||||
return False
|
||||
elif len(node) > 1:
|
||||
for subnode in node[1:]:
|
||||
for subnode in node[1:]: # type: nodes.Node
|
||||
if not isinstance(subnode, nodes.system_message):
|
||||
return False
|
||||
if isinstance(node[0], nodes.paragraph):
|
||||
@ -93,7 +95,7 @@ class Field:
|
||||
return [self.make_xref(rolename, domain, target, innernode, contnode, env)]
|
||||
|
||||
def make_entry(self, fieldarg, content):
|
||||
# type: (List, unicode) -> Tuple[List, unicode]
|
||||
# type: (unicode, List[nodes.Node]) -> Tuple[unicode, List[nodes.Node]]
|
||||
return (fieldarg, content)
|
||||
|
||||
def make_field(self,
|
||||
@ -160,7 +162,8 @@ class GroupedField(Field):
|
||||
listnode += nodes.list_item('', par)
|
||||
|
||||
if len(items) == 1 and self.can_collapse:
|
||||
fieldbody = nodes.field_body('', listnode[0][0])
|
||||
list_item = cast(nodes.list_item, listnode[0])
|
||||
fieldbody = nodes.field_body('', list_item[0])
|
||||
return nodes.field('', fieldname, fieldbody)
|
||||
|
||||
fieldbody = nodes.field_body('', listnode)
|
||||
@ -227,7 +230,7 @@ class TypedField(GroupedField):
|
||||
fieldname = nodes.field_name('', self.label)
|
||||
if len(items) == 1 and self.can_collapse:
|
||||
fieldarg, content = items[0]
|
||||
bodynode = handle_item(fieldarg, content)
|
||||
bodynode = handle_item(fieldarg, content) # type: nodes.Node
|
||||
else:
|
||||
bodynode = self.list_type()
|
||||
for fieldarg, content in items:
|
||||
@ -241,6 +244,7 @@ class DocFieldTransformer:
|
||||
Transforms field lists in "doc field" syntax into better-looking
|
||||
equivalents, using the field type definitions given on a domain.
|
||||
"""
|
||||
typemap = None # type: Dict[unicode, Tuple[Field, bool]]
|
||||
|
||||
def __init__(self, directive):
|
||||
# type: (Any) -> None
|
||||
@ -251,18 +255,19 @@ class DocFieldTransformer:
|
||||
self.typemap = directive._doc_field_type_map
|
||||
|
||||
def preprocess_fieldtypes(self, types):
|
||||
# type: (List) -> Dict[unicode, Tuple[Any, bool]]
|
||||
# type: (List[Field]) -> Dict[unicode, Tuple[Field, bool]]
|
||||
typemap = {}
|
||||
for fieldtype in types:
|
||||
for name in fieldtype.names:
|
||||
typemap[name] = fieldtype, False
|
||||
if fieldtype.is_typed:
|
||||
for name in fieldtype.typenames:
|
||||
typemap[name] = fieldtype, True
|
||||
typed_field = cast(TypedField, fieldtype)
|
||||
for name in typed_field.typenames:
|
||||
typemap[name] = typed_field, True
|
||||
return typemap
|
||||
|
||||
def transform_all(self, node):
|
||||
# type: (nodes.Node) -> None
|
||||
# type: (addnodes.desc_content) -> None
|
||||
"""Transform all field list children of a node."""
|
||||
# don't traverse, only handle field lists that are immediate children
|
||||
for child in node:
|
||||
@ -270,30 +275,33 @@ class DocFieldTransformer:
|
||||
self.transform(child)
|
||||
|
||||
def transform(self, node):
|
||||
# type: (nodes.Node) -> None
|
||||
# type: (nodes.field_list) -> None
|
||||
"""Transform a single field list *node*."""
|
||||
typemap = self.typemap
|
||||
|
||||
entries = []
|
||||
groupindices = {} # type: Dict[unicode, int]
|
||||
types = {} # type: Dict[unicode, Dict]
|
||||
entries = [] # type: List
|
||||
groupindices = {} # type: Dict[unicode, int]
|
||||
types = {} # type: Dict[unicode, Dict]
|
||||
|
||||
# step 1: traverse all fields and collect field types and content
|
||||
for field in node:
|
||||
fieldname, fieldbody = field
|
||||
for field in cast(List[nodes.field], node):
|
||||
assert len(field) == 2
|
||||
field_name = cast(nodes.field_name, field[0])
|
||||
field_body = cast(nodes.field_body, field[1])
|
||||
try:
|
||||
# split into field type and argument
|
||||
fieldtype, fieldarg = fieldname.astext().split(None, 1)
|
||||
fieldtype, fieldarg = field_name.astext().split(None, 1)
|
||||
except ValueError:
|
||||
# maybe an argument-less field type?
|
||||
fieldtype, fieldarg = fieldname.astext(), ''
|
||||
fieldtype, fieldarg = field_name.astext(), ''
|
||||
typedesc, is_typefield = typemap.get(fieldtype, (None, None))
|
||||
|
||||
# collect the content, trying not to keep unnecessary paragraphs
|
||||
if _is_single_paragraph(fieldbody):
|
||||
content = fieldbody.children[0].children
|
||||
if _is_single_paragraph(field_body):
|
||||
paragraph = cast(nodes.paragraph, field_body[0])
|
||||
content = paragraph.children
|
||||
else:
|
||||
content = fieldbody.children
|
||||
content = field_body.children
|
||||
|
||||
# sort out unknown fields
|
||||
if typedesc is None or typedesc.has_arg != bool(fieldarg):
|
||||
@ -302,26 +310,27 @@ class DocFieldTransformer:
|
||||
new_fieldname = fieldtype[0:1].upper() + fieldtype[1:]
|
||||
if fieldarg:
|
||||
new_fieldname += ' ' + fieldarg
|
||||
fieldname[0] = nodes.Text(new_fieldname)
|
||||
field_name[0] = nodes.Text(new_fieldname)
|
||||
entries.append(field)
|
||||
|
||||
# but if this has a type then we can at least link it
|
||||
if (typedesc and is_typefield and content and
|
||||
len(content) == 1 and isinstance(content[0], nodes.Text)):
|
||||
typed_field = cast(TypedField, typedesc)
|
||||
target = content[0].astext()
|
||||
xrefs = typedesc.make_xrefs(
|
||||
typedesc.typerolename,
|
||||
xrefs = typed_field.make_xrefs(
|
||||
typed_field.typerolename,
|
||||
self.directive.domain,
|
||||
target,
|
||||
contnode=content[0],
|
||||
)
|
||||
if _is_single_paragraph(fieldbody):
|
||||
fieldbody.children[0].clear()
|
||||
fieldbody.children[0].extend(xrefs)
|
||||
if _is_single_paragraph(field_body):
|
||||
paragraph = cast(nodes.paragraph, field_body[0])
|
||||
paragraph.clear()
|
||||
paragraph.extend(xrefs)
|
||||
else:
|
||||
fieldbody.clear()
|
||||
fieldbody += nodes.paragraph()
|
||||
fieldbody[0].extend(xrefs)
|
||||
field_body.clear()
|
||||
field_body += nodes.paragraph('', '', *xrefs)
|
||||
|
||||
continue
|
||||
|
||||
@ -348,11 +357,11 @@ class DocFieldTransformer:
|
||||
[nodes.Text(argtype)]
|
||||
fieldarg = argname
|
||||
|
||||
translatable_content = nodes.inline(fieldbody.rawsource,
|
||||
translatable_content = nodes.inline(field_body.rawsource,
|
||||
translatable=True)
|
||||
translatable_content.document = fieldbody.parent.document
|
||||
translatable_content.source = fieldbody.parent.source
|
||||
translatable_content.line = fieldbody.parent.line
|
||||
translatable_content.document = field_body.parent.document
|
||||
translatable_content.source = field_body.parent.source
|
||||
translatable_content.line = field_body.parent.line
|
||||
translatable_content += content
|
||||
|
||||
# grouped entries need to be collected in one entry, while others
|
||||
|
@ -36,6 +36,7 @@ report_re = re.compile('^(.+?:(?:\\d+)?): \\((DEBUG|INFO|WARNING|ERROR|SEVERE)/(
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from types import ModuleType # NOQA
|
||||
from typing import Any, Callable, Generator, List, Set, Tuple, Type # NOQA
|
||||
from docutils.statemachine import State, ViewList # NOQA
|
||||
from sphinx.config import Config # NOQA
|
||||
@ -45,7 +46,7 @@ if False:
|
||||
|
||||
|
||||
__version_info__ = tuple(LooseVersion(docutils.__version__).version)
|
||||
additional_nodes = set() # type: Set[nodes.Node]
|
||||
additional_nodes = set() # type: Set[Type[nodes.Element]]
|
||||
|
||||
|
||||
@contextmanager
|
||||
@ -209,14 +210,14 @@ class sphinx_domains:
|
||||
raise ElementLookupError
|
||||
|
||||
def lookup_directive(self, name, lang_module, document):
|
||||
# type: (unicode, unicode, nodes.document) -> Tuple[Any, List]
|
||||
# type: (unicode, ModuleType, nodes.document) -> Tuple[Any, List]
|
||||
try:
|
||||
return self.lookup_domain_element('directive', name)
|
||||
except ElementLookupError:
|
||||
return self.directive_func(name, lang_module, document)
|
||||
|
||||
def lookup_role(self, name, lang_module, lineno, reporter):
|
||||
# type: (unicode, unicode, int, Any) -> Tuple[Any, List]
|
||||
# type: (unicode, ModuleType, int, Any) -> Tuple[Any, List]
|
||||
try:
|
||||
return self.lookup_domain_element('role', name)
|
||||
except ElementLookupError:
|
||||
|
@ -18,7 +18,7 @@ if False:
|
||||
|
||||
|
||||
def get_node_equation_number(writer, node):
|
||||
# type: (HTMLTranslator, nodes.Node) -> unicode
|
||||
# type: (HTMLTranslator, nodes.math_block) -> unicode
|
||||
if writer.builder.config.math_numfig and writer.builder.config.numfig:
|
||||
figtype = 'displaymath'
|
||||
if writer.builder.name == 'singlehtml':
|
||||
|
@ -298,8 +298,7 @@ def traverse_parent(node, cls=None):
|
||||
def traverse_translatable_index(doctree):
|
||||
# type: (nodes.Element) -> Iterable[Tuple[nodes.Element, List[unicode]]]
|
||||
"""Traverse translatable index node from a document tree."""
|
||||
node = None # type: nodes.Element
|
||||
for node in doctree.traverse(NodeMatcher(addnodes.index, inline=False)):
|
||||
for node in doctree.traverse(NodeMatcher(addnodes.index, inline=False)): # type: addnodes.index # NOQA
|
||||
if 'raw_entries' in node:
|
||||
entries = node['raw_entries']
|
||||
else:
|
||||
|
@ -34,7 +34,7 @@ from docutils.utils import smartquotes
|
||||
from sphinx.util.docutils import __version_info__ as docutils_version
|
||||
|
||||
if False: # For type annotation
|
||||
from typing import Iterable, Iterator, Tuple # NOQA
|
||||
from typing import Generator, Iterable, Tuple # NOQA
|
||||
from sphinx.util.typing import unicode # NOQA
|
||||
|
||||
|
||||
@ -244,7 +244,7 @@ def educateQuotes(text, language='en'):
|
||||
|
||||
|
||||
def educate_tokens(text_tokens, attr=smartquotes.default_smartypants_attr, language='en'):
|
||||
# type: (Iterable[Tuple[str, unicode]], unicode, unicode) -> Iterator
|
||||
# type: (Iterable[Tuple[str, unicode]], unicode, unicode) -> Generator[unicode, None, None]
|
||||
"""Return iterator that "educates" the items of `text_tokens`.
|
||||
|
||||
This is modified to intercept the ``attr='2'`` as it was used by the
|
||||
|
Loading…
Reference in New Issue
Block a user