mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '2.0'
This commit is contained in:
commit
fe06eebfb5
@ -8,24 +8,22 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Node
|
||||
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.nodes import clean_astext
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_node_depth(node):
|
||||
def get_node_depth(node: Node) -> int:
|
||||
i = 0
|
||||
cur_node = node
|
||||
while cur_node.parent != node.document:
|
||||
@ -34,8 +32,7 @@ def get_node_depth(node):
|
||||
return i
|
||||
|
||||
|
||||
def register_sections_as_label(app, document):
|
||||
# type: (Sphinx, nodes.Node) -> None
|
||||
def register_sections_as_label(app: Sphinx, document: Node) -> None:
|
||||
labels = app.env.domaindata['std']['labels']
|
||||
anonlabels = app.env.domaindata['std']['anonlabels']
|
||||
for node in document.traverse(nodes.section):
|
||||
@ -61,8 +58,7 @@ def register_sections_as_label(app, document):
|
||||
labels[name] = docname, labelid, sectname
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_config_value('autosectionlabel_prefix_document', False, 'env')
|
||||
app.add_config_value('autosectionlabel_maxdepth', None, 'env')
|
||||
app.connect('doctree-read', register_sections_as_label)
|
||||
|
@ -60,18 +60,22 @@ import sys
|
||||
import warnings
|
||||
from os import path
|
||||
from types import ModuleType
|
||||
from typing import List, cast
|
||||
from typing import Any, Dict, List, Tuple, Type
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node, system_message
|
||||
from docutils.parsers.rst import directives
|
||||
from docutils.parsers.rst.states import RSTStateMachine, Struct, state_classes
|
||||
from docutils.parsers.rst.states import Inliner, RSTStateMachine, Struct, state_classes
|
||||
from docutils.statemachine import StringList
|
||||
|
||||
import sphinx
|
||||
from sphinx import addnodes
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.environment.adapters.toctree import TocTree
|
||||
from sphinx.ext.autodoc import get_documenters
|
||||
from sphinx.ext.autodoc import Documenter, get_documenters
|
||||
from sphinx.ext.autodoc.directive import DocumenterBridge, Options
|
||||
from sphinx.ext.autodoc.importer import import_module
|
||||
from sphinx.ext.autodoc.mock import mock
|
||||
@ -82,15 +86,8 @@ from sphinx.util.docutils import (
|
||||
NullReporter, SphinxDirective, SphinxRole, new_document, switch_source_input
|
||||
)
|
||||
from sphinx.util.matching import Matcher
|
||||
from sphinx.writers.html import HTMLTranslator
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, Tuple, Type # NOQA
|
||||
from docutils.parsers.rst.states import Inliner # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
from sphinx.ext.autodoc import Documenter # NOQA
|
||||
from sphinx.writers.html import HTMLTranslator # NOQA
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -105,16 +102,14 @@ class autosummary_toc(nodes.comment):
|
||||
pass
|
||||
|
||||
|
||||
def process_autosummary_toc(app, doctree):
|
||||
# type: (Sphinx, nodes.document) -> None
|
||||
def process_autosummary_toc(app: Sphinx, doctree: nodes.document) -> None:
|
||||
"""Insert items described in autosummary:: to the TOC tree, but do
|
||||
not generate the toctree:: list.
|
||||
"""
|
||||
env = app.builder.env
|
||||
crawled = {}
|
||||
|
||||
def crawl_toc(node, depth=1):
|
||||
# type: (nodes.Element, int) -> None
|
||||
def crawl_toc(node: Element, depth: int = 1) -> None:
|
||||
crawled[node] = True
|
||||
for j, subnode in enumerate(node):
|
||||
try:
|
||||
@ -131,14 +126,12 @@ def process_autosummary_toc(app, doctree):
|
||||
crawl_toc(doctree)
|
||||
|
||||
|
||||
def autosummary_toc_visit_html(self, node):
|
||||
# type: (nodes.NodeVisitor, autosummary_toc) -> None
|
||||
def autosummary_toc_visit_html(self: nodes.NodeVisitor, node: autosummary_toc) -> None:
|
||||
"""Hide autosummary toctree list in HTML output."""
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def autosummary_noop(self, node):
|
||||
# type: (nodes.NodeVisitor, nodes.Node) -> None
|
||||
def autosummary_noop(self: nodes.NodeVisitor, node: Node) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@ -148,8 +141,7 @@ class autosummary_table(nodes.comment):
|
||||
pass
|
||||
|
||||
|
||||
def autosummary_table_visit_html(self, node):
|
||||
# type: (HTMLTranslator, autosummary_table) -> None
|
||||
def autosummary_table_visit_html(self: HTMLTranslator, node: autosummary_table) -> None:
|
||||
"""Make the first column of the table non-breaking."""
|
||||
try:
|
||||
table = cast(nodes.table, node[0])
|
||||
@ -174,16 +166,14 @@ _app = None # type: Sphinx
|
||||
|
||||
|
||||
class FakeDirective(DocumenterBridge):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
def __init__(self) -> None:
|
||||
settings = Struct(tab_width=8)
|
||||
document = Struct(settings=settings)
|
||||
state = Struct(document=document)
|
||||
super().__init__({}, None, Options(), 0, state) # type: ignore
|
||||
|
||||
|
||||
def get_documenter(app, obj, parent):
|
||||
# type: (Sphinx, Any, Any) -> Type[Documenter]
|
||||
def get_documenter(app: Sphinx, obj: Any, parent: Any) -> Type[Documenter]:
|
||||
"""Get an autodoc.Documenter class suitable for documenting the given
|
||||
object.
|
||||
|
||||
@ -237,8 +227,7 @@ class Autosummary(SphinxDirective):
|
||||
'template': directives.unchanged,
|
||||
}
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
def run(self) -> List[Node]:
|
||||
self.bridge = DocumenterBridge(self.env, self.state.document.reporter,
|
||||
Options(), self.lineno, self.state)
|
||||
|
||||
@ -280,8 +269,7 @@ class Autosummary(SphinxDirective):
|
||||
|
||||
return nodes
|
||||
|
||||
def get_items(self, names):
|
||||
# type: (List[str]) -> List[Tuple[str, str, str, str]]
|
||||
def get_items(self, names: List[str]) -> List[Tuple[str, str, str, str]]:
|
||||
"""Try to import the given names, and return a list of
|
||||
``[(name, signature, summary_string, real_name), ...]``.
|
||||
"""
|
||||
@ -361,8 +349,7 @@ class Autosummary(SphinxDirective):
|
||||
|
||||
return items
|
||||
|
||||
def get_table(self, items):
|
||||
# type: (List[Tuple[str, str, str, str]]) -> List[nodes.Node]
|
||||
def get_table(self, items: List[Tuple[str, str, str, str]]) -> List[Node]:
|
||||
"""Generate a proper list of table nodes for autosummary:: directive.
|
||||
|
||||
*items* is a list produced by :meth:`get_items`.
|
||||
@ -380,8 +367,7 @@ class Autosummary(SphinxDirective):
|
||||
body = nodes.tbody('')
|
||||
group.append(body)
|
||||
|
||||
def append_row(*column_texts):
|
||||
# type: (str) -> None
|
||||
def append_row(*column_texts: str) -> None:
|
||||
row = nodes.row('')
|
||||
source, line = self.state_machine.get_source_and_line()
|
||||
for text in column_texts:
|
||||
@ -409,42 +395,36 @@ class Autosummary(SphinxDirective):
|
||||
|
||||
return [table_spec, table]
|
||||
|
||||
def warn(self, msg):
|
||||
# type: (str) -> None
|
||||
def warn(self, msg: str) -> None:
|
||||
warnings.warn('Autosummary.warn() is deprecated',
|
||||
RemovedInSphinx40Warning, stacklevel=2)
|
||||
logger.warning(msg)
|
||||
|
||||
@property
|
||||
def genopt(self):
|
||||
# type: () -> Options
|
||||
def genopt(self) -> Options:
|
||||
warnings.warn('Autosummary.genopt is deprecated',
|
||||
RemovedInSphinx40Warning, stacklevel=2)
|
||||
return self.bridge.genopt
|
||||
|
||||
@property
|
||||
def warnings(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
def warnings(self) -> List[Node]:
|
||||
warnings.warn('Autosummary.warnings is deprecated',
|
||||
RemovedInSphinx40Warning, stacklevel=2)
|
||||
return []
|
||||
|
||||
@property
|
||||
def result(self):
|
||||
# type: () -> StringList
|
||||
def result(self) -> StringList:
|
||||
warnings.warn('Autosummary.result is deprecated',
|
||||
RemovedInSphinx40Warning, stacklevel=2)
|
||||
return self.bridge.result
|
||||
|
||||
|
||||
def strip_arg_typehint(s):
|
||||
# type: (str) -> str
|
||||
def strip_arg_typehint(s: str) -> str:
|
||||
"""Strip a type hint from argument definition."""
|
||||
return s.split(':')[0].strip()
|
||||
|
||||
|
||||
def mangle_signature(sig, max_chars=30):
|
||||
# type: (str, int) -> str
|
||||
def mangle_signature(sig: str, max_chars: int = 30) -> str:
|
||||
"""Reformat a function signature to a more compact form."""
|
||||
# Strip return type annotation
|
||||
s = re.sub(r"\)\s*->\s.*$", ")", sig)
|
||||
@ -501,8 +481,7 @@ def mangle_signature(sig, max_chars=30):
|
||||
return "(%s)" % sig
|
||||
|
||||
|
||||
def extract_summary(doc, document):
|
||||
# type: (List[str], Any) -> str
|
||||
def extract_summary(doc: List[str], document: Any) -> str:
|
||||
"""Extract summary from docstring."""
|
||||
|
||||
# Skip a blank lines at the top
|
||||
@ -550,8 +529,8 @@ def extract_summary(doc, document):
|
||||
return summary
|
||||
|
||||
|
||||
def limited_join(sep, items, max_chars=30, overflow_marker="..."):
|
||||
# type: (str, List[str], int, str) -> str
|
||||
def limited_join(sep: str, items: List[str], max_chars: int = 30,
|
||||
overflow_marker: str = "...") -> str:
|
||||
"""Join a number of strings to one, limiting the length to *max_chars*.
|
||||
|
||||
If the string overflows this limit, replace the last fitting item by
|
||||
@ -577,8 +556,7 @@ def limited_join(sep, items, max_chars=30, overflow_marker="..."):
|
||||
|
||||
# -- Importing items -----------------------------------------------------------
|
||||
|
||||
def get_import_prefixes_from_env(env):
|
||||
# type: (BuildEnvironment) -> List[str]
|
||||
def get_import_prefixes_from_env(env: BuildEnvironment) -> List[str]:
|
||||
"""
|
||||
Obtain current Python import prefixes (for `import_by_name`)
|
||||
from ``document.env``
|
||||
@ -599,8 +577,7 @@ def get_import_prefixes_from_env(env):
|
||||
return prefixes
|
||||
|
||||
|
||||
def import_by_name(name, prefixes=[None]):
|
||||
# type: (str, List[str]) -> Tuple[str, Any, Any, str]
|
||||
def import_by_name(name: str, prefixes: List[str] = [None]) -> Tuple[str, Any, Any, str]:
|
||||
"""Import a Python object that has the given *name*, under one of the
|
||||
*prefixes*. The first name that succeeds is used.
|
||||
"""
|
||||
@ -618,8 +595,7 @@ def import_by_name(name, prefixes=[None]):
|
||||
raise ImportError('no module named %s' % ' or '.join(tried))
|
||||
|
||||
|
||||
def _import_by_name(name):
|
||||
# type: (str) -> Tuple[Any, Any, str]
|
||||
def _import_by_name(name: str) -> Tuple[Any, Any, str]:
|
||||
"""Import a Python object given its full name."""
|
||||
try:
|
||||
name_parts = name.split('.')
|
||||
@ -662,8 +638,9 @@ def _import_by_name(name):
|
||||
|
||||
# -- :autolink: (smart default role) -------------------------------------------
|
||||
|
||||
def autolink_role(typ, rawtext, etext, lineno, inliner, options={}, content=[]):
|
||||
# type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
|
||||
def autolink_role(typ: str, rawtext: str, etext: str, lineno: int, inliner: Inliner,
|
||||
options: Dict = {}, content: List[str] = []
|
||||
) -> Tuple[List[Node], List[system_message]]:
|
||||
"""Smart linking role.
|
||||
|
||||
Expands to ':obj:`text`' if `text` is an object that can be imported;
|
||||
@ -694,8 +671,7 @@ class AutoLink(SphinxRole):
|
||||
Expands to ':obj:`text`' if `text` is an object that can be imported;
|
||||
otherwise expands to '*text*'.
|
||||
"""
|
||||
def run(self):
|
||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
||||
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||
pyobj_role = self.env.get_domain('py').role('obj')
|
||||
objects, errors = pyobj_role('obj', self.rawtext, self.text, self.lineno,
|
||||
self.inliner, self.options, self.content)
|
||||
@ -716,10 +692,8 @@ class AutoLink(SphinxRole):
|
||||
return objects, errors
|
||||
|
||||
|
||||
def get_rst_suffix(app):
|
||||
# type: (Sphinx) -> str
|
||||
def get_supported_format(suffix):
|
||||
# type: (str) -> Tuple[str, ...]
|
||||
def get_rst_suffix(app: Sphinx) -> str:
|
||||
def get_supported_format(suffix: str) -> Tuple[str, ...]:
|
||||
parser_class = app.registry.get_source_parsers().get(suffix)
|
||||
if parser_class is None:
|
||||
return ('restructuredtext',)
|
||||
@ -735,8 +709,7 @@ def get_rst_suffix(app):
|
||||
return None
|
||||
|
||||
|
||||
def process_generate_options(app):
|
||||
# type: (Sphinx) -> None
|
||||
def process_generate_options(app: Sphinx) -> None:
|
||||
genfiles = app.config.autosummary_generate
|
||||
|
||||
if genfiles is True:
|
||||
@ -771,8 +744,7 @@ def process_generate_options(app):
|
||||
app=app, imported_members=imported_members)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
# I need autodoc
|
||||
app.setup_extension('sphinx.ext.autodoc')
|
||||
app.add_node(autosummary_toc,
|
||||
|
@ -24,6 +24,7 @@ import pydoc
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Set, Tuple, Type
|
||||
|
||||
from jinja2 import BaseLoader, FileSystemLoader, TemplateNotFound
|
||||
from jinja2.sandbox import SandboxedEnvironment
|
||||
@ -31,7 +32,9 @@ from jinja2.sandbox import SandboxedEnvironment
|
||||
import sphinx.locale
|
||||
from sphinx import __display_version__
|
||||
from sphinx import package_dir
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.ext.autodoc import Documenter
|
||||
from sphinx.ext.autosummary import import_by_name, get_documenter
|
||||
from sphinx.jinja2glue import BuiltinTemplateLoader
|
||||
from sphinx.locale import __
|
||||
@ -41,12 +44,6 @@ from sphinx.util import rst
|
||||
from sphinx.util.inspect import safe_getattr
|
||||
from sphinx.util.osutil import ensuredir
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Dict, List, Set, Tuple, Type, Union # NOQA
|
||||
from sphinx.builders import Builder # NOQA
|
||||
from sphinx.ext.autodoc import Documenter # NOQA
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -54,15 +51,13 @@ logger = logging.getLogger(__name__)
|
||||
class DummyApplication:
|
||||
"""Dummy Application class for sphinx-autogen command."""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
def __init__(self) -> None:
|
||||
self.registry = SphinxComponentRegistry()
|
||||
self.messagelog = [] # type: List[str]
|
||||
self.verbosity = 0
|
||||
|
||||
|
||||
def setup_documenters(app):
|
||||
# type: (Any) -> None
|
||||
def setup_documenters(app: Any) -> None:
|
||||
from sphinx.ext.autodoc import (
|
||||
ModuleDocumenter, ClassDocumenter, ExceptionDocumenter, DataDocumenter,
|
||||
FunctionDocumenter, MethodDocumenter, AttributeDocumenter,
|
||||
@ -79,18 +74,15 @@ def setup_documenters(app):
|
||||
app.registry.add_documenter(documenter.objtype, documenter)
|
||||
|
||||
|
||||
def _simple_info(msg):
|
||||
# type: (str) -> None
|
||||
def _simple_info(msg: str) -> None:
|
||||
print(msg)
|
||||
|
||||
|
||||
def _simple_warn(msg):
|
||||
# type: (str) -> None
|
||||
def _simple_warn(msg: str) -> None:
|
||||
print('WARNING: ' + msg, file=sys.stderr)
|
||||
|
||||
|
||||
def _underline(title, line='='):
|
||||
# type: (str, str) -> str
|
||||
def _underline(title: str, line: str = '=') -> str:
|
||||
if '\n' in title:
|
||||
raise ValueError('Can only underline single lines')
|
||||
return title + '\n' + line * len(title)
|
||||
@ -99,8 +91,7 @@ def _underline(title, line='='):
|
||||
class AutosummaryRenderer:
|
||||
"""A helper class for rendering."""
|
||||
|
||||
def __init__(self, builder, template_dir):
|
||||
# type: (Builder, str) -> None
|
||||
def __init__(self, builder: Builder, template_dir: str) -> None:
|
||||
loader = None # type: BaseLoader
|
||||
template_dirs = [os.path.join(package_dir, 'ext', 'autosummary', 'templates')]
|
||||
if builder is None:
|
||||
@ -117,8 +108,7 @@ class AutosummaryRenderer:
|
||||
self.env.filters['e'] = rst.escape
|
||||
self.env.filters['underline'] = _underline
|
||||
|
||||
def exists(self, template_name):
|
||||
# type: (str) -> bool
|
||||
def exists(self, template_name: str) -> bool:
|
||||
"""Check if template file exists."""
|
||||
try:
|
||||
self.env.get_template(template_name)
|
||||
@ -126,8 +116,7 @@ class AutosummaryRenderer:
|
||||
except TemplateNotFound:
|
||||
return False
|
||||
|
||||
def render(self, template_name, context):
|
||||
# type: (str, Dict) -> str
|
||||
def render(self, template_name: str, context: Dict) -> str:
|
||||
"""Render a template file."""
|
||||
return self.env.get_template(template_name).render(context)
|
||||
|
||||
@ -135,9 +124,9 @@ class AutosummaryRenderer:
|
||||
# -- Generating output ---------------------------------------------------------
|
||||
|
||||
|
||||
def generate_autosummary_content(name, obj, parent, template, template_name,
|
||||
imported_members, app):
|
||||
# type: (str, Any, Any, AutosummaryRenderer, str, bool, Any) -> str
|
||||
def generate_autosummary_content(name: str, obj: Any, parent: Any,
|
||||
template: AutosummaryRenderer, template_name: str,
|
||||
imported_members: bool, app: Any) -> str:
|
||||
doc = get_documenter(app, obj, parent)
|
||||
|
||||
if template_name is None:
|
||||
@ -145,8 +134,8 @@ def generate_autosummary_content(name, obj, parent, template, template_name,
|
||||
if not template.exists(template_name):
|
||||
template_name = 'autosummary/base.rst'
|
||||
|
||||
def get_members(obj, types, include_public=[], imported=True):
|
||||
# type: (Any, Set[str], List[str], bool) -> Tuple[List[str], List[str]] # NOQA
|
||||
def get_members(obj: Any, types: Set[str], include_public: List[str] = [],
|
||||
imported: bool = True) -> Tuple[List[str], List[str]]:
|
||||
items = [] # type: List[str]
|
||||
for name in dir(obj):
|
||||
try:
|
||||
@ -201,10 +190,11 @@ def generate_autosummary_content(name, obj, parent, template, template_name,
|
||||
return template.render(template_name, ns)
|
||||
|
||||
|
||||
def generate_autosummary_docs(sources, output_dir=None, suffix='.rst',
|
||||
warn=None, info=None, base_path=None, builder=None,
|
||||
template_dir=None, imported_members=False, app=None):
|
||||
# type: (List[str], str, str, Callable, Callable, str, Builder, str, bool, Any) -> None
|
||||
def generate_autosummary_docs(sources: List[str], output_dir: str = None,
|
||||
suffix: str = '.rst', warn: Callable = None,
|
||||
info: Callable = None, base_path: str = None,
|
||||
builder: Builder = None, template_dir: str = None,
|
||||
imported_members: bool = False, app: Any = None) -> None:
|
||||
if info:
|
||||
warnings.warn('info argument for generate_autosummary_docs() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
@ -279,8 +269,7 @@ def generate_autosummary_docs(sources, output_dir=None, suffix='.rst',
|
||||
|
||||
# -- Finding documented entries in files ---------------------------------------
|
||||
|
||||
def find_autosummary_in_files(filenames):
|
||||
# type: (List[str]) -> List[Tuple[str, str, str]]
|
||||
def find_autosummary_in_files(filenames: List[str]) -> List[Tuple[str, str, str]]:
|
||||
"""Find out what items are documented in source/*.rst.
|
||||
|
||||
See `find_autosummary_in_lines`.
|
||||
@ -293,8 +282,8 @@ def find_autosummary_in_files(filenames):
|
||||
return documented
|
||||
|
||||
|
||||
def find_autosummary_in_docstring(name, module=None, filename=None):
|
||||
# type: (str, Any, str) -> List[Tuple[str, str, str]]
|
||||
def find_autosummary_in_docstring(name: str, module: Any = None, filename: str = None
|
||||
) -> List[Tuple[str, str, str]]:
|
||||
"""Find out what items are documented in the given object's docstring.
|
||||
|
||||
See `find_autosummary_in_lines`.
|
||||
@ -313,8 +302,8 @@ def find_autosummary_in_docstring(name, module=None, filename=None):
|
||||
return []
|
||||
|
||||
|
||||
def find_autosummary_in_lines(lines, module=None, filename=None):
|
||||
# type: (List[str], Any, str) -> List[Tuple[str, str, str]]
|
||||
def find_autosummary_in_lines(lines: List[str], module: Any = None, filename: str = None
|
||||
) -> List[Tuple[str, str, str]]:
|
||||
"""Find out what items appear in autosummary:: directives in the
|
||||
given lines.
|
||||
|
||||
@ -400,8 +389,7 @@ def find_autosummary_in_lines(lines, module=None, filename=None):
|
||||
return documented
|
||||
|
||||
|
||||
def get_parser():
|
||||
# type: () -> argparse.ArgumentParser
|
||||
def get_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
usage='%(prog)s [OPTIONS] <SOURCE_FILE>...',
|
||||
epilog=__('For more information, visit <http://sphinx-doc.org/>.'),
|
||||
@ -443,8 +431,7 @@ The format of the autosummary directive is documented in the
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv=sys.argv[1:]):
|
||||
# type: (List[str]) -> None
|
||||
def main(argv: List[str] = sys.argv[1:]) -> None:
|
||||
sphinx.locale.setlocale(locale.LC_ALL, '')
|
||||
sphinx.locale.init_console(os.path.join(package_dir, 'locale'), 'sphinx')
|
||||
|
||||
|
@ -16,8 +16,10 @@ import time
|
||||
import warnings
|
||||
from io import StringIO
|
||||
from os import path
|
||||
from typing import Any, Callable, Dict, Iterable, List, Sequence, Set, Tuple, Type
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node, TextElement
|
||||
from docutils.parsers.rst import directives
|
||||
from packaging.specifiers import SpecifierSet, InvalidSpecifier
|
||||
from packaging.version import Version
|
||||
@ -33,8 +35,8 @@ from sphinx.util.osutil import relpath
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Type # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.application import Sphinx
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -42,15 +44,13 @@ blankline_re = re.compile(r'^\s*<BLANKLINE>', re.MULTILINE)
|
||||
doctestopt_re = re.compile(r'#\s*doctest:.+$', re.MULTILINE)
|
||||
|
||||
|
||||
def doctest_encode(text, encoding):
|
||||
# type: (str, str) -> str
|
||||
def doctest_encode(text: str, encoding: str) -> str:
|
||||
warnings.warn('doctest_encode() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
return text
|
||||
|
||||
|
||||
def is_allowed_version(spec, version):
|
||||
# type: (str, str) -> bool
|
||||
def is_allowed_version(spec: str, version: str) -> bool:
|
||||
"""Check `spec` satisfies `version` or not.
|
||||
|
||||
This obeys PEP-440 specifiers:
|
||||
@ -80,8 +80,7 @@ class TestDirective(SphinxDirective):
|
||||
optional_arguments = 1
|
||||
final_argument_whitespace = True
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
def run(self) -> List[Node]:
|
||||
# use ordinary docutils nodes for test code: they get special attributes
|
||||
# so that our builder recognizes them, and the other builders are happy.
|
||||
code = '\n'.join(self.content)
|
||||
@ -95,7 +94,7 @@ class TestDirective(SphinxDirective):
|
||||
if not test:
|
||||
test = code
|
||||
code = doctestopt_re.sub('', code)
|
||||
nodetype = nodes.literal_block # type: Type[nodes.TextElement]
|
||||
nodetype = nodes.literal_block # type: Type[TextElement]
|
||||
if self.name in ('testsetup', 'testcleanup') or 'hide' in self.options:
|
||||
nodetype = nodes.comment
|
||||
if self.arguments:
|
||||
@ -194,15 +193,13 @@ parser = doctest.DocTestParser()
|
||||
# helper classes
|
||||
|
||||
class TestGroup:
|
||||
def __init__(self, name):
|
||||
# type: (str) -> None
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
self.setup = [] # type: List[TestCode]
|
||||
self.tests = [] # type: List[List[TestCode]]
|
||||
self.cleanup = [] # type: List[TestCode]
|
||||
|
||||
def add_code(self, code, prepend=False):
|
||||
# type: (TestCode, bool) -> None
|
||||
def add_code(self, code: "TestCode", prepend: bool = False) -> None:
|
||||
if code.type == 'testsetup':
|
||||
if prepend:
|
||||
self.setup.insert(0, code)
|
||||
@ -220,30 +217,28 @@ class TestGroup:
|
||||
else:
|
||||
raise RuntimeError(__('invalid TestCode type'))
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
def __repr__(self) -> str:
|
||||
return 'TestGroup(name=%r, setup=%r, cleanup=%r, tests=%r)' % (
|
||||
self.name, self.setup, self.cleanup, self.tests)
|
||||
|
||||
|
||||
class TestCode:
|
||||
def __init__(self, code, type, filename, lineno, options=None):
|
||||
# type: (str, str, Optional[str], int, Optional[Dict]) -> None
|
||||
def __init__(self, code: str, type: str, filename: str,
|
||||
lineno: int, options: Dict = None) -> None:
|
||||
self.code = code
|
||||
self.type = type
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
self.options = options or {}
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
def __repr__(self) -> str:
|
||||
return 'TestCode(%r, %r, filename=%r, lineno=%r, options=%r)' % (
|
||||
self.code, self.type, self.filename, self.lineno, self.options)
|
||||
|
||||
|
||||
class SphinxDocTestRunner(doctest.DocTestRunner):
|
||||
def summarize(self, out, verbose=None): # type: ignore
|
||||
# type: (Callable, bool) -> Tuple[int, int]
|
||||
def summarize(self, out: Callable, verbose: bool = None # type: ignore
|
||||
) -> Tuple[int, int]:
|
||||
string_io = StringIO()
|
||||
old_stdout = sys.stdout
|
||||
sys.stdout = string_io
|
||||
@ -254,9 +249,8 @@ class SphinxDocTestRunner(doctest.DocTestRunner):
|
||||
out(string_io.getvalue())
|
||||
return res
|
||||
|
||||
def _DocTestRunner__patched_linecache_getlines(self, filename,
|
||||
module_globals=None):
|
||||
# type: (str, Any) -> Any
|
||||
def _DocTestRunner__patched_linecache_getlines(self, filename: str,
|
||||
module_globals: Any = None) -> Any:
|
||||
# this is overridden from DocTestRunner adding the try-except below
|
||||
m = self._DocTestRunner__LINECACHE_FILENAME_RE.match(filename) # type: ignore
|
||||
if m and m.group('name') == self.test.name:
|
||||
@ -282,8 +276,7 @@ class DocTestBuilder(Builder):
|
||||
epilog = __('Testing of doctests in the sources finished, look at the '
|
||||
'results in %(outdir)s/output.txt.')
|
||||
|
||||
def init(self):
|
||||
# type: () -> None
|
||||
def init(self) -> None:
|
||||
# default options
|
||||
self.opt = self.config.doctest_default_flags
|
||||
|
||||
@ -312,32 +305,26 @@ class DocTestBuilder(Builder):
|
||||
'==================================%s\n') %
|
||||
(date, '=' * len(date)))
|
||||
|
||||
def _out(self, text):
|
||||
# type: (str) -> None
|
||||
def _out(self, text: str) -> None:
|
||||
logger.info(text, nonl=True)
|
||||
self.outfile.write(text)
|
||||
|
||||
def _warn_out(self, text):
|
||||
# type: (str) -> None
|
||||
def _warn_out(self, text: str) -> None:
|
||||
if self.app.quiet or self.app.warningiserror:
|
||||
logger.warning(text)
|
||||
else:
|
||||
logger.info(text, nonl=True)
|
||||
self.outfile.write(text)
|
||||
|
||||
def get_target_uri(self, docname, typ=None):
|
||||
# type: (str, str) -> str
|
||||
def get_target_uri(self, docname: str, typ: str = None) -> str:
|
||||
return ''
|
||||
|
||||
def get_outdated_docs(self):
|
||||
# type: () -> Set[str]
|
||||
def get_outdated_docs(self) -> Set[str]:
|
||||
return self.env.found_docs
|
||||
|
||||
def finish(self):
|
||||
# type: () -> None
|
||||
def finish(self) -> None:
|
||||
# write executive summary
|
||||
def s(v):
|
||||
# type: (int) -> str
|
||||
def s(v: int) -> str:
|
||||
return v != 1 and 's' or ''
|
||||
repl = (self.total_tries, s(self.total_tries),
|
||||
self.total_failures, s(self.total_failures),
|
||||
@ -356,8 +343,8 @@ Doctest summary
|
||||
if self.total_failures or self.setup_failures or self.cleanup_failures:
|
||||
self.app.statuscode = 1
|
||||
|
||||
def write(self, build_docnames, updated_docnames, method='update'):
|
||||
# type: (Iterable[str], Sequence[str], str) -> None
|
||||
def write(self, build_docnames: Iterable[str], updated_docnames: Sequence[str],
|
||||
method: str = 'update') -> None:
|
||||
if build_docnames is None:
|
||||
build_docnames = sorted(self.env.all_docs)
|
||||
|
||||
@ -367,8 +354,7 @@ Doctest summary
|
||||
doctree = self.env.get_doctree(docname)
|
||||
self.test_doc(docname, doctree)
|
||||
|
||||
def get_filename_for_node(self, node, docname):
|
||||
# type: (nodes.Node, str) -> str
|
||||
def get_filename_for_node(self, node: Node, docname: str) -> str:
|
||||
"""Try to get the file which actually contains the doctest, not the
|
||||
filename of the document it's included in."""
|
||||
try:
|
||||
@ -379,8 +365,7 @@ Doctest summary
|
||||
return filename
|
||||
|
||||
@staticmethod
|
||||
def get_line_number(node):
|
||||
# type: (nodes.Node) -> Optional[int]
|
||||
def get_line_number(node: Node) -> int:
|
||||
"""Get the real line number or admit we don't know."""
|
||||
# TODO: Work out how to store or calculate real (file-relative)
|
||||
# line numbers for doctest blocks in docstrings.
|
||||
@ -395,8 +380,7 @@ Doctest summary
|
||||
return node.line - 1
|
||||
return None
|
||||
|
||||
def skipped(self, node):
|
||||
# type: (nodes.Element) -> bool
|
||||
def skipped(self, node: Element) -> bool:
|
||||
if 'skipif' not in node:
|
||||
return False
|
||||
else:
|
||||
@ -409,8 +393,7 @@ Doctest summary
|
||||
exec(self.config.doctest_global_cleanup, context)
|
||||
return should_skip
|
||||
|
||||
def test_doc(self, docname, doctree):
|
||||
# type: (str, nodes.Node) -> None
|
||||
def test_doc(self, docname: str, doctree: Node) -> None:
|
||||
groups = {} # type: Dict[str, TestGroup]
|
||||
add_to_all_groups = []
|
||||
self.setup_runner = SphinxDocTestRunner(verbose=False,
|
||||
@ -424,17 +407,15 @@ Doctest summary
|
||||
self.cleanup_runner._fakeout = self.setup_runner._fakeout # type: ignore
|
||||
|
||||
if self.config.doctest_test_doctest_blocks:
|
||||
def condition(node):
|
||||
# type: (nodes.Node) -> bool
|
||||
def condition(node: Node) -> bool:
|
||||
return (isinstance(node, (nodes.literal_block, nodes.comment)) and
|
||||
'testnodetype' in node) or \
|
||||
isinstance(node, nodes.doctest_block)
|
||||
else:
|
||||
def condition(node):
|
||||
# type: (nodes.Node) -> bool
|
||||
def condition(node: Node) -> bool:
|
||||
return isinstance(node, (nodes.literal_block, nodes.comment)) \
|
||||
and 'testnodetype' in node
|
||||
for node in doctree.traverse(condition): # type: nodes.Element
|
||||
for node in doctree.traverse(condition): # type: Element
|
||||
if self.skipped(node):
|
||||
continue
|
||||
|
||||
@ -490,16 +471,13 @@ Doctest summary
|
||||
self.cleanup_failures += res_f
|
||||
self.cleanup_tries += res_t
|
||||
|
||||
def compile(self, code, name, type, flags, dont_inherit):
|
||||
# type: (str, str, str, Any, bool) -> Any
|
||||
def compile(self, code: str, name: str, type: str, flags: Any, dont_inherit: bool) -> Any:
|
||||
return compile(code, name, self.type, flags, dont_inherit)
|
||||
|
||||
def test_group(self, group):
|
||||
# type: (TestGroup) -> None
|
||||
def test_group(self, group: TestGroup) -> None:
|
||||
ns = {} # type: Dict
|
||||
|
||||
def run_setup_cleanup(runner, testcodes, what):
|
||||
# type: (Any, List[TestCode], Any) -> bool
|
||||
def run_setup_cleanup(runner: Any, testcodes: List[TestCode], what: Any) -> bool:
|
||||
examples = []
|
||||
for testcode in testcodes:
|
||||
example = doctest.Example(testcode.code, '', lineno=testcode.lineno)
|
||||
@ -568,8 +546,7 @@ Doctest summary
|
||||
run_setup_cleanup(self.cleanup_runner, group.cleanup, 'cleanup')
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||
app.add_directive('testsetup', TestsetupDirective)
|
||||
app.add_directive('testcleanup', TestcleanupDirective)
|
||||
app.add_directive('doctest', DoctestDirective)
|
||||
|
@ -19,17 +19,16 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Node
|
||||
|
||||
import sphinx
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, List # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
|
||||
|
||||
class ifconfig(nodes.Element):
|
||||
pass
|
||||
@ -43,8 +42,7 @@ class IfConfig(SphinxDirective):
|
||||
final_argument_whitespace = True
|
||||
option_spec = {} # type: Dict
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
def run(self) -> List[Node]:
|
||||
node = ifconfig()
|
||||
node.document = self.state.document
|
||||
self.set_source_info(node)
|
||||
@ -53,8 +51,7 @@ class IfConfig(SphinxDirective):
|
||||
return [node]
|
||||
|
||||
|
||||
def process_ifconfig_nodes(app, doctree, docname):
|
||||
# type: (Sphinx, nodes.document, str) -> None
|
||||
def process_ifconfig_nodes(app: Sphinx, doctree: nodes.document, docname: str) -> None:
|
||||
ns = {confval.name: confval.value for confval in app.config}
|
||||
ns.update(app.config.__dict__.copy())
|
||||
ns['builder'] = app.builder.name
|
||||
@ -76,8 +73,7 @@ def process_ifconfig_nodes(app, doctree, docname):
|
||||
node.replace_self(node.children)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_node(ifconfig)
|
||||
app.add_directive('ifconfig', IfConfig)
|
||||
app.connect('doctree-resolved', process_ifconfig_nodes)
|
||||
|
@ -17,11 +17,16 @@ import tempfile
|
||||
from hashlib import sha1
|
||||
from os import path
|
||||
from subprocess import CalledProcessError, PIPE
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element
|
||||
|
||||
import sphinx
|
||||
from sphinx import package_dir
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.config import Config
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.locale import _, __
|
||||
@ -30,14 +35,7 @@ from sphinx.util.math import get_node_equation_number, wrap_displaymath
|
||||
from sphinx.util.osutil import ensuredir
|
||||
from sphinx.util.png import read_png_depth, write_png_depth
|
||||
from sphinx.util.template import LaTeXRenderer
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, List, Tuple, Union # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.builders import Builder # NOQA
|
||||
from sphinx.config import Config # NOQA
|
||||
from sphinx.writers.html import HTMLTranslator # NOQA
|
||||
from sphinx.writers.html import HTMLTranslator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -47,8 +45,7 @@ templates_path = path.join(package_dir, 'templates', 'imgmath')
|
||||
class MathExtError(SphinxError):
|
||||
category = 'Math extension error'
|
||||
|
||||
def __init__(self, msg, stderr=None, stdout=None):
|
||||
# type: (str, bytes, bytes) -> None
|
||||
def __init__(self, msg: str, stderr: bytes = None, stdout: bytes = None) -> None:
|
||||
if stderr:
|
||||
msg += '\n[stderr]\n' + stderr.decode(sys.getdefaultencoding(), 'replace')
|
||||
if stdout:
|
||||
@ -116,8 +113,8 @@ def write_svg_depth(filename, depth):
|
||||
f.write('\n<!-- DEPTH=%s -->' % depth)
|
||||
|
||||
|
||||
def generate_latex_macro(image_format, math, config, confdir=''):
|
||||
# type: (str, str, Config, str) -> str
|
||||
def generate_latex_macro(image_format: str, math: str,
|
||||
config: Config, confdir: str = '') -> str:
|
||||
"""Generate LaTeX macro."""
|
||||
variables = {
|
||||
'fontsize': config.imgmath_font_size,
|
||||
@ -140,8 +137,7 @@ def generate_latex_macro(image_format, math, config, confdir=''):
|
||||
return LaTeXRenderer(templates_path).render(template_name, variables)
|
||||
|
||||
|
||||
def ensure_tempdir(builder):
|
||||
# type: (Builder) -> str
|
||||
def ensure_tempdir(builder: Builder) -> str:
|
||||
"""Create temporary directory.
|
||||
|
||||
use only one tempdir per build -- the use of a directory is cleaner
|
||||
@ -154,8 +150,7 @@ def ensure_tempdir(builder):
|
||||
return builder._imgmath_tempdir # type: ignore
|
||||
|
||||
|
||||
def compile_math(latex, builder):
|
||||
# type: (str, Builder) -> str
|
||||
def compile_math(latex: str, builder: Builder) -> str:
|
||||
"""Compile LaTeX macros for math to DVI."""
|
||||
tempdir = ensure_tempdir(builder)
|
||||
filename = path.join(tempdir, 'math.tex')
|
||||
@ -182,8 +177,7 @@ def compile_math(latex, builder):
|
||||
raise MathExtError('latex exited with error', exc.stderr, exc.stdout)
|
||||
|
||||
|
||||
def convert_dvi_to_image(command, name):
|
||||
# type: (List[str], str) -> Tuple[bytes, bytes]
|
||||
def convert_dvi_to_image(command: List[str], name: str) -> Tuple[bytes, bytes]:
|
||||
"""Convert DVI file to specific image format."""
|
||||
try:
|
||||
ret = subprocess.run(command, stdout=PIPE, stderr=PIPE, check=True)
|
||||
@ -197,8 +191,7 @@ def convert_dvi_to_image(command, name):
|
||||
raise MathExtError('%s exited with error' % name, exc.stderr, exc.stdout)
|
||||
|
||||
|
||||
def convert_dvi_to_png(dvipath, builder):
|
||||
# type: (str, Builder) -> Tuple[str, int]
|
||||
def convert_dvi_to_png(dvipath: str, builder: Builder) -> Tuple[str, int]:
|
||||
"""Convert DVI file to PNG image."""
|
||||
tempdir = ensure_tempdir(builder)
|
||||
filename = path.join(tempdir, 'math.png')
|
||||
@ -224,8 +217,7 @@ def convert_dvi_to_png(dvipath, builder):
|
||||
return filename, depth
|
||||
|
||||
|
||||
def convert_dvi_to_svg(dvipath, builder):
|
||||
# type: (str, Builder) -> Tuple[str, int]
|
||||
def convert_dvi_to_svg(dvipath: str, builder: Builder) -> Tuple[str, int]:
|
||||
"""Convert DVI file to SVG image."""
|
||||
tempdir = ensure_tempdir(builder)
|
||||
filename = path.join(tempdir, 'math.svg')
|
||||
@ -249,8 +241,7 @@ def convert_dvi_to_svg(dvipath, builder):
|
||||
return filename, depth
|
||||
|
||||
|
||||
def render_math(self, math):
|
||||
# type: (HTMLTranslator, str) -> Tuple[str, int]
|
||||
def render_math(self: HTMLTranslator, math: str) -> Tuple[str, int]:
|
||||
"""Render the LaTeX math expression *math* using latex and dvipng or
|
||||
dvisvgm.
|
||||
|
||||
@ -312,8 +303,7 @@ def render_math(self, math):
|
||||
return relfn, depth
|
||||
|
||||
|
||||
def cleanup_tempdir(app, exc):
|
||||
# type: (Sphinx, Exception) -> None
|
||||
def cleanup_tempdir(app: Sphinx, exc: Exception) -> None:
|
||||
if exc:
|
||||
return
|
||||
if not hasattr(app.builder, '_imgmath_tempdir'):
|
||||
@ -324,15 +314,13 @@ def cleanup_tempdir(app, exc):
|
||||
pass
|
||||
|
||||
|
||||
def get_tooltip(self, node):
|
||||
# type: (HTMLTranslator, Union[nodes.math, nodes.math_block]) -> str
|
||||
def get_tooltip(self: HTMLTranslator, node: Element) -> str:
|
||||
if self.builder.config.imgmath_add_tooltips:
|
||||
return ' alt="%s"' % self.encode(node.astext()).strip()
|
||||
return ''
|
||||
|
||||
|
||||
def html_visit_math(self, node):
|
||||
# type: (HTMLTranslator, nodes.math) -> None
|
||||
def html_visit_math(self: HTMLTranslator, node: nodes.math) -> None:
|
||||
try:
|
||||
fname, depth = render_math(self, '$' + node.astext() + '$')
|
||||
except MathExtError as exc:
|
||||
@ -354,8 +342,7 @@ def html_visit_math(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def html_visit_displaymath(self, node):
|
||||
# type: (HTMLTranslator, nodes.math_block) -> None
|
||||
def html_visit_displaymath(self: HTMLTranslator, node: nodes.math_block) -> None:
|
||||
if node['nowrap']:
|
||||
latex = node.astext()
|
||||
else:
|
||||
@ -395,8 +382,7 @@ deprecated_alias('sphinx.ext.imgmath',
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_html_math_renderer('imgmath',
|
||||
(html_visit_math, None),
|
||||
(html_visit_displaymath, None))
|
||||
|
@ -40,27 +40,25 @@ import inspect
|
||||
import re
|
||||
import sys
|
||||
from hashlib import md5
|
||||
from typing import Iterable, cast
|
||||
from typing import Any, Dict, Iterable, List, Tuple
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Node
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
import sphinx
|
||||
from sphinx import addnodes
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.ext.graphviz import (
|
||||
graphviz, figure_wrapper,
|
||||
render_dot_html, render_dot_latex, render_dot_texinfo
|
||||
)
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, List, Optional, Tuple # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
from sphinx.writers.html import HTMLTranslator # NOQA
|
||||
from sphinx.writers.latex import LaTeXTranslator # NOQA
|
||||
from sphinx.writers.texinfo import TexinfoTranslator # NOQA
|
||||
from sphinx.writers.html import HTMLTranslator
|
||||
from sphinx.writers.latex import LaTeXTranslator
|
||||
from sphinx.writers.texinfo import TexinfoTranslator
|
||||
|
||||
|
||||
module_sig_re = re.compile(r'''^(?:([\w.]*)\.)? # module names
|
||||
@ -68,8 +66,7 @@ module_sig_re = re.compile(r'''^(?:([\w.]*)\.)? # module names
|
||||
''', re.VERBOSE)
|
||||
|
||||
|
||||
def try_import(objname):
|
||||
# type: (str) -> Any
|
||||
def try_import(objname: str) -> Any:
|
||||
"""Import a object or module using *name* and *currentmodule*.
|
||||
*name* should be a relative name from *currentmodule* or
|
||||
a fully-qualified name.
|
||||
@ -96,8 +93,7 @@ def try_import(objname):
|
||||
return None
|
||||
|
||||
|
||||
def import_classes(name, currmodule):
|
||||
# type: (str, str) -> Any
|
||||
def import_classes(name: str, currmodule: str) -> Any:
|
||||
"""Import a class using its fully-qualified *name*."""
|
||||
target = None
|
||||
|
||||
@ -138,9 +134,9 @@ class InheritanceGraph:
|
||||
from all the way to the root "object", and then is able to generate a
|
||||
graphviz dot graph from them.
|
||||
"""
|
||||
def __init__(self, class_names, currmodule, show_builtins=False,
|
||||
private_bases=False, parts=0, aliases=None, top_classes=[]):
|
||||
# type: (List[str], str, bool, bool, int, Optional[Dict[str, str]], List[Any]) -> None
|
||||
def __init__(self, class_names: List[str], currmodule: str, show_builtins: bool = False,
|
||||
private_bases: bool = False, parts: int = 0, aliases: Dict[str, str] = None,
|
||||
top_classes: List[Any] = []) -> None:
|
||||
"""*class_names* is a list of child classes to show bases from.
|
||||
|
||||
If *show_builtins* is True, then Python builtins will be shown
|
||||
@ -154,16 +150,16 @@ class InheritanceGraph:
|
||||
raise InheritanceException('No classes found for '
|
||||
'inheritance diagram')
|
||||
|
||||
def _import_classes(self, class_names, currmodule):
|
||||
# type: (List[str], str) -> List[Any]
|
||||
def _import_classes(self, class_names: List[str], currmodule: str) -> List[Any]:
|
||||
"""Import a list of classes."""
|
||||
classes = [] # type: List[Any]
|
||||
for name in class_names:
|
||||
classes.extend(import_classes(name, currmodule))
|
||||
return classes
|
||||
|
||||
def _class_info(self, classes, show_builtins, private_bases, parts, aliases, top_classes):
|
||||
# type: (List[Any], bool, bool, int, Optional[Dict[str, str]], List[Any]) -> List[Tuple[str, str, List[str], str]] # NOQA
|
||||
def _class_info(self, classes: List[Any], show_builtins: bool, private_bases: bool,
|
||||
parts: int, aliases: Dict[str, str], top_classes: List[Any]
|
||||
) -> List[Tuple[str, str, List[str], str]]:
|
||||
"""Return name and bases for all classes that are ancestors of
|
||||
*classes*.
|
||||
|
||||
@ -182,8 +178,7 @@ class InheritanceGraph:
|
||||
all_classes = {}
|
||||
py_builtins = vars(builtins).values()
|
||||
|
||||
def recurse(cls):
|
||||
# type: (Any) -> None
|
||||
def recurse(cls: Any) -> None:
|
||||
if not show_builtins and cls in py_builtins:
|
||||
return
|
||||
if not private_bases and cls.__name__.startswith('_'):
|
||||
@ -222,8 +217,7 @@ class InheritanceGraph:
|
||||
|
||||
return list(all_classes.values())
|
||||
|
||||
def class_name(self, cls, parts=0, aliases=None):
|
||||
# type: (Any, int, Optional[Dict[str, str]]) -> str
|
||||
def class_name(self, cls: Any, parts: int = 0, aliases: Dict[str, str] = None) -> str:
|
||||
"""Given a class object, return a fully-qualified name.
|
||||
|
||||
This works for things I've tested in matplotlib so far, but may not be
|
||||
@ -243,8 +237,7 @@ class InheritanceGraph:
|
||||
return aliases[result]
|
||||
return result
|
||||
|
||||
def get_all_class_names(self):
|
||||
# type: () -> List[str]
|
||||
def get_all_class_names(self) -> List[str]:
|
||||
"""Get all of the class names involved in the graph."""
|
||||
return [fullname for (_, fullname, _, _) in self.class_info]
|
||||
|
||||
@ -266,17 +259,15 @@ class InheritanceGraph:
|
||||
'style': '"setlinewidth(0.5)"',
|
||||
}
|
||||
|
||||
def _format_node_attrs(self, attrs):
|
||||
# type: (Dict) -> str
|
||||
def _format_node_attrs(self, attrs: Dict) -> str:
|
||||
return ','.join(['%s=%s' % x for x in sorted(attrs.items())])
|
||||
|
||||
def _format_graph_attrs(self, attrs):
|
||||
# type: (Dict) -> str
|
||||
def _format_graph_attrs(self, attrs: Dict) -> str:
|
||||
return ''.join(['%s=%s;\n' % x for x in sorted(attrs.items())])
|
||||
|
||||
def generate_dot(self, name, urls={}, env=None,
|
||||
graph_attrs={}, node_attrs={}, edge_attrs={}):
|
||||
# type: (str, Dict, BuildEnvironment, Dict, Dict, Dict) -> str
|
||||
def generate_dot(self, name: str, urls: Dict = {}, env: BuildEnvironment = None,
|
||||
graph_attrs: Dict = {}, node_attrs: Dict = {}, edge_attrs: Dict = {}
|
||||
) -> str:
|
||||
"""Generate a graphviz dot graph from the classes that were passed in
|
||||
to __init__.
|
||||
|
||||
@ -344,8 +335,7 @@ class InheritanceDiagram(SphinxDirective):
|
||||
'top-classes': directives.unchanged_required,
|
||||
}
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
def run(self) -> List[Node]:
|
||||
node = inheritance_diagram()
|
||||
node.document = self.state.document
|
||||
class_names = self.arguments[0].split()
|
||||
@ -391,14 +381,12 @@ class InheritanceDiagram(SphinxDirective):
|
||||
return [figure]
|
||||
|
||||
|
||||
def get_graph_hash(node):
|
||||
# type: (inheritance_diagram) -> str
|
||||
def get_graph_hash(node: inheritance_diagram) -> str:
|
||||
encoded = (node['content'] + str(node['parts'])).encode()
|
||||
return md5(encoded).hexdigest()[-10:]
|
||||
|
||||
|
||||
def html_visit_inheritance_diagram(self, node):
|
||||
# type: (HTMLTranslator, inheritance_diagram) -> None
|
||||
def html_visit_inheritance_diagram(self: HTMLTranslator, node: inheritance_diagram) -> None:
|
||||
"""
|
||||
Output the graph for HTML. This will insert a PNG with clickable
|
||||
image map.
|
||||
@ -431,8 +419,7 @@ def html_visit_inheritance_diagram(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def latex_visit_inheritance_diagram(self, node):
|
||||
# type: (LaTeXTranslator, inheritance_diagram) -> None
|
||||
def latex_visit_inheritance_diagram(self: LaTeXTranslator, node: inheritance_diagram) -> None:
|
||||
"""
|
||||
Output the graph for LaTeX. This will insert a PDF.
|
||||
"""
|
||||
@ -447,8 +434,8 @@ def latex_visit_inheritance_diagram(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def texinfo_visit_inheritance_diagram(self, node):
|
||||
# type: (TexinfoTranslator, inheritance_diagram) -> None
|
||||
def texinfo_visit_inheritance_diagram(self: TexinfoTranslator, node: inheritance_diagram
|
||||
) -> None:
|
||||
"""
|
||||
Output the graph for Texinfo. This will insert a PNG.
|
||||
"""
|
||||
@ -463,13 +450,11 @@ def texinfo_visit_inheritance_diagram(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def skip(self, node):
|
||||
# type: (nodes.NodeVisitor, inheritance_diagram) -> None
|
||||
def skip(self: nodes.NodeVisitor, node: inheritance_diagram) -> None:
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.setup_extension('sphinx.ext.graphviz')
|
||||
app.add_node(
|
||||
inheritance_diagram,
|
||||
|
@ -10,6 +10,7 @@
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from typing import Any, Dict
|
||||
|
||||
from sphinxcontrib.jsmath import ( # NOQA
|
||||
html_visit_math,
|
||||
@ -18,16 +19,11 @@ from sphinxcontrib.jsmath import ( # NOQA
|
||||
)
|
||||
|
||||
import sphinx
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
warnings.warn('sphinx.ext.jsmath has been moved to sphinxcontrib-jsmath.',
|
||||
RemovedInSphinx40Warning)
|
||||
|
||||
|
@ -8,14 +8,12 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from sphinx import __display_version__ as __version__
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, List # NOQA
|
||||
|
||||
|
||||
class Config:
|
||||
"""Sphinx napoleon extension settings in `conf.py`.
|
||||
@ -267,16 +265,14 @@ class Config:
|
||||
'napoleon_custom_sections': (None, 'env')
|
||||
}
|
||||
|
||||
def __init__(self, **settings):
|
||||
# type: (Any) -> None
|
||||
def __init__(self, **settings) -> None:
|
||||
for name, (default, rebuild) in self._config_values.items():
|
||||
setattr(self, name, default)
|
||||
for name, value in settings.items():
|
||||
setattr(self, name, value)
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
"""Sphinx extension setup function.
|
||||
|
||||
When the extension is loaded, Sphinx imports this module and executes
|
||||
@ -313,8 +309,7 @@ def setup(app):
|
||||
return {'version': __version__, 'parallel_read_safe': True}
|
||||
|
||||
|
||||
def _patch_python_domain():
|
||||
# type: () -> None
|
||||
def _patch_python_domain() -> None:
|
||||
try:
|
||||
from sphinx.domains.python import PyTypedField
|
||||
except ImportError:
|
||||
@ -333,8 +328,8 @@ def _patch_python_domain():
|
||||
can_collapse=True))
|
||||
|
||||
|
||||
def _process_docstring(app, what, name, obj, options, lines):
|
||||
# type: (Sphinx, str, str, Any, Any, List[str]) -> None
|
||||
def _process_docstring(app: Sphinx, what: str, name: str, obj: Any,
|
||||
options: Any, lines: List[str]) -> None:
|
||||
"""Process the docstring for a given python object.
|
||||
|
||||
Called when autodoc has read and processed a docstring. `lines` is a list
|
||||
@ -383,8 +378,8 @@ def _process_docstring(app, what, name, obj, options, lines):
|
||||
lines[:] = result_lines[:]
|
||||
|
||||
|
||||
def _skip_member(app, what, name, obj, skip, options):
|
||||
# type: (Sphinx, str, str, Any, bool, Any) -> bool
|
||||
def _skip_member(app: Sphinx, what: str, name: str, obj: Any,
|
||||
skip: bool, options: Any) -> bool:
|
||||
"""Determine if private and special class members are included in docs.
|
||||
|
||||
The following settings in conf.py determine if private and special class
|
||||
|
@ -13,16 +13,13 @@
|
||||
import inspect
|
||||
import re
|
||||
from functools import partial
|
||||
from typing import Any, Callable, Dict, List, Tuple, Type, Union
|
||||
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.config import Config as SphinxConfig
|
||||
from sphinx.ext.napoleon.iterators import modify_iter
|
||||
from sphinx.locale import _
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Dict, List, Tuple, Type, Union # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.config import Config as SphinxConfig # NOQA
|
||||
|
||||
|
||||
_directive_regex = re.compile(r'\.\. \S+::')
|
||||
_google_section_regex = re.compile(r'^(\s|\w)+:\s*$')
|
||||
@ -103,9 +100,9 @@ class GoogleDocstring:
|
||||
_name_rgx = re.compile(r"^\s*((?::(?P<role>\S+):)?`(?P<name>[a-zA-Z0-9_.-]+)`|"
|
||||
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
|
||||
|
||||
def __init__(self, docstring, config=None, app=None, what='', name='',
|
||||
obj=None, options=None):
|
||||
# type: (Union[str, List[str]], SphinxConfig, Sphinx, str, str, Any, Any) -> None
|
||||
def __init__(self, docstring: Union[str, List[str]], config: SphinxConfig = None,
|
||||
app: Sphinx = None, what: str = '', name: str = '',
|
||||
obj: Any = None, options: Any = None) -> None:
|
||||
self._config = config
|
||||
self._app = app
|
||||
|
||||
@ -175,8 +172,7 @@ class GoogleDocstring:
|
||||
|
||||
self._parse()
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
"""Return the parsed docstring in reStructuredText format.
|
||||
|
||||
Returns
|
||||
@ -187,8 +183,7 @@ class GoogleDocstring:
|
||||
"""
|
||||
return '\n'.join(self.lines())
|
||||
|
||||
def lines(self):
|
||||
# type: () -> List[str]
|
||||
def lines(self) -> List[str]:
|
||||
"""Return the parsed lines of the docstring in reStructuredText format.
|
||||
|
||||
Returns
|
||||
@ -199,8 +194,7 @@ class GoogleDocstring:
|
||||
"""
|
||||
return self._parsed_lines
|
||||
|
||||
def _consume_indented_block(self, indent=1):
|
||||
# type: (int) -> List[str]
|
||||
def _consume_indented_block(self, indent: int = 1) -> List[str]:
|
||||
lines = []
|
||||
line = self._line_iter.peek()
|
||||
while(not self._is_section_break() and
|
||||
@ -209,8 +203,7 @@ class GoogleDocstring:
|
||||
line = self._line_iter.peek()
|
||||
return lines
|
||||
|
||||
def _consume_contiguous(self):
|
||||
# type: () -> List[str]
|
||||
def _consume_contiguous(self) -> List[str]:
|
||||
lines = []
|
||||
while (self._line_iter.has_next() and
|
||||
self._line_iter.peek() and
|
||||
@ -218,8 +211,7 @@ class GoogleDocstring:
|
||||
lines.append(next(self._line_iter))
|
||||
return lines
|
||||
|
||||
def _consume_empty(self):
|
||||
# type: () -> List[str]
|
||||
def _consume_empty(self) -> List[str]:
|
||||
lines = []
|
||||
line = self._line_iter.peek()
|
||||
while self._line_iter.has_next() and not line:
|
||||
@ -227,8 +219,8 @@ class GoogleDocstring:
|
||||
line = self._line_iter.peek()
|
||||
return lines
|
||||
|
||||
def _consume_field(self, parse_type=True, prefer_type=False):
|
||||
# type: (bool, bool) -> Tuple[str, str, List[str]]
|
||||
def _consume_field(self, parse_type: bool = True, prefer_type: bool = False
|
||||
) -> Tuple[str, str, List[str]]:
|
||||
line = next(self._line_iter)
|
||||
|
||||
before, colon, after = self._partition_field_on_colon(line)
|
||||
@ -249,8 +241,8 @@ class GoogleDocstring:
|
||||
_descs = self.__class__(_descs, self._config).lines()
|
||||
return _name, _type, _descs
|
||||
|
||||
def _consume_fields(self, parse_type=True, prefer_type=False):
|
||||
# type: (bool, bool) -> List[Tuple[str, str, List[str]]]
|
||||
def _consume_fields(self, parse_type: bool = True, prefer_type: bool = False
|
||||
) -> List[Tuple[str, str, List[str]]]:
|
||||
self._consume_empty()
|
||||
fields = []
|
||||
while not self._is_section_break():
|
||||
@ -259,8 +251,7 @@ class GoogleDocstring:
|
||||
fields.append((_name, _type, _desc,))
|
||||
return fields
|
||||
|
||||
def _consume_inline_attribute(self):
|
||||
# type: () -> Tuple[str, List[str]]
|
||||
def _consume_inline_attribute(self) -> Tuple[str, List[str]]:
|
||||
line = next(self._line_iter)
|
||||
_type, colon, _desc = self._partition_field_on_colon(line)
|
||||
if not colon or not _desc:
|
||||
@ -270,8 +261,7 @@ class GoogleDocstring:
|
||||
_descs = self.__class__(_descs, self._config).lines()
|
||||
return _type, _descs
|
||||
|
||||
def _consume_returns_section(self):
|
||||
# type: () -> List[Tuple[str, str, List[str]]]
|
||||
def _consume_returns_section(self) -> List[Tuple[str, str, List[str]]]:
|
||||
lines = self._dedent(self._consume_to_next_section())
|
||||
if lines:
|
||||
before, colon, after = self._partition_field_on_colon(lines[0])
|
||||
@ -290,44 +280,38 @@ class GoogleDocstring:
|
||||
else:
|
||||
return []
|
||||
|
||||
def _consume_usage_section(self):
|
||||
# type: () -> List[str]
|
||||
def _consume_usage_section(self) -> List[str]:
|
||||
lines = self._dedent(self._consume_to_next_section())
|
||||
return lines
|
||||
|
||||
def _consume_section_header(self):
|
||||
# type: () -> str
|
||||
def _consume_section_header(self) -> str:
|
||||
section = next(self._line_iter)
|
||||
stripped_section = section.strip(':')
|
||||
if stripped_section.lower() in self._sections:
|
||||
section = stripped_section
|
||||
return section
|
||||
|
||||
def _consume_to_end(self):
|
||||
# type: () -> List[str]
|
||||
def _consume_to_end(self) -> List[str]:
|
||||
lines = []
|
||||
while self._line_iter.has_next():
|
||||
lines.append(next(self._line_iter))
|
||||
return lines
|
||||
|
||||
def _consume_to_next_section(self):
|
||||
# type: () -> List[str]
|
||||
def _consume_to_next_section(self) -> List[str]:
|
||||
self._consume_empty()
|
||||
lines = []
|
||||
while not self._is_section_break():
|
||||
lines.append(next(self._line_iter))
|
||||
return lines + self._consume_empty()
|
||||
|
||||
def _dedent(self, lines, full=False):
|
||||
# type: (List[str], bool) -> List[str]
|
||||
def _dedent(self, lines: List[str], full: bool = False) -> List[str]:
|
||||
if full:
|
||||
return [line.lstrip() for line in lines]
|
||||
else:
|
||||
min_indent = self._get_min_indent(lines)
|
||||
return [line[min_indent:] for line in lines]
|
||||
|
||||
def _escape_args_and_kwargs(self, name):
|
||||
# type: (str) -> str
|
||||
def _escape_args_and_kwargs(self, name: str) -> str:
|
||||
if name.endswith('_'):
|
||||
name = name[:-1] + r'\_'
|
||||
|
||||
@ -338,8 +322,7 @@ class GoogleDocstring:
|
||||
else:
|
||||
return name
|
||||
|
||||
def _fix_field_desc(self, desc):
|
||||
# type: (List[str]) -> List[str]
|
||||
def _fix_field_desc(self, desc: List[str]) -> List[str]:
|
||||
if self._is_list(desc):
|
||||
desc = [''] + desc
|
||||
elif desc[0].endswith('::'):
|
||||
@ -352,8 +335,7 @@ class GoogleDocstring:
|
||||
desc = ['', desc[0]] + self._indent(desc_block, 4)
|
||||
return desc
|
||||
|
||||
def _format_admonition(self, admonition, lines):
|
||||
# type: (str, List[str]) -> List[str]
|
||||
def _format_admonition(self, admonition: str, lines: List[str]) -> List[str]:
|
||||
lines = self._strip_empty(lines)
|
||||
if len(lines) == 1:
|
||||
return ['.. %s:: %s' % (admonition, lines[0].strip()), '']
|
||||
@ -363,8 +345,7 @@ class GoogleDocstring:
|
||||
else:
|
||||
return ['.. %s::' % admonition, '']
|
||||
|
||||
def _format_block(self, prefix, lines, padding=None):
|
||||
# type: (str, List[str], str) -> List[str]
|
||||
def _format_block(self, prefix: str, lines: List[str], padding: str = None) -> List[str]:
|
||||
if lines:
|
||||
if padding is None:
|
||||
padding = ' ' * len(prefix)
|
||||
@ -380,9 +361,9 @@ class GoogleDocstring:
|
||||
else:
|
||||
return [prefix]
|
||||
|
||||
def _format_docutils_params(self, fields, field_role='param',
|
||||
type_role='type'):
|
||||
# type: (List[Tuple[str, str, List[str]]], str, str) -> List[str]
|
||||
def _format_docutils_params(self, fields: List[Tuple[str, str, List[str]]],
|
||||
field_role: str = 'param', type_role: str = 'type'
|
||||
) -> List[str]:
|
||||
lines = []
|
||||
for _name, _type, _desc in fields:
|
||||
_desc = self._strip_empty(_desc)
|
||||
@ -397,8 +378,7 @@ class GoogleDocstring:
|
||||
lines.append(':%s %s: %s' % (type_role, _name, _type))
|
||||
return lines + ['']
|
||||
|
||||
def _format_field(self, _name, _type, _desc):
|
||||
# type: (str, str, List[str]) -> List[str]
|
||||
def _format_field(self, _name: str, _type: str, _desc: List[str]) -> List[str]:
|
||||
_desc = self._strip_empty(_desc)
|
||||
has_desc = any(_desc)
|
||||
separator = has_desc and ' -- ' or ''
|
||||
@ -427,8 +407,8 @@ class GoogleDocstring:
|
||||
else:
|
||||
return [field]
|
||||
|
||||
def _format_fields(self, field_type, fields):
|
||||
# type: (str, List[Tuple[str, str, List[str]]]) -> List[str]
|
||||
def _format_fields(self, field_type: str, fields: List[Tuple[str, str, List[str]]]
|
||||
) -> List[str]:
|
||||
field_type = ':%s:' % field_type.strip()
|
||||
padding = ' ' * len(field_type)
|
||||
multi = len(fields) > 1
|
||||
@ -446,8 +426,7 @@ class GoogleDocstring:
|
||||
lines.append('')
|
||||
return lines
|
||||
|
||||
def _get_current_indent(self, peek_ahead=0):
|
||||
# type: (int) -> int
|
||||
def _get_current_indent(self, peek_ahead: int = 0) -> int:
|
||||
line = self._line_iter.peek(peek_ahead + 1)[peek_ahead]
|
||||
while line != self._line_iter.sentinel:
|
||||
if line:
|
||||
@ -456,22 +435,19 @@ class GoogleDocstring:
|
||||
line = self._line_iter.peek(peek_ahead + 1)[peek_ahead]
|
||||
return 0
|
||||
|
||||
def _get_indent(self, line):
|
||||
# type: (str) -> int
|
||||
def _get_indent(self, line: str) -> int:
|
||||
for i, s in enumerate(line):
|
||||
if not s.isspace():
|
||||
return i
|
||||
return len(line)
|
||||
|
||||
def _get_initial_indent(self, lines):
|
||||
# type: (List[str]) -> int
|
||||
def _get_initial_indent(self, lines: List[str]) -> int:
|
||||
for line in lines:
|
||||
if line:
|
||||
return self._get_indent(line)
|
||||
return 0
|
||||
|
||||
def _get_min_indent(self, lines):
|
||||
# type: (List[str]) -> int
|
||||
def _get_min_indent(self, lines: List[str]) -> int:
|
||||
min_indent = None
|
||||
for line in lines:
|
||||
if line:
|
||||
@ -482,12 +458,10 @@ class GoogleDocstring:
|
||||
min_indent = indent
|
||||
return min_indent or 0
|
||||
|
||||
def _indent(self, lines, n=4):
|
||||
# type: (List[str], int) -> List[str]
|
||||
def _indent(self, lines: List[str], n: int = 4) -> List[str]:
|
||||
return [(' ' * n) + line for line in lines]
|
||||
|
||||
def _is_indented(self, line, indent=1):
|
||||
# type: (str, int) -> bool
|
||||
def _is_indented(self, line: str, indent: int = 1) -> bool:
|
||||
for i, s in enumerate(line):
|
||||
if i >= indent:
|
||||
return True
|
||||
@ -495,8 +469,7 @@ class GoogleDocstring:
|
||||
return False
|
||||
return False
|
||||
|
||||
def _is_list(self, lines):
|
||||
# type: (List[str]) -> bool
|
||||
def _is_list(self, lines: List[str]) -> bool:
|
||||
if not lines:
|
||||
return False
|
||||
if _bullet_list_regex.match(lines[0]):
|
||||
@ -513,8 +486,7 @@ class GoogleDocstring:
|
||||
break
|
||||
return next_indent > indent
|
||||
|
||||
def _is_section_header(self):
|
||||
# type: () -> bool
|
||||
def _is_section_header(self) -> bool:
|
||||
section = self._line_iter.peek().lower()
|
||||
match = _google_section_regex.match(section)
|
||||
if match and section.strip(':') in self._sections:
|
||||
@ -528,8 +500,7 @@ class GoogleDocstring:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_section_break(self):
|
||||
# type: () -> bool
|
||||
def _is_section_break(self) -> bool:
|
||||
line = self._line_iter.peek()
|
||||
return (not self._line_iter.has_next() or
|
||||
self._is_section_header() or
|
||||
@ -537,9 +508,7 @@ class GoogleDocstring:
|
||||
line and
|
||||
not self._is_indented(line, self._section_indent)))
|
||||
|
||||
def _load_custom_sections(self):
|
||||
# type: () -> None
|
||||
|
||||
def _load_custom_sections(self) -> None:
|
||||
if self._config.napoleon_custom_sections is not None:
|
||||
for entry in self._config.napoleon_custom_sections:
|
||||
if isinstance(entry, str):
|
||||
@ -554,8 +523,7 @@ class GoogleDocstring:
|
||||
self._sections.get(entry[1].lower(),
|
||||
self._parse_custom_generic_section)
|
||||
|
||||
def _parse(self):
|
||||
# type: () -> None
|
||||
def _parse(self) -> None:
|
||||
self._parsed_lines = self._consume_empty()
|
||||
|
||||
if self._name and self._what in ('attribute', 'data', 'property'):
|
||||
@ -594,16 +562,14 @@ class GoogleDocstring:
|
||||
lines = self._consume_to_next_section()
|
||||
return self._format_admonition(admonition, lines)
|
||||
|
||||
def _parse_attribute_docstring(self):
|
||||
# type: () -> List[str]
|
||||
def _parse_attribute_docstring(self) -> List[str]:
|
||||
_type, _desc = self._consume_inline_attribute()
|
||||
lines = self._format_field('', '', _desc)
|
||||
if _type:
|
||||
lines.extend(['', ':type: %s' % _type])
|
||||
return lines
|
||||
|
||||
def _parse_attributes_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_attributes_section(self, section: str) -> List[str]:
|
||||
lines = []
|
||||
for _name, _type, _desc in self._consume_fields():
|
||||
if self._config.napoleon_use_ivar:
|
||||
@ -624,8 +590,7 @@ class GoogleDocstring:
|
||||
lines.append('')
|
||||
return lines
|
||||
|
||||
def _parse_examples_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_examples_section(self, section: str) -> List[str]:
|
||||
labels = {
|
||||
'example': _('Example'),
|
||||
'examples': _('Examples'),
|
||||
@ -638,16 +603,14 @@ class GoogleDocstring:
|
||||
# for now, no admonition for simple custom sections
|
||||
return self._parse_generic_section(section, False)
|
||||
|
||||
def _parse_usage_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_usage_section(self, section: str) -> List[str]:
|
||||
header = ['.. rubric:: Usage:', '']
|
||||
block = ['.. code-block:: python', '']
|
||||
lines = self._consume_usage_section()
|
||||
lines = self._indent(lines, 3)
|
||||
return header + block + lines + ['']
|
||||
|
||||
def _parse_generic_section(self, section, use_admonition):
|
||||
# type: (str, bool) -> List[str]
|
||||
def _parse_generic_section(self, section: str, use_admonition: bool) -> List[str]:
|
||||
lines = self._strip_empty(self._consume_to_next_section())
|
||||
lines = self._dedent(lines)
|
||||
if use_admonition:
|
||||
@ -660,8 +623,7 @@ class GoogleDocstring:
|
||||
else:
|
||||
return [header, '']
|
||||
|
||||
def _parse_keyword_arguments_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_keyword_arguments_section(self, section: str) -> List[str]:
|
||||
fields = self._consume_fields()
|
||||
if self._config.napoleon_use_keyword:
|
||||
return self._format_docutils_params(
|
||||
@ -671,8 +633,7 @@ class GoogleDocstring:
|
||||
else:
|
||||
return self._format_fields(_('Keyword Arguments'), fields)
|
||||
|
||||
def _parse_methods_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_methods_section(self, section: str) -> List[str]:
|
||||
lines = [] # type: List[str]
|
||||
for _name, _type, _desc in self._consume_fields(parse_type=False):
|
||||
lines.append('.. method:: %s' % _name)
|
||||
@ -681,25 +642,21 @@ class GoogleDocstring:
|
||||
lines.append('')
|
||||
return lines
|
||||
|
||||
def _parse_notes_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_notes_section(self, section: str) -> List[str]:
|
||||
use_admonition = self._config.napoleon_use_admonition_for_notes
|
||||
return self._parse_generic_section(_('Notes'), use_admonition)
|
||||
|
||||
def _parse_other_parameters_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_other_parameters_section(self, section: str) -> List[str]:
|
||||
return self._format_fields(_('Other Parameters'), self._consume_fields())
|
||||
|
||||
def _parse_parameters_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_parameters_section(self, section: str) -> List[str]:
|
||||
fields = self._consume_fields()
|
||||
if self._config.napoleon_use_param:
|
||||
return self._format_docutils_params(fields)
|
||||
else:
|
||||
return self._format_fields(_('Parameters'), fields)
|
||||
|
||||
def _parse_raises_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_raises_section(self, section: str) -> List[str]:
|
||||
fields = self._consume_fields(parse_type=False, prefer_type=True)
|
||||
lines = [] # type: List[str]
|
||||
for _name, _type, _desc in fields:
|
||||
@ -714,13 +671,11 @@ class GoogleDocstring:
|
||||
lines.append('')
|
||||
return lines
|
||||
|
||||
def _parse_references_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_references_section(self, section: str) -> List[str]:
|
||||
use_admonition = self._config.napoleon_use_admonition_for_references
|
||||
return self._parse_generic_section(_('References'), use_admonition)
|
||||
|
||||
def _parse_returns_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_returns_section(self, section: str) -> List[str]:
|
||||
fields = self._consume_returns_section()
|
||||
multi = len(fields) > 1
|
||||
if multi:
|
||||
@ -748,21 +703,17 @@ class GoogleDocstring:
|
||||
lines.append('')
|
||||
return lines
|
||||
|
||||
def _parse_see_also_section(self, section):
|
||||
# type (str) -> List[str]
|
||||
def _parse_see_also_section(self, section: str) -> List[str]:
|
||||
return self._parse_admonition('seealso', section)
|
||||
|
||||
def _parse_warns_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_warns_section(self, section: str) -> List[str]:
|
||||
return self._format_fields(_('Warns'), self._consume_fields())
|
||||
|
||||
def _parse_yields_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_yields_section(self, section: str) -> List[str]:
|
||||
fields = self._consume_returns_section()
|
||||
return self._format_fields(_('Yields'), fields)
|
||||
|
||||
def _partition_field_on_colon(self, line):
|
||||
# type: (str) -> Tuple[str, str, str]
|
||||
def _partition_field_on_colon(self, line: str) -> Tuple[str, str, str]:
|
||||
before_colon = []
|
||||
after_colon = []
|
||||
colon = ''
|
||||
@ -784,8 +735,7 @@ class GoogleDocstring:
|
||||
colon,
|
||||
"".join(after_colon).strip())
|
||||
|
||||
def _qualify_name(self, attr_name, klass):
|
||||
# type: (str, Type) -> str
|
||||
def _qualify_name(self, attr_name: str, klass: Type) -> str:
|
||||
if klass and '.' not in attr_name:
|
||||
if attr_name.startswith('~'):
|
||||
attr_name = attr_name[1:]
|
||||
@ -796,8 +746,7 @@ class GoogleDocstring:
|
||||
return '~%s.%s' % (q, attr_name)
|
||||
return attr_name
|
||||
|
||||
def _strip_empty(self, lines):
|
||||
# type: (List[str]) -> List[str]
|
||||
def _strip_empty(self, lines: List[str]) -> List[str]:
|
||||
if lines:
|
||||
start = -1
|
||||
for i, line in enumerate(lines):
|
||||
@ -910,14 +859,14 @@ class NumpyDocstring(GoogleDocstring):
|
||||
The lines of the docstring in a list.
|
||||
|
||||
"""
|
||||
def __init__(self, docstring, config=None, app=None, what='', name='',
|
||||
obj=None, options=None):
|
||||
# type: (Union[str, List[str]], SphinxConfig, Sphinx, str, str, Any, Any) -> None
|
||||
def __init__(self, docstring: Union[str, List[str]], config: SphinxConfig = None,
|
||||
app: Sphinx = None, what: str = '', name: str = '',
|
||||
obj: Any = None, options: Any = None) -> None:
|
||||
self._directive_sections = ['.. index::']
|
||||
super().__init__(docstring, config, app, what, name, obj, options)
|
||||
|
||||
def _consume_field(self, parse_type=True, prefer_type=False):
|
||||
# type: (bool, bool) -> Tuple[str, str, List[str]]
|
||||
def _consume_field(self, parse_type: bool = True, prefer_type: bool = False
|
||||
) -> Tuple[str, str, List[str]]:
|
||||
line = next(self._line_iter)
|
||||
if parse_type:
|
||||
_name, _, _type = self._partition_field_on_colon(line)
|
||||
@ -933,20 +882,17 @@ class NumpyDocstring(GoogleDocstring):
|
||||
_desc = self.__class__(_desc, self._config).lines()
|
||||
return _name, _type, _desc
|
||||
|
||||
def _consume_returns_section(self):
|
||||
# type: () -> List[Tuple[str, str, List[str]]]
|
||||
def _consume_returns_section(self) -> List[Tuple[str, str, List[str]]]:
|
||||
return self._consume_fields(prefer_type=True)
|
||||
|
||||
def _consume_section_header(self):
|
||||
# type: () -> str
|
||||
def _consume_section_header(self) -> str:
|
||||
section = next(self._line_iter)
|
||||
if not _directive_regex.match(section):
|
||||
# Consume the header underline
|
||||
next(self._line_iter)
|
||||
return section
|
||||
|
||||
def _is_section_break(self):
|
||||
# type: () -> bool
|
||||
def _is_section_break(self) -> bool:
|
||||
line1, line2 = self._line_iter.peek(2)
|
||||
return (not self._line_iter.has_next() or
|
||||
self._is_section_header() or
|
||||
@ -955,8 +901,7 @@ class NumpyDocstring(GoogleDocstring):
|
||||
line1 and
|
||||
not self._is_indented(line1, self._section_indent)))
|
||||
|
||||
def _is_section_header(self):
|
||||
# type: () -> bool
|
||||
def _is_section_header(self) -> bool:
|
||||
section, underline = self._line_iter.peek(2)
|
||||
section = section.lower()
|
||||
if section in self._sections and isinstance(underline, str):
|
||||
@ -968,16 +913,14 @@ class NumpyDocstring(GoogleDocstring):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _parse_see_also_section(self, section):
|
||||
# type: (str) -> List[str]
|
||||
def _parse_see_also_section(self, section: str) -> List[str]:
|
||||
lines = self._consume_to_next_section()
|
||||
try:
|
||||
return self._parse_numpydoc_see_also_section(lines)
|
||||
except ValueError:
|
||||
return self._format_admonition('seealso', lines)
|
||||
|
||||
def _parse_numpydoc_see_also_section(self, content):
|
||||
# type: (List[str]) -> List[str]
|
||||
def _parse_numpydoc_see_also_section(self, content: List[str]) -> List[str]:
|
||||
"""
|
||||
Derived from the NumpyDoc implementation of _parse_see_also.
|
||||
|
||||
@ -991,8 +934,7 @@ class NumpyDocstring(GoogleDocstring):
|
||||
"""
|
||||
items = []
|
||||
|
||||
def parse_item_name(text):
|
||||
# type: (str) -> Tuple[str, str]
|
||||
def parse_item_name(text: str) -> Tuple[str, str]:
|
||||
"""Match ':role:`name`' or 'name'"""
|
||||
m = self._name_rgx.match(text)
|
||||
if m:
|
||||
@ -1003,8 +945,7 @@ class NumpyDocstring(GoogleDocstring):
|
||||
return g[2], g[1]
|
||||
raise ValueError("%s is not a item name" % text)
|
||||
|
||||
def push_item(name, rest):
|
||||
# type: (str, List[str]) -> None
|
||||
def push_item(name: str, rest: List[str]) -> None:
|
||||
if not name:
|
||||
return
|
||||
name, role = parse_item_name(name)
|
||||
|
@ -11,10 +11,7 @@
|
||||
"""
|
||||
|
||||
import collections
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Iterable # NOQA
|
||||
from typing import Any, Iterable
|
||||
|
||||
|
||||
class peek_iter:
|
||||
@ -50,8 +47,7 @@ class peek_iter:
|
||||
be set to a new object instance: ``object()``.
|
||||
|
||||
"""
|
||||
def __init__(self, *args):
|
||||
# type: (Any) -> None
|
||||
def __init__(self, *args) -> None:
|
||||
"""__init__(o, sentinel=None)"""
|
||||
self._iterable = iter(*args) # type: Iterable
|
||||
self._cache = collections.deque() # type: collections.deque
|
||||
@ -60,18 +56,15 @@ class peek_iter:
|
||||
else:
|
||||
self.sentinel = object()
|
||||
|
||||
def __iter__(self):
|
||||
# type: () -> peek_iter
|
||||
def __iter__(self) -> "peek_iter":
|
||||
return self
|
||||
|
||||
def __next__(self, n=None):
|
||||
# type: (int) -> Any
|
||||
def __next__(self, n: int = None) -> Any:
|
||||
# note: prevent 2to3 to transform self.next() in next(self) which
|
||||
# causes an infinite loop !
|
||||
return getattr(self, 'next')(n)
|
||||
|
||||
def _fillcache(self, n):
|
||||
# type: (int) -> None
|
||||
def _fillcache(self, n: int) -> None:
|
||||
"""Cache `n` items. If `n` is 0 or None, then 1 item is cached."""
|
||||
if not n:
|
||||
n = 1
|
||||
@ -82,8 +75,7 @@ class peek_iter:
|
||||
while len(self._cache) < n:
|
||||
self._cache.append(self.sentinel)
|
||||
|
||||
def has_next(self):
|
||||
# type: () -> bool
|
||||
def has_next(self) -> bool:
|
||||
"""Determine if iterator is exhausted.
|
||||
|
||||
Returns
|
||||
@ -98,8 +90,7 @@ class peek_iter:
|
||||
"""
|
||||
return self.peek() != self.sentinel
|
||||
|
||||
def next(self, n=None):
|
||||
# type: (int) -> Any
|
||||
def next(self, n: int = None) -> Any:
|
||||
"""Get the next item or `n` items of the iterator.
|
||||
|
||||
Parameters
|
||||
@ -134,8 +125,7 @@ class peek_iter:
|
||||
result = [self._cache.popleft() for i in range(n)]
|
||||
return result
|
||||
|
||||
def peek(self, n=None):
|
||||
# type: (int) -> Any
|
||||
def peek(self, n: int = None) -> Any:
|
||||
"""Preview the next item or `n` items of the iterator.
|
||||
|
||||
The iterator is not advanced when peek is called.
|
||||
@ -218,8 +208,7 @@ class modify_iter(peek_iter):
|
||||
"whitespace."
|
||||
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
# type: (Any, Any) -> None
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
"""__init__(o, sentinel=None, modifier=lambda x: x)"""
|
||||
if 'modifier' in kwargs:
|
||||
self.modifier = kwargs['modifier']
|
||||
@ -233,8 +222,7 @@ class modify_iter(peek_iter):
|
||||
'modifier must be callable')
|
||||
super().__init__(*args)
|
||||
|
||||
def _fillcache(self, n):
|
||||
# type: (int) -> None
|
||||
def _fillcache(self, n: int) -> None:
|
||||
"""Cache `n` modified items. If `n` is 0 or None, 1 item is cached.
|
||||
|
||||
Each item returned by the iterator is passed through the
|
||||
|
@ -12,29 +12,27 @@
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterable, List, Tuple
|
||||
from typing import cast
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node
|
||||
from docutils.parsers.rst import directives
|
||||
from docutils.parsers.rst.directives.admonitions import BaseAdmonition
|
||||
|
||||
import sphinx
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.domains import Domain
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.errors import NoUri
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
from sphinx.util.nodes import make_refnode
|
||||
from sphinx.util.texescape import tex_escape_map
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, Iterable, List, Tuple # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
from sphinx.writers.html import HTMLTranslator # NOQA
|
||||
from sphinx.writers.latex import LaTeXTranslator # NOQA
|
||||
from sphinx.writers.html import HTMLTranslator
|
||||
from sphinx.writers.latex import LaTeXTranslator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -62,12 +60,11 @@ class Todo(BaseAdmonition, SphinxDirective):
|
||||
'name': directives.unchanged,
|
||||
}
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
def run(self) -> List[Node]:
|
||||
if not self.options.get('class'):
|
||||
self.options['class'] = ['admonition-todo']
|
||||
|
||||
(todo,) = super().run() # type: Tuple[nodes.Node]
|
||||
(todo,) = super().run() # type: Tuple[Node]
|
||||
if isinstance(todo, nodes.system_message):
|
||||
return [todo]
|
||||
elif isinstance(todo, todo_node):
|
||||
@ -86,21 +83,18 @@ class TodoDomain(Domain):
|
||||
label = 'todo'
|
||||
|
||||
@property
|
||||
def todos(self):
|
||||
# type: () -> Dict[str, List[todo_node]]
|
||||
def todos(self) -> Dict[str, List[todo_node]]:
|
||||
return self.data.setdefault('todos', {})
|
||||
|
||||
def clear_doc(self, docname):
|
||||
# type: (str) -> None
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
self.todos.pop(docname, None)
|
||||
|
||||
def merge_domaindata(self, docnames, otherdata):
|
||||
# type: (List[str], Dict) -> None
|
||||
def merge_domaindata(self, docnames: List[str], otherdata: Dict) -> None:
|
||||
for docname in docnames:
|
||||
self.todos[docname] = otherdata['todos'][docname]
|
||||
|
||||
def process_doc(self, env, docname, document):
|
||||
# type: (BuildEnvironment, str, nodes.document) -> None
|
||||
def process_doc(self, env: BuildEnvironment, docname: str,
|
||||
document: nodes.document) -> None:
|
||||
todos = self.todos.setdefault(docname, [])
|
||||
for todo in document.traverse(todo_node):
|
||||
env.app.emit('todo-defined', todo)
|
||||
@ -111,8 +105,7 @@ class TodoDomain(Domain):
|
||||
location=todo)
|
||||
|
||||
|
||||
def process_todos(app, doctree):
|
||||
# type: (Sphinx, nodes.document) -> None
|
||||
def process_todos(app: Sphinx, doctree: nodes.document) -> None:
|
||||
warnings.warn('process_todos() is deprecated.', RemovedInSphinx40Warning)
|
||||
# collect all todos in the environment
|
||||
# this is not done in the directive itself because it some transformations
|
||||
@ -150,16 +143,14 @@ class TodoList(SphinxDirective):
|
||||
final_argument_whitespace = False
|
||||
option_spec = {} # type: Dict
|
||||
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
def run(self) -> List[Node]:
|
||||
# Simply insert an empty todolist node which will be replaced later
|
||||
# when process_todo_nodes is called
|
||||
return [todolist('')]
|
||||
|
||||
|
||||
class TodoListProcessor:
|
||||
def __init__(self, app, doctree, docname):
|
||||
# type: (Sphinx, nodes.document, str) -> None
|
||||
def __init__(self, app: Sphinx, doctree: nodes.document, docname: str) -> None:
|
||||
self.builder = app.builder
|
||||
self.config = app.config
|
||||
self.env = app.env
|
||||
@ -167,8 +158,7 @@ class TodoListProcessor:
|
||||
|
||||
self.process(doctree, docname)
|
||||
|
||||
def process(self, doctree, docname):
|
||||
# type: (nodes.document, str) -> None
|
||||
def process(self, doctree: nodes.document, docname: str) -> None:
|
||||
todos = sum(self.domain.todos.values(), [])
|
||||
for node in doctree.traverse(todolist):
|
||||
if not self.config.todo_include_todos:
|
||||
@ -176,7 +166,7 @@ class TodoListProcessor:
|
||||
continue
|
||||
|
||||
if node.get('ids'):
|
||||
content = [nodes.target()] # type: List[nodes.Element]
|
||||
content = [nodes.target()] # type: List[Element]
|
||||
else:
|
||||
content = []
|
||||
|
||||
@ -194,8 +184,7 @@ class TodoListProcessor:
|
||||
|
||||
node.replace_self(content)
|
||||
|
||||
def create_todo_reference(self, todo, docname):
|
||||
# type: (todo_node, str) -> nodes.paragraph
|
||||
def create_todo_reference(self, todo: todo_node, docname: str) -> nodes.paragraph:
|
||||
if self.config.todo_link_only:
|
||||
description = _('<<original entry>>')
|
||||
else:
|
||||
@ -224,8 +213,7 @@ class TodoListProcessor:
|
||||
return para
|
||||
|
||||
|
||||
def process_todo_nodes(app, doctree, fromdocname):
|
||||
# type: (Sphinx, nodes.document, str) -> None
|
||||
def process_todo_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str) -> None:
|
||||
"""Replace all todolist nodes with a list of the collected todos.
|
||||
Augment each todo with a backlink to the original location.
|
||||
"""
|
||||
@ -236,7 +224,7 @@ def process_todo_nodes(app, doctree, fromdocname):
|
||||
|
||||
for node in doctree.traverse(todolist):
|
||||
if node.get('ids'):
|
||||
content = [nodes.target()] # type: List[nodes.Element]
|
||||
content = [nodes.target()] # type: List[Element]
|
||||
else:
|
||||
content = []
|
||||
|
||||
@ -280,8 +268,7 @@ def process_todo_nodes(app, doctree, fromdocname):
|
||||
node.replace_self(content)
|
||||
|
||||
|
||||
def purge_todos(app, env, docname):
|
||||
# type: (Sphinx, BuildEnvironment, str) -> None
|
||||
def purge_todos(app: Sphinx, env: BuildEnvironment, docname: str) -> None:
|
||||
warnings.warn('purge_todos() is deprecated.', RemovedInSphinx40Warning)
|
||||
if not hasattr(env, 'todo_all_todos'):
|
||||
return
|
||||
@ -289,8 +276,8 @@ def purge_todos(app, env, docname):
|
||||
if todo['docname'] != docname]
|
||||
|
||||
|
||||
def merge_info(app, env, docnames, other):
|
||||
# type: (Sphinx, BuildEnvironment, Iterable[str], BuildEnvironment) -> None
|
||||
def merge_info(app: Sphinx, env: BuildEnvironment, docnames: Iterable[str],
|
||||
other: BuildEnvironment) -> None:
|
||||
warnings.warn('merge_info() is deprecated.', RemovedInSphinx40Warning)
|
||||
if not hasattr(other, 'todo_all_todos'):
|
||||
return
|
||||
@ -299,21 +286,18 @@ def merge_info(app, env, docnames, other):
|
||||
env.todo_all_todos.extend(other.todo_all_todos) # type: ignore
|
||||
|
||||
|
||||
def visit_todo_node(self, node):
|
||||
# type: (HTMLTranslator, todo_node) -> None
|
||||
def visit_todo_node(self: HTMLTranslator, node: todo_node) -> None:
|
||||
if self.config.todo_include_todos:
|
||||
self.visit_admonition(node)
|
||||
else:
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def depart_todo_node(self, node):
|
||||
# type: (HTMLTranslator, todo_node) -> None
|
||||
def depart_todo_node(self: HTMLTranslator, node: todo_node) -> None:
|
||||
self.depart_admonition(node)
|
||||
|
||||
|
||||
def latex_visit_todo_node(self, node):
|
||||
# type: (LaTeXTranslator, todo_node) -> None
|
||||
def latex_visit_todo_node(self: LaTeXTranslator, node: todo_node) -> None:
|
||||
if self.config.todo_include_todos:
|
||||
self.body.append('\n\\begin{sphinxadmonition}{note}{')
|
||||
self.body.append(self.hypertarget_to(node))
|
||||
@ -324,13 +308,11 @@ def latex_visit_todo_node(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
def latex_depart_todo_node(self, node):
|
||||
# type: (LaTeXTranslator, todo_node) -> None
|
||||
def latex_depart_todo_node(self: LaTeXTranslator, node: todo_node) -> None:
|
||||
self.body.append('\\end{sphinxadmonition}\n')
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_event('todo-defined')
|
||||
app.add_config_value('todo_include_todos', False, 'html')
|
||||
app.add_config_value('todo_link_only', False, 'html')
|
||||
|
@ -9,28 +9,25 @@
|
||||
"""
|
||||
|
||||
import traceback
|
||||
from typing import Any, Dict, Iterable, Iterator, Set, Tuple
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.nodes import Element, Node
|
||||
|
||||
import sphinx
|
||||
from sphinx import addnodes
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.pycode import ModuleAnalyzer
|
||||
from sphinx.util import get_full_modname, logging, status_iterator
|
||||
from sphinx.util.nodes import make_refnode
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict, Iterable, Iterator, Set, Tuple # NOQA
|
||||
from sphinx.application import Sphinx # NOQA
|
||||
from sphinx.config import Config # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_full_modname(app, modname, attribute):
|
||||
# type: (Sphinx, str, str) -> str
|
||||
def _get_full_modname(app: Sphinx, modname: str, attribute: str) -> str:
|
||||
try:
|
||||
return get_full_modname(modname, attribute)
|
||||
except AttributeError:
|
||||
@ -48,8 +45,7 @@ def _get_full_modname(app, modname, attribute):
|
||||
return None
|
||||
|
||||
|
||||
def doctree_read(app, doctree):
|
||||
# type: (Sphinx, nodes.Node) -> None
|
||||
def doctree_read(app: Sphinx, doctree: Node) -> None:
|
||||
env = app.builder.env
|
||||
if not hasattr(env, '_viewcode_modules'):
|
||||
env._viewcode_modules = {} # type: ignore
|
||||
@ -120,8 +116,8 @@ def doctree_read(app, doctree):
|
||||
signode += onlynode
|
||||
|
||||
|
||||
def env_merge_info(app, env, docnames, other):
|
||||
# type: (Sphinx, BuildEnvironment, Iterable[str], BuildEnvironment) -> None
|
||||
def env_merge_info(app: Sphinx, env: BuildEnvironment, docnames: Iterable[str],
|
||||
other: BuildEnvironment) -> None:
|
||||
if not hasattr(other, '_viewcode_modules'):
|
||||
return
|
||||
# create a _viewcode_modules dict on the main environment
|
||||
@ -131,8 +127,8 @@ def env_merge_info(app, env, docnames, other):
|
||||
env._viewcode_modules.update(other._viewcode_modules) # type: ignore
|
||||
|
||||
|
||||
def missing_reference(app, env, node, contnode):
|
||||
# type: (Sphinx, BuildEnvironment, nodes.Element, nodes.Node) -> nodes.Node
|
||||
def missing_reference(app: Sphinx, env: BuildEnvironment, node: Element, contnode: Node
|
||||
) -> Node:
|
||||
# resolve our "viewcode" reference nodes -- they need special treatment
|
||||
if node['reftype'] == 'viewcode':
|
||||
return make_refnode(app.builder, node['refdoc'], node['reftarget'],
|
||||
@ -141,8 +137,7 @@ def missing_reference(app, env, node, contnode):
|
||||
return None
|
||||
|
||||
|
||||
def collect_pages(app):
|
||||
# type: (Sphinx) -> Iterator[Tuple[str, Dict[str, Any], str]]
|
||||
def collect_pages(app: Sphinx) -> Iterator[Tuple[str, Dict[str, Any], str]]:
|
||||
env = app.builder.env
|
||||
if not hasattr(env, '_viewcode_modules'):
|
||||
return
|
||||
@ -236,8 +231,7 @@ def collect_pages(app):
|
||||
yield ('_modules/index', context, 'page.html')
|
||||
|
||||
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_config_value('viewcode_import', None, False)
|
||||
app.add_config_value('viewcode_enable_epub', False, False)
|
||||
app.add_config_value('viewcode_follow_imported_members', True, False)
|
||||
|
Loading…
Reference in New Issue
Block a user