mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '2.0'
This commit is contained in:
commit
d717f5ae31
10
CHANGES
10
CHANGES
@ -68,7 +68,7 @@ Bugs fixed
|
|||||||
Testing
|
Testing
|
||||||
--------
|
--------
|
||||||
|
|
||||||
Release 2.3.1 (in development)
|
Release 2.3.2 (in development)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
@ -89,6 +89,14 @@ Bugs fixed
|
|||||||
Testing
|
Testing
|
||||||
--------
|
--------
|
||||||
|
|
||||||
|
Release 2.3.1 (released Dec 22, 2019)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Bugs fixed
|
||||||
|
----------
|
||||||
|
|
||||||
|
* #6936: sphinx-autogen: raises AttributeError
|
||||||
|
|
||||||
Release 2.3.0 (released Dec 15, 2019)
|
Release 2.3.0 (released Dec 15, 2019)
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import warnings
|
import warnings
|
||||||
|
from typing import Any, Dict, List, Sequence
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
|
|
||||||
@ -16,8 +17,7 @@ from sphinx.deprecation import RemovedInSphinx40Warning
|
|||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Dict, List, Sequence # NOQA
|
from sphinx.application import Sphinx
|
||||||
from sphinx.application import Sphinx # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
class translatable(nodes.Node):
|
class translatable(nodes.Node):
|
||||||
@ -34,18 +34,15 @@ class translatable(nodes.Node):
|
|||||||
Because they are used at final step; extraction.
|
Because they are used at final step; extraction.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def preserve_original_messages(self):
|
def preserve_original_messages(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Preserve original translatable messages."""
|
"""Preserve original translatable messages."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def apply_translated_message(self, original_message, translated_message):
|
def apply_translated_message(self, original_message: str, translated_message: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Apply translated message."""
|
"""Apply translated message."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def extract_original_messages(self):
|
def extract_original_messages(self) -> Sequence[str]:
|
||||||
# type: () -> Sequence[str]
|
|
||||||
"""Extract translation messages.
|
"""Extract translation messages.
|
||||||
|
|
||||||
:returns: list of extracted messages or messages generator
|
:returns: list of extracted messages or messages generator
|
||||||
@ -61,8 +58,7 @@ class not_smartquotable:
|
|||||||
class toctree(nodes.General, nodes.Element, translatable):
|
class toctree(nodes.General, nodes.Element, translatable):
|
||||||
"""Node for inserting a "TOC tree"."""
|
"""Node for inserting a "TOC tree"."""
|
||||||
|
|
||||||
def preserve_original_messages(self):
|
def preserve_original_messages(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
# toctree entries
|
# toctree entries
|
||||||
rawentries = self.setdefault('rawentries', [])
|
rawentries = self.setdefault('rawentries', [])
|
||||||
for title, docname in self['entries']:
|
for title, docname in self['entries']:
|
||||||
@ -73,8 +69,7 @@ class toctree(nodes.General, nodes.Element, translatable):
|
|||||||
if self.get('caption'):
|
if self.get('caption'):
|
||||||
self['rawcaption'] = self['caption']
|
self['rawcaption'] = self['caption']
|
||||||
|
|
||||||
def apply_translated_message(self, original_message, translated_message):
|
def apply_translated_message(self, original_message: str, translated_message: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
# toctree entries
|
# toctree entries
|
||||||
for i, (title, docname) in enumerate(self['entries']):
|
for i, (title, docname) in enumerate(self['entries']):
|
||||||
if title == original_message:
|
if title == original_message:
|
||||||
@ -84,8 +79,7 @@ class toctree(nodes.General, nodes.Element, translatable):
|
|||||||
if self.get('rawcaption') == original_message:
|
if self.get('rawcaption') == original_message:
|
||||||
self['caption'] = translated_message
|
self['caption'] = translated_message
|
||||||
|
|
||||||
def extract_original_messages(self):
|
def extract_original_messages(self) -> List[str]:
|
||||||
# type: () -> List[str]
|
|
||||||
messages = [] # type: List[str]
|
messages = [] # type: List[str]
|
||||||
|
|
||||||
# toctree entries
|
# toctree entries
|
||||||
@ -143,8 +137,7 @@ class desc_type(nodes.Part, nodes.Inline, nodes.FixedTextElement):
|
|||||||
|
|
||||||
class desc_returns(desc_type):
|
class desc_returns(desc_type):
|
||||||
"""Node for a "returns" annotation (a la -> in Python)."""
|
"""Node for a "returns" annotation (a la -> in Python)."""
|
||||||
def astext(self):
|
def astext(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return ' -> ' + super().astext()
|
return ' -> ' + super().astext()
|
||||||
|
|
||||||
|
|
||||||
@ -165,8 +158,7 @@ class desc_optional(nodes.Part, nodes.Inline, nodes.FixedTextElement):
|
|||||||
"""Node for marking optional parts of the parameter list."""
|
"""Node for marking optional parts of the parameter list."""
|
||||||
child_text_separator = ', '
|
child_text_separator = ', '
|
||||||
|
|
||||||
def astext(self):
|
def astext(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return '[' + super().astext() + ']'
|
return '[' + super().astext() + ']'
|
||||||
|
|
||||||
|
|
||||||
@ -313,8 +305,7 @@ class abbreviation(nodes.abbreviation):
|
|||||||
.. deprecated:: 2.0
|
.. deprecated:: 2.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, rawsource='', text='', *children, **attributes):
|
def __init__(self, rawsource: str = '', text: str = '', *children, **attributes) -> None:
|
||||||
# type: (str, str, *nodes.Node, **Any) -> None
|
|
||||||
warnings.warn("abbrevition node for Sphinx was replaced by docutils'.",
|
warnings.warn("abbrevition node for Sphinx was replaced by docutils'.",
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
|
|
||||||
@ -325,8 +316,7 @@ class manpage(nodes.Inline, nodes.FixedTextElement):
|
|||||||
"""Node for references to manpages."""
|
"""Node for references to manpages."""
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||||
# type: (Sphinx) -> Dict[str, Any]
|
|
||||||
app.add_node(toctree)
|
app.add_node(toctree)
|
||||||
app.add_node(desc)
|
app.add_node(desc)
|
||||||
app.add_node(desc_signature)
|
app.add_node(desc_signature)
|
||||||
|
@ -18,8 +18,12 @@ import warnings
|
|||||||
from collections import deque
|
from collections import deque
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from os import path
|
from os import path
|
||||||
|
from typing import Any, Callable, Dict, IO, List, Tuple, Union
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.nodes import Element, TextElement
|
||||||
from docutils.parsers.rst import Directive, roles
|
from docutils.parsers.rst import Directive, roles
|
||||||
|
from docutils.transforms import Transform
|
||||||
from pygments.lexer import Lexer
|
from pygments.lexer import Lexer
|
||||||
|
|
||||||
import sphinx
|
import sphinx
|
||||||
@ -27,12 +31,16 @@ from sphinx import package_dir, locale
|
|||||||
from sphinx.config import Config
|
from sphinx.config import Config
|
||||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||||
from sphinx.environment import BuildEnvironment
|
from sphinx.environment import BuildEnvironment
|
||||||
|
from sphinx.environment.collectors import EnvironmentCollector
|
||||||
from sphinx.errors import ApplicationError, ConfigError, VersionRequirementError
|
from sphinx.errors import ApplicationError, ConfigError, VersionRequirementError
|
||||||
from sphinx.events import EventManager
|
from sphinx.events import EventManager
|
||||||
|
from sphinx.extension import Extension
|
||||||
from sphinx.highlighting import lexer_classes, lexers
|
from sphinx.highlighting import lexer_classes, lexers
|
||||||
from sphinx.locale import __
|
from sphinx.locale import __
|
||||||
from sphinx.project import Project
|
from sphinx.project import Project
|
||||||
from sphinx.registry import SphinxComponentRegistry
|
from sphinx.registry import SphinxComponentRegistry
|
||||||
|
from sphinx.roles import XRefRole
|
||||||
|
from sphinx.theming import Theme
|
||||||
from sphinx.util import docutils
|
from sphinx.util import docutils
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from sphinx.util import progress_message
|
from sphinx.util import progress_message
|
||||||
@ -42,21 +50,14 @@ from sphinx.util.i18n import CatalogRepository
|
|||||||
from sphinx.util.logging import prefixed_warnings
|
from sphinx.util.logging import prefixed_warnings
|
||||||
from sphinx.util.osutil import abspath, ensuredir, relpath
|
from sphinx.util.osutil import abspath, ensuredir, relpath
|
||||||
from sphinx.util.tags import Tags
|
from sphinx.util.tags import Tags
|
||||||
|
from sphinx.util.typing import RoleFunction, TitleGetter
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Tuple, Union # NOQA
|
from docutils.nodes import Node # NOQA
|
||||||
from typing import Type # for python3.5.1
|
from typing import Type # for python3.5.1
|
||||||
from docutils import nodes # NOQA
|
from sphinx.builders import Builder
|
||||||
from docutils.parsers import Parser # NOQA
|
|
||||||
from docutils.transforms import Transform # NOQA
|
|
||||||
from sphinx.builders import Builder # NOQA
|
|
||||||
from sphinx.domains import Domain, Index # NOQA
|
|
||||||
from sphinx.environment.collectors import EnvironmentCollector # NOQA
|
|
||||||
from sphinx.extension import Extension # NOQA
|
|
||||||
from sphinx.roles import XRefRole # NOQA
|
|
||||||
from sphinx.theming import Theme # NOQA
|
|
||||||
from sphinx.util.typing import RoleFunction, TitleGetter # NOQA
|
|
||||||
|
|
||||||
builtin_extensions = (
|
builtin_extensions = (
|
||||||
'sphinx.addnodes',
|
'sphinx.addnodes',
|
||||||
@ -132,11 +133,11 @@ class Sphinx:
|
|||||||
:ivar outdir: Directory for storing build documents.
|
:ivar outdir: Directory for storing build documents.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, srcdir, confdir, outdir, doctreedir, buildername,
|
def __init__(self, srcdir: str, confdir: str, outdir: str, doctreedir: str,
|
||||||
confoverrides=None, status=sys.stdout, warning=sys.stderr,
|
buildername: str, confoverrides: Dict = None,
|
||||||
freshenv=False, warningiserror=False, tags=None, verbosity=0,
|
status: IO = sys.stdout, warning: IO = sys.stderr,
|
||||||
parallel=0, keep_going=False):
|
freshenv: bool = False, warningiserror: bool = False, tags: List[str] = None,
|
||||||
# type: (str, str, str, str, str, Dict, IO, IO, bool, bool, List[str], int, int, bool) -> None # NOQA
|
verbosity: int = 0, parallel: int = 0, keep_going: bool = False) -> None:
|
||||||
self.phase = BuildPhase.INITIALIZATION
|
self.phase = BuildPhase.INITIALIZATION
|
||||||
self.verbosity = verbosity
|
self.verbosity = verbosity
|
||||||
self.extensions = {} # type: Dict[str, Extension]
|
self.extensions = {} # type: Dict[str, Extension]
|
||||||
@ -270,8 +271,7 @@ class Sphinx:
|
|||||||
# set up the builder
|
# set up the builder
|
||||||
self._init_builder()
|
self._init_builder()
|
||||||
|
|
||||||
def _init_i18n(self):
|
def _init_i18n(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Load translated strings from the configured localedirs if enabled in
|
"""Load translated strings from the configured localedirs if enabled in
|
||||||
the configuration.
|
the configuration.
|
||||||
"""
|
"""
|
||||||
@ -296,8 +296,7 @@ class Sphinx:
|
|||||||
else:
|
else:
|
||||||
logger.info(__('not available for built-in messages'))
|
logger.info(__('not available for built-in messages'))
|
||||||
|
|
||||||
def _init_env(self, freshenv):
|
def _init_env(self, freshenv: bool) -> None:
|
||||||
# type: (bool) -> None
|
|
||||||
filename = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
|
filename = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
|
||||||
if freshenv or not os.path.exists(filename):
|
if freshenv or not os.path.exists(filename):
|
||||||
self.env = BuildEnvironment()
|
self.env = BuildEnvironment()
|
||||||
@ -313,28 +312,24 @@ class Sphinx:
|
|||||||
logger.info(__('failed: %s'), err)
|
logger.info(__('failed: %s'), err)
|
||||||
self._init_env(freshenv=True)
|
self._init_env(freshenv=True)
|
||||||
|
|
||||||
def preload_builder(self, name):
|
def preload_builder(self, name: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
self.registry.preload_builder(self, name)
|
self.registry.preload_builder(self, name)
|
||||||
|
|
||||||
def create_builder(self, name):
|
def create_builder(self, name: str) -> "Builder":
|
||||||
# type: (str) -> Builder
|
|
||||||
if name is None:
|
if name is None:
|
||||||
logger.info(__('No builder selected, using default: html'))
|
logger.info(__('No builder selected, using default: html'))
|
||||||
name = 'html'
|
name = 'html'
|
||||||
|
|
||||||
return self.registry.create_builder(self, name)
|
return self.registry.create_builder(self, name)
|
||||||
|
|
||||||
def _init_builder(self):
|
def _init_builder(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.builder.set_environment(self.env)
|
self.builder.set_environment(self.env)
|
||||||
self.builder.init()
|
self.builder.init()
|
||||||
self.events.emit('builder-inited')
|
self.events.emit('builder-inited')
|
||||||
|
|
||||||
# ---- main "build" method -------------------------------------------------
|
# ---- main "build" method -------------------------------------------------
|
||||||
|
|
||||||
def build(self, force_all=False, filenames=None):
|
def build(self, force_all: bool = False, filenames: List[str] = None) -> None:
|
||||||
# type: (bool, List[str]) -> None
|
|
||||||
self.phase = BuildPhase.READING
|
self.phase = BuildPhase.READING
|
||||||
try:
|
try:
|
||||||
if force_all:
|
if force_all:
|
||||||
@ -385,8 +380,7 @@ class Sphinx:
|
|||||||
|
|
||||||
# ---- general extensibility interface -------------------------------------
|
# ---- general extensibility interface -------------------------------------
|
||||||
|
|
||||||
def setup_extension(self, extname):
|
def setup_extension(self, extname: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Import and setup a Sphinx extension module.
|
"""Import and setup a Sphinx extension module.
|
||||||
|
|
||||||
Load the extension given by the module *name*. Use this if your
|
Load the extension given by the module *name*. Use this if your
|
||||||
@ -396,8 +390,7 @@ class Sphinx:
|
|||||||
logger.debug('[app] setting up extension: %r', extname)
|
logger.debug('[app] setting up extension: %r', extname)
|
||||||
self.registry.load_extension(self, extname)
|
self.registry.load_extension(self, extname)
|
||||||
|
|
||||||
def require_sphinx(self, version):
|
def require_sphinx(self, version: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Check the Sphinx version if requested.
|
"""Check the Sphinx version if requested.
|
||||||
|
|
||||||
Compare *version* (which must be a ``major.minor`` version string, e.g.
|
Compare *version* (which must be a ``major.minor`` version string, e.g.
|
||||||
@ -410,8 +403,7 @@ class Sphinx:
|
|||||||
raise VersionRequirementError(version)
|
raise VersionRequirementError(version)
|
||||||
|
|
||||||
# event interface
|
# event interface
|
||||||
def connect(self, event, callback):
|
def connect(self, event: str, callback: Callable) -> int:
|
||||||
# type: (str, Callable) -> int
|
|
||||||
"""Register *callback* to be called when *event* is emitted.
|
"""Register *callback* to be called when *event* is emitted.
|
||||||
|
|
||||||
For details on available core events and the arguments of callback
|
For details on available core events and the arguments of callback
|
||||||
@ -424,14 +416,12 @@ class Sphinx:
|
|||||||
logger.debug('[app] connecting event %r: %r [id=%s]', event, callback, listener_id)
|
logger.debug('[app] connecting event %r: %r [id=%s]', event, callback, listener_id)
|
||||||
return listener_id
|
return listener_id
|
||||||
|
|
||||||
def disconnect(self, listener_id):
|
def disconnect(self, listener_id: int) -> None:
|
||||||
# type: (int) -> None
|
|
||||||
"""Unregister callback by *listener_id*."""
|
"""Unregister callback by *listener_id*."""
|
||||||
logger.debug('[app] disconnecting event: [id=%s]', listener_id)
|
logger.debug('[app] disconnecting event: [id=%s]', listener_id)
|
||||||
self.events.disconnect(listener_id)
|
self.events.disconnect(listener_id)
|
||||||
|
|
||||||
def emit(self, event, *args):
|
def emit(self, event: str, *args) -> List:
|
||||||
# type: (str, Any) -> List
|
|
||||||
"""Emit *event* and pass *arguments* to the callback functions.
|
"""Emit *event* and pass *arguments* to the callback functions.
|
||||||
|
|
||||||
Return the return values of all callbacks as a list. Do not emit core
|
Return the return values of all callbacks as a list. Do not emit core
|
||||||
@ -439,8 +429,7 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
return self.events.emit(event, *args)
|
return self.events.emit(event, *args)
|
||||||
|
|
||||||
def emit_firstresult(self, event, *args):
|
def emit_firstresult(self, event: str, *args) -> Any:
|
||||||
# type: (str, Any) -> Any
|
|
||||||
"""Emit *event* and pass *arguments* to the callback functions.
|
"""Emit *event* and pass *arguments* to the callback functions.
|
||||||
|
|
||||||
Return the result of the first callback that doesn't return ``None``.
|
Return the result of the first callback that doesn't return ``None``.
|
||||||
@ -451,8 +440,7 @@ class Sphinx:
|
|||||||
|
|
||||||
# registering addon parts
|
# registering addon parts
|
||||||
|
|
||||||
def add_builder(self, builder, override=False):
|
def add_builder(self, builder: "Type[Builder]", override: bool = False) -> None:
|
||||||
# type: (Type[Builder], bool) -> None
|
|
||||||
"""Register a new builder.
|
"""Register a new builder.
|
||||||
|
|
||||||
*builder* must be a class that inherits from
|
*builder* must be a class that inherits from
|
||||||
@ -464,8 +452,8 @@ class Sphinx:
|
|||||||
self.registry.add_builder(builder, override=override)
|
self.registry.add_builder(builder, override=override)
|
||||||
|
|
||||||
# TODO(stephenfin): Describe 'types' parameter
|
# TODO(stephenfin): Describe 'types' parameter
|
||||||
def add_config_value(self, name, default, rebuild, types=()):
|
def add_config_value(self, name: str, default: Any, rebuild: Union[bool, str],
|
||||||
# type: (str, Any, Union[bool, str], Any) -> None
|
types: Any = ()) -> None:
|
||||||
"""Register a configuration value.
|
"""Register a configuration value.
|
||||||
|
|
||||||
This is necessary for Sphinx to recognize new values and set default
|
This is necessary for Sphinx to recognize new values and set default
|
||||||
@ -497,8 +485,7 @@ class Sphinx:
|
|||||||
rebuild = 'env' if rebuild else ''
|
rebuild = 'env' if rebuild else ''
|
||||||
self.config.add(name, default, rebuild, types)
|
self.config.add(name, default, rebuild, types)
|
||||||
|
|
||||||
def add_event(self, name):
|
def add_event(self, name: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Register an event called *name*.
|
"""Register an event called *name*.
|
||||||
|
|
||||||
This is needed to be able to emit it.
|
This is needed to be able to emit it.
|
||||||
@ -506,8 +493,8 @@ class Sphinx:
|
|||||||
logger.debug('[app] adding event: %r', name)
|
logger.debug('[app] adding event: %r', name)
|
||||||
self.events.add(name)
|
self.events.add(name)
|
||||||
|
|
||||||
def set_translator(self, name, translator_class, override=False):
|
def set_translator(self, name: str, translator_class: "Type[nodes.NodeVisitor]",
|
||||||
# type: (str, Type[nodes.NodeVisitor], bool) -> None
|
override: bool = False) -> None:
|
||||||
"""Register or override a Docutils translator class.
|
"""Register or override a Docutils translator class.
|
||||||
|
|
||||||
This is used to register a custom output translator or to replace a
|
This is used to register a custom output translator or to replace a
|
||||||
@ -520,8 +507,7 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_translator(name, translator_class, override=override)
|
self.registry.add_translator(name, translator_class, override=override)
|
||||||
|
|
||||||
def add_node(self, node, override=False, **kwds):
|
def add_node(self, node: "Type[Element]", override: bool = False, **kwds) -> None:
|
||||||
# type: (Type[nodes.Element], bool, Any) -> None
|
|
||||||
"""Register a Docutils node class.
|
"""Register a Docutils node class.
|
||||||
|
|
||||||
This is necessary for Docutils internals. It may also be used in the
|
This is necessary for Docutils internals. It may also be used in the
|
||||||
@ -559,8 +545,9 @@ class Sphinx:
|
|||||||
docutils.register_node(node)
|
docutils.register_node(node)
|
||||||
self.registry.add_translation_handlers(node, **kwds)
|
self.registry.add_translation_handlers(node, **kwds)
|
||||||
|
|
||||||
def add_enumerable_node(self, node, figtype, title_getter=None, override=False, **kwds):
|
def add_enumerable_node(self, node: "Type[Element]", figtype: str,
|
||||||
# type: (Type[nodes.Element], str, TitleGetter, bool, Any) -> None
|
title_getter: TitleGetter = None, override: bool = False,
|
||||||
|
**kwds) -> None:
|
||||||
"""Register a Docutils node class as a numfig target.
|
"""Register a Docutils node class as a numfig target.
|
||||||
|
|
||||||
Sphinx numbers the node automatically. And then the users can refer it
|
Sphinx numbers the node automatically. And then the users can refer it
|
||||||
@ -587,8 +574,7 @@ class Sphinx:
|
|||||||
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
|
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
|
||||||
self.add_node(node, override=override, **kwds)
|
self.add_node(node, override=override, **kwds)
|
||||||
|
|
||||||
def add_directive(self, name, cls, override=False):
|
def add_directive(self, name: str, cls: "Type[Directive]", override: bool = False):
|
||||||
# type: (str, Type[Directive], bool) -> None
|
|
||||||
"""Register a Docutils directive.
|
"""Register a Docutils directive.
|
||||||
|
|
||||||
*name* must be the prospective directive name. *cls* is a directive
|
*name* must be the prospective directive name. *cls* is a directive
|
||||||
@ -632,8 +618,7 @@ class Sphinx:
|
|||||||
|
|
||||||
docutils.register_directive(name, cls)
|
docutils.register_directive(name, cls)
|
||||||
|
|
||||||
def add_role(self, name, role, override=False):
|
def add_role(self, name: str, role: Any, override: bool = False) -> None:
|
||||||
# type: (str, Any, bool) -> None
|
|
||||||
"""Register a Docutils role.
|
"""Register a Docutils role.
|
||||||
|
|
||||||
*name* must be the role name that occurs in the source, *role* the role
|
*name* must be the role name that occurs in the source, *role* the role
|
||||||
@ -650,8 +635,7 @@ class Sphinx:
|
|||||||
name, type='app', subtype='add_role')
|
name, type='app', subtype='add_role')
|
||||||
docutils.register_role(name, role)
|
docutils.register_role(name, role)
|
||||||
|
|
||||||
def add_generic_role(self, name, nodeclass, override=False):
|
def add_generic_role(self, name: str, nodeclass: Any, override: bool = False) -> None:
|
||||||
# type: (str, Any, bool) -> None
|
|
||||||
"""Register a generic Docutils role.
|
"""Register a generic Docutils role.
|
||||||
|
|
||||||
Register a Docutils role that does nothing but wrap its contents in the
|
Register a Docutils role that does nothing but wrap its contents in the
|
||||||
@ -670,8 +654,7 @@ class Sphinx:
|
|||||||
role = roles.GenericRole(name, nodeclass)
|
role = roles.GenericRole(name, nodeclass)
|
||||||
docutils.register_role(name, role)
|
docutils.register_role(name, role)
|
||||||
|
|
||||||
def add_domain(self, domain, override=False):
|
def add_domain(self, domain: "Type[Domain]", override: bool = False) -> None:
|
||||||
# type: (Type[Domain], bool) -> None
|
|
||||||
"""Register a domain.
|
"""Register a domain.
|
||||||
|
|
||||||
Make the given *domain* (which must be a class; more precisely, a
|
Make the given *domain* (which must be a class; more precisely, a
|
||||||
@ -683,8 +666,8 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_domain(domain, override=override)
|
self.registry.add_domain(domain, override=override)
|
||||||
|
|
||||||
def add_directive_to_domain(self, domain, name, cls, override=False):
|
def add_directive_to_domain(self, domain: str, name: str,
|
||||||
# type: (str, str, Type[Directive], bool) -> None
|
cls: "Type[Directive]", override: bool = False) -> None:
|
||||||
"""Register a Docutils directive in a domain.
|
"""Register a Docutils directive in a domain.
|
||||||
|
|
||||||
Like :meth:`add_directive`, but the directive is added to the domain
|
Like :meth:`add_directive`, but the directive is added to the domain
|
||||||
@ -696,8 +679,8 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_directive_to_domain(domain, name, cls, override=override)
|
self.registry.add_directive_to_domain(domain, name, cls, override=override)
|
||||||
|
|
||||||
def add_role_to_domain(self, domain, name, role, override=False):
|
def add_role_to_domain(self, domain: str, name: str, role: Union[RoleFunction, XRefRole],
|
||||||
# type: (str, str, Union[RoleFunction, XRefRole], bool) -> None
|
override: bool = False) -> None:
|
||||||
"""Register a Docutils role in a domain.
|
"""Register a Docutils role in a domain.
|
||||||
|
|
||||||
Like :meth:`add_role`, but the role is added to the domain named
|
Like :meth:`add_role`, but the role is added to the domain named
|
||||||
@ -709,8 +692,8 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_role_to_domain(domain, name, role, override=override)
|
self.registry.add_role_to_domain(domain, name, role, override=override)
|
||||||
|
|
||||||
def add_index_to_domain(self, domain, index, override=False):
|
def add_index_to_domain(self, domain: str, index: "Type[Index]", override: bool = False
|
||||||
# type: (str, Type[Index], bool) -> None
|
) -> None:
|
||||||
"""Register a custom index for a domain.
|
"""Register a custom index for a domain.
|
||||||
|
|
||||||
Add a custom *index* class to the domain named *domain*. *index* must
|
Add a custom *index* class to the domain named *domain*. *index* must
|
||||||
@ -722,10 +705,10 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_index_to_domain(domain, index)
|
self.registry.add_index_to_domain(domain, index)
|
||||||
|
|
||||||
def add_object_type(self, directivename, rolename, indextemplate='',
|
def add_object_type(self, directivename: str, rolename: str, indextemplate: str = '',
|
||||||
parse_node=None, ref_nodeclass=None, objname='',
|
parse_node: Callable = None, ref_nodeclass: "Type[TextElement]" = None,
|
||||||
doc_field_types=[], override=False):
|
objname: str = '', doc_field_types: List = [], override: bool = False
|
||||||
# type: (str, str, str, Callable, Type[nodes.TextElement], str, List, bool) -> None
|
) -> None:
|
||||||
"""Register a new object type.
|
"""Register a new object type.
|
||||||
|
|
||||||
This method is a very convenient way to add a new :term:`object` type
|
This method is a very convenient way to add a new :term:`object` type
|
||||||
@ -786,9 +769,9 @@ class Sphinx:
|
|||||||
ref_nodeclass, objname, doc_field_types,
|
ref_nodeclass, objname, doc_field_types,
|
||||||
override=override)
|
override=override)
|
||||||
|
|
||||||
def add_crossref_type(self, directivename, rolename, indextemplate='',
|
def add_crossref_type(self, directivename: str, rolename: str, indextemplate: str = '',
|
||||||
ref_nodeclass=None, objname='', override=False):
|
ref_nodeclass: "Type[TextElement]" = None, objname: str = '',
|
||||||
# type: (str, str, str, Type[nodes.TextElement], str, bool) -> None
|
override: bool = False) -> None:
|
||||||
"""Register a new crossref object type.
|
"""Register a new crossref object type.
|
||||||
|
|
||||||
This method is very similar to :meth:`add_object_type` except that the
|
This method is very similar to :meth:`add_object_type` except that the
|
||||||
@ -822,8 +805,7 @@ class Sphinx:
|
|||||||
indextemplate, ref_nodeclass, objname,
|
indextemplate, ref_nodeclass, objname,
|
||||||
override=override)
|
override=override)
|
||||||
|
|
||||||
def add_transform(self, transform):
|
def add_transform(self, transform: "Type[Transform]") -> None:
|
||||||
# type: (Type[Transform]) -> None
|
|
||||||
"""Register a Docutils transform to be applied after parsing.
|
"""Register a Docutils transform to be applied after parsing.
|
||||||
|
|
||||||
Add the standard docutils :class:`Transform` subclass *transform* to
|
Add the standard docutils :class:`Transform` subclass *transform* to
|
||||||
@ -856,8 +838,7 @@ class Sphinx:
|
|||||||
""" # NOQA
|
""" # NOQA
|
||||||
self.registry.add_transform(transform)
|
self.registry.add_transform(transform)
|
||||||
|
|
||||||
def add_post_transform(self, transform):
|
def add_post_transform(self, transform: "Type[Transform]") -> None:
|
||||||
# type: (Type[Transform]) -> None
|
|
||||||
"""Register a Docutils transform to be applied before writing.
|
"""Register a Docutils transform to be applied before writing.
|
||||||
|
|
||||||
Add the standard docutils :class:`Transform` subclass *transform* to
|
Add the standard docutils :class:`Transform` subclass *transform* to
|
||||||
@ -866,16 +847,14 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_post_transform(transform)
|
self.registry.add_post_transform(transform)
|
||||||
|
|
||||||
def add_javascript(self, filename, **kwargs):
|
def add_javascript(self, filename: str, **kwargs: str) -> None:
|
||||||
# type: (str, **str) -> None
|
|
||||||
"""An alias of :meth:`add_js_file`."""
|
"""An alias of :meth:`add_js_file`."""
|
||||||
warnings.warn('The app.add_javascript() is deprecated. '
|
warnings.warn('The app.add_javascript() is deprecated. '
|
||||||
'Please use app.add_js_file() instead.',
|
'Please use app.add_js_file() instead.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
self.add_js_file(filename, **kwargs)
|
self.add_js_file(filename, **kwargs)
|
||||||
|
|
||||||
def add_js_file(self, filename, **kwargs):
|
def add_js_file(self, filename: str, **kwargs: str) -> None:
|
||||||
# type: (str, **str) -> None
|
|
||||||
"""Register a JavaScript file to include in the HTML output.
|
"""Register a JavaScript file to include in the HTML output.
|
||||||
|
|
||||||
Add *filename* to the list of JavaScript files that the default HTML
|
Add *filename* to the list of JavaScript files that the default HTML
|
||||||
@ -901,8 +880,7 @@ class Sphinx:
|
|||||||
if hasattr(self.builder, 'add_js_file'):
|
if hasattr(self.builder, 'add_js_file'):
|
||||||
self.builder.add_js_file(filename, **kwargs) # type: ignore
|
self.builder.add_js_file(filename, **kwargs) # type: ignore
|
||||||
|
|
||||||
def add_css_file(self, filename, **kwargs):
|
def add_css_file(self, filename: str, **kwargs: str) -> None:
|
||||||
# type: (str, **str) -> None
|
|
||||||
"""Register a stylesheet to include in the HTML output.
|
"""Register a stylesheet to include in the HTML output.
|
||||||
|
|
||||||
Add *filename* to the list of CSS files that the default HTML template
|
Add *filename* to the list of CSS files that the default HTML template
|
||||||
@ -941,8 +919,8 @@ class Sphinx:
|
|||||||
if hasattr(self.builder, 'add_css_file'):
|
if hasattr(self.builder, 'add_css_file'):
|
||||||
self.builder.add_css_file(filename, **kwargs) # type: ignore
|
self.builder.add_css_file(filename, **kwargs) # type: ignore
|
||||||
|
|
||||||
def add_stylesheet(self, filename, alternate=False, title=None):
|
def add_stylesheet(self, filename: str, alternate: bool = False, title: str = None
|
||||||
# type: (str, bool, str) -> None
|
) -> None:
|
||||||
"""An alias of :meth:`add_css_file`."""
|
"""An alias of :meth:`add_css_file`."""
|
||||||
warnings.warn('The app.add_stylesheet() is deprecated. '
|
warnings.warn('The app.add_stylesheet() is deprecated. '
|
||||||
'Please use app.add_css_file() instead.',
|
'Please use app.add_css_file() instead.',
|
||||||
@ -959,8 +937,7 @@ class Sphinx:
|
|||||||
|
|
||||||
self.add_css_file(filename, **attributes)
|
self.add_css_file(filename, **attributes)
|
||||||
|
|
||||||
def add_latex_package(self, packagename, options=None):
|
def add_latex_package(self, packagename: str, options: str = None) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
r"""Register a package to include in the LaTeX source code.
|
r"""Register a package to include in the LaTeX source code.
|
||||||
|
|
||||||
Add *packagename* to the list of packages that LaTeX source code will
|
Add *packagename* to the list of packages that LaTeX source code will
|
||||||
@ -978,8 +955,7 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_latex_package(packagename, options)
|
self.registry.add_latex_package(packagename, options)
|
||||||
|
|
||||||
def add_lexer(self, alias, lexer):
|
def add_lexer(self, alias: str, lexer: Union[Lexer, "Type[Lexer]"]) -> None:
|
||||||
# type: (str, Union[Lexer, Type[Lexer]]) -> None
|
|
||||||
"""Register a new lexer for source code.
|
"""Register a new lexer for source code.
|
||||||
|
|
||||||
Use *lexer* to highlight code blocks with the given language *alias*.
|
Use *lexer* to highlight code blocks with the given language *alias*.
|
||||||
@ -998,8 +974,7 @@ class Sphinx:
|
|||||||
else:
|
else:
|
||||||
lexer_classes[alias] = lexer
|
lexer_classes[alias] = lexer
|
||||||
|
|
||||||
def add_autodocumenter(self, cls, override=False):
|
def add_autodocumenter(self, cls: Any, override: bool = False) -> None:
|
||||||
# type: (Any, bool) -> None
|
|
||||||
"""Register a new documenter class for the autodoc extension.
|
"""Register a new documenter class for the autodoc extension.
|
||||||
|
|
||||||
Add *cls* as a new documenter class for the :mod:`sphinx.ext.autodoc`
|
Add *cls* as a new documenter class for the :mod:`sphinx.ext.autodoc`
|
||||||
@ -1019,8 +994,8 @@ class Sphinx:
|
|||||||
self.registry.add_documenter(cls.objtype, cls)
|
self.registry.add_documenter(cls.objtype, cls)
|
||||||
self.add_directive('auto' + cls.objtype, AutodocDirective, override=override)
|
self.add_directive('auto' + cls.objtype, AutodocDirective, override=override)
|
||||||
|
|
||||||
def add_autodoc_attrgetter(self, typ, getter):
|
def add_autodoc_attrgetter(self, typ: "Type", getter: Callable[[Any, str, Any], Any]
|
||||||
# type: (Type, Callable[[Any, str, Any], Any]) -> None
|
) -> None:
|
||||||
"""Register a new ``getattr``-like function for the autodoc extension.
|
"""Register a new ``getattr``-like function for the autodoc extension.
|
||||||
|
|
||||||
Add *getter*, which must be a function with an interface compatible to
|
Add *getter*, which must be a function with an interface compatible to
|
||||||
@ -1034,8 +1009,7 @@ class Sphinx:
|
|||||||
logger.debug('[app] adding autodoc attrgetter: %r', (typ, getter))
|
logger.debug('[app] adding autodoc attrgetter: %r', (typ, getter))
|
||||||
self.registry.add_autodoc_attrgetter(typ, getter)
|
self.registry.add_autodoc_attrgetter(typ, getter)
|
||||||
|
|
||||||
def add_search_language(self, cls):
|
def add_search_language(self, cls: Any) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
"""Register a new language for the HTML search index.
|
"""Register a new language for the HTML search index.
|
||||||
|
|
||||||
Add *cls*, which must be a subclass of
|
Add *cls*, which must be a subclass of
|
||||||
@ -1051,8 +1025,7 @@ class Sphinx:
|
|||||||
assert issubclass(cls, SearchLanguage)
|
assert issubclass(cls, SearchLanguage)
|
||||||
languages[cls.lang] = cls
|
languages[cls.lang] = cls
|
||||||
|
|
||||||
def add_source_suffix(self, suffix, filetype, override=False):
|
def add_source_suffix(self, suffix: str, filetype: str, override: bool = False) -> None:
|
||||||
# type: (str, str, bool) -> None
|
|
||||||
"""Register a suffix of source files.
|
"""Register a suffix of source files.
|
||||||
|
|
||||||
Same as :confval:`source_suffix`. The users can override this
|
Same as :confval:`source_suffix`. The users can override this
|
||||||
@ -1062,8 +1035,7 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_source_suffix(suffix, filetype, override=override)
|
self.registry.add_source_suffix(suffix, filetype, override=override)
|
||||||
|
|
||||||
def add_source_parser(self, *args, **kwargs):
|
def add_source_parser(self, *args, **kwargs) -> None:
|
||||||
# type: (Any, Any) -> None
|
|
||||||
"""Register a parser class.
|
"""Register a parser class.
|
||||||
|
|
||||||
.. versionadded:: 1.4
|
.. versionadded:: 1.4
|
||||||
@ -1075,8 +1047,7 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_source_parser(*args, **kwargs)
|
self.registry.add_source_parser(*args, **kwargs)
|
||||||
|
|
||||||
def add_env_collector(self, collector):
|
def add_env_collector(self, collector: "Type[EnvironmentCollector]") -> None:
|
||||||
# type: (Type[EnvironmentCollector]) -> None
|
|
||||||
"""Register an environment collector class.
|
"""Register an environment collector class.
|
||||||
|
|
||||||
Refer to :ref:`collector-api`.
|
Refer to :ref:`collector-api`.
|
||||||
@ -1086,8 +1057,7 @@ class Sphinx:
|
|||||||
logger.debug('[app] adding environment collector: %r', collector)
|
logger.debug('[app] adding environment collector: %r', collector)
|
||||||
collector().enable(self)
|
collector().enable(self)
|
||||||
|
|
||||||
def add_html_theme(self, name, theme_path):
|
def add_html_theme(self, name: str, theme_path: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Register a HTML Theme.
|
"""Register a HTML Theme.
|
||||||
|
|
||||||
The *name* is a name of theme, and *path* is a full path to the theme
|
The *name* is a name of theme, and *path* is a full path to the theme
|
||||||
@ -1098,8 +1068,9 @@ class Sphinx:
|
|||||||
logger.debug('[app] adding HTML theme: %r, %r', name, theme_path)
|
logger.debug('[app] adding HTML theme: %r, %r', name, theme_path)
|
||||||
self.html_themes[name] = theme_path
|
self.html_themes[name] = theme_path
|
||||||
|
|
||||||
def add_html_math_renderer(self, name, inline_renderers=None, block_renderers=None):
|
def add_html_math_renderer(self, name: str,
|
||||||
# type: (str, Tuple[Callable, Callable], Tuple[Callable, Callable]) -> None
|
inline_renderers: Tuple[Callable, Callable] = None,
|
||||||
|
block_renderers: Tuple[Callable, Callable] = None) -> None:
|
||||||
"""Register a math renderer for HTML.
|
"""Register a math renderer for HTML.
|
||||||
|
|
||||||
The *name* is a name of math renderer. Both *inline_renderers* and
|
The *name* is a name of math renderer. Both *inline_renderers* and
|
||||||
@ -1113,8 +1084,7 @@ class Sphinx:
|
|||||||
"""
|
"""
|
||||||
self.registry.add_html_math_renderer(name, inline_renderers, block_renderers)
|
self.registry.add_html_math_renderer(name, inline_renderers, block_renderers)
|
||||||
|
|
||||||
def add_message_catalog(self, catalog, locale_dir):
|
def add_message_catalog(self, catalog: str, locale_dir: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Register a message catalog.
|
"""Register a message catalog.
|
||||||
|
|
||||||
The *catalog* is a name of catalog, and *locale_dir* is a base path
|
The *catalog* is a name of catalog, and *locale_dir* is a base path
|
||||||
@ -1127,8 +1097,7 @@ class Sphinx:
|
|||||||
locale.init_console(locale_dir, catalog)
|
locale.init_console(locale_dir, catalog)
|
||||||
|
|
||||||
# ---- other methods -------------------------------------------------
|
# ---- other methods -------------------------------------------------
|
||||||
def is_parallel_allowed(self, typ):
|
def is_parallel_allowed(self, typ: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
"""Check parallel processing is allowed or not.
|
"""Check parallel processing is allowed or not.
|
||||||
|
|
||||||
``typ`` is a type of processing; ``'read'`` or ``'write'``.
|
``typ`` is a type of processing; ``'read'`` or ``'write'``.
|
||||||
@ -1170,8 +1139,7 @@ class TemplateBridge:
|
|||||||
that renders templates given a template name and a context.
|
that renders templates given a template name and a context.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def init(self, builder, theme=None, dirs=None):
|
def init(self, builder: "Builder", theme: Theme = None, dirs: List[str] = None) -> None:
|
||||||
# type: (Builder, Theme, List[str]) -> None
|
|
||||||
"""Called by the builder to initialize the template system.
|
"""Called by the builder to initialize the template system.
|
||||||
|
|
||||||
*builder* is the builder object; you'll probably want to look at the
|
*builder* is the builder object; you'll probably want to look at the
|
||||||
@ -1182,23 +1150,20 @@ class TemplateBridge:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError('must be implemented in subclasses')
|
raise NotImplementedError('must be implemented in subclasses')
|
||||||
|
|
||||||
def newest_template_mtime(self):
|
def newest_template_mtime(self) -> float:
|
||||||
# type: () -> float
|
|
||||||
"""Called by the builder to determine if output files are outdated
|
"""Called by the builder to determine if output files are outdated
|
||||||
because of template changes. Return the mtime of the newest template
|
because of template changes. Return the mtime of the newest template
|
||||||
file that was changed. The default implementation returns ``0``.
|
file that was changed. The default implementation returns ``0``.
|
||||||
"""
|
"""
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def render(self, template, context):
|
def render(self, template: str, context: Dict) -> None:
|
||||||
# type: (str, Dict) -> None
|
|
||||||
"""Called by the builder to render a template given as a filename with
|
"""Called by the builder to render a template given as a filename with
|
||||||
a specified context (a Python dictionary).
|
a specified context (a Python dictionary).
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError('must be implemented in subclasses')
|
raise NotImplementedError('must be implemented in subclasses')
|
||||||
|
|
||||||
def render_string(self, template, context):
|
def render_string(self, template: str, context: Dict) -> str:
|
||||||
# type: (str, Dict) -> str
|
|
||||||
"""Called by the builder to render a template given as a string with a
|
"""Called by the builder to render a template given as a string with a
|
||||||
specified context (a Python dictionary).
|
specified context (a Python dictionary).
|
||||||
"""
|
"""
|
||||||
|
@ -14,7 +14,9 @@ import types
|
|||||||
import warnings
|
import warnings
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from os import path, getenv
|
from os import path, getenv
|
||||||
from typing import Any, NamedTuple, Union
|
from typing import (
|
||||||
|
Any, Callable, Dict, Generator, Iterator, List, NamedTuple, Set, Tuple, Union
|
||||||
|
)
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||||
from sphinx.errors import ConfigError, ExtensionError
|
from sphinx.errors import ConfigError, ExtensionError
|
||||||
@ -23,14 +25,13 @@ from sphinx.util import logging
|
|||||||
from sphinx.util.i18n import format_date
|
from sphinx.util.i18n import format_date
|
||||||
from sphinx.util.osutil import cd
|
from sphinx.util.osutil import cd
|
||||||
from sphinx.util.pycompat import execfile_
|
from sphinx.util.pycompat import execfile_
|
||||||
|
from sphinx.util.tags import Tags
|
||||||
from sphinx.util.typing import NoneType
|
from sphinx.util.typing import NoneType
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Callable, Dict, Generator, Iterator, List, Set, Tuple # NOQA
|
from sphinx.application import Sphinx
|
||||||
from sphinx.application import Sphinx # NOQA
|
from sphinx.environment import BuildEnvironment
|
||||||
from sphinx.environment import BuildEnvironment # NOQA
|
|
||||||
from sphinx.util.tags import Tags # NOQA
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -43,8 +44,7 @@ ConfigValue = NamedTuple('ConfigValue', [('name', str),
|
|||||||
('rebuild', Union[bool, str])])
|
('rebuild', Union[bool, str])])
|
||||||
|
|
||||||
|
|
||||||
def is_serializable(obj):
|
def is_serializable(obj: Any) -> bool:
|
||||||
# type: (Any) -> bool
|
|
||||||
"""Check if object is serializable or not."""
|
"""Check if object is serializable or not."""
|
||||||
if isinstance(obj, UNSERIALIZABLE_TYPES):
|
if isinstance(obj, UNSERIALIZABLE_TYPES):
|
||||||
return False
|
return False
|
||||||
@ -64,12 +64,10 @@ class ENUM:
|
|||||||
Example:
|
Example:
|
||||||
app.add_config_value('latex_show_urls', 'no', None, ENUM('no', 'footnote', 'inline'))
|
app.add_config_value('latex_show_urls', 'no', None, ENUM('no', 'footnote', 'inline'))
|
||||||
"""
|
"""
|
||||||
def __init__(self, *candidates):
|
def __init__(self, *candidates: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
self.candidates = candidates
|
self.candidates = candidates
|
||||||
|
|
||||||
def match(self, value):
|
def match(self, value: Union[str, List, Tuple]) -> bool:
|
||||||
# type: (Union[str, List, Tuple]) -> bool
|
|
||||||
if isinstance(value, (list, tuple)):
|
if isinstance(value, (list, tuple)):
|
||||||
return all(item in self.candidates for item in value)
|
return all(item in self.candidates for item in value)
|
||||||
else:
|
else:
|
||||||
@ -156,8 +154,7 @@ class Config:
|
|||||||
'env', []),
|
'env', []),
|
||||||
} # type: Dict[str, Tuple]
|
} # type: Dict[str, Tuple]
|
||||||
|
|
||||||
def __init__(self, config={}, overrides={}):
|
def __init__(self, config: Dict[str, Any] = {}, overrides: Dict[str, Any] = {}) -> None:
|
||||||
# type: (Dict[str, Any], Dict[str, Any]) -> None
|
|
||||||
self.overrides = dict(overrides)
|
self.overrides = dict(overrides)
|
||||||
self.values = Config.config_values.copy()
|
self.values = Config.config_values.copy()
|
||||||
self._raw_config = config
|
self._raw_config = config
|
||||||
@ -171,15 +168,13 @@ class Config:
|
|||||||
self.extensions = config.get('extensions', []) # type: List[str]
|
self.extensions = config.get('extensions', []) # type: List[str]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def read(cls, confdir, overrides=None, tags=None):
|
def read(cls, confdir: str, overrides: Dict = None, tags: Tags = None) -> "Config":
|
||||||
# type: (str, Dict, Tags) -> Config
|
|
||||||
"""Create a Config object from configuration file."""
|
"""Create a Config object from configuration file."""
|
||||||
filename = path.join(confdir, CONFIG_FILENAME)
|
filename = path.join(confdir, CONFIG_FILENAME)
|
||||||
namespace = eval_config_file(filename, tags)
|
namespace = eval_config_file(filename, tags)
|
||||||
return cls(namespace, overrides or {})
|
return cls(namespace, overrides or {})
|
||||||
|
|
||||||
def convert_overrides(self, name, value):
|
def convert_overrides(self, name: str, value: Any) -> Any:
|
||||||
# type: (str, Any) -> Any
|
|
||||||
if not isinstance(value, str):
|
if not isinstance(value, str):
|
||||||
return value
|
return value
|
||||||
else:
|
else:
|
||||||
@ -212,8 +207,7 @@ class Config:
|
|||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def pre_init_values(self):
|
def pre_init_values(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""
|
"""
|
||||||
Initialize some limited config variables before initialize i18n and loading extensions
|
Initialize some limited config variables before initialize i18n and loading extensions
|
||||||
"""
|
"""
|
||||||
@ -227,8 +221,7 @@ class Config:
|
|||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
logger.warning("%s", exc)
|
logger.warning("%s", exc)
|
||||||
|
|
||||||
def init_values(self):
|
def init_values(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
config = self._raw_config
|
config = self._raw_config
|
||||||
for valname, value in self.overrides.items():
|
for valname, value in self.overrides.items():
|
||||||
try:
|
try:
|
||||||
@ -250,8 +243,7 @@ class Config:
|
|||||||
if name in self.values:
|
if name in self.values:
|
||||||
self.__dict__[name] = config[name]
|
self.__dict__[name] = config[name]
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name: str) -> Any:
|
||||||
# type: (str) -> Any
|
|
||||||
if name.startswith('_'):
|
if name.startswith('_'):
|
||||||
raise AttributeError(name)
|
raise AttributeError(name)
|
||||||
if name not in self.values:
|
if name not in self.values:
|
||||||
@ -261,42 +253,34 @@ class Config:
|
|||||||
return default(self)
|
return default(self)
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return getattr(self, name)
|
return getattr(self, name)
|
||||||
|
|
||||||
def __setitem__(self, name, value):
|
def __setitem__(self, name: str, value: Any) -> None:
|
||||||
# type: (str, Any) -> None
|
|
||||||
setattr(self, name, value)
|
setattr(self, name, value)
|
||||||
|
|
||||||
def __delitem__(self, name):
|
def __delitem__(self, name: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
delattr(self, name)
|
delattr(self, name)
|
||||||
|
|
||||||
def __contains__(self, name):
|
def __contains__(self, name: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
return name in self.values
|
return name in self.values
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self) -> Generator[ConfigValue, None, None]:
|
||||||
# type: () -> Generator[ConfigValue, None, None]
|
|
||||||
for name, value in self.values.items():
|
for name, value in self.values.items():
|
||||||
yield ConfigValue(name, getattr(self, name), value[1])
|
yield ConfigValue(name, getattr(self, name), value[1])
|
||||||
|
|
||||||
def add(self, name, default, rebuild, types):
|
def add(self, name: str, default: Any, rebuild: Union[bool, str], types: Any) -> None:
|
||||||
# type: (str, Any, Union[bool, str], Any) -> None
|
|
||||||
if name in self.values:
|
if name in self.values:
|
||||||
raise ExtensionError(__('Config value %r already present') % name)
|
raise ExtensionError(__('Config value %r already present') % name)
|
||||||
else:
|
else:
|
||||||
self.values[name] = (default, rebuild, types)
|
self.values[name] = (default, rebuild, types)
|
||||||
|
|
||||||
def filter(self, rebuild):
|
def filter(self, rebuild: Union[str, List[str]]) -> Iterator[ConfigValue]:
|
||||||
# type: (Union[str, List[str]]) -> Iterator[ConfigValue]
|
|
||||||
if isinstance(rebuild, str):
|
if isinstance(rebuild, str):
|
||||||
rebuild = [rebuild]
|
rebuild = [rebuild]
|
||||||
return (value for value in self if value.rebuild in rebuild)
|
return (value for value in self if value.rebuild in rebuild)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self) -> Dict:
|
||||||
# type: () -> Dict
|
|
||||||
"""Obtains serializable data for pickling."""
|
"""Obtains serializable data for pickling."""
|
||||||
# remove potentially pickling-problematic values from config
|
# remove potentially pickling-problematic values from config
|
||||||
__dict__ = {}
|
__dict__ = {}
|
||||||
@ -319,13 +303,11 @@ class Config:
|
|||||||
|
|
||||||
return __dict__
|
return __dict__
|
||||||
|
|
||||||
def __setstate__(self, state):
|
def __setstate__(self, state: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
self.__dict__.update(state)
|
self.__dict__.update(state)
|
||||||
|
|
||||||
|
|
||||||
def eval_config_file(filename, tags):
|
def eval_config_file(filename: str, tags: Tags) -> Dict[str, Any]:
|
||||||
# type: (str, Tags) -> Dict[str, Any]
|
|
||||||
"""Evaluate a config file."""
|
"""Evaluate a config file."""
|
||||||
namespace = {} # type: Dict[str, Any]
|
namespace = {} # type: Dict[str, Any]
|
||||||
namespace['__file__'] = filename
|
namespace['__file__'] = filename
|
||||||
@ -349,8 +331,7 @@ def eval_config_file(filename, tags):
|
|||||||
return namespace
|
return namespace
|
||||||
|
|
||||||
|
|
||||||
def convert_source_suffix(app, config):
|
def convert_source_suffix(app: "Sphinx", config: Config) -> None:
|
||||||
# type: (Sphinx, Config) -> None
|
|
||||||
"""This converts old styled source_suffix to new styled one.
|
"""This converts old styled source_suffix to new styled one.
|
||||||
|
|
||||||
* old style: str or list
|
* old style: str or list
|
||||||
@ -375,8 +356,7 @@ def convert_source_suffix(app, config):
|
|||||||
"But `%r' is given." % source_suffix))
|
"But `%r' is given." % source_suffix))
|
||||||
|
|
||||||
|
|
||||||
def init_numfig_format(app, config):
|
def init_numfig_format(app: "Sphinx", config: Config) -> None:
|
||||||
# type: (Sphinx, Config) -> None
|
|
||||||
"""Initialize :confval:`numfig_format`."""
|
"""Initialize :confval:`numfig_format`."""
|
||||||
numfig_format = {'section': _('Section %s'),
|
numfig_format = {'section': _('Section %s'),
|
||||||
'figure': _('Fig. %s'),
|
'figure': _('Fig. %s'),
|
||||||
@ -388,8 +368,7 @@ def init_numfig_format(app, config):
|
|||||||
config.numfig_format = numfig_format # type: ignore
|
config.numfig_format = numfig_format # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def correct_copyright_year(app, config):
|
def correct_copyright_year(app: "Sphinx", config: Config) -> None:
|
||||||
# type: (Sphinx, Config) -> None
|
|
||||||
"""correct values of copyright year that are not coherent with
|
"""correct values of copyright year that are not coherent with
|
||||||
the SOURCE_DATE_EPOCH environment variable (if set)
|
the SOURCE_DATE_EPOCH environment variable (if set)
|
||||||
|
|
||||||
@ -402,8 +381,7 @@ def correct_copyright_year(app, config):
|
|||||||
config[k] = copyright_year_re.sub(replace, config[k])
|
config[k] = copyright_year_re.sub(replace, config[k])
|
||||||
|
|
||||||
|
|
||||||
def check_confval_types(app, config):
|
def check_confval_types(app: "Sphinx", config: Config) -> None:
|
||||||
# type: (Sphinx, Config) -> None
|
|
||||||
"""check all values for deviation from the default value's type, since
|
"""check all values for deviation from the default value's type, since
|
||||||
that can result in TypeErrors all over the place NB.
|
that can result in TypeErrors all over the place NB.
|
||||||
"""
|
"""
|
||||||
@ -458,8 +436,7 @@ def check_confval_types(app, config):
|
|||||||
default=type(default)))
|
default=type(default)))
|
||||||
|
|
||||||
|
|
||||||
def check_unicode(config):
|
def check_unicode(config: Config) -> None:
|
||||||
# type: (Config) -> None
|
|
||||||
"""check all string values for non-ASCII characters in bytestrings,
|
"""check all string values for non-ASCII characters in bytestrings,
|
||||||
since that can result in UnicodeErrors all over the place
|
since that can result in UnicodeErrors all over the place
|
||||||
"""
|
"""
|
||||||
@ -475,16 +452,15 @@ def check_unicode(config):
|
|||||||
'Please use Unicode strings, e.g. %r.'), name, 'Content')
|
'Please use Unicode strings, e.g. %r.'), name, 'Content')
|
||||||
|
|
||||||
|
|
||||||
def check_primary_domain(app, config):
|
def check_primary_domain(app: "Sphinx", config: Config) -> None:
|
||||||
# type: (Sphinx, Config) -> None
|
|
||||||
primary_domain = config.primary_domain
|
primary_domain = config.primary_domain
|
||||||
if primary_domain and not app.registry.has_domain(primary_domain):
|
if primary_domain and not app.registry.has_domain(primary_domain):
|
||||||
logger.warning(__('primary_domain %r not found, ignored.'), primary_domain)
|
logger.warning(__('primary_domain %r not found, ignored.'), primary_domain)
|
||||||
config.primary_domain = None # type: ignore
|
config.primary_domain = None # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def check_master_doc(app, env, added, changed, removed):
|
def check_master_doc(app: "Sphinx", env: "BuildEnvironment", added: Set[str],
|
||||||
# type: (Sphinx, BuildEnvironment, Set[str], Set[str], Set[str]) -> Set[str]
|
changed: Set[str], removed: Set[str]) -> Set[str]:
|
||||||
"""Adjust master_doc to 'contents' to support an old project which does not have
|
"""Adjust master_doc to 'contents' to support an old project which does not have
|
||||||
no master_doc setting.
|
no master_doc setting.
|
||||||
"""
|
"""
|
||||||
@ -498,8 +474,7 @@ def check_master_doc(app, env, added, changed, removed):
|
|||||||
return changed
|
return changed
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||||
# type: (Sphinx) -> Dict[str, Any]
|
|
||||||
app.connect('config-inited', convert_source_suffix)
|
app.connect('config-inited', convert_source_suffix)
|
||||||
app.connect('config-inited', init_numfig_format)
|
app.connect('config-inited', init_numfig_format)
|
||||||
app.connect('config-inited', correct_copyright_year)
|
app.connect('config-inited', correct_copyright_year)
|
||||||
|
@ -11,11 +11,8 @@
|
|||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
from typing import Any, Dict
|
||||||
if False:
|
from typing import Type # for python3.5.1
|
||||||
# For type annotation
|
|
||||||
from typing import Any, Dict # NOQA
|
|
||||||
from typing import Type # for python3.5.1
|
|
||||||
|
|
||||||
|
|
||||||
class RemovedInSphinx40Warning(DeprecationWarning):
|
class RemovedInSphinx40Warning(DeprecationWarning):
|
||||||
@ -29,22 +26,20 @@ class RemovedInSphinx50Warning(PendingDeprecationWarning):
|
|||||||
RemovedInNextVersionWarning = RemovedInSphinx40Warning
|
RemovedInNextVersionWarning = RemovedInSphinx40Warning
|
||||||
|
|
||||||
|
|
||||||
def deprecated_alias(modname, objects, warning):
|
def deprecated_alias(modname: str, objects: Dict, warning: Type[Warning]) -> None:
|
||||||
# type: (str, Dict, Type[Warning]) -> None
|
|
||||||
module = import_module(modname)
|
module = import_module(modname)
|
||||||
sys.modules[modname] = _ModuleWrapper(module, modname, objects, warning) # type: ignore
|
sys.modules[modname] = _ModuleWrapper(module, modname, objects, warning) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class _ModuleWrapper:
|
class _ModuleWrapper:
|
||||||
def __init__(self, module, modname, objects, warning):
|
def __init__(self, module: Any, modname: str, objects: Dict, warning: Type[Warning]
|
||||||
# type: (Any, str, Dict, Type[Warning]) -> None
|
) -> None:
|
||||||
self._module = module
|
self._module = module
|
||||||
self._modname = modname
|
self._modname = modname
|
||||||
self._objects = objects
|
self._objects = objects
|
||||||
self._warning = warning
|
self._warning = warning
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name: str) -> Any:
|
||||||
# type: (str) -> Any
|
|
||||||
if name in self._objects:
|
if name in self._objects:
|
||||||
warnings.warn("%s.%s is deprecated. Check CHANGES for Sphinx "
|
warnings.warn("%s.%s is deprecated. Check CHANGES for Sphinx "
|
||||||
"API modifications." % (self._modname, name),
|
"API modifications." % (self._modname, name),
|
||||||
@ -57,33 +52,27 @@ class _ModuleWrapper:
|
|||||||
class DeprecatedDict(dict):
|
class DeprecatedDict(dict):
|
||||||
"""A deprecated dict which warns on each access."""
|
"""A deprecated dict which warns on each access."""
|
||||||
|
|
||||||
def __init__(self, data, message, warning):
|
def __init__(self, data: Dict, message: str, warning: Type[Warning]) -> None:
|
||||||
# type: (Dict, str, Type[Warning]) -> None
|
|
||||||
self.message = message
|
self.message = message
|
||||||
self.warning = warning
|
self.warning = warning
|
||||||
super().__init__(data)
|
super().__init__(data)
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key: str, value: Any) -> None:
|
||||||
# type: (str, Any) -> None
|
|
||||||
warnings.warn(self.message, self.warning, stacklevel=2)
|
warnings.warn(self.message, self.warning, stacklevel=2)
|
||||||
super().__setitem__(key, value)
|
super().__setitem__(key, value)
|
||||||
|
|
||||||
def setdefault(self, key, default=None):
|
def setdefault(self, key: str, default: Any = None) -> Any:
|
||||||
# type: (str, Any) -> None
|
|
||||||
warnings.warn(self.message, self.warning, stacklevel=2)
|
warnings.warn(self.message, self.warning, stacklevel=2)
|
||||||
return super().setdefault(key, default)
|
return super().setdefault(key, default)
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
warnings.warn(self.message, self.warning, stacklevel=2)
|
warnings.warn(self.message, self.warning, stacklevel=2)
|
||||||
return super().__getitem__(key)
|
return super().__getitem__(key)
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key: str, default: Any = None) -> Any:
|
||||||
# type: (str, Any) -> None
|
|
||||||
warnings.warn(self.message, self.warning, stacklevel=2)
|
warnings.warn(self.message, self.warning, stacklevel=2)
|
||||||
return super().get(key, default)
|
return super().get(key, default)
|
||||||
|
|
||||||
def update(self, other=None): # type: ignore
|
def update(self, other: Dict = None) -> None: # type: ignore
|
||||||
# type: (Dict) -> None
|
|
||||||
warnings.warn(self.message, self.warning, stacklevel=2)
|
warnings.warn(self.message, self.warning, stacklevel=2)
|
||||||
super().update(other)
|
super().update(other)
|
||||||
|
@ -640,7 +640,7 @@ class BuildEnvironment:
|
|||||||
@property
|
@property
|
||||||
def indexentries(self) -> Dict[str, List[Tuple[str, str, str, str, str]]]:
|
def indexentries(self) -> Dict[str, List[Tuple[str, str, str, str, str]]]:
|
||||||
warnings.warn('env.indexentries() is deprecated. Please use IndexDomain instead.',
|
warnings.warn('env.indexentries() is deprecated. Please use IndexDomain instead.',
|
||||||
RemovedInSphinx40Warning)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
from sphinx.domains.index import IndexDomain
|
from sphinx.domains.index import IndexDomain
|
||||||
domain = cast(IndexDomain, self.get_domain('index'))
|
domain = cast(IndexDomain, self.get_domain('index'))
|
||||||
return domain.entries
|
return domain.entries
|
||||||
|
@ -12,8 +12,10 @@ import re
|
|||||||
import unicodedata
|
import unicodedata
|
||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
from typing import Any, Dict, Pattern, List, Tuple
|
from typing import Any, Dict, Pattern, List, Tuple
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
from sphinx.builders import Builder
|
from sphinx.builders import Builder
|
||||||
|
from sphinx.domains.index import IndexDomain
|
||||||
from sphinx.environment import BuildEnvironment
|
from sphinx.environment import BuildEnvironment
|
||||||
from sphinx.errors import NoUri
|
from sphinx.errors import NoUri
|
||||||
from sphinx.locale import _, __
|
from sphinx.locale import _, __
|
||||||
@ -53,7 +55,8 @@ class IndexEntries:
|
|||||||
# maintain links in sorted/deterministic order
|
# maintain links in sorted/deterministic order
|
||||||
bisect.insort(entry[0], (main, uri))
|
bisect.insort(entry[0], (main, uri))
|
||||||
|
|
||||||
for fn, entries in self.env.indexentries.items():
|
domain = cast(IndexDomain, self.env.get_domain('index'))
|
||||||
|
for fn, entries in domain.entries.items():
|
||||||
# new entry types must be listed in directives/other.py!
|
# new entry types must be listed in directives/other.py!
|
||||||
for type, value, tid, main, index_key in entries:
|
for type, value, tid, main, index_key in entries:
|
||||||
try:
|
try:
|
||||||
|
@ -12,9 +12,12 @@ from typing import Dict, List, Set
|
|||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
|
|
||||||
from sphinx.application import Sphinx
|
|
||||||
from sphinx.environment import BuildEnvironment
|
from sphinx.environment import BuildEnvironment
|
||||||
|
|
||||||
|
if False:
|
||||||
|
# For type annotation
|
||||||
|
from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentCollector:
|
class EnvironmentCollector:
|
||||||
"""An EnvironmentCollector is a specific data collector from each document.
|
"""An EnvironmentCollector is a specific data collector from each document.
|
||||||
@ -27,7 +30,7 @@ class EnvironmentCollector:
|
|||||||
|
|
||||||
listener_ids = None # type: Dict[str, int]
|
listener_ids = None # type: Dict[str, int]
|
||||||
|
|
||||||
def enable(self, app: Sphinx) -> None:
|
def enable(self, app: "Sphinx") -> None:
|
||||||
assert self.listener_ids is None
|
assert self.listener_ids is None
|
||||||
self.listener_ids = {
|
self.listener_ids = {
|
||||||
'doctree-read': app.connect('doctree-read', self.process_doc),
|
'doctree-read': app.connect('doctree-read', self.process_doc),
|
||||||
@ -37,38 +40,38 @@ class EnvironmentCollector:
|
|||||||
'env-get-outdated': app.connect('env-get-outdated', self.get_outdated_docs),
|
'env-get-outdated': app.connect('env-get-outdated', self.get_outdated_docs),
|
||||||
}
|
}
|
||||||
|
|
||||||
def disable(self, app: Sphinx) -> None:
|
def disable(self, app: "Sphinx") -> None:
|
||||||
assert self.listener_ids is not None
|
assert self.listener_ids is not None
|
||||||
for listener_id in self.listener_ids.values():
|
for listener_id in self.listener_ids.values():
|
||||||
app.disconnect(listener_id)
|
app.disconnect(listener_id)
|
||||||
self.listener_ids = None
|
self.listener_ids = None
|
||||||
|
|
||||||
def clear_doc(self, app: Sphinx, env: BuildEnvironment, docname: str) -> None:
|
def clear_doc(self, app: "Sphinx", env: BuildEnvironment, docname: str) -> None:
|
||||||
"""Remove specified data of a document.
|
"""Remove specified data of a document.
|
||||||
|
|
||||||
This method is called on the removal of the document."""
|
This method is called on the removal of the document."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def merge_other(self, app: Sphinx, env: BuildEnvironment,
|
def merge_other(self, app: "Sphinx", env: BuildEnvironment,
|
||||||
docnames: Set[str], other: BuildEnvironment) -> None:
|
docnames: Set[str], other: BuildEnvironment) -> None:
|
||||||
"""Merge in specified data regarding docnames from a different `BuildEnvironment`
|
"""Merge in specified data regarding docnames from a different `BuildEnvironment`
|
||||||
object which coming from a subprocess in parallel builds."""
|
object which coming from a subprocess in parallel builds."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
|
def process_doc(self, app: "Sphinx", doctree: nodes.document) -> None:
|
||||||
"""Process a document and gather specific data from it.
|
"""Process a document and gather specific data from it.
|
||||||
|
|
||||||
This method is called after the document is read."""
|
This method is called after the document is read."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_updated_docs(self, app: Sphinx, env: BuildEnvironment) -> List[str]:
|
def get_updated_docs(self, app: "Sphinx", env: BuildEnvironment) -> List[str]:
|
||||||
"""Return a list of docnames to re-read.
|
"""Return a list of docnames to re-read.
|
||||||
|
|
||||||
This methods is called after reading the whole of documents (experimental).
|
This methods is called after reading the whole of documents (experimental).
|
||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_outdated_docs(self, app: Sphinx, env: BuildEnvironment,
|
def get_outdated_docs(self, app: "Sphinx", env: BuildEnvironment,
|
||||||
added: Set[str], changed: Set[str], removed: Set[str]) -> List[str]:
|
added: Set[str], changed: Set[str], removed: Set[str]) -> List[str]:
|
||||||
"""Return a list of docnames to re-read.
|
"""Return a list of docnames to re-read.
|
||||||
|
|
||||||
|
@ -9,9 +9,7 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if False:
|
from typing import Any
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
class SphinxError(Exception):
|
class SphinxError(Exception):
|
||||||
@ -51,21 +49,18 @@ class ExtensionError(SphinxError):
|
|||||||
"""Extension error."""
|
"""Extension error."""
|
||||||
category = 'Extension error'
|
category = 'Extension error'
|
||||||
|
|
||||||
def __init__(self, message, orig_exc=None):
|
def __init__(self, message: str, orig_exc: Exception = None) -> None:
|
||||||
# type: (str, Exception) -> None
|
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
self.message = message
|
self.message = message
|
||||||
self.orig_exc = orig_exc
|
self.orig_exc = orig_exc
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
if self.orig_exc:
|
if self.orig_exc:
|
||||||
return '%s(%r, %r)' % (self.__class__.__name__,
|
return '%s(%r, %r)' % (self.__class__.__name__,
|
||||||
self.message, self.orig_exc)
|
self.message, self.orig_exc)
|
||||||
return '%s(%r)' % (self.__class__.__name__, self.message)
|
return '%s(%r)' % (self.__class__.__name__, self.message)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
parent_str = super().__str__()
|
parent_str = super().__str__()
|
||||||
if self.orig_exc:
|
if self.orig_exc:
|
||||||
return '%s (exception: %s)' % (parent_str, self.orig_exc)
|
return '%s (exception: %s)' % (parent_str, self.orig_exc)
|
||||||
@ -102,21 +97,18 @@ class SphinxParallelError(SphinxError):
|
|||||||
|
|
||||||
category = 'Sphinx parallel build error'
|
category = 'Sphinx parallel build error'
|
||||||
|
|
||||||
def __init__(self, message, traceback):
|
def __init__(self, message: str, traceback: Any) -> None:
|
||||||
# type: (str, Any) -> None
|
|
||||||
self.message = message
|
self.message = message
|
||||||
self.traceback = traceback
|
self.traceback = traceback
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return self.message
|
return self.message
|
||||||
|
|
||||||
|
|
||||||
class PycodeError(Exception):
|
class PycodeError(Exception):
|
||||||
"""Pycode Python source code analyser error."""
|
"""Pycode Python source code analyser error."""
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
res = self.args[0]
|
res = self.args[0]
|
||||||
if len(self.args) > 1:
|
if len(self.args) > 1:
|
||||||
res += ' (exception was: %r)' % self.args[1]
|
res += ' (exception was: %r)' % self.args[1]
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
|
|
||||||
import warnings
|
import warnings
|
||||||
from collections import OrderedDict, defaultdict
|
from collections import OrderedDict, defaultdict
|
||||||
|
from typing import Any, Callable, Dict, List
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||||
from sphinx.errors import ExtensionError
|
from sphinx.errors import ExtensionError
|
||||||
@ -20,8 +21,8 @@ from sphinx.util import logging
|
|||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Callable, Dict, List # NOQA
|
from sphinx.application import Sphinx
|
||||||
from sphinx.application import Sphinx # NOQA
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -50,8 +51,7 @@ core_events = {
|
|||||||
class EventManager:
|
class EventManager:
|
||||||
"""Event manager for Sphinx."""
|
"""Event manager for Sphinx."""
|
||||||
|
|
||||||
def __init__(self, app=None):
|
def __init__(self, app: "Sphinx" = None) -> None:
|
||||||
# type: (Sphinx) -> None
|
|
||||||
if app is None:
|
if app is None:
|
||||||
warnings.warn('app argument is required for EventManager.',
|
warnings.warn('app argument is required for EventManager.',
|
||||||
RemovedInSphinx40Warning)
|
RemovedInSphinx40Warning)
|
||||||
@ -60,15 +60,13 @@ class EventManager:
|
|||||||
self.listeners = defaultdict(OrderedDict) # type: Dict[str, Dict[int, Callable]]
|
self.listeners = defaultdict(OrderedDict) # type: Dict[str, Dict[int, Callable]]
|
||||||
self.next_listener_id = 0
|
self.next_listener_id = 0
|
||||||
|
|
||||||
def add(self, name):
|
def add(self, name: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Register a custom Sphinx event."""
|
"""Register a custom Sphinx event."""
|
||||||
if name in self.events:
|
if name in self.events:
|
||||||
raise ExtensionError(__('Event %r already present') % name)
|
raise ExtensionError(__('Event %r already present') % name)
|
||||||
self.events[name] = ''
|
self.events[name] = ''
|
||||||
|
|
||||||
def connect(self, name, callback):
|
def connect(self, name: str, callback: Callable) -> int:
|
||||||
# type: (str, Callable) -> int
|
|
||||||
"""Connect a handler to specific event."""
|
"""Connect a handler to specific event."""
|
||||||
if name not in self.events:
|
if name not in self.events:
|
||||||
raise ExtensionError(__('Unknown event name: %s') % name)
|
raise ExtensionError(__('Unknown event name: %s') % name)
|
||||||
@ -78,14 +76,12 @@ class EventManager:
|
|||||||
self.listeners[name][listener_id] = callback
|
self.listeners[name][listener_id] = callback
|
||||||
return listener_id
|
return listener_id
|
||||||
|
|
||||||
def disconnect(self, listener_id):
|
def disconnect(self, listener_id: int) -> None:
|
||||||
# type: (int) -> None
|
|
||||||
"""Disconnect a handler."""
|
"""Disconnect a handler."""
|
||||||
for event in self.listeners.values():
|
for event in self.listeners.values():
|
||||||
event.pop(listener_id, None)
|
event.pop(listener_id, None)
|
||||||
|
|
||||||
def emit(self, name, *args):
|
def emit(self, name: str, *args) -> List:
|
||||||
# type: (str, Any) -> List
|
|
||||||
"""Emit a Sphinx event."""
|
"""Emit a Sphinx event."""
|
||||||
try:
|
try:
|
||||||
logger.debug('[app] emitting event: %r%s', name, repr(args)[:100])
|
logger.debug('[app] emitting event: %r%s', name, repr(args)[:100])
|
||||||
@ -103,8 +99,7 @@ class EventManager:
|
|||||||
results.append(callback(self.app, *args))
|
results.append(callback(self.app, *args))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def emit_firstresult(self, name, *args):
|
def emit_firstresult(self, name: str, *args) -> Any:
|
||||||
# type: (str, Any) -> Any
|
|
||||||
"""Emit a Sphinx event and returns first result.
|
"""Emit a Sphinx event and returns first result.
|
||||||
|
|
||||||
This returns the result of the first handler that doesn't return ``None``.
|
This returns the result of the first handler that doesn't return ``None``.
|
||||||
|
@ -59,6 +59,11 @@ class DummyApplication:
|
|||||||
self.registry = SphinxComponentRegistry()
|
self.registry = SphinxComponentRegistry()
|
||||||
self.messagelog = [] # type: List[str]
|
self.messagelog = [] # type: List[str]
|
||||||
self.verbosity = 0
|
self.verbosity = 0
|
||||||
|
self._warncount = 0
|
||||||
|
self.warningiserror = False
|
||||||
|
|
||||||
|
def emit_firstresult(self, *args) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def setup_documenters(app: Any) -> None:
|
def setup_documenters(app: Any) -> None:
|
||||||
|
@ -8,22 +8,22 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from sphinx.config import Config
|
||||||
from sphinx.errors import VersionRequirementError
|
from sphinx.errors import VersionRequirementError
|
||||||
from sphinx.locale import __
|
from sphinx.locale import __
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Dict # NOQA
|
from sphinx.application import Sphinx
|
||||||
from sphinx.application import Sphinx # NOQA
|
|
||||||
from sphinx.config import Config # NOQA
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Extension:
|
class Extension:
|
||||||
def __init__(self, name, module, **kwargs):
|
def __init__(self, name: str, module: Any, **kwargs) -> None:
|
||||||
# type: (str, Any, Any) -> None
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self.module = module
|
self.module = module
|
||||||
self.metadata = kwargs
|
self.metadata = kwargs
|
||||||
@ -40,8 +40,7 @@ class Extension:
|
|||||||
self.parallel_write_safe = kwargs.pop('parallel_write_safe', True)
|
self.parallel_write_safe = kwargs.pop('parallel_write_safe', True)
|
||||||
|
|
||||||
|
|
||||||
def verify_needs_extensions(app, config):
|
def verify_needs_extensions(app: "Sphinx", config: Config) -> None:
|
||||||
# type: (Sphinx, Config) -> None
|
|
||||||
"""Verify the required Sphinx extensions are loaded."""
|
"""Verify the required Sphinx extensions are loaded."""
|
||||||
if config.needs_extensions is None:
|
if config.needs_extensions is None:
|
||||||
return
|
return
|
||||||
@ -60,8 +59,7 @@ def verify_needs_extensions(app, config):
|
|||||||
(extname, reqversion, extension.version))
|
(extname, reqversion, extension.version))
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||||
# type: (Sphinx) -> Dict[str, Any]
|
|
||||||
app.connect('config-inited', verify_needs_extensions)
|
app.connect('config-inited', verify_needs_extensions)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -10,14 +10,17 @@
|
|||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
from pygments import highlight
|
from pygments import highlight
|
||||||
from pygments.filters import ErrorToken
|
from pygments.filters import ErrorToken
|
||||||
|
from pygments.formatter import Formatter
|
||||||
from pygments.formatters import HtmlFormatter, LatexFormatter
|
from pygments.formatters import HtmlFormatter, LatexFormatter
|
||||||
from pygments.lexer import Lexer
|
from pygments.lexer import Lexer
|
||||||
from pygments.lexers import get_lexer_by_name, guess_lexer
|
from pygments.lexers import get_lexer_by_name, guess_lexer
|
||||||
from pygments.lexers import PythonLexer, Python3Lexer, PythonConsoleLexer, \
|
from pygments.lexers import PythonLexer, Python3Lexer, PythonConsoleLexer, \
|
||||||
CLexer, TextLexer, RstLexer
|
CLexer, TextLexer, RstLexer
|
||||||
|
from pygments.style import Style
|
||||||
from pygments.styles import get_style_by_name
|
from pygments.styles import get_style_by_name
|
||||||
from pygments.util import ClassNotFound
|
from pygments.util import ClassNotFound
|
||||||
|
|
||||||
@ -25,12 +28,6 @@ from sphinx.locale import __
|
|||||||
from sphinx.pygments_styles import SphinxStyle, NoneStyle
|
from sphinx.pygments_styles import SphinxStyle, NoneStyle
|
||||||
from sphinx.util import logging, texescape
|
from sphinx.util import logging, texescape
|
||||||
|
|
||||||
if False:
|
|
||||||
# For type annotation
|
|
||||||
from typing import Any, Dict # NOQA
|
|
||||||
from pygments.formatter import Formatter # NOQA
|
|
||||||
from pygments.style import Style # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -63,8 +60,8 @@ class PygmentsBridge:
|
|||||||
html_formatter = HtmlFormatter
|
html_formatter = HtmlFormatter
|
||||||
latex_formatter = LatexFormatter
|
latex_formatter = LatexFormatter
|
||||||
|
|
||||||
def __init__(self, dest='html', stylename='sphinx', latex_engine=None):
|
def __init__(self, dest: str = 'html', stylename: str = 'sphinx',
|
||||||
# type: (str, str, str) -> None
|
latex_engine: str = None) -> None:
|
||||||
self.dest = dest
|
self.dest = dest
|
||||||
self.latex_engine = latex_engine
|
self.latex_engine = latex_engine
|
||||||
|
|
||||||
@ -76,8 +73,7 @@ class PygmentsBridge:
|
|||||||
self.formatter = self.latex_formatter
|
self.formatter = self.latex_formatter
|
||||||
self.formatter_args['commandprefix'] = 'PYG'
|
self.formatter_args['commandprefix'] = 'PYG'
|
||||||
|
|
||||||
def get_style(self, stylename):
|
def get_style(self, stylename: str) -> Style:
|
||||||
# type: (str) -> Style
|
|
||||||
if stylename is None or stylename == 'sphinx':
|
if stylename is None or stylename == 'sphinx':
|
||||||
return SphinxStyle
|
return SphinxStyle
|
||||||
elif stylename == 'none':
|
elif stylename == 'none':
|
||||||
@ -88,13 +84,12 @@ class PygmentsBridge:
|
|||||||
else:
|
else:
|
||||||
return get_style_by_name(stylename)
|
return get_style_by_name(stylename)
|
||||||
|
|
||||||
def get_formatter(self, **kwargs):
|
def get_formatter(self, **kwargs) -> Formatter:
|
||||||
# type: (Any) -> Formatter
|
|
||||||
kwargs.update(self.formatter_args)
|
kwargs.update(self.formatter_args)
|
||||||
return self.formatter(**kwargs)
|
return self.formatter(**kwargs)
|
||||||
|
|
||||||
def get_lexer(self, source, lang, opts=None, force=False, location=None):
|
def get_lexer(self, source: str, lang: str, opts: Dict = None,
|
||||||
# type: (str, str, Dict, bool, Any) -> Lexer
|
force: bool = False, location: Any = None) -> Lexer:
|
||||||
if not opts:
|
if not opts:
|
||||||
opts = {}
|
opts = {}
|
||||||
|
|
||||||
@ -137,8 +132,8 @@ class PygmentsBridge:
|
|||||||
|
|
||||||
return lexer
|
return lexer
|
||||||
|
|
||||||
def highlight_block(self, source, lang, opts=None, force=False, location=None, **kwargs):
|
def highlight_block(self, source: str, lang: str, opts: Dict = None,
|
||||||
# type: (str, str, Dict, bool, Any, Any) -> str
|
force: bool = False, location: Any = None, **kwargs) -> str:
|
||||||
if not isinstance(source, str):
|
if not isinstance(source, str):
|
||||||
source = source.decode()
|
source = source.decode()
|
||||||
|
|
||||||
@ -167,8 +162,7 @@ class PygmentsBridge:
|
|||||||
# MEMO: this is done to escape Unicode chars with non-Unicode engines
|
# MEMO: this is done to escape Unicode chars with non-Unicode engines
|
||||||
return texescape.hlescape(hlsource, self.latex_engine)
|
return texescape.hlescape(hlsource, self.latex_engine)
|
||||||
|
|
||||||
def get_stylesheet(self):
|
def get_stylesheet(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
formatter = self.get_formatter()
|
formatter = self.get_formatter()
|
||||||
if self.dest == 'html':
|
if self.dest == 'html':
|
||||||
return formatter.get_style_defs('.highlight')
|
return formatter.get_style_defs('.highlight')
|
||||||
|
64
sphinx/io.py
64
sphinx/io.py
@ -9,16 +9,23 @@
|
|||||||
"""
|
"""
|
||||||
import codecs
|
import codecs
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any
|
from typing import Any, List, Tuple
|
||||||
|
from typing import Type # for python3.5.1
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
from docutils.core import Publisher
|
from docutils.core import Publisher
|
||||||
from docutils.io import FileInput, NullOutput
|
from docutils.frontend import Values
|
||||||
|
from docutils.io import FileInput, Input, NullOutput
|
||||||
|
from docutils.parsers import Parser
|
||||||
from docutils.parsers.rst import Parser as RSTParser
|
from docutils.parsers.rst import Parser as RSTParser
|
||||||
from docutils.readers import standalone
|
from docutils.readers import standalone
|
||||||
|
from docutils.statemachine import StringList, string2lines
|
||||||
|
from docutils.transforms import Transform
|
||||||
from docutils.transforms.references import DanglingReferences
|
from docutils.transforms.references import DanglingReferences
|
||||||
from docutils.writers import UnfilteredWriter
|
from docutils.writers import UnfilteredWriter
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
|
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
|
||||||
|
from sphinx.environment import BuildEnvironment
|
||||||
from sphinx.errors import FiletypeNotFoundError
|
from sphinx.errors import FiletypeNotFoundError
|
||||||
from sphinx.transforms import (
|
from sphinx.transforms import (
|
||||||
AutoIndexUpgrader, DoctreeReadEvent, FigureAligner, SphinxTransformer
|
AutoIndexUpgrader, DoctreeReadEvent, FigureAligner, SphinxTransformer
|
||||||
@ -34,15 +41,7 @@ from sphinx.versioning import UIDTransform
|
|||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Dict, List, Tuple # NOQA
|
from sphinx.application import Sphinx
|
||||||
from typing import Type # for python3.5.1
|
|
||||||
from docutils import nodes # NOQA
|
|
||||||
from docutils.frontend import Values # NOQA
|
|
||||||
from docutils.io import Input # NOQA
|
|
||||||
from docutils.parsers import Parser # NOQA
|
|
||||||
from docutils.transforms import Transform # NOQA
|
|
||||||
from sphinx.application import Sphinx # NOQA
|
|
||||||
from sphinx.environment import BuildEnvironment # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -57,8 +56,7 @@ class SphinxBaseReader(standalone.Reader):
|
|||||||
|
|
||||||
transforms = [] # type: List[Type[Transform]]
|
transforms = [] # type: List[Type[Transform]]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
# type: (Any, Any) -> None
|
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
if len(args) > 0 and isinstance(args[0], Sphinx):
|
if len(args) > 0 and isinstance(args[0], Sphinx):
|
||||||
self._app = args[0]
|
self._app = args[0]
|
||||||
@ -68,26 +66,22 @@ class SphinxBaseReader(standalone.Reader):
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def app(self):
|
def app(self) -> "Sphinx":
|
||||||
# type: () -> Sphinx
|
|
||||||
warnings.warn('SphinxBaseReader.app is deprecated.',
|
warnings.warn('SphinxBaseReader.app is deprecated.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
return self._app
|
return self._app
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def env(self):
|
def env(self) -> BuildEnvironment:
|
||||||
# type: () -> BuildEnvironment
|
|
||||||
warnings.warn('SphinxBaseReader.env is deprecated.',
|
warnings.warn('SphinxBaseReader.env is deprecated.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
return self._env
|
return self._env
|
||||||
|
|
||||||
def setup(self, app):
|
def setup(self, app: "Sphinx") -> None:
|
||||||
# type: (Sphinx) -> None
|
|
||||||
self._app = app # hold application object only for compatibility
|
self._app = app # hold application object only for compatibility
|
||||||
self._env = app.env
|
self._env = app.env
|
||||||
|
|
||||||
def get_transforms(self):
|
def get_transforms(self) -> List[Type[Transform]]:
|
||||||
# type: () -> List[Type[Transform]]
|
|
||||||
transforms = super().get_transforms() + self.transforms
|
transforms = super().get_transforms() + self.transforms
|
||||||
|
|
||||||
# remove transforms which is not needed for Sphinx
|
# remove transforms which is not needed for Sphinx
|
||||||
@ -98,8 +92,7 @@ class SphinxBaseReader(standalone.Reader):
|
|||||||
|
|
||||||
return transforms
|
return transforms
|
||||||
|
|
||||||
def new_document(self):
|
def new_document(self) -> nodes.document:
|
||||||
# type: () -> nodes.document
|
|
||||||
"""Creates a new document object which having a special reporter object good
|
"""Creates a new document object which having a special reporter object good
|
||||||
for logging.
|
for logging.
|
||||||
"""
|
"""
|
||||||
@ -121,13 +114,11 @@ class SphinxStandaloneReader(SphinxBaseReader):
|
|||||||
A basic document reader for Sphinx.
|
A basic document reader for Sphinx.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def setup(self, app):
|
def setup(self, app: "Sphinx") -> None:
|
||||||
# type: (Sphinx) -> None
|
|
||||||
self.transforms = self.transforms + app.registry.get_transforms()
|
self.transforms = self.transforms + app.registry.get_transforms()
|
||||||
super().setup(app)
|
super().setup(app)
|
||||||
|
|
||||||
def read(self, source, parser, settings):
|
def read(self, source: Input, parser: Parser, settings: Values) -> nodes.document:
|
||||||
# type: (Input, Parser, Values) -> nodes.document
|
|
||||||
self.source = source
|
self.source = source
|
||||||
if not self.parser:
|
if not self.parser:
|
||||||
self.parser = parser
|
self.parser = parser
|
||||||
@ -136,8 +127,7 @@ class SphinxStandaloneReader(SphinxBaseReader):
|
|||||||
self.parse()
|
self.parse()
|
||||||
return self.document
|
return self.document
|
||||||
|
|
||||||
def read_source(self, env):
|
def read_source(self, env: BuildEnvironment) -> str:
|
||||||
# type: (BuildEnvironment) -> str
|
|
||||||
"""Read content from source and do post-process."""
|
"""Read content from source and do post-process."""
|
||||||
content = self.source.read()
|
content = self.source.read()
|
||||||
|
|
||||||
@ -156,8 +146,7 @@ class SphinxI18nReader(SphinxBaseReader):
|
|||||||
Because the translated texts are partial and they don't have correct line numbers.
|
Because the translated texts are partial and they don't have correct line numbers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def setup(self, app):
|
def setup(self, app: "Sphinx") -> None:
|
||||||
# type: (Sphinx) -> None
|
|
||||||
super().setup(app)
|
super().setup(app)
|
||||||
|
|
||||||
self.transforms = self.transforms + app.registry.get_transforms()
|
self.transforms = self.transforms + app.registry.get_transforms()
|
||||||
@ -174,27 +163,24 @@ class SphinxDummyWriter(UnfilteredWriter):
|
|||||||
|
|
||||||
supported = ('html',) # needed to keep "meta" nodes
|
supported = ('html',) # needed to keep "meta" nodes
|
||||||
|
|
||||||
def translate(self):
|
def translate(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def SphinxDummySourceClass(source, *args, **kwargs):
|
def SphinxDummySourceClass(source: Any, *args, **kwargs) -> Any:
|
||||||
# type: (Any, Any, Any) -> Any
|
|
||||||
"""Bypass source object as is to cheat Publisher."""
|
"""Bypass source object as is to cheat Publisher."""
|
||||||
return source
|
return source
|
||||||
|
|
||||||
|
|
||||||
class SphinxFileInput(FileInput):
|
class SphinxFileInput(FileInput):
|
||||||
"""A basic FileInput for Sphinx."""
|
"""A basic FileInput for Sphinx."""
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
# type: (Any, Any) -> None
|
|
||||||
kwargs['error_handler'] = 'sphinx'
|
kwargs['error_handler'] = 'sphinx'
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def read_doc(app, env, filename):
|
|
||||||
# type: (Sphinx, BuildEnvironment, str) -> nodes.document
|
def read_doc(app: "Sphinx", env: BuildEnvironment, filename: str) -> nodes.document:
|
||||||
"""Parse a document and convert to doctree."""
|
"""Parse a document and convert to doctree."""
|
||||||
# set up error_handler for the target document
|
# set up error_handler for the target document
|
||||||
error_handler = UnicodeDecodeErrorHandler(env.docname)
|
error_handler = UnicodeDecodeErrorHandler(env.docname)
|
||||||
|
@ -10,42 +10,37 @@
|
|||||||
|
|
||||||
from os import path
|
from os import path
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import Any, Callable, Iterator, Tuple # NOQA
|
from typing import Any, Callable, Dict, Iterator, List, Tuple, Union
|
||||||
|
|
||||||
from jinja2 import FileSystemLoader, BaseLoader, TemplateNotFound, \
|
from jinja2 import FileSystemLoader, BaseLoader, TemplateNotFound, contextfunction
|
||||||
contextfunction
|
from jinja2.environment import Environment
|
||||||
from jinja2.sandbox import SandboxedEnvironment
|
from jinja2.sandbox import SandboxedEnvironment
|
||||||
from jinja2.utils import open_if_exists
|
from jinja2.utils import open_if_exists
|
||||||
|
|
||||||
from sphinx.application import TemplateBridge
|
from sphinx.application import TemplateBridge
|
||||||
|
from sphinx.theming import Theme
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from sphinx.util.osutil import mtimes_of_files
|
from sphinx.util.osutil import mtimes_of_files
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Dict, List, Union # NOQA
|
from sphinx.builders import Builder
|
||||||
from jinja2.environment import Environment # NOQA
|
|
||||||
from sphinx.builders import Builder # NOQA
|
|
||||||
from sphinx.theming import Theme # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
def _tobool(val):
|
def _tobool(val: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
if isinstance(val, str):
|
if isinstance(val, str):
|
||||||
return val.lower() in ('true', '1', 'yes', 'on')
|
return val.lower() in ('true', '1', 'yes', 'on')
|
||||||
return bool(val)
|
return bool(val)
|
||||||
|
|
||||||
|
|
||||||
def _toint(val):
|
def _toint(val: str) -> int:
|
||||||
# type: (str) -> int
|
|
||||||
try:
|
try:
|
||||||
return int(val)
|
return int(val)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def _todim(val):
|
def _todim(val: Union[int, str]) -> str:
|
||||||
# type: (Union[int, str]) -> str
|
|
||||||
"""
|
"""
|
||||||
Make val a css dimension. In particular the following transformations
|
Make val a css dimension. In particular the following transformations
|
||||||
are performed:
|
are performed:
|
||||||
@ -63,8 +58,7 @@ def _todim(val):
|
|||||||
return val # type: ignore
|
return val # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def _slice_index(values, slices):
|
def _slice_index(values: List, slices: int) -> Iterator[List]:
|
||||||
# type: (List, int) -> Iterator[List]
|
|
||||||
seq = list(values)
|
seq = list(values)
|
||||||
length = 0
|
length = 0
|
||||||
for value in values:
|
for value in values:
|
||||||
@ -85,8 +79,7 @@ def _slice_index(values, slices):
|
|||||||
yield seq[start:offset]
|
yield seq[start:offset]
|
||||||
|
|
||||||
|
|
||||||
def accesskey(context, key):
|
def accesskey(context: Any, key: str) -> str:
|
||||||
# type: (Any, str) -> str
|
|
||||||
"""Helper to output each access key only once."""
|
"""Helper to output each access key only once."""
|
||||||
if '_accesskeys' not in context:
|
if '_accesskeys' not in context:
|
||||||
context.vars['_accesskeys'] = {}
|
context.vars['_accesskeys'] = {}
|
||||||
@ -97,24 +90,20 @@ def accesskey(context, key):
|
|||||||
|
|
||||||
|
|
||||||
class idgen:
|
class idgen:
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self.id = 0
|
self.id = 0
|
||||||
|
|
||||||
def current(self):
|
def current(self) -> int:
|
||||||
# type: () -> int
|
|
||||||
return self.id
|
return self.id
|
||||||
|
|
||||||
def __next__(self):
|
def __next__(self) -> int:
|
||||||
# type: () -> int
|
|
||||||
self.id += 1
|
self.id += 1
|
||||||
return self.id
|
return self.id
|
||||||
next = __next__ # Python 2/Jinja compatibility
|
next = __next__ # Python 2/Jinja compatibility
|
||||||
|
|
||||||
|
|
||||||
@contextfunction
|
@contextfunction
|
||||||
def warning(context, message, *args, **kwargs):
|
def warning(context: Dict, message: str, *args, **kwargs) -> str:
|
||||||
# type: (Dict, str, Any, Any) -> str
|
|
||||||
if 'pagename' in context:
|
if 'pagename' in context:
|
||||||
filename = context.get('pagename') + context.get('file_suffix', '')
|
filename = context.get('pagename') + context.get('file_suffix', '')
|
||||||
message = 'in rendering %s: %s' % (filename, message)
|
message = 'in rendering %s: %s' % (filename, message)
|
||||||
@ -129,8 +118,7 @@ class SphinxFileSystemLoader(FileSystemLoader):
|
|||||||
template names.
|
template names.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_source(self, environment, template):
|
def get_source(self, environment: Environment, template: str) -> Tuple[str, str, Callable]:
|
||||||
# type: (Environment, str) -> Tuple[str, str, Callable]
|
|
||||||
for searchpath in self.searchpath:
|
for searchpath in self.searchpath:
|
||||||
filename = path.join(searchpath, template)
|
filename = path.join(searchpath, template)
|
||||||
f = open_if_exists(filename)
|
f = open_if_exists(filename)
|
||||||
@ -141,8 +129,7 @@ class SphinxFileSystemLoader(FileSystemLoader):
|
|||||||
|
|
||||||
mtime = path.getmtime(filename)
|
mtime = path.getmtime(filename)
|
||||||
|
|
||||||
def uptodate():
|
def uptodate() -> bool:
|
||||||
# type: () -> bool
|
|
||||||
try:
|
try:
|
||||||
return path.getmtime(filename) == mtime
|
return path.getmtime(filename) == mtime
|
||||||
except OSError:
|
except OSError:
|
||||||
@ -158,8 +145,7 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
|||||||
|
|
||||||
# TemplateBridge interface
|
# TemplateBridge interface
|
||||||
|
|
||||||
def init(self, builder, theme=None, dirs=None):
|
def init(self, builder: "Builder", theme: Theme = None, dirs: List[str] = None) -> None:
|
||||||
# type: (Builder, Theme, List[str]) -> None
|
|
||||||
# create a chain of paths to search
|
# create a chain of paths to search
|
||||||
if theme:
|
if theme:
|
||||||
# the theme's own dir and its bases' dirs
|
# the theme's own dir and its bases' dirs
|
||||||
@ -202,22 +188,18 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
|||||||
if use_i18n:
|
if use_i18n:
|
||||||
self.environment.install_gettext_translations(builder.app.translator) # type: ignore # NOQA
|
self.environment.install_gettext_translations(builder.app.translator) # type: ignore # NOQA
|
||||||
|
|
||||||
def render(self, template, context): # type: ignore
|
def render(self, template: str, context: Dict) -> str: # type: ignore
|
||||||
# type: (str, Dict) -> str
|
|
||||||
return self.environment.get_template(template).render(context)
|
return self.environment.get_template(template).render(context)
|
||||||
|
|
||||||
def render_string(self, source, context):
|
def render_string(self, source: str, context: Dict) -> str:
|
||||||
# type: (str, Dict) -> str
|
|
||||||
return self.environment.from_string(source).render(context)
|
return self.environment.from_string(source).render(context)
|
||||||
|
|
||||||
def newest_template_mtime(self):
|
def newest_template_mtime(self) -> float:
|
||||||
# type: () -> float
|
|
||||||
return max(mtimes_of_files(self.pathchain, '.html'))
|
return max(mtimes_of_files(self.pathchain, '.html'))
|
||||||
|
|
||||||
# Loader interface
|
# Loader interface
|
||||||
|
|
||||||
def get_source(self, environment, template):
|
def get_source(self, environment: Environment, template: str) -> Tuple[str, str, Callable]:
|
||||||
# type: (Environment, str) -> Tuple[str, str, Callable]
|
|
||||||
loaders = self.loaders
|
loaders = self.loaders
|
||||||
# exclamation mark starts search from theme
|
# exclamation mark starts search from theme
|
||||||
if template.startswith('!'):
|
if template.startswith('!'):
|
||||||
|
@ -12,10 +12,9 @@ import gettext
|
|||||||
import locale
|
import locale
|
||||||
from collections import UserString, defaultdict
|
from collections import UserString, defaultdict
|
||||||
from gettext import NullTranslations
|
from gettext import NullTranslations
|
||||||
|
from typing import Any, Callable, Dict, Iterable, List, Tuple, Union
|
||||||
|
|
||||||
if False:
|
from sphinx.deprecation import RemovedInSphinx30Warning
|
||||||
# For type annotation
|
|
||||||
from typing import Any, Callable, Dict, Iterable, List, Tuple, Union # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
class _TranslationProxy(UserString):
|
class _TranslationProxy(UserString):
|
||||||
@ -32,32 +31,27 @@ class _TranslationProxy(UserString):
|
|||||||
"""
|
"""
|
||||||
__slots__ = ('_func', '_args')
|
__slots__ = ('_func', '_args')
|
||||||
|
|
||||||
def __new__(cls, func, *args): # type: ignore
|
def __new__(cls, func: Callable, *args: str) -> object: # type: ignore
|
||||||
# type: (Callable, str) -> object
|
|
||||||
if not args:
|
if not args:
|
||||||
# not called with "function" and "arguments", but a plain string
|
# not called with "function" and "arguments", but a plain string
|
||||||
return str(func)
|
return str(func)
|
||||||
return object.__new__(cls)
|
return object.__new__(cls)
|
||||||
|
|
||||||
def __getnewargs__(self):
|
def __getnewargs__(self) -> Tuple[str]:
|
||||||
# type: () -> Tuple[str]
|
|
||||||
return (self._func,) + self._args # type: ignore
|
return (self._func,) + self._args # type: ignore
|
||||||
|
|
||||||
def __init__(self, func, *args):
|
def __init__(self, func: Callable, *args: str) -> None:
|
||||||
# type: (Callable, str) -> None
|
|
||||||
self._func = func
|
self._func = func
|
||||||
self._args = args
|
self._args = args
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self): # type: ignore
|
def data(self) -> str: # type: ignore
|
||||||
# type: () -> str
|
|
||||||
return self._func(*self._args)
|
return self._func(*self._args)
|
||||||
|
|
||||||
# replace function from UserString; it instantiates a self.__class__
|
# replace function from UserString; it instantiates a self.__class__
|
||||||
# for the encoding result
|
# for the encoding result
|
||||||
|
|
||||||
def encode(self, encoding=None, errors=None): # type: ignore
|
def encode(self, encoding: str = None, errors: str = None) -> bytes: # type: ignore
|
||||||
# type: (str, str) -> bytes
|
|
||||||
if encoding:
|
if encoding:
|
||||||
if errors:
|
if errors:
|
||||||
return self.data.encode(encoding, errors)
|
return self.data.encode(encoding, errors)
|
||||||
@ -66,58 +60,45 @@ class _TranslationProxy(UserString):
|
|||||||
else:
|
else:
|
||||||
return self.data.encode()
|
return self.data.encode()
|
||||||
|
|
||||||
def __dir__(self):
|
def __dir__(self) -> List[str]:
|
||||||
# type: () -> List[str]
|
|
||||||
return dir(str)
|
return dir(str)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return str(self.data)
|
return str(self.data)
|
||||||
|
|
||||||
def __add__(self, other): # type: ignore
|
def __add__(self, other: str) -> str: # type: ignore
|
||||||
# type: (str) -> str
|
|
||||||
return self.data + other
|
return self.data + other
|
||||||
|
|
||||||
def __radd__(self, other):
|
def __radd__(self, other: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return other + self.data
|
return other + self.data
|
||||||
|
|
||||||
def __mod__(self, other): # type: ignore
|
def __mod__(self, other: str) -> str: # type: ignore
|
||||||
# type: (str) -> str
|
|
||||||
return self.data % other
|
return self.data % other
|
||||||
|
|
||||||
def __rmod__(self, other):
|
def __rmod__(self, other: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return other % self.data
|
return other % self.data
|
||||||
|
|
||||||
def __mul__(self, other): # type: ignore
|
def __mul__(self, other: Any) -> str: # type: ignore
|
||||||
# type: (Any) -> str
|
|
||||||
return self.data * other
|
return self.data * other
|
||||||
|
|
||||||
def __rmul__(self, other):
|
def __rmul__(self, other: Any) -> str:
|
||||||
# type: (Any) -> str
|
|
||||||
return other * self.data
|
return other * self.data
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name: str) -> Any:
|
||||||
# type: (str) -> Any
|
|
||||||
if name == '__members__':
|
if name == '__members__':
|
||||||
return self.__dir__()
|
return self.__dir__()
|
||||||
return getattr(self.data, name)
|
return getattr(self.data, name)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self) -> Tuple[Callable, Tuple[str, ...]]:
|
||||||
# type: () -> Tuple[Callable, Tuple[str, ...]]
|
|
||||||
return self._func, self._args
|
return self._func, self._args
|
||||||
|
|
||||||
def __setstate__(self, tup):
|
def __setstate__(self, tup: Tuple[Callable, Tuple[str]]) -> None:
|
||||||
# type: (Tuple[Callable, Tuple[str]]) -> None
|
|
||||||
self._func, self._args = tup
|
self._func, self._args = tup
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self) -> "_TranslationProxy":
|
||||||
# type: () -> _TranslationProxy
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
try:
|
try:
|
||||||
return 'i' + repr(str(self.data))
|
return 'i' + repr(str(self.data))
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -127,8 +108,8 @@ class _TranslationProxy(UserString):
|
|||||||
translators = defaultdict(NullTranslations) # type: Dict[Tuple[str, str], NullTranslations]
|
translators = defaultdict(NullTranslations) # type: Dict[Tuple[str, str], NullTranslations]
|
||||||
|
|
||||||
|
|
||||||
def init(locale_dirs, language, catalog='sphinx', namespace='general'):
|
def init(locale_dirs: List[str], language: str,
|
||||||
# type: (List[str], str, str, str) -> Tuple[NullTranslations, bool]
|
catalog: str = 'sphinx', namespace: str = 'general') -> Tuple[NullTranslations, bool]:
|
||||||
"""Look for message catalogs in `locale_dirs` and *ensure* that there is at
|
"""Look for message catalogs in `locale_dirs` and *ensure* that there is at
|
||||||
least a NullTranslations catalog set in `translators`. If called multiple
|
least a NullTranslations catalog set in `translators`. If called multiple
|
||||||
times or if several ``.mo`` files are found, their contents are merged
|
times or if several ``.mo`` files are found, their contents are merged
|
||||||
@ -167,8 +148,7 @@ def init(locale_dirs, language, catalog='sphinx', namespace='general'):
|
|||||||
return translator, has_translation
|
return translator, has_translation
|
||||||
|
|
||||||
|
|
||||||
def setlocale(category, value=None):
|
def setlocale(category: int, value: Union[str, Iterable[str]] = None) -> None:
|
||||||
# type: (int, Union[str, Iterable[str]]) -> None
|
|
||||||
"""Update locale settings.
|
"""Update locale settings.
|
||||||
|
|
||||||
This does not throw any exception even if update fails.
|
This does not throw any exception even if update fails.
|
||||||
@ -188,8 +168,7 @@ def setlocale(category, value=None):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def init_console(locale_dir, catalog):
|
def init_console(locale_dir: str, catalog: str) -> Tuple[NullTranslations, bool]:
|
||||||
# type: (str, str) -> Tuple[NullTranslations, bool]
|
|
||||||
"""Initialize locale for console.
|
"""Initialize locale for console.
|
||||||
|
|
||||||
.. versionadded:: 1.8
|
.. versionadded:: 1.8
|
||||||
@ -204,18 +183,15 @@ def init_console(locale_dir, catalog):
|
|||||||
return init([locale_dir], language, catalog, 'console')
|
return init([locale_dir], language, catalog, 'console')
|
||||||
|
|
||||||
|
|
||||||
def get_translator(catalog='sphinx', namespace='general'):
|
def get_translator(catalog: str = 'sphinx', namespace: str = 'general') -> NullTranslations:
|
||||||
# type: (str, str) -> NullTranslations
|
|
||||||
return translators[(namespace, catalog)]
|
return translators[(namespace, catalog)]
|
||||||
|
|
||||||
|
|
||||||
def is_translator_registered(catalog='sphinx', namespace='general'):
|
def is_translator_registered(catalog: str = 'sphinx', namespace: str = 'general') -> bool:
|
||||||
# type: (str, str) -> bool
|
|
||||||
return (namespace, catalog) in translators
|
return (namespace, catalog) in translators
|
||||||
|
|
||||||
|
|
||||||
def _lazy_translate(catalog, namespace, message):
|
def _lazy_translate(catalog: str, namespace: str, message: str) -> str:
|
||||||
# type: (str, str, str) -> str
|
|
||||||
"""Used instead of _ when creating TranslationProxy, because _ is
|
"""Used instead of _ when creating TranslationProxy, because _ is
|
||||||
not bound yet at that time.
|
not bound yet at that time.
|
||||||
"""
|
"""
|
||||||
@ -248,8 +224,7 @@ def get_translation(catalog, namespace='general'):
|
|||||||
|
|
||||||
.. versionadded:: 1.8
|
.. versionadded:: 1.8
|
||||||
"""
|
"""
|
||||||
def gettext(message, *args):
|
def gettext(message: str, *args) -> str:
|
||||||
# type: (str, *Any) -> str
|
|
||||||
if not is_translator_registered(catalog, namespace):
|
if not is_translator_registered(catalog, namespace):
|
||||||
# not initialized yet
|
# not initialized yet
|
||||||
return _TranslationProxy(_lazy_translate, catalog, namespace, message) # type: ignore # NOQA
|
return _TranslationProxy(_lazy_translate, catalog, namespace, message) # type: ignore # NOQA
|
||||||
|
@ -8,8 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, List, Union
|
||||||
|
|
||||||
import docutils.parsers
|
import docutils.parsers
|
||||||
import docutils.parsers.rst
|
import docutils.parsers.rst
|
||||||
|
from docutils import nodes
|
||||||
from docutils.parsers.rst import states
|
from docutils.parsers.rst import states
|
||||||
from docutils.statemachine import StringList
|
from docutils.statemachine import StringList
|
||||||
from docutils.transforms.universal import SmartQuotes
|
from docutils.transforms.universal import SmartQuotes
|
||||||
@ -18,11 +21,9 @@ from sphinx.util.rst import append_epilog, prepend_prolog
|
|||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Dict, List, Union # NOQA
|
|
||||||
from typing import Type # for python3.5.1
|
|
||||||
from docutils import nodes # NOQA
|
|
||||||
from docutils.transforms import Transform # NOQA
|
from docutils.transforms import Transform # NOQA
|
||||||
from sphinx.application import Sphinx # NOQA
|
from typing import Type # NOQA # for python3.5.1
|
||||||
|
from sphinx.application import Sphinx
|
||||||
|
|
||||||
|
|
||||||
class Parser(docutils.parsers.Parser):
|
class Parser(docutils.parsers.Parser):
|
||||||
@ -48,8 +49,7 @@ class Parser(docutils.parsers.Parser):
|
|||||||
``warn()`` and ``info()`` is deprecated. Use :mod:`sphinx.util.logging` instead.
|
``warn()`` and ``info()`` is deprecated. Use :mod:`sphinx.util.logging` instead.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def set_application(self, app):
|
def set_application(self, app: "Sphinx") -> None:
|
||||||
# type: (Sphinx) -> None
|
|
||||||
"""set_application will be called from Sphinx to set app and other instance variables
|
"""set_application will be called from Sphinx to set app and other instance variables
|
||||||
|
|
||||||
:param sphinx.application.Sphinx app: Sphinx application object
|
:param sphinx.application.Sphinx app: Sphinx application object
|
||||||
@ -62,8 +62,7 @@ class Parser(docutils.parsers.Parser):
|
|||||||
class RSTParser(docutils.parsers.rst.Parser, Parser):
|
class RSTParser(docutils.parsers.rst.Parser, Parser):
|
||||||
"""A reST parser for Sphinx."""
|
"""A reST parser for Sphinx."""
|
||||||
|
|
||||||
def get_transforms(self):
|
def get_transforms(self) -> List["Type[Transform]"]:
|
||||||
# type: () -> List[Type[Transform]]
|
|
||||||
"""Sphinx's reST parser replaces a transform class for smart-quotes by own's
|
"""Sphinx's reST parser replaces a transform class for smart-quotes by own's
|
||||||
|
|
||||||
refs: sphinx.io.SphinxStandaloneReader
|
refs: sphinx.io.SphinxStandaloneReader
|
||||||
@ -72,8 +71,7 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
|
|||||||
transforms.remove(SmartQuotes)
|
transforms.remove(SmartQuotes)
|
||||||
return transforms
|
return transforms
|
||||||
|
|
||||||
def parse(self, inputstring, document):
|
def parse(self, inputstring: Union[str, StringList], document: nodes.document) -> None:
|
||||||
# type: (Union[str, StringList], nodes.document) -> None
|
|
||||||
"""Parse text and generate a document tree."""
|
"""Parse text and generate a document tree."""
|
||||||
self.setup_parse(inputstring, document) # type: ignore
|
self.setup_parse(inputstring, document) # type: ignore
|
||||||
self.statemachine = states.RSTStateMachine(
|
self.statemachine = states.RSTStateMachine(
|
||||||
@ -95,15 +93,13 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
|
|||||||
self.statemachine.run(inputlines, document, inliner=self.inliner)
|
self.statemachine.run(inputlines, document, inliner=self.inliner)
|
||||||
self.finish_parse()
|
self.finish_parse()
|
||||||
|
|
||||||
def decorate(self, content):
|
def decorate(self, content: StringList) -> None:
|
||||||
# type: (StringList) -> None
|
|
||||||
"""Preprocess reST content before parsing."""
|
"""Preprocess reST content before parsing."""
|
||||||
prepend_prolog(content, self.config.rst_prolog)
|
prepend_prolog(content, self.config.rst_prolog)
|
||||||
append_epilog(content, self.config.rst_epilog)
|
append_epilog(content, self.config.rst_epilog)
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||||
# type: (Sphinx) -> Dict[str, Any]
|
|
||||||
app.add_source_parser(RSTParser)
|
app.add_source_parser(RSTParser)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -11,36 +11,37 @@
|
|||||||
import traceback
|
import traceback
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from types import MethodType
|
from types import MethodType
|
||||||
|
from typing import Any, Callable, Dict, Iterator, List, Tuple, Union
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.io import Input
|
||||||
|
from docutils.nodes import Element, Node, TextElement
|
||||||
|
from docutils.parsers import Parser
|
||||||
from docutils.parsers.rst import Directive
|
from docutils.parsers.rst import Directive
|
||||||
|
from docutils.transforms import Transform
|
||||||
from pkg_resources import iter_entry_points
|
from pkg_resources import iter_entry_points
|
||||||
|
|
||||||
from sphinx.domains import ObjType
|
from sphinx.builders import Builder
|
||||||
|
from sphinx.config import Config
|
||||||
|
from sphinx.deprecation import RemovedInSphinx30Warning
|
||||||
|
from sphinx.domains import Domain, Index, ObjType
|
||||||
from sphinx.domains.std import GenericObject, Target
|
from sphinx.domains.std import GenericObject, Target
|
||||||
|
from sphinx.environment import BuildEnvironment
|
||||||
from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError
|
from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError
|
||||||
from sphinx.extension import Extension
|
from sphinx.extension import Extension
|
||||||
|
from sphinx.io import SphinxFileInput
|
||||||
from sphinx.locale import __
|
from sphinx.locale import __
|
||||||
from sphinx.parsers import Parser as SphinxParser
|
from sphinx.parsers import Parser as SphinxParser
|
||||||
from sphinx.roles import XRefRole
|
from sphinx.roles import XRefRole
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from sphinx.util.logging import prefixed_warnings
|
from sphinx.util.logging import prefixed_warnings
|
||||||
|
from sphinx.util.typing import RoleFunction, TitleGetter
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Callable, Dict, Iterator, List, Tuple, Union # NOQA
|
|
||||||
from typing import Type # for python3.5.1
|
from typing import Type # for python3.5.1
|
||||||
from docutils import nodes # NOQA
|
from sphinx.application import Sphinx
|
||||||
from docutils.io import Input # NOQA
|
from sphinx.ext.autodoc import Documenter
|
||||||
from docutils.parsers import Parser # NOQA
|
|
||||||
from docutils.transforms import Transform # NOQA
|
|
||||||
from sphinx.application import Sphinx # NOQA
|
|
||||||
from sphinx.builders import Builder # NOQA
|
|
||||||
from sphinx.config import Config # NOQA
|
|
||||||
from sphinx.domains import Domain, Index # NOQA
|
|
||||||
from sphinx.environment import BuildEnvironment # NOQA
|
|
||||||
from sphinx.ext.autodoc import Documenter # NOQA
|
|
||||||
from sphinx.io import SphinxFileInput # NOQA
|
|
||||||
from sphinx.util.typing import RoleFunction, TitleGetter # NOQA
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -52,8 +53,7 @@ EXTENSION_BLACKLIST = {
|
|||||||
|
|
||||||
|
|
||||||
class SphinxComponentRegistry:
|
class SphinxComponentRegistry:
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
#: special attrgetter for autodoc; class object -> attrgetter
|
#: special attrgetter for autodoc; class object -> attrgetter
|
||||||
self.autodoc_attrgettrs = {} # type: Dict[Type, Callable[[Any, str, Any], Any]]
|
self.autodoc_attrgettrs = {} # type: Dict[Type, Callable[[Any, str, Any], Any]]
|
||||||
|
|
||||||
@ -87,7 +87,7 @@ class SphinxComponentRegistry:
|
|||||||
|
|
||||||
#: additional enumerable nodes
|
#: additional enumerable nodes
|
||||||
#: a dict of node class -> tuple of figtype and title_getter function
|
#: a dict of node class -> tuple of figtype and title_getter function
|
||||||
self.enumerable_nodes = {} # type: Dict[Type[nodes.Node], Tuple[str, TitleGetter]]
|
self.enumerable_nodes = {} # type: Dict[Type[Node], Tuple[str, TitleGetter]]
|
||||||
|
|
||||||
#: HTML inline and block math renderers
|
#: HTML inline and block math renderers
|
||||||
#: a dict of name -> tuple of visit function and depart function
|
#: a dict of name -> tuple of visit function and depart function
|
||||||
@ -122,8 +122,7 @@ class SphinxComponentRegistry:
|
|||||||
#: additional transforms; list of transforms
|
#: additional transforms; list of transforms
|
||||||
self.transforms = [] # type: List[Type[Transform]]
|
self.transforms = [] # type: List[Type[Transform]]
|
||||||
|
|
||||||
def add_builder(self, builder, override=False):
|
def add_builder(self, builder: "Type[Builder]", override: bool = False) -> None:
|
||||||
# type: (Type[Builder], bool) -> None
|
|
||||||
logger.debug('[app] adding builder: %r', builder)
|
logger.debug('[app] adding builder: %r', builder)
|
||||||
if not hasattr(builder, 'name'):
|
if not hasattr(builder, 'name'):
|
||||||
raise ExtensionError(__('Builder class %s has no "name" attribute') % builder)
|
raise ExtensionError(__('Builder class %s has no "name" attribute') % builder)
|
||||||
@ -132,8 +131,7 @@ class SphinxComponentRegistry:
|
|||||||
(builder.name, self.builders[builder.name].__module__))
|
(builder.name, self.builders[builder.name].__module__))
|
||||||
self.builders[builder.name] = builder
|
self.builders[builder.name] = builder
|
||||||
|
|
||||||
def preload_builder(self, app, name):
|
def preload_builder(self, app: "Sphinx", name: str) -> None:
|
||||||
# type: (Sphinx, str) -> None
|
|
||||||
if name is None:
|
if name is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -147,26 +145,22 @@ class SphinxComponentRegistry:
|
|||||||
|
|
||||||
self.load_extension(app, entry_point.module_name)
|
self.load_extension(app, entry_point.module_name)
|
||||||
|
|
||||||
def create_builder(self, app, name):
|
def create_builder(self, app: "Sphinx", name: str) -> Builder:
|
||||||
# type: (Sphinx, str) -> Builder
|
|
||||||
if name not in self.builders:
|
if name not in self.builders:
|
||||||
raise SphinxError(__('Builder name %s not registered') % name)
|
raise SphinxError(__('Builder name %s not registered') % name)
|
||||||
|
|
||||||
return self.builders[name](app)
|
return self.builders[name](app)
|
||||||
|
|
||||||
def add_domain(self, domain, override=False):
|
def add_domain(self, domain: "Type[Domain]", override: bool = False) -> None:
|
||||||
# type: (Type[Domain], bool) -> None
|
|
||||||
logger.debug('[app] adding domain: %r', domain)
|
logger.debug('[app] adding domain: %r', domain)
|
||||||
if domain.name in self.domains and not override:
|
if domain.name in self.domains and not override:
|
||||||
raise ExtensionError(__('domain %s already registered') % domain.name)
|
raise ExtensionError(__('domain %s already registered') % domain.name)
|
||||||
self.domains[domain.name] = domain
|
self.domains[domain.name] = domain
|
||||||
|
|
||||||
def has_domain(self, domain):
|
def has_domain(self, domain: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
return domain in self.domains
|
return domain in self.domains
|
||||||
|
|
||||||
def create_domains(self, env):
|
def create_domains(self, env: BuildEnvironment) -> Iterator[Domain]:
|
||||||
# type: (BuildEnvironment) -> Iterator[Domain]
|
|
||||||
for DomainClass in self.domains.values():
|
for DomainClass in self.domains.values():
|
||||||
domain = DomainClass(env)
|
domain = DomainClass(env)
|
||||||
|
|
||||||
@ -179,8 +173,8 @@ class SphinxComponentRegistry:
|
|||||||
|
|
||||||
yield domain
|
yield domain
|
||||||
|
|
||||||
def add_directive_to_domain(self, domain, name, cls, override=False):
|
def add_directive_to_domain(self, domain: str, name: str,
|
||||||
# type: (str, str, Type[Directive], bool) -> None
|
cls: "Type[Directive]", override: bool = False) -> None:
|
||||||
logger.debug('[app] adding directive to domain: %r', (domain, name, cls))
|
logger.debug('[app] adding directive to domain: %r', (domain, name, cls))
|
||||||
if domain not in self.domains:
|
if domain not in self.domains:
|
||||||
raise ExtensionError(__('domain %s not yet registered') % domain)
|
raise ExtensionError(__('domain %s not yet registered') % domain)
|
||||||
@ -191,8 +185,9 @@ class SphinxComponentRegistry:
|
|||||||
(name, domain))
|
(name, domain))
|
||||||
directives[name] = cls
|
directives[name] = cls
|
||||||
|
|
||||||
def add_role_to_domain(self, domain, name, role, override=False):
|
def add_role_to_domain(self, domain: str, name: str,
|
||||||
# type: (str, str, Union[RoleFunction, XRefRole], bool) -> None
|
role: Union[RoleFunction, XRefRole], override: bool = False
|
||||||
|
) -> None:
|
||||||
logger.debug('[app] adding role to domain: %r', (domain, name, role))
|
logger.debug('[app] adding role to domain: %r', (domain, name, role))
|
||||||
if domain not in self.domains:
|
if domain not in self.domains:
|
||||||
raise ExtensionError(__('domain %s not yet registered') % domain)
|
raise ExtensionError(__('domain %s not yet registered') % domain)
|
||||||
@ -202,8 +197,8 @@ class SphinxComponentRegistry:
|
|||||||
(name, domain))
|
(name, domain))
|
||||||
roles[name] = role
|
roles[name] = role
|
||||||
|
|
||||||
def add_index_to_domain(self, domain, index, override=False):
|
def add_index_to_domain(self, domain: str, index: "Type[Index]",
|
||||||
# type: (str, Type[Index], bool) -> None
|
override: bool = False) -> None:
|
||||||
logger.debug('[app] adding index to domain: %r', (domain, index))
|
logger.debug('[app] adding index to domain: %r', (domain, index))
|
||||||
if domain not in self.domains:
|
if domain not in self.domains:
|
||||||
raise ExtensionError(__('domain %s not yet registered') % domain)
|
raise ExtensionError(__('domain %s not yet registered') % domain)
|
||||||
@ -213,10 +208,10 @@ class SphinxComponentRegistry:
|
|||||||
(index.name, domain))
|
(index.name, domain))
|
||||||
indices.append(index)
|
indices.append(index)
|
||||||
|
|
||||||
def add_object_type(self, directivename, rolename, indextemplate='',
|
def add_object_type(self, directivename: str, rolename: str, indextemplate: str = '',
|
||||||
parse_node=None, ref_nodeclass=None, objname='',
|
parse_node: Callable = None, ref_nodeclass: "Type[TextElement]" = None,
|
||||||
doc_field_types=[], override=False):
|
objname: str = '', doc_field_types: List = [], override: bool = False
|
||||||
# type: (str, str, str, Callable, Type[nodes.TextElement], str, List, bool) -> None
|
) -> None:
|
||||||
logger.debug('[app] adding object type: %r',
|
logger.debug('[app] adding object type: %r',
|
||||||
(directivename, rolename, indextemplate, parse_node,
|
(directivename, rolename, indextemplate, parse_node,
|
||||||
ref_nodeclass, objname, doc_field_types))
|
ref_nodeclass, objname, doc_field_types))
|
||||||
@ -237,9 +232,9 @@ class SphinxComponentRegistry:
|
|||||||
directivename)
|
directivename)
|
||||||
object_types[directivename] = ObjType(objname or directivename, rolename)
|
object_types[directivename] = ObjType(objname or directivename, rolename)
|
||||||
|
|
||||||
def add_crossref_type(self, directivename, rolename, indextemplate='',
|
def add_crossref_type(self, directivename: str, rolename: str, indextemplate: str = '',
|
||||||
ref_nodeclass=None, objname='', override=False):
|
ref_nodeclass: "Type[TextElement]" = None, objname: str = '',
|
||||||
# type: (str, str, str, Type[nodes.TextElement], str, bool) -> None
|
override: bool = False) -> None:
|
||||||
logger.debug('[app] adding crossref type: %r',
|
logger.debug('[app] adding crossref type: %r',
|
||||||
(directivename, rolename, indextemplate, ref_nodeclass, objname))
|
(directivename, rolename, indextemplate, ref_nodeclass, objname))
|
||||||
|
|
||||||
@ -257,17 +252,16 @@ class SphinxComponentRegistry:
|
|||||||
directivename)
|
directivename)
|
||||||
object_types[directivename] = ObjType(objname or directivename, rolename)
|
object_types[directivename] = ObjType(objname or directivename, rolename)
|
||||||
|
|
||||||
def add_source_suffix(self, suffix, filetype, override=False):
|
def add_source_suffix(self, suffix: str, filetype: str, override: bool = False) -> None:
|
||||||
# type: (str, str, bool) -> None
|
|
||||||
logger.debug('[app] adding source_suffix: %r, %r', suffix, filetype)
|
logger.debug('[app] adding source_suffix: %r, %r', suffix, filetype)
|
||||||
if suffix in self.source_suffix and not override:
|
if suffix in self.source_suffix and not override:
|
||||||
raise ExtensionError(__('source_suffix %r is already registered') % suffix)
|
raise ExtensionError(__('source_suffix %r is already registered') % suffix)
|
||||||
else:
|
else:
|
||||||
self.source_suffix[suffix] = filetype
|
self.source_suffix[suffix] = filetype
|
||||||
|
|
||||||
def add_source_parser(self, parser, **kwargs):
|
def add_source_parser(self, parser: "Type[Parser]", **kwargs) -> None:
|
||||||
# type: (Type[Parser], bool) -> None
|
|
||||||
logger.debug('[app] adding search source_parser: %r', parser)
|
logger.debug('[app] adding search source_parser: %r', parser)
|
||||||
|
|
||||||
# create a map from filetype to parser
|
# create a map from filetype to parser
|
||||||
for filetype in parser.supported:
|
for filetype in parser.supported:
|
||||||
if filetype in self.source_parsers and not kwargs.get('override'):
|
if filetype in self.source_parsers and not kwargs.get('override'):
|
||||||
@ -276,27 +270,23 @@ class SphinxComponentRegistry:
|
|||||||
else:
|
else:
|
||||||
self.source_parsers[filetype] = parser
|
self.source_parsers[filetype] = parser
|
||||||
|
|
||||||
def get_source_parser(self, filetype):
|
def get_source_parser(self, filetype: str) -> "Type[Parser]":
|
||||||
# type: (str) -> Type[Parser]
|
|
||||||
try:
|
try:
|
||||||
return self.source_parsers[filetype]
|
return self.source_parsers[filetype]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SphinxError(__('Source parser for %s not registered') % filetype)
|
raise SphinxError(__('Source parser for %s not registered') % filetype)
|
||||||
|
|
||||||
def get_source_parsers(self):
|
def get_source_parsers(self) -> Dict[str, "Type[Parser]"]:
|
||||||
# type: () -> Dict[str, Type[Parser]]
|
|
||||||
return self.source_parsers
|
return self.source_parsers
|
||||||
|
|
||||||
def create_source_parser(self, app, filename):
|
def create_source_parser(self, app: "Sphinx", filename: str) -> Parser:
|
||||||
# type: (Sphinx, str) -> Parser
|
|
||||||
parser_class = self.get_source_parser(filename)
|
parser_class = self.get_source_parser(filename)
|
||||||
parser = parser_class()
|
parser = parser_class()
|
||||||
if isinstance(parser, SphinxParser):
|
if isinstance(parser, SphinxParser):
|
||||||
parser.set_application(app)
|
parser.set_application(app)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
def get_source_input(self, filetype):
|
def get_source_input(self, filetype: str) -> "Type[Input]":
|
||||||
# type: (str) -> Type[Input]
|
|
||||||
try:
|
try:
|
||||||
return self.source_inputs[filetype]
|
return self.source_inputs[filetype]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -306,15 +296,14 @@ class SphinxComponentRegistry:
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def add_translator(self, name, translator, override=False):
|
def add_translator(self, name: str, translator: "Type[nodes.NodeVisitor]",
|
||||||
# type: (str, Type[nodes.NodeVisitor], bool) -> None
|
override: bool = False) -> None:
|
||||||
logger.debug('[app] Change of translator for the %s builder.' % name)
|
logger.debug('[app] Change of translator for the %s builder.' % name)
|
||||||
if name in self.translators and not override:
|
if name in self.translators and not override:
|
||||||
raise ExtensionError(__('Translator for %r already exists') % name)
|
raise ExtensionError(__('Translator for %r already exists') % name)
|
||||||
self.translators[name] = translator
|
self.translators[name] = translator
|
||||||
|
|
||||||
def add_translation_handlers(self, node, **kwargs):
|
def add_translation_handlers(self, node: "Type[Element]", **kwargs) -> None:
|
||||||
# type: (Type[nodes.Element], Any) -> None
|
|
||||||
logger.debug('[app] adding translation_handlers: %r, %r', node, kwargs)
|
logger.debug('[app] adding translation_handlers: %r, %r', node, kwargs)
|
||||||
for builder_name, handlers in kwargs.items():
|
for builder_name, handlers in kwargs.items():
|
||||||
translation_handlers = self.translation_handlers.setdefault(builder_name, {})
|
translation_handlers = self.translation_handlers.setdefault(builder_name, {})
|
||||||
@ -325,13 +314,11 @@ class SphinxComponentRegistry:
|
|||||||
raise ExtensionError(__('kwargs for add_node() must be a (visit, depart) '
|
raise ExtensionError(__('kwargs for add_node() must be a (visit, depart) '
|
||||||
'function tuple: %r=%r') % builder_name, handlers)
|
'function tuple: %r=%r') % builder_name, handlers)
|
||||||
|
|
||||||
def get_translator_class(self, builder):
|
def get_translator_class(self, builder: Builder) -> "Type[nodes.NodeVisitor]":
|
||||||
# type: (Builder) -> Type[nodes.NodeVisitor]
|
|
||||||
return self.translators.get(builder.name,
|
return self.translators.get(builder.name,
|
||||||
builder.default_translator_class)
|
builder.default_translator_class)
|
||||||
|
|
||||||
def create_translator(self, builder, *args):
|
def create_translator(self, builder: Builder, *args) -> nodes.NodeVisitor:
|
||||||
# type: (Builder, Any) -> nodes.NodeVisitor
|
|
||||||
translator_class = self.get_translator_class(builder)
|
translator_class = self.get_translator_class(builder)
|
||||||
assert translator_class, "translator not found for %s" % builder.name
|
assert translator_class, "translator not found for %s" % builder.name
|
||||||
translator = translator_class(*args)
|
translator = translator_class(*args)
|
||||||
@ -349,54 +336,48 @@ class SphinxComponentRegistry:
|
|||||||
|
|
||||||
return translator
|
return translator
|
||||||
|
|
||||||
def add_transform(self, transform):
|
def add_transform(self, transform: "Type[Transform]") -> None:
|
||||||
# type: (Type[Transform]) -> None
|
|
||||||
logger.debug('[app] adding transform: %r', transform)
|
logger.debug('[app] adding transform: %r', transform)
|
||||||
self.transforms.append(transform)
|
self.transforms.append(transform)
|
||||||
|
|
||||||
def get_transforms(self):
|
def get_transforms(self) -> List["Type[Transform]"]:
|
||||||
# type: () -> List[Type[Transform]]
|
|
||||||
return self.transforms
|
return self.transforms
|
||||||
|
|
||||||
def add_post_transform(self, transform):
|
def add_post_transform(self, transform: "Type[Transform]") -> None:
|
||||||
# type: (Type[Transform]) -> None
|
|
||||||
logger.debug('[app] adding post transform: %r', transform)
|
logger.debug('[app] adding post transform: %r', transform)
|
||||||
self.post_transforms.append(transform)
|
self.post_transforms.append(transform)
|
||||||
|
|
||||||
def get_post_transforms(self):
|
def get_post_transforms(self) -> List["Type[Transform]"]:
|
||||||
# type: () -> List[Type[Transform]]
|
|
||||||
return self.post_transforms
|
return self.post_transforms
|
||||||
|
|
||||||
def add_documenter(self, objtype, documenter):
|
def add_documenter(self, objtype: str, documenter: "Type[Documenter]") -> None:
|
||||||
# type: (str, Type[Documenter]) -> None
|
|
||||||
self.documenters[objtype] = documenter
|
self.documenters[objtype] = documenter
|
||||||
|
|
||||||
def add_autodoc_attrgetter(self, typ, attrgetter):
|
def add_autodoc_attrgetter(self, typ: "Type",
|
||||||
# type: (Type, Callable[[Any, str, Any], Any]) -> None
|
attrgetter: Callable[[Any, str, Any], Any]) -> None:
|
||||||
self.autodoc_attrgettrs[typ] = attrgetter
|
self.autodoc_attrgettrs[typ] = attrgetter
|
||||||
|
|
||||||
def add_css_files(self, filename, **attributes):
|
def add_css_files(self, filename, **attributes):
|
||||||
self.css_files.append((filename, attributes))
|
self.css_files.append((filename, attributes))
|
||||||
|
|
||||||
def add_js_file(self, filename, **attributes):
|
def add_js_file(self, filename: str, **attributes: str) -> None:
|
||||||
# type: (str, **str) -> None
|
|
||||||
logger.debug('[app] adding js_file: %r, %r', filename, attributes)
|
logger.debug('[app] adding js_file: %r, %r', filename, attributes)
|
||||||
self.js_files.append((filename, attributes))
|
self.js_files.append((filename, attributes))
|
||||||
|
|
||||||
def add_latex_package(self, name, options):
|
def add_latex_package(self, name: str, options: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
logger.debug('[app] adding latex package: %r', name)
|
logger.debug('[app] adding latex package: %r', name)
|
||||||
self.latex_packages.append((name, options))
|
self.latex_packages.append((name, options))
|
||||||
|
|
||||||
def add_enumerable_node(self, node, figtype, title_getter=None, override=False):
|
def add_enumerable_node(self, node: "Type[Node]", figtype: str,
|
||||||
# type: (Type[nodes.Node], str, TitleGetter, bool) -> None
|
title_getter: TitleGetter = None, override: bool = False) -> None:
|
||||||
logger.debug('[app] adding enumerable node: (%r, %r, %r)', node, figtype, title_getter)
|
logger.debug('[app] adding enumerable node: (%r, %r, %r)', node, figtype, title_getter)
|
||||||
if node in self.enumerable_nodes and not override:
|
if node in self.enumerable_nodes and not override:
|
||||||
raise ExtensionError(__('enumerable_node %r already registered') % node)
|
raise ExtensionError(__('enumerable_node %r already registered') % node)
|
||||||
self.enumerable_nodes[node] = (figtype, title_getter)
|
self.enumerable_nodes[node] = (figtype, title_getter)
|
||||||
|
|
||||||
def add_html_math_renderer(self, name, inline_renderers, block_renderers):
|
def add_html_math_renderer(self, name: str,
|
||||||
# type: (str, Tuple[Callable, Callable], Tuple[Callable, Callable]) -> None
|
inline_renderers: Tuple[Callable, Callable],
|
||||||
|
block_renderers: Tuple[Callable, Callable]) -> None:
|
||||||
logger.debug('[app] adding html_math_renderer: %s, %r, %r',
|
logger.debug('[app] adding html_math_renderer: %s, %r, %r',
|
||||||
name, inline_renderers, block_renderers)
|
name, inline_renderers, block_renderers)
|
||||||
if name in self.html_inline_math_renderers:
|
if name in self.html_inline_math_renderers:
|
||||||
@ -405,8 +386,7 @@ class SphinxComponentRegistry:
|
|||||||
self.html_inline_math_renderers[name] = inline_renderers
|
self.html_inline_math_renderers[name] = inline_renderers
|
||||||
self.html_block_math_renderers[name] = block_renderers
|
self.html_block_math_renderers[name] = block_renderers
|
||||||
|
|
||||||
def load_extension(self, app, extname):
|
def load_extension(self, app: "Sphinx", extname: str) -> None:
|
||||||
# type: (Sphinx, str) -> None
|
|
||||||
"""Load a Sphinx extension."""
|
"""Load a Sphinx extension."""
|
||||||
if extname in app.extensions: # alread loaded
|
if extname in app.extensions: # alread loaded
|
||||||
return
|
return
|
||||||
@ -451,8 +431,7 @@ class SphinxComponentRegistry:
|
|||||||
|
|
||||||
app.extensions[extname] = Extension(extname, mod, **metadata)
|
app.extensions[extname] = Extension(extname, mod, **metadata)
|
||||||
|
|
||||||
def get_envversion(self, app):
|
def get_envversion(self, app: "Sphinx") -> Dict[str, str]:
|
||||||
# type: (Sphinx) -> Dict[str, str]
|
|
||||||
from sphinx.environment import ENV_VERSION
|
from sphinx.environment import ENV_VERSION
|
||||||
envversion = {ext.name: ext.metadata['env_version'] for ext in app.extensions.values()
|
envversion = {ext.name: ext.metadata['env_version'] for ext in app.extensions.values()
|
||||||
if ext.metadata.get('env_version')}
|
if ext.metadata.get('env_version')}
|
||||||
@ -460,8 +439,7 @@ class SphinxComponentRegistry:
|
|||||||
return envversion
|
return envversion
|
||||||
|
|
||||||
|
|
||||||
def merge_source_suffix(app, config):
|
def merge_source_suffix(app: "Sphinx", config: Config) -> None:
|
||||||
# type: (Sphinx, Config) -> None
|
|
||||||
"""Merge source_suffix which specified by user and added by extensions."""
|
"""Merge source_suffix which specified by user and added by extensions."""
|
||||||
for suffix, filetype in app.registry.source_suffix.items():
|
for suffix, filetype in app.registry.source_suffix.items():
|
||||||
if suffix not in app.config.source_suffix:
|
if suffix not in app.config.source_suffix:
|
||||||
@ -475,8 +453,7 @@ def merge_source_suffix(app, config):
|
|||||||
app.registry.source_suffix = app.config.source_suffix
|
app.registry.source_suffix = app.config.source_suffix
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||||
# type: (Sphinx) -> Dict[str, Any]
|
|
||||||
app.connect('config-inited', merge_source_suffix)
|
app.connect('config-inited', merge_source_suffix)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
117
sphinx/roles.py
117
sphinx/roles.py
@ -10,25 +10,27 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import warnings
|
import warnings
|
||||||
|
from typing import Any, Dict, List, Tuple
|
||||||
|
from typing import Type # for python3.5.1
|
||||||
|
|
||||||
from docutils import nodes, utils
|
from docutils import nodes, utils
|
||||||
|
from docutils.nodes import Element, Node, TextElement, system_message
|
||||||
|
from docutils.parsers.rst.states import Inliner
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||||
from sphinx.locale import _
|
from sphinx.locale import _
|
||||||
from sphinx.util import ws_re
|
from sphinx.util import ws_re
|
||||||
from sphinx.util.docutils import ReferenceRole, SphinxRole
|
from sphinx.util.docutils import ReferenceRole, SphinxRole
|
||||||
from sphinx.util.nodes import split_explicit_title, process_index_entry, \
|
from sphinx.util.nodes import (
|
||||||
set_role_source_info
|
split_explicit_title, process_index_entry, set_role_source_info
|
||||||
|
)
|
||||||
|
from sphinx.util.typing import RoleFunction
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Dict, List, Tuple # NOQA
|
from sphinx.application import Sphinx
|
||||||
from typing import Type # for python3.5.1
|
from sphinx.environment import BuildEnvironment
|
||||||
from docutils.parsers.rst.states import Inliner # NOQA
|
|
||||||
from sphinx.application import Sphinx # NOQA
|
|
||||||
from sphinx.environment import BuildEnvironment # NOQA
|
|
||||||
from sphinx.util.typing import RoleFunction # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
generic_docroles = {
|
generic_docroles = {
|
||||||
@ -71,12 +73,12 @@ class XRefRole(ReferenceRole):
|
|||||||
* Subclassing and overwriting `process_link()` and/or `result_nodes()`.
|
* Subclassing and overwriting `process_link()` and/or `result_nodes()`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
nodeclass = addnodes.pending_xref # type: Type[nodes.Element]
|
nodeclass = addnodes.pending_xref # type: Type[Element]
|
||||||
innernodeclass = nodes.literal # type: Type[nodes.TextElement]
|
innernodeclass = nodes.literal # type: Type[TextElement]
|
||||||
|
|
||||||
def __init__(self, fix_parens=False, lowercase=False,
|
def __init__(self, fix_parens: bool = False, lowercase: bool = False,
|
||||||
nodeclass=None, innernodeclass=None, warn_dangling=False):
|
nodeclass: Type[Element] = None, innernodeclass: Type[TextElement] = None,
|
||||||
# type: (bool, bool, Type[nodes.Element], Type[nodes.TextElement], bool) -> None
|
warn_dangling: bool = False) -> None:
|
||||||
self.fix_parens = fix_parens
|
self.fix_parens = fix_parens
|
||||||
self.lowercase = lowercase
|
self.lowercase = lowercase
|
||||||
self.warn_dangling = warn_dangling
|
self.warn_dangling = warn_dangling
|
||||||
@ -87,8 +89,8 @@ class XRefRole(ReferenceRole):
|
|||||||
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
def _fix_parens(self, env, has_explicit_title, title, target):
|
def _fix_parens(self, env: "BuildEnvironment", has_explicit_title: bool, title: str,
|
||||||
# type: (BuildEnvironment, bool, str, str) -> Tuple[str, str]
|
target: str) -> Tuple[str, str]:
|
||||||
warnings.warn('XRefRole._fix_parens() is deprecated.',
|
warnings.warn('XRefRole._fix_parens() is deprecated.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
if not has_explicit_title:
|
if not has_explicit_title:
|
||||||
@ -103,8 +105,7 @@ class XRefRole(ReferenceRole):
|
|||||||
target = target[:-2]
|
target = target[:-2]
|
||||||
return title, target
|
return title, target
|
||||||
|
|
||||||
def update_title_and_target(self, title, target):
|
def update_title_and_target(self, title: str, target: str) -> Tuple[str, str]:
|
||||||
# type: (str, str) -> Tuple[str, str]
|
|
||||||
if not self.has_explicit_title:
|
if not self.has_explicit_title:
|
||||||
if title.endswith('()'):
|
if title.endswith('()'):
|
||||||
# remove parentheses
|
# remove parentheses
|
||||||
@ -117,8 +118,7 @@ class XRefRole(ReferenceRole):
|
|||||||
target = target[:-2]
|
target = target[:-2]
|
||||||
return title, target
|
return title, target
|
||||||
|
|
||||||
def run(self):
|
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
if ':' not in self.name:
|
if ':' not in self.name:
|
||||||
self.refdomain, self.reftype = '', self.name
|
self.refdomain, self.reftype = '', self.name
|
||||||
self.classes = ['xref', self.reftype]
|
self.classes = ['xref', self.reftype]
|
||||||
@ -132,8 +132,7 @@ class XRefRole(ReferenceRole):
|
|||||||
else:
|
else:
|
||||||
return self.create_xref_node()
|
return self.create_xref_node()
|
||||||
|
|
||||||
def create_non_xref_node(self):
|
def create_non_xref_node(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
text = utils.unescape(self.text[1:])
|
text = utils.unescape(self.text[1:])
|
||||||
if self.fix_parens:
|
if self.fix_parens:
|
||||||
self.has_explicit_title = False # treat as implicit
|
self.has_explicit_title = False # treat as implicit
|
||||||
@ -142,8 +141,7 @@ class XRefRole(ReferenceRole):
|
|||||||
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
|
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
|
||||||
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
|
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
|
||||||
|
|
||||||
def create_xref_node(self):
|
def create_xref_node(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
target = self.target
|
target = self.target
|
||||||
title = self.title
|
title = self.title
|
||||||
if self.lowercase:
|
if self.lowercase:
|
||||||
@ -170,8 +168,8 @@ class XRefRole(ReferenceRole):
|
|||||||
|
|
||||||
# methods that can be overwritten
|
# methods that can be overwritten
|
||||||
|
|
||||||
def process_link(self, env, refnode, has_explicit_title, title, target):
|
def process_link(self, env: "BuildEnvironment", refnode: Element, has_explicit_title: bool,
|
||||||
# type: (BuildEnvironment, nodes.Element, bool, str, str) -> Tuple[str, str]
|
title: str, target: str) -> Tuple[str, str]:
|
||||||
"""Called after parsing title and target text, and creating the
|
"""Called after parsing title and target text, and creating the
|
||||||
reference node (given in *refnode*). This method can alter the
|
reference node (given in *refnode*). This method can alter the
|
||||||
reference node and must return a new (or the same) ``(title, target)``
|
reference node and must return a new (or the same) ``(title, target)``
|
||||||
@ -179,8 +177,8 @@ class XRefRole(ReferenceRole):
|
|||||||
"""
|
"""
|
||||||
return title, ws_re.sub(' ', target)
|
return title, ws_re.sub(' ', target)
|
||||||
|
|
||||||
def result_nodes(self, document, env, node, is_ref):
|
def result_nodes(self, document: nodes.document, env: "BuildEnvironment", node: Element,
|
||||||
# type: (nodes.document, BuildEnvironment, nodes.Element, bool) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
|
is_ref: bool) -> Tuple[List[Node], List[system_message]]:
|
||||||
"""Called before returning the finished nodes. *node* is the reference
|
"""Called before returning the finished nodes. *node* is the reference
|
||||||
node if one was created (*is_ref* is then true), else the content node.
|
node if one was created (*is_ref* is then true), else the content node.
|
||||||
This method can add other nodes and must return a ``(nodes, messages)``
|
This method can add other nodes and must return a ``(nodes, messages)``
|
||||||
@ -190,16 +188,17 @@ class XRefRole(ReferenceRole):
|
|||||||
|
|
||||||
|
|
||||||
class AnyXRefRole(XRefRole):
|
class AnyXRefRole(XRefRole):
|
||||||
def process_link(self, env, refnode, has_explicit_title, title, target):
|
def process_link(self, env: "BuildEnvironment", refnode: Element, has_explicit_title: bool,
|
||||||
# type: (BuildEnvironment, nodes.Element, bool, str, str) -> Tuple[str, str]
|
title: str, target: str) -> Tuple[str, str]:
|
||||||
result = super().process_link(env, refnode, has_explicit_title, title, target)
|
result = super().process_link(env, refnode, has_explicit_title, title, target)
|
||||||
# add all possible context info (i.e. std:program, py:module etc.)
|
# add all possible context info (i.e. std:program, py:module etc.)
|
||||||
refnode.attributes.update(env.ref_context)
|
refnode.attributes.update(env.ref_context)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def indexmarkup_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
def indexmarkup_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
|
||||||
# type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
|
options: Dict = {}, content: List[str] = []
|
||||||
|
) -> Tuple[List[Node], List[system_message]]:
|
||||||
"""Role for PEP/RFC references that generate an index entry."""
|
"""Role for PEP/RFC references that generate an index entry."""
|
||||||
warnings.warn('indexmarkup_role() is deprecated. Please use PEP or RFC class instead.',
|
warnings.warn('indexmarkup_role() is deprecated. Please use PEP or RFC class instead.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
@ -267,8 +266,7 @@ def indexmarkup_role(typ, rawtext, text, lineno, inliner, options={}, content=[]
|
|||||||
|
|
||||||
|
|
||||||
class PEP(ReferenceRole):
|
class PEP(ReferenceRole):
|
||||||
def run(self):
|
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
target_id = 'index-%s' % self.env.new_serialno('index')
|
target_id = 'index-%s' % self.env.new_serialno('index')
|
||||||
entries = [('single', _('Python Enhancement Proposals; PEP %s') % self.target,
|
entries = [('single', _('Python Enhancement Proposals; PEP %s') % self.target,
|
||||||
target_id, '', None)]
|
target_id, '', None)]
|
||||||
@ -293,8 +291,7 @@ class PEP(ReferenceRole):
|
|||||||
|
|
||||||
return [index, target, reference], []
|
return [index, target, reference], []
|
||||||
|
|
||||||
def build_uri(self):
|
def build_uri(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
base_url = self.inliner.document.settings.pep_base_url
|
base_url = self.inliner.document.settings.pep_base_url
|
||||||
ret = self.target.split('#', 1)
|
ret = self.target.split('#', 1)
|
||||||
if len(ret) == 2:
|
if len(ret) == 2:
|
||||||
@ -304,8 +301,7 @@ class PEP(ReferenceRole):
|
|||||||
|
|
||||||
|
|
||||||
class RFC(ReferenceRole):
|
class RFC(ReferenceRole):
|
||||||
def run(self):
|
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
|
|
||||||
target_id = 'index-%s' % self.env.new_serialno('index')
|
target_id = 'index-%s' % self.env.new_serialno('index')
|
||||||
entries = [('single', 'RFC; RFC %s' % self.target, target_id, '', None)]
|
entries = [('single', 'RFC; RFC %s' % self.target, target_id, '', None)]
|
||||||
|
|
||||||
@ -329,8 +325,7 @@ class RFC(ReferenceRole):
|
|||||||
|
|
||||||
return [index, target, reference], []
|
return [index, target, reference], []
|
||||||
|
|
||||||
def build_uri(self):
|
def build_uri(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
base_url = self.inliner.document.settings.rfc_base_url
|
base_url = self.inliner.document.settings.rfc_base_url
|
||||||
ret = self.target.split('#', 1)
|
ret = self.target.split('#', 1)
|
||||||
if len(ret) == 2:
|
if len(ret) == 2:
|
||||||
@ -342,8 +337,9 @@ class RFC(ReferenceRole):
|
|||||||
_amp_re = re.compile(r'(?<!&)&(?![&\s])')
|
_amp_re = re.compile(r'(?<!&)&(?![&\s])')
|
||||||
|
|
||||||
|
|
||||||
def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
def menusel_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
|
||||||
# type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
|
options: Dict = {}, content: List[str] = []
|
||||||
|
) -> Tuple[List[Node], List[system_message]]:
|
||||||
warnings.warn('menusel_role() is deprecated. '
|
warnings.warn('menusel_role() is deprecated. '
|
||||||
'Please use MenuSelection or GUILabel class instead.',
|
'Please use MenuSelection or GUILabel class instead.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
@ -382,8 +378,7 @@ def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|||||||
class GUILabel(SphinxRole):
|
class GUILabel(SphinxRole):
|
||||||
amp_re = re.compile(r'(?<!&)&(?![&\s])')
|
amp_re = re.compile(r'(?<!&)&(?![&\s])')
|
||||||
|
|
||||||
def run(self):
|
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
node = nodes.inline(rawtext=self.rawtext, classes=[self.name])
|
node = nodes.inline(rawtext=self.rawtext, classes=[self.name])
|
||||||
spans = self.amp_re.split(self.text)
|
spans = self.amp_re.split(self.text)
|
||||||
node += nodes.Text(spans.pop(0))
|
node += nodes.Text(spans.pop(0))
|
||||||
@ -399,8 +394,7 @@ class GUILabel(SphinxRole):
|
|||||||
|
|
||||||
|
|
||||||
class MenuSelection(GUILabel):
|
class MenuSelection(GUILabel):
|
||||||
def run(self):
|
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
self.text = self.text.replace('-->', '\N{TRIANGULAR BULLET}')
|
self.text = self.text.replace('-->', '\N{TRIANGULAR BULLET}')
|
||||||
return super().run()
|
return super().run()
|
||||||
|
|
||||||
@ -409,9 +403,9 @@ _litvar_re = re.compile('{([^}]+)}')
|
|||||||
parens_re = re.compile(r'(\\*{|\\*})')
|
parens_re = re.compile(r'(\\*{|\\*})')
|
||||||
|
|
||||||
|
|
||||||
def emph_literal_role(typ, rawtext, text, lineno, inliner,
|
def emph_literal_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
|
||||||
options={}, content=[]):
|
options: Dict = {}, content: List[str] = []
|
||||||
# type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
|
) -> Tuple[List[Node], List[system_message]]:
|
||||||
warnings.warn('emph_literal_role() is deprecated. '
|
warnings.warn('emph_literal_role() is deprecated. '
|
||||||
'Please use EmphasizedLiteral class instead.',
|
'Please use EmphasizedLiteral class instead.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
@ -465,17 +459,15 @@ def emph_literal_role(typ, rawtext, text, lineno, inliner,
|
|||||||
class EmphasizedLiteral(SphinxRole):
|
class EmphasizedLiteral(SphinxRole):
|
||||||
parens_re = re.compile(r'(\\\\|\\{|\\}|{|})')
|
parens_re = re.compile(r'(\\\\|\\{|\\}|{|})')
|
||||||
|
|
||||||
def run(self):
|
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
children = self.parse(self.text)
|
children = self.parse(self.text)
|
||||||
node = nodes.literal(self.rawtext, '', *children,
|
node = nodes.literal(self.rawtext, '', *children,
|
||||||
role=self.name.lower(), classes=[self.name])
|
role=self.name.lower(), classes=[self.name])
|
||||||
|
|
||||||
return [node], []
|
return [node], []
|
||||||
|
|
||||||
def parse(self, text):
|
def parse(self, text: str) -> List[Node]:
|
||||||
# type: (str) -> List[nodes.Node]
|
result = [] # type: List[Node]
|
||||||
result = [] # type: List[nodes.Node]
|
|
||||||
|
|
||||||
stack = ['']
|
stack = ['']
|
||||||
for part in self.parens_re.split(text):
|
for part in self.parens_re.split(text):
|
||||||
@ -517,8 +509,9 @@ class EmphasizedLiteral(SphinxRole):
|
|||||||
_abbr_re = re.compile(r'\((.*)\)$', re.S)
|
_abbr_re = re.compile(r'\((.*)\)$', re.S)
|
||||||
|
|
||||||
|
|
||||||
def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
def abbr_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
|
||||||
# type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
|
options: Dict = {}, content: List[str] = []
|
||||||
|
) -> Tuple[List[Node], List[system_message]]:
|
||||||
warnings.warn('abbr_role() is deprecated. Please use Abbrevation class instead.',
|
warnings.warn('abbr_role() is deprecated. Please use Abbrevation class instead.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
text = utils.unescape(text)
|
text = utils.unescape(text)
|
||||||
@ -535,8 +528,7 @@ def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|||||||
class Abbreviation(SphinxRole):
|
class Abbreviation(SphinxRole):
|
||||||
abbr_re = re.compile(r'\((.*)\)$', re.S)
|
abbr_re = re.compile(r'\((.*)\)$', re.S)
|
||||||
|
|
||||||
def run(self):
|
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
matched = self.abbr_re.search(self.text)
|
matched = self.abbr_re.search(self.text)
|
||||||
if matched:
|
if matched:
|
||||||
text = self.text[:matched.start()].strip()
|
text = self.text[:matched.start()].strip()
|
||||||
@ -547,8 +539,9 @@ class Abbreviation(SphinxRole):
|
|||||||
return [nodes.abbreviation(self.rawtext, text, **self.options)], []
|
return [nodes.abbreviation(self.rawtext, text, **self.options)], []
|
||||||
|
|
||||||
|
|
||||||
def index_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
def index_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
|
||||||
# type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
|
options: Dict = {}, content: List[str] = []
|
||||||
|
) -> Tuple[List[Node], List[system_message]]:
|
||||||
warnings.warn('index_role() is deprecated. Please use Index class instead.',
|
warnings.warn('index_role() is deprecated. Please use Index class instead.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
# create new reference target
|
# create new reference target
|
||||||
@ -579,8 +572,7 @@ def index_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|||||||
|
|
||||||
|
|
||||||
class Index(ReferenceRole):
|
class Index(ReferenceRole):
|
||||||
def run(self):
|
def run(self) -> Tuple[List[Node], List[system_message]]:
|
||||||
# type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
|
|
||||||
target_id = 'index-%s' % self.env.new_serialno('index')
|
target_id = 'index-%s' % self.env.new_serialno('index')
|
||||||
if self.has_explicit_title:
|
if self.has_explicit_title:
|
||||||
# if an explicit target is given, process it as a full entry
|
# if an explicit target is given, process it as a full entry
|
||||||
@ -619,8 +611,7 @@ specific_docroles = {
|
|||||||
} # type: Dict[str, RoleFunction]
|
} # type: Dict[str, RoleFunction]
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||||
# type: (Sphinx) -> Dict[str, Any]
|
|
||||||
from docutils.parsers.rst import roles
|
from docutils.parsers.rst import roles
|
||||||
|
|
||||||
for rolename, nodeclass in generic_docroles.items():
|
for rolename, nodeclass in generic_docroles.items():
|
||||||
|
@ -13,21 +13,21 @@ import re
|
|||||||
import warnings
|
import warnings
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from os import path
|
from os import path
|
||||||
|
from typing import Any, Dict, IO, Iterable, List, Tuple, Set
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
|
from docutils.nodes import Node
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx import package_dir
|
from sphinx import package_dir
|
||||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||||
|
from sphinx.environment import BuildEnvironment
|
||||||
from sphinx.search.jssplitter import splitter_code
|
from sphinx.search.jssplitter import splitter_code
|
||||||
from sphinx.util import jsdump, rpartition
|
from sphinx.util import jsdump, rpartition
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Dict, IO, Iterable, List, Tuple, Set # NOQA
|
|
||||||
from typing import Type # for python3.5.1
|
from typing import Type # for python3.5.1
|
||||||
from docutils import nodes # NOQA
|
|
||||||
from sphinx.environment import BuildEnvironment # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
class SearchLanguage:
|
class SearchLanguage:
|
||||||
@ -69,19 +69,16 @@ var Stemmer = function() {
|
|||||||
|
|
||||||
_word_re = re.compile(r'(?u)\w+')
|
_word_re = re.compile(r'(?u)\w+')
|
||||||
|
|
||||||
def __init__(self, options):
|
def __init__(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
self.options = options
|
self.options = options
|
||||||
self.init(options)
|
self.init(options)
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
"""
|
"""
|
||||||
Initialize the class with the options the user has given.
|
Initialize the class with the options the user has given.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def split(self, input):
|
def split(self, input: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
"""
|
"""
|
||||||
This method splits a sentence into words. Default splitter splits input
|
This method splits a sentence into words. Default splitter splits input
|
||||||
at white spaces, which should be enough for most languages except CJK
|
at white spaces, which should be enough for most languages except CJK
|
||||||
@ -89,8 +86,7 @@ var Stemmer = function() {
|
|||||||
"""
|
"""
|
||||||
return self._word_re.findall(input)
|
return self._word_re.findall(input)
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
"""
|
||||||
This method implements stemming algorithm of the Python version.
|
This method implements stemming algorithm of the Python version.
|
||||||
|
|
||||||
@ -103,8 +99,7 @@ var Stemmer = function() {
|
|||||||
"""
|
"""
|
||||||
return word
|
return word
|
||||||
|
|
||||||
def word_filter(self, word):
|
def word_filter(self, word: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
"""
|
"""
|
||||||
Return true if the target word should be registered in the search index.
|
Return true if the target word should be registered in the search index.
|
||||||
This method is called after stemming.
|
This method is called after stemming.
|
||||||
@ -121,8 +116,7 @@ var Stemmer = function() {
|
|||||||
from sphinx.search.en import SearchEnglish
|
from sphinx.search.en import SearchEnglish
|
||||||
|
|
||||||
|
|
||||||
def parse_stop_word(source):
|
def parse_stop_word(source: str) -> Set[str]:
|
||||||
# type: (str) -> Set[str]
|
|
||||||
"""
|
"""
|
||||||
parse snowball style word list like this:
|
parse snowball style word list like this:
|
||||||
|
|
||||||
@ -166,24 +160,20 @@ class _JavaScriptIndex:
|
|||||||
PREFIX = 'Search.setIndex('
|
PREFIX = 'Search.setIndex('
|
||||||
SUFFIX = ')'
|
SUFFIX = ')'
|
||||||
|
|
||||||
def dumps(self, data):
|
def dumps(self, data: Any) -> str:
|
||||||
# type: (Any) -> str
|
|
||||||
return self.PREFIX + jsdump.dumps(data) + self.SUFFIX
|
return self.PREFIX + jsdump.dumps(data) + self.SUFFIX
|
||||||
|
|
||||||
def loads(self, s):
|
def loads(self, s: str) -> Any:
|
||||||
# type: (str) -> Any
|
|
||||||
data = s[len(self.PREFIX):-len(self.SUFFIX)]
|
data = s[len(self.PREFIX):-len(self.SUFFIX)]
|
||||||
if not data or not s.startswith(self.PREFIX) or not \
|
if not data or not s.startswith(self.PREFIX) or not \
|
||||||
s.endswith(self.SUFFIX):
|
s.endswith(self.SUFFIX):
|
||||||
raise ValueError('invalid data')
|
raise ValueError('invalid data')
|
||||||
return jsdump.loads(data)
|
return jsdump.loads(data)
|
||||||
|
|
||||||
def dump(self, data, f):
|
def dump(self, data: Any, f: IO) -> None:
|
||||||
# type: (Any, IO) -> None
|
|
||||||
f.write(self.dumps(data))
|
f.write(self.dumps(data))
|
||||||
|
|
||||||
def load(self, f):
|
def load(self, f: IO) -> Any:
|
||||||
# type: (IO) -> Any
|
|
||||||
return self.loads(f.read())
|
return self.loads(f.read())
|
||||||
|
|
||||||
|
|
||||||
@ -195,15 +185,13 @@ class WordCollector(nodes.NodeVisitor):
|
|||||||
A special visitor that collects words for the `IndexBuilder`.
|
A special visitor that collects words for the `IndexBuilder`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, document, lang):
|
def __init__(self, document: nodes.document, lang: SearchLanguage) -> None:
|
||||||
# type: (nodes.document, SearchLanguage) -> None
|
|
||||||
super().__init__(document)
|
super().__init__(document)
|
||||||
self.found_words = [] # type: List[str]
|
self.found_words = [] # type: List[str]
|
||||||
self.found_title_words = [] # type: List[str]
|
self.found_title_words = [] # type: List[str]
|
||||||
self.lang = lang
|
self.lang = lang
|
||||||
|
|
||||||
def is_meta_keywords(self, node, nodetype=None):
|
def is_meta_keywords(self, node: addnodes.meta, nodetype: Any = None) -> bool:
|
||||||
# type: (addnodes.meta, Any) -> bool
|
|
||||||
if nodetype is not None:
|
if nodetype is not None:
|
||||||
warnings.warn('"nodetype" argument for WordCollector.is_meta_keywords() '
|
warnings.warn('"nodetype" argument for WordCollector.is_meta_keywords() '
|
||||||
'is deprecated.', RemovedInSphinx40Warning)
|
'is deprecated.', RemovedInSphinx40Warning)
|
||||||
@ -217,8 +205,7 @@ class WordCollector(nodes.NodeVisitor):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def dispatch_visit(self, node):
|
def dispatch_visit(self, node: Node) -> None:
|
||||||
# type: (nodes.Node) -> None
|
|
||||||
if isinstance(node, nodes.comment):
|
if isinstance(node, nodes.comment):
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
elif isinstance(node, nodes.raw):
|
elif isinstance(node, nodes.raw):
|
||||||
@ -251,8 +238,7 @@ class IndexBuilder:
|
|||||||
'pickle': pickle
|
'pickle': pickle
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, env, lang, options, scoring):
|
def __init__(self, env: BuildEnvironment, lang: str, options: Dict, scoring: str) -> None:
|
||||||
# type: (BuildEnvironment, str, Dict, str) -> None
|
|
||||||
self.env = env
|
self.env = env
|
||||||
self._titles = {} # type: Dict[str, str]
|
self._titles = {} # type: Dict[str, str]
|
||||||
# docname -> title
|
# docname -> title
|
||||||
@ -292,8 +278,7 @@ class IndexBuilder:
|
|||||||
self.js_scorer_code = ''
|
self.js_scorer_code = ''
|
||||||
self.js_splitter_code = splitter_code
|
self.js_splitter_code = splitter_code
|
||||||
|
|
||||||
def load(self, stream, format):
|
def load(self, stream: IO, format: Any) -> None:
|
||||||
# type: (IO, Any) -> None
|
|
||||||
"""Reconstruct from frozen data."""
|
"""Reconstruct from frozen data."""
|
||||||
if isinstance(format, str):
|
if isinstance(format, str):
|
||||||
format = self.formats[format]
|
format = self.formats[format]
|
||||||
@ -306,8 +291,7 @@ class IndexBuilder:
|
|||||||
self._filenames = dict(zip(index2fn, frozen['filenames']))
|
self._filenames = dict(zip(index2fn, frozen['filenames']))
|
||||||
self._titles = dict(zip(index2fn, frozen['titles']))
|
self._titles = dict(zip(index2fn, frozen['titles']))
|
||||||
|
|
||||||
def load_terms(mapping):
|
def load_terms(mapping: Dict[str, Any]) -> Dict[str, Set[str]]:
|
||||||
# type: (Dict[str, Any]) -> Dict[str, Set[str]]
|
|
||||||
rv = {}
|
rv = {}
|
||||||
for k, v in mapping.items():
|
for k, v in mapping.items():
|
||||||
if isinstance(v, int):
|
if isinstance(v, int):
|
||||||
@ -320,15 +304,14 @@ class IndexBuilder:
|
|||||||
self._title_mapping = load_terms(frozen['titleterms'])
|
self._title_mapping = load_terms(frozen['titleterms'])
|
||||||
# no need to load keywords/objtypes
|
# no need to load keywords/objtypes
|
||||||
|
|
||||||
def dump(self, stream, format):
|
def dump(self, stream: IO, format: Any) -> None:
|
||||||
# type: (IO, Any) -> None
|
|
||||||
"""Dump the frozen index to a stream."""
|
"""Dump the frozen index to a stream."""
|
||||||
if isinstance(format, str):
|
if isinstance(format, str):
|
||||||
format = self.formats[format]
|
format = self.formats[format]
|
||||||
format.dump(self.freeze(), stream)
|
format.dump(self.freeze(), stream)
|
||||||
|
|
||||||
def get_objects(self, fn2index):
|
def get_objects(self, fn2index: Dict[str, int]
|
||||||
# type: (Dict[str, int]) -> Dict[str, Dict[str, Tuple[int, int, int, str]]]
|
) -> Dict[str, Dict[str, Tuple[int, int, int, str]]]:
|
||||||
rv = {} # type: Dict[str, Dict[str, Tuple[int, int, int, str]]]
|
rv = {} # type: Dict[str, Dict[str, Tuple[int, int, int, str]]]
|
||||||
otypes = self._objtypes
|
otypes = self._objtypes
|
||||||
onames = self._objnames
|
onames = self._objnames
|
||||||
@ -364,8 +347,7 @@ class IndexBuilder:
|
|||||||
pdict[name] = (fn2index[docname], typeindex, prio, shortanchor)
|
pdict[name] = (fn2index[docname], typeindex, prio, shortanchor)
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
def get_terms(self, fn2index):
|
def get_terms(self, fn2index: Dict) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]:
|
||||||
# type: (Dict) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]
|
|
||||||
rvs = {}, {} # type: Tuple[Dict[str, List[str]], Dict[str, List[str]]]
|
rvs = {}, {} # type: Tuple[Dict[str, List[str]], Dict[str, List[str]]]
|
||||||
for rv, mapping in zip(rvs, (self._mapping, self._title_mapping)):
|
for rv, mapping in zip(rvs, (self._mapping, self._title_mapping)):
|
||||||
for k, v in mapping.items():
|
for k, v in mapping.items():
|
||||||
@ -377,8 +359,7 @@ class IndexBuilder:
|
|||||||
rv[k] = sorted([fn2index[fn] for fn in v if fn in fn2index])
|
rv[k] = sorted([fn2index[fn] for fn in v if fn in fn2index])
|
||||||
return rvs
|
return rvs
|
||||||
|
|
||||||
def freeze(self):
|
def freeze(self) -> Dict[str, Any]:
|
||||||
# type: () -> Dict[str, Any]
|
|
||||||
"""Create a usable data structure for serializing."""
|
"""Create a usable data structure for serializing."""
|
||||||
docnames, titles = zip(*sorted(self._titles.items()))
|
docnames, titles = zip(*sorted(self._titles.items()))
|
||||||
filenames = [self._filenames.get(docname) for docname in docnames]
|
filenames = [self._filenames.get(docname) for docname in docnames]
|
||||||
@ -392,12 +373,10 @@ class IndexBuilder:
|
|||||||
objects=objects, objtypes=objtypes, objnames=objnames,
|
objects=objects, objtypes=objtypes, objnames=objnames,
|
||||||
titleterms=title_terms, envversion=self.env.version)
|
titleterms=title_terms, envversion=self.env.version)
|
||||||
|
|
||||||
def label(self):
|
def label(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return "%s (code: %s)" % (self.lang.language_name, self.lang.lang)
|
return "%s (code: %s)" % (self.lang.language_name, self.lang.lang)
|
||||||
|
|
||||||
def prune(self, docnames):
|
def prune(self, docnames: Iterable[str]) -> None:
|
||||||
# type: (Iterable[str]) -> None
|
|
||||||
"""Remove data for all docnames not in the list."""
|
"""Remove data for all docnames not in the list."""
|
||||||
new_titles = {}
|
new_titles = {}
|
||||||
new_filenames = {}
|
new_filenames = {}
|
||||||
@ -412,8 +391,7 @@ class IndexBuilder:
|
|||||||
for wordnames in self._title_mapping.values():
|
for wordnames in self._title_mapping.values():
|
||||||
wordnames.intersection_update(docnames)
|
wordnames.intersection_update(docnames)
|
||||||
|
|
||||||
def feed(self, docname, filename, title, doctree):
|
def feed(self, docname: str, filename: str, title: str, doctree: nodes.document) -> None:
|
||||||
# type: (str, str, str, nodes.document) -> None
|
|
||||||
"""Feed a doctree to the index."""
|
"""Feed a doctree to the index."""
|
||||||
self._titles[docname] = title
|
self._titles[docname] = title
|
||||||
self._filenames[docname] = filename
|
self._filenames[docname] = filename
|
||||||
@ -422,8 +400,7 @@ class IndexBuilder:
|
|||||||
doctree.walk(visitor)
|
doctree.walk(visitor)
|
||||||
|
|
||||||
# memoize self.lang.stem
|
# memoize self.lang.stem
|
||||||
def stem(word):
|
def stem(word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
try:
|
try:
|
||||||
return self._stem_cache[word]
|
return self._stem_cache[word]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -447,8 +424,7 @@ class IndexBuilder:
|
|||||||
if _filter(stemmed_word) and not already_indexed:
|
if _filter(stemmed_word) and not already_indexed:
|
||||||
self._mapping.setdefault(stemmed_word, set()).add(docname)
|
self._mapping.setdefault(stemmed_word, set()).add(docname)
|
||||||
|
|
||||||
def context_for_searchtool(self):
|
def context_for_searchtool(self) -> Dict[str, Any]:
|
||||||
# type: () -> Dict[str, Any]
|
|
||||||
return {
|
return {
|
||||||
'search_language_stemming_code': self.lang.js_stemmer_code,
|
'search_language_stemming_code': self.lang.js_stemmer_code,
|
||||||
'search_language_stop_words': jsdump.dumps(sorted(self.lang.stopwords)),
|
'search_language_stop_words': jsdump.dumps(sorted(self.lang.stopwords)),
|
||||||
@ -456,8 +432,7 @@ class IndexBuilder:
|
|||||||
'search_word_splitter_code': self.js_splitter_code,
|
'search_word_splitter_code': self.js_splitter_code,
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_js_stemmer_rawcode(self):
|
def get_js_stemmer_rawcode(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
if self.lang.js_stemmer_rawcode:
|
if self.lang.js_stemmer_rawcode:
|
||||||
return path.join(package_dir, 'search', 'non-minified-js',
|
return path.join(package_dir, 'search', 'non-minified-js',
|
||||||
self.lang.js_stemmer_rawcode)
|
self.lang.js_stemmer_rawcode)
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
danish_stopwords = parse_stop_word('''
|
danish_stopwords = parse_stop_word('''
|
||||||
@ -128,10 +126,8 @@ class SearchDanish(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = danish_stopwords
|
stopwords = danish_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('danish')
|
self.stemmer = snowballstemmer.stemmer('danish')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
german_stopwords = parse_stop_word('''
|
german_stopwords = parse_stop_word('''
|
||||||
@ -311,10 +309,8 @@ class SearchGerman(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = german_stopwords
|
stopwords = german_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('german')
|
self.stemmer = snowballstemmer.stemmer('german')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage
|
from sphinx.search import SearchLanguage
|
||||||
from sphinx.util.stemmer import get_stemmer
|
from sphinx.util.stemmer import get_stemmer
|
||||||
|
|
||||||
if False:
|
|
||||||
# For type annotation
|
|
||||||
from typing import Dict # NOQA
|
|
||||||
|
|
||||||
english_stopwords = set("""
|
english_stopwords = set("""
|
||||||
a and are as at
|
a and are as at
|
||||||
be but by
|
be but by
|
||||||
@ -220,10 +218,8 @@ class SearchEnglish(SearchLanguage):
|
|||||||
js_stemmer_code = js_porter_stemmer
|
js_stemmer_code = js_porter_stemmer
|
||||||
stopwords = english_stopwords
|
stopwords = english_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
self.stemmer = get_stemmer()
|
self.stemmer = get_stemmer()
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stem(word.lower())
|
return self.stemmer.stem(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
spanish_stopwords = parse_stop_word('''
|
spanish_stopwords = parse_stop_word('''
|
||||||
@ -371,10 +369,8 @@ class SearchSpanish(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = spanish_stopwords
|
stopwords = spanish_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('spanish')
|
self.stemmer = snowballstemmer.stemmer('spanish')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
finnish_stopwords = parse_stop_word('''
|
finnish_stopwords = parse_stop_word('''
|
||||||
@ -121,10 +119,8 @@ class SearchFinnish(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = finnish_stopwords
|
stopwords = finnish_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('finnish')
|
self.stemmer = snowballstemmer.stemmer('finnish')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
french_stopwords = parse_stop_word('''
|
french_stopwords = parse_stop_word('''
|
||||||
@ -207,10 +205,8 @@ class SearchFrench(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = french_stopwords
|
stopwords = french_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('french')
|
self.stemmer = snowballstemmer.stemmer('french')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
hungarian_stopwords = parse_stop_word('''
|
hungarian_stopwords = parse_stop_word('''
|
||||||
@ -235,10 +233,8 @@ class SearchHungarian(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = hungarian_stopwords
|
stopwords = hungarian_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('hungarian')
|
self.stemmer = snowballstemmer.stemmer('hungarian')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
italian_stopwords = parse_stop_word('''
|
italian_stopwords = parse_stop_word('''
|
||||||
@ -324,10 +322,8 @@ class SearchItalian(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = italian_stopwords
|
stopwords = italian_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('italian')
|
self.stemmer = snowballstemmer.stemmer('italian')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import MeCab
|
import MeCab
|
||||||
@ -36,21 +37,13 @@ from sphinx.errors import SphinxError, ExtensionError
|
|||||||
from sphinx.search import SearchLanguage
|
from sphinx.search import SearchLanguage
|
||||||
from sphinx.util import import_object
|
from sphinx.util import import_object
|
||||||
|
|
||||||
if False:
|
|
||||||
# For type annotation
|
|
||||||
from typing import Any, Dict, List # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
class BaseSplitter:
|
class BaseSplitter:
|
||||||
|
def __init__(self, options: Dict) -> None:
|
||||||
def __init__(self, options):
|
|
||||||
# type: (Dict) -> None
|
|
||||||
self.options = options
|
self.options = options
|
||||||
|
|
||||||
def split(self, input):
|
def split(self, input: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
:param str input:
|
:param str input:
|
||||||
:return:
|
:return:
|
||||||
:rtype: list[str]
|
:rtype: list[str]
|
||||||
@ -59,8 +52,7 @@ class BaseSplitter:
|
|||||||
|
|
||||||
|
|
||||||
class MecabSplitter(BaseSplitter):
|
class MecabSplitter(BaseSplitter):
|
||||||
def __init__(self, options):
|
def __init__(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
super().__init__(options)
|
super().__init__(options)
|
||||||
self.ctypes_libmecab = None # type: Any
|
self.ctypes_libmecab = None # type: Any
|
||||||
self.ctypes_mecab = None # type: Any
|
self.ctypes_mecab = None # type: Any
|
||||||
@ -70,8 +62,7 @@ class MecabSplitter(BaseSplitter):
|
|||||||
self.init_native(options)
|
self.init_native(options)
|
||||||
self.dict_encode = options.get('dic_enc', 'utf-8')
|
self.dict_encode = options.get('dic_enc', 'utf-8')
|
||||||
|
|
||||||
def split(self, input):
|
def split(self, input: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
if native_module:
|
if native_module:
|
||||||
result = self.native.parse(input)
|
result = self.native.parse(input)
|
||||||
else:
|
else:
|
||||||
@ -79,16 +70,14 @@ class MecabSplitter(BaseSplitter):
|
|||||||
self.ctypes_mecab, input.encode(self.dict_encode))
|
self.ctypes_mecab, input.encode(self.dict_encode))
|
||||||
return result.split(' ')
|
return result.split(' ')
|
||||||
|
|
||||||
def init_native(self, options):
|
def init_native(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
param = '-Owakati'
|
param = '-Owakati'
|
||||||
dict = options.get('dict')
|
dict = options.get('dict')
|
||||||
if dict:
|
if dict:
|
||||||
param += ' -d %s' % dict
|
param += ' -d %s' % dict
|
||||||
self.native = MeCab.Tagger(param)
|
self.native = MeCab.Tagger(param)
|
||||||
|
|
||||||
def init_ctypes(self, options):
|
def init_ctypes(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
import ctypes.util
|
import ctypes.util
|
||||||
|
|
||||||
lib = options.get('lib')
|
lib = options.get('lib')
|
||||||
@ -124,8 +113,7 @@ class MecabSplitter(BaseSplitter):
|
|||||||
if self.ctypes_mecab is None:
|
if self.ctypes_mecab is None:
|
||||||
raise SphinxError('mecab initialization failed')
|
raise SphinxError('mecab initialization failed')
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
if self.ctypes_libmecab:
|
if self.ctypes_libmecab:
|
||||||
self.ctypes_libmecab.mecab_destroy(self.ctypes_mecab)
|
self.ctypes_libmecab.mecab_destroy(self.ctypes_mecab)
|
||||||
|
|
||||||
@ -133,21 +121,18 @@ MeCabBinder = MecabSplitter # keep backward compatibility until Sphinx-1.6
|
|||||||
|
|
||||||
|
|
||||||
class JanomeSplitter(BaseSplitter):
|
class JanomeSplitter(BaseSplitter):
|
||||||
def __init__(self, options):
|
def __init__(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
super().__init__(options)
|
super().__init__(options)
|
||||||
self.user_dict = options.get('user_dic')
|
self.user_dict = options.get('user_dic')
|
||||||
self.user_dict_enc = options.get('user_dic_enc', 'utf8')
|
self.user_dict_enc = options.get('user_dic_enc', 'utf8')
|
||||||
self.init_tokenizer()
|
self.init_tokenizer()
|
||||||
|
|
||||||
def init_tokenizer(self):
|
def init_tokenizer(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
if not janome_module:
|
if not janome_module:
|
||||||
raise RuntimeError('Janome is not available')
|
raise RuntimeError('Janome is not available')
|
||||||
self.tokenizer = janome.tokenizer.Tokenizer(udic=self.user_dict, udic_enc=self.user_dict_enc)
|
self.tokenizer = janome.tokenizer.Tokenizer(udic=self.user_dict, udic_enc=self.user_dict_enc)
|
||||||
|
|
||||||
def split(self, input):
|
def split(self, input: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
result = ' '.join(token.surface for token in self.tokenizer.tokenize(input))
|
result = ' '.join(token.surface for token in self.tokenizer.tokenize(input))
|
||||||
return result.split(' ')
|
return result.split(' ')
|
||||||
|
|
||||||
@ -423,23 +408,20 @@ class DefaultSplitter(BaseSplitter):
|
|||||||
'郎': 1082, '1': -270, 'E1': 306, 'ル': -673, 'ン': -496}
|
'郎': 1082, '1': -270, 'E1': 306, 'ル': -673, 'ン': -496}
|
||||||
|
|
||||||
# ctype_
|
# ctype_
|
||||||
def ctype_(self, char):
|
def ctype_(self, char: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
for pattern, value in self.patterns_.items():
|
for pattern, value in self.patterns_.items():
|
||||||
if pattern.match(char):
|
if pattern.match(char):
|
||||||
return value
|
return value
|
||||||
return 'O'
|
return 'O'
|
||||||
|
|
||||||
# ts_
|
# ts_
|
||||||
def ts_(self, dict, key):
|
def ts_(self, dict: Dict[str, int], key: str) -> int:
|
||||||
# type: (Dict[str, int], str) -> int
|
|
||||||
if key in dict:
|
if key in dict:
|
||||||
return dict[key]
|
return dict[key]
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
# segment
|
# segment
|
||||||
def split(self, input):
|
def split(self, input: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
if not input:
|
if not input:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@ -542,8 +524,7 @@ class SearchJapanese(SearchLanguage):
|
|||||||
lang = 'ja'
|
lang = 'ja'
|
||||||
language_name = 'Japanese'
|
language_name = 'Japanese'
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
dotted_path = options.get('type', 'sphinx.search.ja.DefaultSplitter')
|
dotted_path = options.get('type', 'sphinx.search.ja.DefaultSplitter')
|
||||||
try:
|
try:
|
||||||
self.splitter = import_object(dotted_path)(options)
|
self.splitter = import_object(dotted_path)(options)
|
||||||
@ -551,14 +532,11 @@ class SearchJapanese(SearchLanguage):
|
|||||||
raise ExtensionError("Splitter module %r can't be imported" %
|
raise ExtensionError("Splitter module %r can't be imported" %
|
||||||
dotted_path)
|
dotted_path)
|
||||||
|
|
||||||
def split(self, input):
|
def split(self, input: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
return self.splitter.split(input)
|
return self.splitter.split(input)
|
||||||
|
|
||||||
def word_filter(self, stemmed_word):
|
def word_filter(self, stemmed_word: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
return len(stemmed_word) > 1
|
return len(stemmed_word) > 1
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return word
|
return word
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
dutch_stopwords = parse_stop_word('''
|
dutch_stopwords = parse_stop_word('''
|
||||||
@ -135,10 +133,8 @@ class SearchDutch(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = dutch_stopwords
|
stopwords = dutch_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('dutch')
|
self.stemmer = snowballstemmer.stemmer('dutch')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
norwegian_stopwords = parse_stop_word('''
|
norwegian_stopwords = parse_stop_word('''
|
||||||
@ -210,10 +208,8 @@ class SearchNorwegian(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = norwegian_stopwords
|
stopwords = norwegian_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('norwegian')
|
self.stemmer = snowballstemmer.stemmer('norwegian')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
portuguese_stopwords = parse_stop_word('''
|
portuguese_stopwords = parse_stop_word('''
|
||||||
@ -270,10 +268,8 @@ class SearchPortuguese(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = portuguese_stopwords
|
stopwords = portuguese_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('portuguese')
|
self.stemmer = snowballstemmer.stemmer('portuguese')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
File diff suppressed because one or more lines are too long
@ -8,13 +8,11 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any # NOQA
|
|
||||||
|
|
||||||
|
|
||||||
russian_stopwords = parse_stop_word('''
|
russian_stopwords = parse_stop_word('''
|
||||||
@ -259,10 +257,8 @@ class SearchRussian(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = russian_stopwords
|
stopwords = russian_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('russian')
|
self.stemmer = snowballstemmer.stemmer('russian')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
@ -8,13 +8,12 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage, parse_stop_word
|
from typing import Dict
|
||||||
|
|
||||||
import snowballstemmer
|
import snowballstemmer
|
||||||
|
|
||||||
if False:
|
from sphinx.search import SearchLanguage, parse_stop_word
|
||||||
# For type annotation
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
swedish_stopwords = parse_stop_word('''
|
swedish_stopwords = parse_stop_word('''
|
||||||
| source: http://snowball.tartarus.org/algorithms/swedish/stop.txt
|
| source: http://snowball.tartarus.org/algorithms/swedish/stop.txt
|
||||||
@ -147,10 +146,8 @@ class SearchSwedish(SearchLanguage):
|
|||||||
js_stemmer_code = js_stemmer
|
js_stemmer_code = js_stemmer
|
||||||
stopwords = swedish_stopwords
|
stopwords = swedish_stopwords
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
self.stemmer = snowballstemmer.stemmer('swedish')
|
self.stemmer = snowballstemmer.stemmer('swedish')
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
return self.stemmer.stemWord(word.lower())
|
return self.stemmer.stemWord(word.lower())
|
||||||
|
File diff suppressed because one or more lines are too long
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
from sphinx.search import SearchLanguage
|
from sphinx.search import SearchLanguage
|
||||||
from sphinx.util.stemmer import get_stemmer
|
from sphinx.util.stemmer import get_stemmer
|
||||||
@ -20,10 +21,6 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
JIEBA = False
|
JIEBA = False
|
||||||
|
|
||||||
if False:
|
|
||||||
# For type annotation
|
|
||||||
from typing import Dict, List # NOQA
|
|
||||||
|
|
||||||
english_stopwords = set("""
|
english_stopwords = set("""
|
||||||
a and are as at
|
a and are as at
|
||||||
be but by
|
be but by
|
||||||
@ -235,8 +232,7 @@ class SearchChinese(SearchLanguage):
|
|||||||
latin1_letters = re.compile(r'[a-zA-Z0-9_]+')
|
latin1_letters = re.compile(r'[a-zA-Z0-9_]+')
|
||||||
latin_terms = [] # type: List[str]
|
latin_terms = [] # type: List[str]
|
||||||
|
|
||||||
def init(self, options):
|
def init(self, options: Dict) -> None:
|
||||||
# type: (Dict) -> None
|
|
||||||
if JIEBA:
|
if JIEBA:
|
||||||
dict_path = options.get('dict')
|
dict_path = options.get('dict')
|
||||||
if dict_path and os.path.isfile(dict_path):
|
if dict_path and os.path.isfile(dict_path):
|
||||||
@ -244,8 +240,7 @@ class SearchChinese(SearchLanguage):
|
|||||||
|
|
||||||
self.stemmer = get_stemmer()
|
self.stemmer = get_stemmer()
|
||||||
|
|
||||||
def split(self, input):
|
def split(self, input: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
chinese = [] # type: List[str]
|
chinese = [] # type: List[str]
|
||||||
if JIEBA:
|
if JIEBA:
|
||||||
chinese = list(jieba.cut_for_search(input))
|
chinese = list(jieba.cut_for_search(input))
|
||||||
@ -255,13 +250,10 @@ class SearchChinese(SearchLanguage):
|
|||||||
self.latin_terms.extend(latin1)
|
self.latin_terms.extend(latin1)
|
||||||
return chinese + latin1
|
return chinese + latin1
|
||||||
|
|
||||||
def word_filter(self, stemmed_word):
|
def word_filter(self, stemmed_word: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
return len(stemmed_word) > 1
|
return len(stemmed_word) > 1
|
||||||
|
|
||||||
def stem(self, word):
|
def stem(self, word: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
|
|
||||||
# Don't stem Latin words that are long enough to be relevant for search
|
# Don't stem Latin words that are long enough to be relevant for search
|
||||||
# if not stemmed, but would be too short after being stemmed
|
# if not stemmed, but would be too short after being stemmed
|
||||||
# avoids some issues with acronyms
|
# avoids some issues with acronyms
|
||||||
|
@ -13,6 +13,7 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
from os import path
|
from os import path
|
||||||
|
from typing import Any, Dict, List
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
@ -23,19 +24,18 @@ from sphinx.locale import __
|
|||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
from sphinx.util.osutil import ensuredir
|
from sphinx.util.osutil import ensuredir
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Dict, List # NOQA
|
from sphinx.application import Sphinx
|
||||||
from sphinx.application import Sphinx # NOQA
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
NODEFAULT = object()
|
NODEFAULT = object()
|
||||||
THEMECONF = 'theme.conf'
|
THEMECONF = 'theme.conf'
|
||||||
|
|
||||||
|
|
||||||
def extract_zip(filename, targetdir):
|
def extract_zip(filename: str, targetdir: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Extract zip file to target directory."""
|
"""Extract zip file to target directory."""
|
||||||
ensuredir(targetdir)
|
ensuredir(targetdir)
|
||||||
|
|
||||||
@ -54,8 +54,7 @@ class Theme:
|
|||||||
|
|
||||||
This class supports both theme directory and theme archive (zipped theme)."""
|
This class supports both theme directory and theme archive (zipped theme)."""
|
||||||
|
|
||||||
def __init__(self, name, theme_path, factory):
|
def __init__(self, name: str, theme_path: str, factory: "HTMLThemeFactory") -> None:
|
||||||
# type: (str, str, HTMLThemeFactory) -> None
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self.base = None
|
self.base = None
|
||||||
self.rootdir = None
|
self.rootdir = None
|
||||||
@ -87,8 +86,7 @@ class Theme:
|
|||||||
raise ThemeError(__('no theme named %r found, inherited by %r') %
|
raise ThemeError(__('no theme named %r found, inherited by %r') %
|
||||||
(inherit, name))
|
(inherit, name))
|
||||||
|
|
||||||
def get_theme_dirs(self):
|
def get_theme_dirs(self) -> List[str]:
|
||||||
# type: () -> List[str]
|
|
||||||
"""Return a list of theme directories, beginning with this theme's,
|
"""Return a list of theme directories, beginning with this theme's,
|
||||||
then the base theme's, then that one's base theme's, etc.
|
then the base theme's, then that one's base theme's, etc.
|
||||||
"""
|
"""
|
||||||
@ -97,8 +95,7 @@ class Theme:
|
|||||||
else:
|
else:
|
||||||
return [self.themedir] + self.base.get_theme_dirs()
|
return [self.themedir] + self.base.get_theme_dirs()
|
||||||
|
|
||||||
def get_config(self, section, name, default=NODEFAULT):
|
def get_config(self, section: str, name: str, default: Any = NODEFAULT) -> Any:
|
||||||
# type: (str, str, Any) -> Any
|
|
||||||
"""Return the value for a theme configuration setting, searching the
|
"""Return the value for a theme configuration setting, searching the
|
||||||
base theme chain.
|
base theme chain.
|
||||||
"""
|
"""
|
||||||
@ -114,8 +111,7 @@ class Theme:
|
|||||||
else:
|
else:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def get_options(self, overrides={}):
|
def get_options(self, overrides: Dict[str, Any] = {}) -> Dict[str, Any]:
|
||||||
# type: (Dict[str, Any]) -> Dict[str, Any]
|
|
||||||
"""Return a dictionary of theme options and their values."""
|
"""Return a dictionary of theme options and their values."""
|
||||||
if self.base:
|
if self.base:
|
||||||
options = self.base.get_options()
|
options = self.base.get_options()
|
||||||
@ -135,8 +131,7 @@ class Theme:
|
|||||||
|
|
||||||
return options
|
return options
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Remove temporary directories."""
|
"""Remove temporary directories."""
|
||||||
if self.rootdir:
|
if self.rootdir:
|
||||||
try:
|
try:
|
||||||
@ -147,8 +142,7 @@ class Theme:
|
|||||||
self.base.cleanup()
|
self.base.cleanup()
|
||||||
|
|
||||||
|
|
||||||
def is_archived_theme(filename):
|
def is_archived_theme(filename: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
"""Check the specified file is an archived theme file or not."""
|
"""Check the specified file is an archived theme file or not."""
|
||||||
try:
|
try:
|
||||||
with ZipFile(filename) as f:
|
with ZipFile(filename) as f:
|
||||||
@ -160,23 +154,20 @@ def is_archived_theme(filename):
|
|||||||
class HTMLThemeFactory:
|
class HTMLThemeFactory:
|
||||||
"""A factory class for HTML Themes."""
|
"""A factory class for HTML Themes."""
|
||||||
|
|
||||||
def __init__(self, app):
|
def __init__(self, app: "Sphinx") -> None:
|
||||||
# type: (Sphinx) -> None
|
|
||||||
self.app = app
|
self.app = app
|
||||||
self.themes = app.html_themes
|
self.themes = app.html_themes
|
||||||
self.load_builtin_themes()
|
self.load_builtin_themes()
|
||||||
if getattr(app.config, 'html_theme_path', None):
|
if getattr(app.config, 'html_theme_path', None):
|
||||||
self.load_additional_themes(app.config.html_theme_path)
|
self.load_additional_themes(app.config.html_theme_path)
|
||||||
|
|
||||||
def load_builtin_themes(self):
|
def load_builtin_themes(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Load built-in themes."""
|
"""Load built-in themes."""
|
||||||
themes = self.find_themes(path.join(package_dir, 'themes'))
|
themes = self.find_themes(path.join(package_dir, 'themes'))
|
||||||
for name, theme in themes.items():
|
for name, theme in themes.items():
|
||||||
self.themes[name] = theme
|
self.themes[name] = theme
|
||||||
|
|
||||||
def load_additional_themes(self, theme_paths):
|
def load_additional_themes(self, theme_paths: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Load additional themes placed at specified directories."""
|
"""Load additional themes placed at specified directories."""
|
||||||
for theme_path in theme_paths:
|
for theme_path in theme_paths:
|
||||||
abs_theme_path = path.abspath(path.join(self.app.confdir, theme_path))
|
abs_theme_path = path.abspath(path.join(self.app.confdir, theme_path))
|
||||||
@ -184,8 +175,7 @@ class HTMLThemeFactory:
|
|||||||
for name, theme in themes.items():
|
for name, theme in themes.items():
|
||||||
self.themes[name] = theme
|
self.themes[name] = theme
|
||||||
|
|
||||||
def load_extra_theme(self, name):
|
def load_extra_theme(self, name: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Try to load a theme having specifed name."""
|
"""Try to load a theme having specifed name."""
|
||||||
if name == 'alabaster':
|
if name == 'alabaster':
|
||||||
self.load_alabaster_theme()
|
self.load_alabaster_theme()
|
||||||
@ -194,14 +184,12 @@ class HTMLThemeFactory:
|
|||||||
else:
|
else:
|
||||||
self.load_external_theme(name)
|
self.load_external_theme(name)
|
||||||
|
|
||||||
def load_alabaster_theme(self):
|
def load_alabaster_theme(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Load alabaster theme."""
|
"""Load alabaster theme."""
|
||||||
import alabaster
|
import alabaster
|
||||||
self.themes['alabaster'] = path.join(alabaster.get_path(), 'alabaster')
|
self.themes['alabaster'] = path.join(alabaster.get_path(), 'alabaster')
|
||||||
|
|
||||||
def load_sphinx_rtd_theme(self):
|
def load_sphinx_rtd_theme(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
"""Load sphinx_rtd_theme theme (if exists)."""
|
"""Load sphinx_rtd_theme theme (if exists)."""
|
||||||
try:
|
try:
|
||||||
import sphinx_rtd_theme
|
import sphinx_rtd_theme
|
||||||
@ -210,8 +198,7 @@ class HTMLThemeFactory:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def load_external_theme(self, name):
|
def load_external_theme(self, name: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Try to load a theme using entry_points.
|
"""Try to load a theme using entry_points.
|
||||||
|
|
||||||
Sphinx refers to ``sphinx_themes`` entry_points.
|
Sphinx refers to ``sphinx_themes`` entry_points.
|
||||||
@ -225,8 +212,7 @@ class HTMLThemeFactory:
|
|||||||
except StopIteration:
|
except StopIteration:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def find_themes(self, theme_path):
|
def find_themes(self, theme_path: str) -> Dict[str, str]:
|
||||||
# type: (str) -> Dict[str, str]
|
|
||||||
"""Search themes from specified directory."""
|
"""Search themes from specified directory."""
|
||||||
themes = {} # type: Dict[str, str]
|
themes = {} # type: Dict[str, str]
|
||||||
if not path.isdir(theme_path):
|
if not path.isdir(theme_path):
|
||||||
@ -247,8 +233,7 @@ class HTMLThemeFactory:
|
|||||||
|
|
||||||
return themes
|
return themes
|
||||||
|
|
||||||
def create(self, name):
|
def create(self, name: str) -> Theme:
|
||||||
# type: (str) -> Theme
|
|
||||||
"""Create an instance of theme."""
|
"""Create an instance of theme."""
|
||||||
if name not in self.themes:
|
if name not in self.themes:
|
||||||
self.load_extra_theme(name)
|
self.load_extra_theme(name)
|
||||||
|
@ -273,8 +273,7 @@ class DoctestTransform(SphinxTransform):
|
|||||||
"""Set "doctest" style to each doctest_block node"""
|
"""Set "doctest" style to each doctest_block node"""
|
||||||
default_priority = 500
|
default_priority = 500
|
||||||
|
|
||||||
def apply(self, **kwargs):
|
def apply(self, **kwargs) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
for node in self.document.traverse(nodes.doctest_block):
|
for node in self.document.traverse(nodes.doctest_block):
|
||||||
node['classes'].append('doctest')
|
node['classes'].append('doctest')
|
||||||
|
|
||||||
|
@ -119,8 +119,7 @@ def get_matching_docs(dirname: str, suffixes: List[str],
|
|||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
def get_filetype(source_suffix, filename):
|
def get_filetype(source_suffix: Dict[str, str], filename: str) -> str:
|
||||||
# type: (Dict[str, str], str) -> str
|
|
||||||
for suffix, filetype in source_suffix.items():
|
for suffix, filetype in source_suffix.items():
|
||||||
if filename.endswith(suffix):
|
if filename.endswith(suffix):
|
||||||
# If default filetype (None), considered as restructuredtext.
|
# If default filetype (None), considered as restructuredtext.
|
||||||
|
@ -12,15 +12,17 @@ import pickle
|
|||||||
from itertools import product, zip_longest
|
from itertools import product, zip_longest
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from os import path
|
from os import path
|
||||||
|
from typing import Any, Dict, Iterator
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
from docutils.nodes import Node
|
||||||
|
|
||||||
from sphinx.transforms import SphinxTransform
|
from sphinx.transforms import SphinxTransform
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Dict, Iterator # NOQA
|
from sphinx.application import Sphinx
|
||||||
from docutils import nodes # NOQA
|
|
||||||
from sphinx.application import Sphinx # NOQA
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import Levenshtein
|
import Levenshtein
|
||||||
@ -32,8 +34,7 @@ except ImportError:
|
|||||||
VERSIONING_RATIO = 65
|
VERSIONING_RATIO = 65
|
||||||
|
|
||||||
|
|
||||||
def add_uids(doctree, condition):
|
def add_uids(doctree: Node, condition: Any) -> Iterator[Node]:
|
||||||
# type: (nodes.Node, Any) -> Iterator[nodes.Node]
|
|
||||||
"""Add a unique id to every node in the `doctree` which matches the
|
"""Add a unique id to every node in the `doctree` which matches the
|
||||||
condition and yield the nodes.
|
condition and yield the nodes.
|
||||||
|
|
||||||
@ -48,8 +49,7 @@ def add_uids(doctree, condition):
|
|||||||
yield node
|
yield node
|
||||||
|
|
||||||
|
|
||||||
def merge_doctrees(old, new, condition):
|
def merge_doctrees(old: Node, new: Node, condition: Any) -> Iterator[Node]:
|
||||||
# type: (nodes.Node, nodes.Node, Any) -> Iterator[nodes.Node]
|
|
||||||
"""Merge the `old` doctree with the `new` one while looking at nodes
|
"""Merge the `old` doctree with the `new` one while looking at nodes
|
||||||
matching the `condition`.
|
matching the `condition`.
|
||||||
|
|
||||||
@ -116,8 +116,7 @@ def merge_doctrees(old, new, condition):
|
|||||||
yield new_node
|
yield new_node
|
||||||
|
|
||||||
|
|
||||||
def get_ratio(old, new):
|
def get_ratio(old: str, new: str) -> float:
|
||||||
# type: (str, str) -> float
|
|
||||||
"""Return a "similiarity ratio" (in percent) representing the similarity
|
"""Return a "similiarity ratio" (in percent) representing the similarity
|
||||||
between the two strings where 0 is equal and anything above less than equal.
|
between the two strings where 0 is equal and anything above less than equal.
|
||||||
"""
|
"""
|
||||||
@ -130,8 +129,7 @@ def get_ratio(old, new):
|
|||||||
return levenshtein_distance(old, new) / (len(old) / 100.0)
|
return levenshtein_distance(old, new) / (len(old) / 100.0)
|
||||||
|
|
||||||
|
|
||||||
def levenshtein_distance(a, b):
|
def levenshtein_distance(a: str, b: str) -> int:
|
||||||
# type: (str, str) -> int
|
|
||||||
"""Return the Levenshtein edit distance between two strings *a* and *b*."""
|
"""Return the Levenshtein edit distance between two strings *a* and *b*."""
|
||||||
if a == b:
|
if a == b:
|
||||||
return 0
|
return 0
|
||||||
@ -155,8 +153,7 @@ class UIDTransform(SphinxTransform):
|
|||||||
"""Add UIDs to doctree for versioning."""
|
"""Add UIDs to doctree for versioning."""
|
||||||
default_priority = 880
|
default_priority = 880
|
||||||
|
|
||||||
def apply(self, **kwargs):
|
def apply(self, **kwargs) -> None:
|
||||||
# type: (Any) -> None
|
|
||||||
env = self.env
|
env = self.env
|
||||||
old_doctree = None
|
old_doctree = None
|
||||||
if not env.versioning_condition:
|
if not env.versioning_condition:
|
||||||
@ -178,8 +175,7 @@ class UIDTransform(SphinxTransform):
|
|||||||
list(merge_doctrees(old_doctree, self.document, env.versioning_condition))
|
list(merge_doctrees(old_doctree, self.document, env.versioning_condition))
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app: "Sphinx") -> Dict[str, Any]:
|
||||||
# type: (Sphinx) -> Dict[str, Any]
|
|
||||||
app.add_transform(UIDTransform)
|
app.add_transform(UIDTransform)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
Loading…
Reference in New Issue
Block a user