mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
🔧 Ruff format select sphinx modules with minimal diffs (#12146)
This comit removes select sphinx top-level modules from the `ruff format` exclude list. They were selected based on the fact that they have the least diffs when formatted, are not believe to introduce changes that would adversely affect any existing PRs.
This commit is contained in:
parent
392358d4de
commit
2d6f73ed68
4
.flake8
4
.flake8
@ -2,8 +2,10 @@
|
||||
max-line-length = 95
|
||||
ignore =
|
||||
E116,
|
||||
E203,
|
||||
E241,
|
||||
E251,
|
||||
E501,
|
||||
E741,
|
||||
W503,
|
||||
W504,
|
||||
@ -29,5 +31,3 @@ exclude =
|
||||
doc/_build/*,
|
||||
sphinx/search/*,
|
||||
doc/usage/extensions/example*.py,
|
||||
per-file-ignores =
|
||||
tests/*: E501
|
||||
|
14
.ruff.toml
14
.ruff.toml
@ -426,37 +426,23 @@ forced-separate = [
|
||||
preview = true
|
||||
quote-style = "single"
|
||||
exclude = [
|
||||
"sphinx/__init__.py",
|
||||
"sphinx/addnodes.py",
|
||||
"sphinx/application.py",
|
||||
"sphinx/builders/**/*",
|
||||
"sphinx/cmd/**/*",
|
||||
"sphinx/config.py",
|
||||
"sphinx/deprecation.py",
|
||||
"sphinx/directives/**/*",
|
||||
"sphinx/domains/**/*",
|
||||
"sphinx/environment/**/*",
|
||||
"sphinx/errors.py",
|
||||
"sphinx/events.py",
|
||||
"sphinx/ext/**/*",
|
||||
"sphinx/extension.py",
|
||||
"sphinx/highlighting.py",
|
||||
"sphinx/io.py",
|
||||
"sphinx/jinja2glue.py",
|
||||
"sphinx/locale/__init__.py",
|
||||
"sphinx/parsers.py",
|
||||
"sphinx/project.py",
|
||||
"sphinx/pycode/**/*",
|
||||
"sphinx/pygments_styles.py",
|
||||
"sphinx/registry.py",
|
||||
"sphinx/roles.py",
|
||||
"sphinx/search/**/*",
|
||||
"sphinx/templates/**/*",
|
||||
"sphinx/testing/**/*",
|
||||
"sphinx/theming.py",
|
||||
"sphinx/transforms/**/*",
|
||||
"sphinx/util/**/*",
|
||||
"sphinx/versioning.py",
|
||||
"sphinx/writers/**/*",
|
||||
"tests/certs/**/*",
|
||||
"tests/conftest.py",
|
||||
|
@ -13,8 +13,9 @@ from .deprecation import RemovedInNextVersionWarning
|
||||
# Users can avoid this by using environment variable: PYTHONWARNINGS=
|
||||
if 'PYTHONWARNINGS' not in os.environ:
|
||||
warnings.filterwarnings('default', category=RemovedInNextVersionWarning)
|
||||
warnings.filterwarnings('ignore', 'The frontend.Option class .*',
|
||||
DeprecationWarning, module='docutils.frontend')
|
||||
warnings.filterwarnings(
|
||||
'ignore', 'The frontend.Option class .*', DeprecationWarning, module='docutils.frontend'
|
||||
)
|
||||
|
||||
__version__ = '7.3.0'
|
||||
__display_version__ = __version__ # used for command line version
|
||||
|
@ -56,10 +56,12 @@ def _deprecation_warning(
|
||||
|
||||
qualified_name = f'{module}.{attribute}'
|
||||
if canonical_name:
|
||||
message = (f'The alias {qualified_name!r} is deprecated, '
|
||||
f'use {canonical_name!r} instead.')
|
||||
message = (
|
||||
f'The alias {qualified_name!r} is deprecated, use {canonical_name!r} instead.'
|
||||
)
|
||||
else:
|
||||
message = f'{qualified_name!r} is deprecated.'
|
||||
|
||||
warnings.warn(message + " Check CHANGES for Sphinx API modifications.",
|
||||
warning_class, stacklevel=3)
|
||||
warnings.warn(
|
||||
message + ' Check CHANGES for Sphinx API modifications.', warning_class, stacklevel=3
|
||||
)
|
||||
|
@ -45,7 +45,7 @@ class ExtensionError(SphinxError):
|
||||
"""Extension error."""
|
||||
|
||||
def __init__(
|
||||
self, message: str, orig_exc: Exception | None = None, modname: str | None = None,
|
||||
self, message: str, orig_exc: Exception | None = None, modname: str | None = None
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
|
@ -81,8 +81,9 @@ class EventManager:
|
||||
if listener.id == listener_id:
|
||||
listeners.remove(listener)
|
||||
|
||||
def emit(self, name: str, *args: Any,
|
||||
allowed_exceptions: tuple[type[Exception], ...] = ()) -> list:
|
||||
def emit(
|
||||
self, name: str, *args: Any, allowed_exceptions: tuple[type[Exception], ...] = ()
|
||||
) -> list:
|
||||
"""Emit a Sphinx event."""
|
||||
# not every object likes to be repr()'d (think
|
||||
# random stuff coming via autodoc)
|
||||
@ -90,7 +91,7 @@ class EventManager:
|
||||
logger.debug('[app] emitting event: %r%s', name, repr(args)[:100])
|
||||
|
||||
results = []
|
||||
listeners = sorted(self.listeners[name], key=attrgetter("priority"))
|
||||
listeners = sorted(self.listeners[name], key=attrgetter('priority'))
|
||||
for listener in listeners:
|
||||
try:
|
||||
results.append(listener.handler(self.app, *args))
|
||||
@ -104,12 +105,17 @@ class EventManager:
|
||||
# Just pass through the error, so that it can be debugged.
|
||||
raise
|
||||
modname = safe_getattr(listener.handler, '__module__', None)
|
||||
raise ExtensionError(__("Handler %r for event %r threw an exception") %
|
||||
(listener.handler, name), exc, modname=modname) from exc
|
||||
raise ExtensionError(
|
||||
__('Handler %r for event %r threw an exception')
|
||||
% (listener.handler, name),
|
||||
exc,
|
||||
modname=modname,
|
||||
) from exc
|
||||
return results
|
||||
|
||||
def emit_firstresult(self, name: str, *args: Any,
|
||||
allowed_exceptions: tuple[type[Exception], ...] = ()) -> Any:
|
||||
def emit_firstresult(
|
||||
self, name: str, *args: Any, allowed_exceptions: tuple[type[Exception], ...] = ()
|
||||
) -> Any:
|
||||
"""Emit a Sphinx event and returns first result.
|
||||
|
||||
This returns the result of the first handler that doesn't return ``None``.
|
||||
|
@ -51,8 +51,13 @@ def verify_needs_extensions(app: Sphinx, config: Config) -> None:
|
||||
for extname, reqversion in config.needs_extensions.items():
|
||||
extension = app.extensions.get(extname)
|
||||
if extension is None:
|
||||
logger.warning(__('The %s extension is required by needs_extensions settings, '
|
||||
'but it is not loaded.'), extname)
|
||||
logger.warning(
|
||||
__(
|
||||
'The %s extension is required by needs_extensions settings, '
|
||||
'but it is not loaded.'
|
||||
),
|
||||
extname,
|
||||
)
|
||||
continue
|
||||
|
||||
fulfilled = True
|
||||
@ -67,10 +72,14 @@ def verify_needs_extensions(app: Sphinx, config: Config) -> None:
|
||||
fulfilled = False
|
||||
|
||||
if not fulfilled:
|
||||
raise VersionRequirementError(__('This project needs the extension %s at least in '
|
||||
'version %s and therefore cannot be built with '
|
||||
'the loaded version (%s).') %
|
||||
(extname, reqversion, extension.version))
|
||||
raise VersionRequirementError(
|
||||
__(
|
||||
'This project needs the extension %s at least in '
|
||||
'version %s and therefore cannot be built with '
|
||||
'the loaded version (%s).'
|
||||
)
|
||||
% (extname, reqversion, extension.version)
|
||||
)
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> dict[str, Any]:
|
||||
|
@ -42,9 +42,7 @@ lexer_classes: dict[str, type[Lexer] | partial[Lexer]] = {
|
||||
}
|
||||
|
||||
|
||||
escape_hl_chars = {ord('\\'): '\\PYGZbs{}',
|
||||
ord('{'): '\\PYGZob{}',
|
||||
ord('}'): '\\PYGZcb{}'}
|
||||
escape_hl_chars = {ord('\\'): '\\PYGZbs{}', ord('{'): '\\PYGZob{}', ord('}'): '\\PYGZcb{}'}
|
||||
|
||||
# used if Pygments is available
|
||||
# MEMO: no use of \protected here to avoid having to do hyperref extras,
|
||||
@ -57,7 +55,7 @@ escape_hl_chars = {ord('\\'): '\\PYGZbs{}',
|
||||
# MEMO: the Pygments escapes with \char`\<char> syntax, if the document
|
||||
# uses old OT1 font encoding, work correctly only in monospace font.
|
||||
# MEMO: the Pygmentize output mark-up is always with a {} after.
|
||||
_LATEX_ADD_STYLES = r'''
|
||||
_LATEX_ADD_STYLES = r"""
|
||||
% Sphinx redefinitions
|
||||
% Originally to obtain a straight single quote via package textcomp, then
|
||||
% to fix problems for the 5.0.0 inline code highlighting (captions!).
|
||||
@ -82,7 +80,7 @@ _LATEX_ADD_STYLES = r'''
|
||||
% use \protected to allow syntax highlighting in captions
|
||||
\protected\def\PYG#1#2{\PYG@reset\PYG@toks#1+\relax+{\PYG@do{#2}}}
|
||||
\makeatother
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
class PygmentsBridge:
|
||||
@ -91,8 +89,9 @@ class PygmentsBridge:
|
||||
html_formatter = HtmlFormatter
|
||||
latex_formatter = LatexFormatter
|
||||
|
||||
def __init__(self, dest: str = 'html', stylename: str = 'sphinx',
|
||||
latex_engine: str | None = None) -> None:
|
||||
def __init__(
|
||||
self, dest: str = 'html', stylename: str = 'sphinx', latex_engine: str | None = None
|
||||
) -> None:
|
||||
self.dest = dest
|
||||
self.latex_engine = latex_engine
|
||||
|
||||
@ -119,8 +118,14 @@ class PygmentsBridge:
|
||||
kwargs.update(self.formatter_args)
|
||||
return self.formatter(**kwargs)
|
||||
|
||||
def get_lexer(self, source: str, lang: str, opts: dict | None = None,
|
||||
force: bool = False, location: Any = None) -> Lexer:
|
||||
def get_lexer(
|
||||
self,
|
||||
source: str,
|
||||
lang: str,
|
||||
opts: dict | None = None,
|
||||
force: bool = False,
|
||||
location: Any = None,
|
||||
) -> Lexer:
|
||||
if not opts:
|
||||
opts = {}
|
||||
|
||||
@ -146,8 +151,9 @@ class PygmentsBridge:
|
||||
else:
|
||||
lexer = get_lexer_by_name(lang, **opts)
|
||||
except ClassNotFound:
|
||||
logger.warning(__('Pygments lexer name %r is not known'), lang,
|
||||
location=location)
|
||||
logger.warning(
|
||||
__('Pygments lexer name %r is not known'), lang, location=location
|
||||
)
|
||||
lexer = lexer_classes['none'](**opts)
|
||||
|
||||
if not force:
|
||||
@ -155,8 +161,15 @@ class PygmentsBridge:
|
||||
|
||||
return lexer
|
||||
|
||||
def highlight_block(self, source: str, lang: str, opts: dict | None = None,
|
||||
force: bool = False, location: Any = None, **kwargs: Any) -> str:
|
||||
def highlight_block(
|
||||
self,
|
||||
source: str,
|
||||
lang: str,
|
||||
opts: dict | None = None,
|
||||
force: bool = False,
|
||||
location: Any = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
if not isinstance(source, str):
|
||||
source = source.decode()
|
||||
|
||||
@ -173,11 +186,17 @@ class PygmentsBridge:
|
||||
lang = 'none' # automatic highlighting failed.
|
||||
else:
|
||||
logger.warning(
|
||||
__('Lexing literal_block %r as "%s" resulted in an error at token: %r. '
|
||||
'Retrying in relaxed mode.'),
|
||||
source, lang, str(err),
|
||||
type='misc', subtype='highlighting_failure',
|
||||
location=location)
|
||||
__(
|
||||
'Lexing literal_block %r as "%s" resulted in an error at token: %r. '
|
||||
'Retrying in relaxed mode.'
|
||||
),
|
||||
source,
|
||||
lang,
|
||||
str(err),
|
||||
type='misc',
|
||||
subtype='highlighting_failure',
|
||||
location=location,
|
||||
)
|
||||
if force:
|
||||
lang = 'none'
|
||||
else:
|
||||
|
18
sphinx/io.py
18
sphinx/io.py
@ -1,4 +1,5 @@
|
||||
"""Input/Output files"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
@ -46,6 +47,7 @@ class SphinxBaseReader(standalone.Reader):
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
from sphinx.application import Sphinx
|
||||
|
||||
if len(args) > 0 and isinstance(args[0], Sphinx):
|
||||
self._app = args[0]
|
||||
self._env = self._app.env
|
||||
@ -54,7 +56,7 @@ class SphinxBaseReader(standalone.Reader):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def setup(self, app: Sphinx) -> None:
|
||||
self._app = app # hold application object only for compatibility
|
||||
self._app = app # hold application object only for compatibility
|
||||
self._env = app.env
|
||||
|
||||
def get_transforms(self) -> list[type[Transform]]:
|
||||
@ -128,9 +130,15 @@ class SphinxI18nReader(SphinxBaseReader):
|
||||
super().setup(app)
|
||||
|
||||
self.transforms = self.transforms + app.registry.get_transforms()
|
||||
unused = [PreserveTranslatableMessages, Locale, RemoveTranslatableInline,
|
||||
AutoIndexUpgrader, SphinxDomains, DoctreeReadEvent,
|
||||
UIDTransform]
|
||||
unused = [
|
||||
PreserveTranslatableMessages,
|
||||
Locale,
|
||||
RemoveTranslatableInline,
|
||||
AutoIndexUpgrader,
|
||||
SphinxDomains,
|
||||
DoctreeReadEvent,
|
||||
UIDTransform,
|
||||
]
|
||||
for transform in unused:
|
||||
if transform in self.transforms:
|
||||
self.transforms.remove(transform)
|
||||
@ -181,7 +189,7 @@ def create_publisher(app: Sphinx, filetype: str) -> Publisher:
|
||||
destination=NullOutput(),
|
||||
)
|
||||
# Propagate exceptions by default when used programmatically:
|
||||
defaults = {"traceback": True, **app.env.settings}
|
||||
defaults = {'traceback': True, **app.env.settings}
|
||||
# Set default settings
|
||||
if docutils.__version_info__[:2] >= (0, 19):
|
||||
pub.get_settings(**defaults)
|
||||
|
@ -95,6 +95,7 @@ class idgen:
|
||||
def __next__(self) -> int:
|
||||
self.id += 1
|
||||
return self.id
|
||||
|
||||
next = __next__ # Python 2/Jinja compatibility
|
||||
|
||||
|
||||
@ -133,6 +134,7 @@ class SphinxFileSystemLoader(FileSystemLoader):
|
||||
return path.getmtime(filename) == mtime
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
return contents, filename, uptodate
|
||||
|
||||
|
||||
@ -165,8 +167,9 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
||||
# prepend explicit template paths
|
||||
self.templatepathlen = len(builder.config.templates_path)
|
||||
if builder.config.templates_path:
|
||||
cfg_templates_path = [path.join(builder.confdir, tp)
|
||||
for tp in builder.config.templates_path]
|
||||
cfg_templates_path = [
|
||||
path.join(builder.confdir, tp) for tp in builder.config.templates_path
|
||||
]
|
||||
pathchain[0:0] = cfg_templates_path
|
||||
loaderchain[0:0] = cfg_templates_path
|
||||
|
||||
@ -178,8 +181,7 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
||||
|
||||
use_i18n = builder.app.translator is not None
|
||||
extensions = ['jinja2.ext.i18n'] if use_i18n else []
|
||||
self.environment = SandboxedEnvironment(loader=self,
|
||||
extensions=extensions)
|
||||
self.environment = SandboxedEnvironment(loader=self, extensions=extensions)
|
||||
self.environment.filters['tobool'] = _tobool
|
||||
self.environment.filters['toint'] = _toint
|
||||
self.environment.filters['todim'] = _todim
|
||||
@ -191,7 +193,8 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
||||
if use_i18n:
|
||||
# ``install_gettext_translations`` is injected by the ``jinja2.ext.i18n`` extension
|
||||
self.environment.install_gettext_translations( # type: ignore[attr-defined]
|
||||
builder.app.translator)
|
||||
builder.app.translator
|
||||
)
|
||||
|
||||
def render(self, template: str, context: dict) -> str: # type: ignore[override]
|
||||
return self.environment.get_template(template).render(context)
|
||||
@ -208,13 +211,12 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
||||
loaders = self.loaders
|
||||
# exclamation mark starts search from theme
|
||||
if template.startswith('!'):
|
||||
loaders = loaders[self.templatepathlen:]
|
||||
loaders = loaders[self.templatepathlen :]
|
||||
template = template[1:]
|
||||
for loader in loaders:
|
||||
try:
|
||||
return loader.get_source(environment, template)
|
||||
except TemplateNotFound:
|
||||
pass
|
||||
msg = (f"{template!r} not found in "
|
||||
f"{self.environment.loader.pathchain}") # type: ignore[union-attr]
|
||||
msg = f'{template!r} not found in {self.environment.loader.pathchain}' # type: ignore[union-attr]
|
||||
raise TemplateNotFound(msg)
|
||||
|
@ -51,8 +51,10 @@ class _TranslationProxy:
|
||||
try:
|
||||
return f'i{self.__str__()!r}'
|
||||
except Exception:
|
||||
return (self.__class__.__name__
|
||||
+ f'({self._catalogue}, {self._namespace}, {self._message})')
|
||||
return (
|
||||
self.__class__.__name__
|
||||
+ f'({self._catalogue}, {self._namespace}, {self._message})'
|
||||
)
|
||||
|
||||
def __add__(self, other: str) -> str:
|
||||
return self.__str__() + other
|
||||
@ -197,6 +199,7 @@ def get_translation(catalog: str, namespace: str = 'general') -> Callable[[str],
|
||||
|
||||
.. versionadded:: 1.8
|
||||
"""
|
||||
|
||||
def gettext(message: str) -> str:
|
||||
if not is_translator_registered(catalog, namespace):
|
||||
# not initialized yet
|
||||
@ -220,13 +223,13 @@ __ = get_translation('sphinx', 'console')
|
||||
# labels
|
||||
admonitionlabels = {
|
||||
'attention': _('Attention'),
|
||||
'caution': _('Caution'),
|
||||
'danger': _('Danger'),
|
||||
'error': _('Error'),
|
||||
'hint': _('Hint'),
|
||||
'caution': _('Caution'),
|
||||
'danger': _('Danger'),
|
||||
'error': _('Error'),
|
||||
'hint': _('Hint'),
|
||||
'important': _('Important'),
|
||||
'note': _('Note'),
|
||||
'seealso': _('See also'),
|
||||
'tip': _('Tip'),
|
||||
'warning': _('Warning'),
|
||||
'note': _('Note'),
|
||||
'seealso': _('See also'),
|
||||
'tip': _('Tip'),
|
||||
'warning': _('Warning'),
|
||||
}
|
||||
|
@ -65,13 +65,14 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
|
||||
self.statemachine = states.RSTStateMachine(
|
||||
state_classes=self.state_classes,
|
||||
initial_state=self.initial_state,
|
||||
debug=document.reporter.debug_flag)
|
||||
debug=document.reporter.debug_flag,
|
||||
)
|
||||
|
||||
# preprocess inputstring
|
||||
if isinstance(inputstring, str):
|
||||
lines = docutils.statemachine.string2lines(
|
||||
inputstring, tab_width=document.settings.tab_width,
|
||||
convert_whitespace=True)
|
||||
inputstring, tab_width=document.settings.tab_width, convert_whitespace=True
|
||||
)
|
||||
|
||||
inputlines = StringList(lines, document.current_source)
|
||||
else:
|
||||
|
@ -28,7 +28,7 @@ class Project:
|
||||
|
||||
#: source_suffix. Same as :confval:`source_suffix`.
|
||||
self.source_suffix = tuple(source_suffix)
|
||||
self._first_source_suffix = next(iter(self.source_suffix), "")
|
||||
self._first_source_suffix = next(iter(self.source_suffix), '')
|
||||
|
||||
#: The name of documents belonging to this project.
|
||||
self.docnames: set[str] = set()
|
||||
@ -43,8 +43,9 @@ class Project:
|
||||
self._path_to_docname = other._path_to_docname
|
||||
self._docname_to_path = other._docname_to_path
|
||||
|
||||
def discover(self, exclude_paths: Iterable[str] = (),
|
||||
include_paths: Iterable[str] = ("**",)) -> set[str]:
|
||||
def discover(
|
||||
self, exclude_paths: Iterable[str] = (), include_paths: Iterable[str] = ('**',)
|
||||
) -> set[str]:
|
||||
"""Find all document files in the source directory and put them in
|
||||
:attr:`docnames`.
|
||||
"""
|
||||
@ -61,17 +62,24 @@ class Project:
|
||||
if docname in self.docnames:
|
||||
pattern = os.path.join(self.srcdir, docname) + '.*'
|
||||
files = [relpath(f, self.srcdir) for f in glob(pattern)]
|
||||
logger.warning(__('multiple files found for the document "%s": %r\n'
|
||||
'Use %r for the build.'),
|
||||
docname, files, self.doc2path(docname, absolute=True),
|
||||
once=True)
|
||||
logger.warning(
|
||||
__(
|
||||
'multiple files found for the document "%s": %r\n'
|
||||
'Use %r for the build.'
|
||||
),
|
||||
docname,
|
||||
files,
|
||||
self.doc2path(docname, absolute=True),
|
||||
once=True,
|
||||
)
|
||||
elif os.access(os.path.join(self.srcdir, filename), os.R_OK):
|
||||
self.docnames.add(docname)
|
||||
self._path_to_docname[filename] = docname
|
||||
self._docname_to_path[docname] = filename
|
||||
else:
|
||||
logger.warning(__("Ignored unreadable document %r."),
|
||||
filename, location=docname)
|
||||
logger.warning(
|
||||
__('Ignored unreadable document %r.'), filename, location=docname
|
||||
)
|
||||
|
||||
return self.docnames
|
||||
|
||||
|
114
sphinx/roles.py
114
sphinx/roles.py
@ -40,6 +40,7 @@ generic_docroles = {
|
||||
|
||||
# -- generic cross-reference role ----------------------------------------------
|
||||
|
||||
|
||||
class XRefRole(ReferenceRole):
|
||||
"""
|
||||
A generic cross-referencing role. To create a callable that can be used as
|
||||
@ -67,10 +68,14 @@ class XRefRole(ReferenceRole):
|
||||
nodeclass: type[Element] = addnodes.pending_xref
|
||||
innernodeclass: type[TextElement] = nodes.literal
|
||||
|
||||
def __init__(self, fix_parens: bool = False, lowercase: bool = False,
|
||||
nodeclass: type[Element] | None = None,
|
||||
innernodeclass: type[TextElement] | None = None,
|
||||
warn_dangling: bool = False) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
fix_parens: bool = False,
|
||||
lowercase: bool = False,
|
||||
nodeclass: type[Element] | None = None,
|
||||
innernodeclass: type[TextElement] | None = None,
|
||||
warn_dangling: bool = False,
|
||||
) -> None:
|
||||
self.fix_parens = fix_parens
|
||||
self.lowercase = lowercase
|
||||
self.warn_dangling = warn_dangling
|
||||
@ -111,7 +116,7 @@ class XRefRole(ReferenceRole):
|
||||
text = utils.unescape(self.text[1:])
|
||||
if self.fix_parens:
|
||||
self.has_explicit_title = False # treat as implicit
|
||||
text, target = self.update_title_and_target(text, "")
|
||||
text, target = self.update_title_and_target(text, '')
|
||||
|
||||
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
|
||||
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
|
||||
@ -125,17 +130,20 @@ class XRefRole(ReferenceRole):
|
||||
title, target = self.update_title_and_target(title, target)
|
||||
|
||||
# create the reference node
|
||||
options = {'refdoc': self.env.docname,
|
||||
'refdomain': self.refdomain,
|
||||
'reftype': self.reftype,
|
||||
'refexplicit': self.has_explicit_title,
|
||||
'refwarn': self.warn_dangling}
|
||||
options = {
|
||||
'refdoc': self.env.docname,
|
||||
'refdomain': self.refdomain,
|
||||
'reftype': self.reftype,
|
||||
'refexplicit': self.has_explicit_title,
|
||||
'refwarn': self.warn_dangling,
|
||||
}
|
||||
refnode = self.nodeclass(self.rawtext, **options)
|
||||
self.set_source_info(refnode)
|
||||
|
||||
# determine the target and title for the class
|
||||
title, target = self.process_link(self.env, refnode, self.has_explicit_title,
|
||||
title, target)
|
||||
title, target = self.process_link(
|
||||
self.env, refnode, self.has_explicit_title, title, target
|
||||
)
|
||||
refnode['reftarget'] = target
|
||||
refnode += self.innernodeclass(self.rawtext, title, classes=self.classes)
|
||||
|
||||
@ -143,8 +151,14 @@ class XRefRole(ReferenceRole):
|
||||
|
||||
# methods that can be overwritten
|
||||
|
||||
def process_link(self, env: BuildEnvironment, refnode: Element, has_explicit_title: bool,
|
||||
title: str, target: str) -> tuple[str, str]:
|
||||
def process_link(
|
||||
self,
|
||||
env: BuildEnvironment,
|
||||
refnode: Element,
|
||||
has_explicit_title: bool,
|
||||
title: str,
|
||||
target: str,
|
||||
) -> tuple[str, str]:
|
||||
"""Called after parsing title and target text, and creating the
|
||||
reference node (given in *refnode*). This method can alter the
|
||||
reference node and must return a new (or the same) ``(title, target)``
|
||||
@ -152,8 +166,9 @@ class XRefRole(ReferenceRole):
|
||||
"""
|
||||
return title, ws_re.sub(' ', target)
|
||||
|
||||
def result_nodes(self, document: nodes.document, env: BuildEnvironment, node: Element,
|
||||
is_ref: bool) -> tuple[list[Node], list[system_message]]:
|
||||
def result_nodes(
|
||||
self, document: nodes.document, env: BuildEnvironment, node: Element, is_ref: bool
|
||||
) -> tuple[list[Node], list[system_message]]:
|
||||
"""Called before returning the finished nodes. *node* is the reference
|
||||
node if one was created (*is_ref* is then true), else the content node.
|
||||
This method can add other nodes and must return a ``(nodes, messages)``
|
||||
@ -163,8 +178,14 @@ class XRefRole(ReferenceRole):
|
||||
|
||||
|
||||
class AnyXRefRole(XRefRole):
|
||||
def process_link(self, env: BuildEnvironment, refnode: Element, has_explicit_title: bool,
|
||||
title: str, target: str) -> tuple[str, str]:
|
||||
def process_link(
|
||||
self,
|
||||
env: BuildEnvironment,
|
||||
refnode: Element,
|
||||
has_explicit_title: bool,
|
||||
title: str,
|
||||
target: str,
|
||||
) -> tuple[str, str]:
|
||||
result = super().process_link(env, refnode, has_explicit_title, title, target)
|
||||
# add all possible context info (i.e. std:program, py:module etc.)
|
||||
refnode.attributes.update(env.ref_context)
|
||||
@ -174,8 +195,15 @@ class AnyXRefRole(XRefRole):
|
||||
class PEP(ReferenceRole):
|
||||
def run(self) -> tuple[list[Node], list[system_message]]:
|
||||
target_id = 'index-%s' % self.env.new_serialno('index')
|
||||
entries = [('single', _('Python Enhancement Proposals; PEP %s') % self.target,
|
||||
target_id, '', None)]
|
||||
entries = [
|
||||
(
|
||||
'single',
|
||||
_('Python Enhancement Proposals; PEP %s') % self.target,
|
||||
target_id,
|
||||
'',
|
||||
None,
|
||||
)
|
||||
]
|
||||
|
||||
index = addnodes.index(entries=entries)
|
||||
target = nodes.target('', '', ids=[target_id])
|
||||
@ -187,11 +215,12 @@ class PEP(ReferenceRole):
|
||||
if self.has_explicit_title:
|
||||
reference += nodes.strong(self.title, self.title)
|
||||
else:
|
||||
title = "PEP " + self.title
|
||||
title = 'PEP ' + self.title
|
||||
reference += nodes.strong(title, title)
|
||||
except ValueError:
|
||||
msg = self.inliner.reporter.error(__('invalid PEP number %s') % self.target,
|
||||
line=self.lineno)
|
||||
msg = self.inliner.reporter.error(
|
||||
__('invalid PEP number %s') % self.target, line=self.lineno
|
||||
)
|
||||
prb = self.inliner.problematic(self.rawtext, self.rawtext, msg)
|
||||
return [prb], [msg]
|
||||
|
||||
@ -221,11 +250,12 @@ class RFC(ReferenceRole):
|
||||
if self.has_explicit_title:
|
||||
reference += nodes.strong(self.title, self.title)
|
||||
else:
|
||||
title = "RFC " + self.title
|
||||
title = 'RFC ' + self.title
|
||||
reference += nodes.strong(title, title)
|
||||
except ValueError:
|
||||
msg = self.inliner.reporter.error(__('invalid RFC number %s') % self.target,
|
||||
line=self.lineno)
|
||||
msg = self.inliner.reporter.error(
|
||||
__('invalid RFC number %s') % self.target, line=self.lineno
|
||||
)
|
||||
prb = self.inliner.problematic(self.rawtext, self.rawtext, msg)
|
||||
return [prb], [msg]
|
||||
|
||||
@ -271,8 +301,9 @@ class EmphasizedLiteral(SphinxRole):
|
||||
|
||||
def run(self) -> tuple[list[Node], list[system_message]]:
|
||||
children = self.parse(self.text)
|
||||
node = nodes.literal(self.rawtext, '', *children,
|
||||
role=self.name.lower(), classes=[self.name])
|
||||
node = nodes.literal(
|
||||
self.rawtext, '', *children, role=self.name.lower(), classes=[self.name]
|
||||
)
|
||||
|
||||
return [node], []
|
||||
|
||||
@ -284,13 +315,13 @@ class EmphasizedLiteral(SphinxRole):
|
||||
if part == '\\\\': # escaped backslash
|
||||
stack[-1] += '\\'
|
||||
elif part == '{':
|
||||
if len(stack) >= 2 and stack[-2] == "{": # nested
|
||||
stack[-1] += "{"
|
||||
if len(stack) >= 2 and stack[-2] == '{': # nested
|
||||
stack[-1] += '{'
|
||||
else:
|
||||
# start emphasis
|
||||
stack.extend(('{', ''))
|
||||
elif part == '}':
|
||||
if len(stack) == 3 and stack[1] == "{" and len(stack[2]) > 0:
|
||||
if len(stack) == 3 and stack[1] == '{' and len(stack[2]) > 0:
|
||||
# emphasized word found
|
||||
if stack[0]:
|
||||
result.append(nodes.Text(stack[0]))
|
||||
@ -322,7 +353,7 @@ class Abbreviation(SphinxRole):
|
||||
options = self.options.copy()
|
||||
matched = self.abbr_re.search(self.text)
|
||||
if matched:
|
||||
text = self.text[:matched.start()].strip()
|
||||
text = self.text[: matched.start()].strip()
|
||||
options['explanation'] = matched.group(1)
|
||||
else:
|
||||
text = self.text
|
||||
@ -344,11 +375,10 @@ class Manpage(ReferenceRole):
|
||||
text = self.title[1:] if self.disabled else self.title
|
||||
if not self.disabled and self.config.manpages_url:
|
||||
uri = self.config.manpages_url.format(**info)
|
||||
inner = nodes.reference('', text, classes=[self.name], refuri=uri)
|
||||
inner = nodes.reference('', text, classes=[self.name], refuri=uri)
|
||||
else:
|
||||
inner = nodes.Text(text)
|
||||
node = addnodes.manpage(self.rawtext, '', inner,
|
||||
classes=[self.name], **info)
|
||||
node = addnodes.manpage(self.rawtext, '', inner, classes=[self.name], **info)
|
||||
|
||||
return [node], []
|
||||
|
||||
@ -377,10 +407,15 @@ class Manpage(ReferenceRole):
|
||||
# way as the Sphinx `code-block` directive.
|
||||
#
|
||||
# TODO: Change to use `SphinxRole` once SphinxRole is fixed to support options.
|
||||
def code_role(name: str, rawtext: str, text: str, lineno: int,
|
||||
inliner: docutils.parsers.rst.states.Inliner,
|
||||
options: dict[str, Any] | None = None, content: Sequence[str] = (),
|
||||
) -> tuple[list[Node], list[system_message]]:
|
||||
def code_role(
|
||||
name: str,
|
||||
rawtext: str,
|
||||
text: str,
|
||||
lineno: int,
|
||||
inliner: docutils.parsers.rst.states.Inliner,
|
||||
options: dict[str, Any] | None = None,
|
||||
content: Sequence[str] = (),
|
||||
) -> tuple[list[Node], list[system_message]]:
|
||||
if options is None:
|
||||
options = {}
|
||||
options = options.copy()
|
||||
@ -411,7 +446,6 @@ specific_docroles: dict[str, RoleFunction] = {
|
||||
'download': XRefRole(nodeclass=addnodes.download_reference),
|
||||
# links to anything
|
||||
'any': AnyXRefRole(warn_dangling=True),
|
||||
|
||||
'pep': PEP(),
|
||||
'rfc': RFC(),
|
||||
'guilabel': GUILabel(),
|
||||
|
@ -80,9 +80,10 @@ class Theme:
|
||||
else:
|
||||
value = _NO_DEFAULT
|
||||
if value is _NO_DEFAULT:
|
||||
msg = __(
|
||||
'setting %s.%s occurs in none of the searched theme configs',
|
||||
) % (section, name)
|
||||
msg = __('setting %s.%s occurs in none of the searched theme configs') % (
|
||||
section,
|
||||
name,
|
||||
)
|
||||
raise ThemeError(msg)
|
||||
return value
|
||||
|
||||
@ -159,8 +160,13 @@ class HTMLThemeFactory:
|
||||
name = entry[:-4]
|
||||
themes[name] = pathname
|
||||
else:
|
||||
logger.warning(__('file %r on theme path is not a valid '
|
||||
'zipfile or contains no theme'), entry)
|
||||
logger.warning(
|
||||
__(
|
||||
'file %r on theme path is not a valid '
|
||||
'zipfile or contains no theme'
|
||||
),
|
||||
entry,
|
||||
)
|
||||
else:
|
||||
if path.isfile(path.join(pathname, _THEME_CONF)):
|
||||
themes[entry] = pathname
|
||||
@ -189,8 +195,7 @@ def _is_archived_theme(filename: str, /) -> bool:
|
||||
|
||||
|
||||
def _load_theme_with_ancestors(
|
||||
theme_paths: dict[str, str],
|
||||
name: str, /,
|
||||
theme_paths: dict[str, str], name: str, /
|
||||
) -> tuple[dict[str, configparser.RawConfigParser], list[str], list[str]]:
|
||||
themes: dict[str, configparser.RawConfigParser] = {}
|
||||
theme_dirs: list[str] = []
|
||||
@ -211,7 +216,7 @@ def _load_theme_with_ancestors(
|
||||
if inherit not in theme_paths:
|
||||
msg = __(
|
||||
'The %r theme inherits from %r, which is not a loaded theme. '
|
||||
'Loaded themes are: %s',
|
||||
'Loaded themes are: %s'
|
||||
) % (name, inherit, ', '.join(sorted(theme_paths)))
|
||||
raise ThemeError(msg)
|
||||
name = inherit
|
||||
@ -223,7 +228,7 @@ def _load_theme_with_ancestors(
|
||||
|
||||
|
||||
def _load_theme(
|
||||
name: str, theme_path: str, /,
|
||||
name: str, theme_path: str, /
|
||||
) -> tuple[str, str, str | None, configparser.RawConfigParser]:
|
||||
if path.isdir(theme_path):
|
||||
# already a directory, do nothing
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Implements the low-level algorithms Sphinx uses for versioning doctrees."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pickle
|
||||
@ -19,6 +20,7 @@ if TYPE_CHECKING:
|
||||
|
||||
try:
|
||||
import Levenshtein # type: ignore[import-not-found]
|
||||
|
||||
IS_SPEEDUP = True
|
||||
except ImportError:
|
||||
IS_SPEEDUP = False
|
||||
|
Loading…
Reference in New Issue
Block a user