mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
🔧 Ruff format select sphinx modules with minimal diffs (#12146)
This comit removes select sphinx top-level modules from the `ruff format` exclude list. They were selected based on the fact that they have the least diffs when formatted, are not believe to introduce changes that would adversely affect any existing PRs.
This commit is contained in:
4
.flake8
4
.flake8
@@ -2,8 +2,10 @@
|
|||||||
max-line-length = 95
|
max-line-length = 95
|
||||||
ignore =
|
ignore =
|
||||||
E116,
|
E116,
|
||||||
|
E203,
|
||||||
E241,
|
E241,
|
||||||
E251,
|
E251,
|
||||||
|
E501,
|
||||||
E741,
|
E741,
|
||||||
W503,
|
W503,
|
||||||
W504,
|
W504,
|
||||||
@@ -29,5 +31,3 @@ exclude =
|
|||||||
doc/_build/*,
|
doc/_build/*,
|
||||||
sphinx/search/*,
|
sphinx/search/*,
|
||||||
doc/usage/extensions/example*.py,
|
doc/usage/extensions/example*.py,
|
||||||
per-file-ignores =
|
|
||||||
tests/*: E501
|
|
||||||
|
|||||||
14
.ruff.toml
14
.ruff.toml
@@ -426,37 +426,23 @@ forced-separate = [
|
|||||||
preview = true
|
preview = true
|
||||||
quote-style = "single"
|
quote-style = "single"
|
||||||
exclude = [
|
exclude = [
|
||||||
"sphinx/__init__.py",
|
|
||||||
"sphinx/addnodes.py",
|
"sphinx/addnodes.py",
|
||||||
"sphinx/application.py",
|
"sphinx/application.py",
|
||||||
"sphinx/builders/**/*",
|
"sphinx/builders/**/*",
|
||||||
"sphinx/cmd/**/*",
|
"sphinx/cmd/**/*",
|
||||||
"sphinx/config.py",
|
"sphinx/config.py",
|
||||||
"sphinx/deprecation.py",
|
|
||||||
"sphinx/directives/**/*",
|
"sphinx/directives/**/*",
|
||||||
"sphinx/domains/**/*",
|
"sphinx/domains/**/*",
|
||||||
"sphinx/environment/**/*",
|
"sphinx/environment/**/*",
|
||||||
"sphinx/errors.py",
|
|
||||||
"sphinx/events.py",
|
|
||||||
"sphinx/ext/**/*",
|
"sphinx/ext/**/*",
|
||||||
"sphinx/extension.py",
|
|
||||||
"sphinx/highlighting.py",
|
|
||||||
"sphinx/io.py",
|
|
||||||
"sphinx/jinja2glue.py",
|
|
||||||
"sphinx/locale/__init__.py",
|
|
||||||
"sphinx/parsers.py",
|
|
||||||
"sphinx/project.py",
|
|
||||||
"sphinx/pycode/**/*",
|
"sphinx/pycode/**/*",
|
||||||
"sphinx/pygments_styles.py",
|
"sphinx/pygments_styles.py",
|
||||||
"sphinx/registry.py",
|
"sphinx/registry.py",
|
||||||
"sphinx/roles.py",
|
|
||||||
"sphinx/search/**/*",
|
"sphinx/search/**/*",
|
||||||
"sphinx/templates/**/*",
|
"sphinx/templates/**/*",
|
||||||
"sphinx/testing/**/*",
|
"sphinx/testing/**/*",
|
||||||
"sphinx/theming.py",
|
|
||||||
"sphinx/transforms/**/*",
|
"sphinx/transforms/**/*",
|
||||||
"sphinx/util/**/*",
|
"sphinx/util/**/*",
|
||||||
"sphinx/versioning.py",
|
|
||||||
"sphinx/writers/**/*",
|
"sphinx/writers/**/*",
|
||||||
"tests/certs/**/*",
|
"tests/certs/**/*",
|
||||||
"tests/conftest.py",
|
"tests/conftest.py",
|
||||||
|
|||||||
@@ -13,8 +13,9 @@ from .deprecation import RemovedInNextVersionWarning
|
|||||||
# Users can avoid this by using environment variable: PYTHONWARNINGS=
|
# Users can avoid this by using environment variable: PYTHONWARNINGS=
|
||||||
if 'PYTHONWARNINGS' not in os.environ:
|
if 'PYTHONWARNINGS' not in os.environ:
|
||||||
warnings.filterwarnings('default', category=RemovedInNextVersionWarning)
|
warnings.filterwarnings('default', category=RemovedInNextVersionWarning)
|
||||||
warnings.filterwarnings('ignore', 'The frontend.Option class .*',
|
warnings.filterwarnings(
|
||||||
DeprecationWarning, module='docutils.frontend')
|
'ignore', 'The frontend.Option class .*', DeprecationWarning, module='docutils.frontend'
|
||||||
|
)
|
||||||
|
|
||||||
__version__ = '7.3.0'
|
__version__ = '7.3.0'
|
||||||
__display_version__ = __version__ # used for command line version
|
__display_version__ = __version__ # used for command line version
|
||||||
|
|||||||
@@ -56,10 +56,12 @@ def _deprecation_warning(
|
|||||||
|
|
||||||
qualified_name = f'{module}.{attribute}'
|
qualified_name = f'{module}.{attribute}'
|
||||||
if canonical_name:
|
if canonical_name:
|
||||||
message = (f'The alias {qualified_name!r} is deprecated, '
|
message = (
|
||||||
f'use {canonical_name!r} instead.')
|
f'The alias {qualified_name!r} is deprecated, use {canonical_name!r} instead.'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
message = f'{qualified_name!r} is deprecated.'
|
message = f'{qualified_name!r} is deprecated.'
|
||||||
|
|
||||||
warnings.warn(message + " Check CHANGES for Sphinx API modifications.",
|
warnings.warn(
|
||||||
warning_class, stacklevel=3)
|
message + ' Check CHANGES for Sphinx API modifications.', warning_class, stacklevel=3
|
||||||
|
)
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ class ExtensionError(SphinxError):
|
|||||||
"""Extension error."""
|
"""Extension error."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, message: str, orig_exc: Exception | None = None, modname: str | None = None,
|
self, message: str, orig_exc: Exception | None = None, modname: str | None = None
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
self.message = message
|
self.message = message
|
||||||
|
|||||||
@@ -81,8 +81,9 @@ class EventManager:
|
|||||||
if listener.id == listener_id:
|
if listener.id == listener_id:
|
||||||
listeners.remove(listener)
|
listeners.remove(listener)
|
||||||
|
|
||||||
def emit(self, name: str, *args: Any,
|
def emit(
|
||||||
allowed_exceptions: tuple[type[Exception], ...] = ()) -> list:
|
self, name: str, *args: Any, allowed_exceptions: tuple[type[Exception], ...] = ()
|
||||||
|
) -> list:
|
||||||
"""Emit a Sphinx event."""
|
"""Emit a Sphinx event."""
|
||||||
# not every object likes to be repr()'d (think
|
# not every object likes to be repr()'d (think
|
||||||
# random stuff coming via autodoc)
|
# random stuff coming via autodoc)
|
||||||
@@ -90,7 +91,7 @@ class EventManager:
|
|||||||
logger.debug('[app] emitting event: %r%s', name, repr(args)[:100])
|
logger.debug('[app] emitting event: %r%s', name, repr(args)[:100])
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
listeners = sorted(self.listeners[name], key=attrgetter("priority"))
|
listeners = sorted(self.listeners[name], key=attrgetter('priority'))
|
||||||
for listener in listeners:
|
for listener in listeners:
|
||||||
try:
|
try:
|
||||||
results.append(listener.handler(self.app, *args))
|
results.append(listener.handler(self.app, *args))
|
||||||
@@ -104,12 +105,17 @@ class EventManager:
|
|||||||
# Just pass through the error, so that it can be debugged.
|
# Just pass through the error, so that it can be debugged.
|
||||||
raise
|
raise
|
||||||
modname = safe_getattr(listener.handler, '__module__', None)
|
modname = safe_getattr(listener.handler, '__module__', None)
|
||||||
raise ExtensionError(__("Handler %r for event %r threw an exception") %
|
raise ExtensionError(
|
||||||
(listener.handler, name), exc, modname=modname) from exc
|
__('Handler %r for event %r threw an exception')
|
||||||
|
% (listener.handler, name),
|
||||||
|
exc,
|
||||||
|
modname=modname,
|
||||||
|
) from exc
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def emit_firstresult(self, name: str, *args: Any,
|
def emit_firstresult(
|
||||||
allowed_exceptions: tuple[type[Exception], ...] = ()) -> Any:
|
self, name: str, *args: Any, allowed_exceptions: tuple[type[Exception], ...] = ()
|
||||||
|
) -> Any:
|
||||||
"""Emit a Sphinx event and returns first result.
|
"""Emit a Sphinx event and returns first result.
|
||||||
|
|
||||||
This returns the result of the first handler that doesn't return ``None``.
|
This returns the result of the first handler that doesn't return ``None``.
|
||||||
|
|||||||
@@ -51,8 +51,13 @@ def verify_needs_extensions(app: Sphinx, config: Config) -> None:
|
|||||||
for extname, reqversion in config.needs_extensions.items():
|
for extname, reqversion in config.needs_extensions.items():
|
||||||
extension = app.extensions.get(extname)
|
extension = app.extensions.get(extname)
|
||||||
if extension is None:
|
if extension is None:
|
||||||
logger.warning(__('The %s extension is required by needs_extensions settings, '
|
logger.warning(
|
||||||
'but it is not loaded.'), extname)
|
__(
|
||||||
|
'The %s extension is required by needs_extensions settings, '
|
||||||
|
'but it is not loaded.'
|
||||||
|
),
|
||||||
|
extname,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
fulfilled = True
|
fulfilled = True
|
||||||
@@ -67,10 +72,14 @@ def verify_needs_extensions(app: Sphinx, config: Config) -> None:
|
|||||||
fulfilled = False
|
fulfilled = False
|
||||||
|
|
||||||
if not fulfilled:
|
if not fulfilled:
|
||||||
raise VersionRequirementError(__('This project needs the extension %s at least in '
|
raise VersionRequirementError(
|
||||||
'version %s and therefore cannot be built with '
|
__(
|
||||||
'the loaded version (%s).') %
|
'This project needs the extension %s at least in '
|
||||||
(extname, reqversion, extension.version))
|
'version %s and therefore cannot be built with '
|
||||||
|
'the loaded version (%s).'
|
||||||
|
)
|
||||||
|
% (extname, reqversion, extension.version)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def setup(app: Sphinx) -> dict[str, Any]:
|
def setup(app: Sphinx) -> dict[str, Any]:
|
||||||
|
|||||||
@@ -42,9 +42,7 @@ lexer_classes: dict[str, type[Lexer] | partial[Lexer]] = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
escape_hl_chars = {ord('\\'): '\\PYGZbs{}',
|
escape_hl_chars = {ord('\\'): '\\PYGZbs{}', ord('{'): '\\PYGZob{}', ord('}'): '\\PYGZcb{}'}
|
||||||
ord('{'): '\\PYGZob{}',
|
|
||||||
ord('}'): '\\PYGZcb{}'}
|
|
||||||
|
|
||||||
# used if Pygments is available
|
# used if Pygments is available
|
||||||
# MEMO: no use of \protected here to avoid having to do hyperref extras,
|
# MEMO: no use of \protected here to avoid having to do hyperref extras,
|
||||||
@@ -57,7 +55,7 @@ escape_hl_chars = {ord('\\'): '\\PYGZbs{}',
|
|||||||
# MEMO: the Pygments escapes with \char`\<char> syntax, if the document
|
# MEMO: the Pygments escapes with \char`\<char> syntax, if the document
|
||||||
# uses old OT1 font encoding, work correctly only in monospace font.
|
# uses old OT1 font encoding, work correctly only in monospace font.
|
||||||
# MEMO: the Pygmentize output mark-up is always with a {} after.
|
# MEMO: the Pygmentize output mark-up is always with a {} after.
|
||||||
_LATEX_ADD_STYLES = r'''
|
_LATEX_ADD_STYLES = r"""
|
||||||
% Sphinx redefinitions
|
% Sphinx redefinitions
|
||||||
% Originally to obtain a straight single quote via package textcomp, then
|
% Originally to obtain a straight single quote via package textcomp, then
|
||||||
% to fix problems for the 5.0.0 inline code highlighting (captions!).
|
% to fix problems for the 5.0.0 inline code highlighting (captions!).
|
||||||
@@ -82,7 +80,7 @@ _LATEX_ADD_STYLES = r'''
|
|||||||
% use \protected to allow syntax highlighting in captions
|
% use \protected to allow syntax highlighting in captions
|
||||||
\protected\def\PYG#1#2{\PYG@reset\PYG@toks#1+\relax+{\PYG@do{#2}}}
|
\protected\def\PYG#1#2{\PYG@reset\PYG@toks#1+\relax+{\PYG@do{#2}}}
|
||||||
\makeatother
|
\makeatother
|
||||||
'''
|
"""
|
||||||
|
|
||||||
|
|
||||||
class PygmentsBridge:
|
class PygmentsBridge:
|
||||||
@@ -91,8 +89,9 @@ class PygmentsBridge:
|
|||||||
html_formatter = HtmlFormatter
|
html_formatter = HtmlFormatter
|
||||||
latex_formatter = LatexFormatter
|
latex_formatter = LatexFormatter
|
||||||
|
|
||||||
def __init__(self, dest: str = 'html', stylename: str = 'sphinx',
|
def __init__(
|
||||||
latex_engine: str | None = None) -> None:
|
self, dest: str = 'html', stylename: str = 'sphinx', latex_engine: str | None = None
|
||||||
|
) -> None:
|
||||||
self.dest = dest
|
self.dest = dest
|
||||||
self.latex_engine = latex_engine
|
self.latex_engine = latex_engine
|
||||||
|
|
||||||
@@ -119,8 +118,14 @@ class PygmentsBridge:
|
|||||||
kwargs.update(self.formatter_args)
|
kwargs.update(self.formatter_args)
|
||||||
return self.formatter(**kwargs)
|
return self.formatter(**kwargs)
|
||||||
|
|
||||||
def get_lexer(self, source: str, lang: str, opts: dict | None = None,
|
def get_lexer(
|
||||||
force: bool = False, location: Any = None) -> Lexer:
|
self,
|
||||||
|
source: str,
|
||||||
|
lang: str,
|
||||||
|
opts: dict | None = None,
|
||||||
|
force: bool = False,
|
||||||
|
location: Any = None,
|
||||||
|
) -> Lexer:
|
||||||
if not opts:
|
if not opts:
|
||||||
opts = {}
|
opts = {}
|
||||||
|
|
||||||
@@ -146,8 +151,9 @@ class PygmentsBridge:
|
|||||||
else:
|
else:
|
||||||
lexer = get_lexer_by_name(lang, **opts)
|
lexer = get_lexer_by_name(lang, **opts)
|
||||||
except ClassNotFound:
|
except ClassNotFound:
|
||||||
logger.warning(__('Pygments lexer name %r is not known'), lang,
|
logger.warning(
|
||||||
location=location)
|
__('Pygments lexer name %r is not known'), lang, location=location
|
||||||
|
)
|
||||||
lexer = lexer_classes['none'](**opts)
|
lexer = lexer_classes['none'](**opts)
|
||||||
|
|
||||||
if not force:
|
if not force:
|
||||||
@@ -155,8 +161,15 @@ class PygmentsBridge:
|
|||||||
|
|
||||||
return lexer
|
return lexer
|
||||||
|
|
||||||
def highlight_block(self, source: str, lang: str, opts: dict | None = None,
|
def highlight_block(
|
||||||
force: bool = False, location: Any = None, **kwargs: Any) -> str:
|
self,
|
||||||
|
source: str,
|
||||||
|
lang: str,
|
||||||
|
opts: dict | None = None,
|
||||||
|
force: bool = False,
|
||||||
|
location: Any = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> str:
|
||||||
if not isinstance(source, str):
|
if not isinstance(source, str):
|
||||||
source = source.decode()
|
source = source.decode()
|
||||||
|
|
||||||
@@ -173,11 +186,17 @@ class PygmentsBridge:
|
|||||||
lang = 'none' # automatic highlighting failed.
|
lang = 'none' # automatic highlighting failed.
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
__('Lexing literal_block %r as "%s" resulted in an error at token: %r. '
|
__(
|
||||||
'Retrying in relaxed mode.'),
|
'Lexing literal_block %r as "%s" resulted in an error at token: %r. '
|
||||||
source, lang, str(err),
|
'Retrying in relaxed mode.'
|
||||||
type='misc', subtype='highlighting_failure',
|
),
|
||||||
location=location)
|
source,
|
||||||
|
lang,
|
||||||
|
str(err),
|
||||||
|
type='misc',
|
||||||
|
subtype='highlighting_failure',
|
||||||
|
location=location,
|
||||||
|
)
|
||||||
if force:
|
if force:
|
||||||
lang = 'none'
|
lang = 'none'
|
||||||
else:
|
else:
|
||||||
|
|||||||
18
sphinx/io.py
18
sphinx/io.py
@@ -1,4 +1,5 @@
|
|||||||
"""Input/Output files"""
|
"""Input/Output files"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
@@ -46,6 +47,7 @@ class SphinxBaseReader(standalone.Reader):
|
|||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||||
from sphinx.application import Sphinx
|
from sphinx.application import Sphinx
|
||||||
|
|
||||||
if len(args) > 0 and isinstance(args[0], Sphinx):
|
if len(args) > 0 and isinstance(args[0], Sphinx):
|
||||||
self._app = args[0]
|
self._app = args[0]
|
||||||
self._env = self._app.env
|
self._env = self._app.env
|
||||||
@@ -54,7 +56,7 @@ class SphinxBaseReader(standalone.Reader):
|
|||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def setup(self, app: Sphinx) -> None:
|
def setup(self, app: Sphinx) -> None:
|
||||||
self._app = app # hold application object only for compatibility
|
self._app = app # hold application object only for compatibility
|
||||||
self._env = app.env
|
self._env = app.env
|
||||||
|
|
||||||
def get_transforms(self) -> list[type[Transform]]:
|
def get_transforms(self) -> list[type[Transform]]:
|
||||||
@@ -128,9 +130,15 @@ class SphinxI18nReader(SphinxBaseReader):
|
|||||||
super().setup(app)
|
super().setup(app)
|
||||||
|
|
||||||
self.transforms = self.transforms + app.registry.get_transforms()
|
self.transforms = self.transforms + app.registry.get_transforms()
|
||||||
unused = [PreserveTranslatableMessages, Locale, RemoveTranslatableInline,
|
unused = [
|
||||||
AutoIndexUpgrader, SphinxDomains, DoctreeReadEvent,
|
PreserveTranslatableMessages,
|
||||||
UIDTransform]
|
Locale,
|
||||||
|
RemoveTranslatableInline,
|
||||||
|
AutoIndexUpgrader,
|
||||||
|
SphinxDomains,
|
||||||
|
DoctreeReadEvent,
|
||||||
|
UIDTransform,
|
||||||
|
]
|
||||||
for transform in unused:
|
for transform in unused:
|
||||||
if transform in self.transforms:
|
if transform in self.transforms:
|
||||||
self.transforms.remove(transform)
|
self.transforms.remove(transform)
|
||||||
@@ -181,7 +189,7 @@ def create_publisher(app: Sphinx, filetype: str) -> Publisher:
|
|||||||
destination=NullOutput(),
|
destination=NullOutput(),
|
||||||
)
|
)
|
||||||
# Propagate exceptions by default when used programmatically:
|
# Propagate exceptions by default when used programmatically:
|
||||||
defaults = {"traceback": True, **app.env.settings}
|
defaults = {'traceback': True, **app.env.settings}
|
||||||
# Set default settings
|
# Set default settings
|
||||||
if docutils.__version_info__[:2] >= (0, 19):
|
if docutils.__version_info__[:2] >= (0, 19):
|
||||||
pub.get_settings(**defaults)
|
pub.get_settings(**defaults)
|
||||||
|
|||||||
@@ -95,6 +95,7 @@ class idgen:
|
|||||||
def __next__(self) -> int:
|
def __next__(self) -> int:
|
||||||
self.id += 1
|
self.id += 1
|
||||||
return self.id
|
return self.id
|
||||||
|
|
||||||
next = __next__ # Python 2/Jinja compatibility
|
next = __next__ # Python 2/Jinja compatibility
|
||||||
|
|
||||||
|
|
||||||
@@ -133,6 +134,7 @@ class SphinxFileSystemLoader(FileSystemLoader):
|
|||||||
return path.getmtime(filename) == mtime
|
return path.getmtime(filename) == mtime
|
||||||
except OSError:
|
except OSError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return contents, filename, uptodate
|
return contents, filename, uptodate
|
||||||
|
|
||||||
|
|
||||||
@@ -165,8 +167,9 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
|||||||
# prepend explicit template paths
|
# prepend explicit template paths
|
||||||
self.templatepathlen = len(builder.config.templates_path)
|
self.templatepathlen = len(builder.config.templates_path)
|
||||||
if builder.config.templates_path:
|
if builder.config.templates_path:
|
||||||
cfg_templates_path = [path.join(builder.confdir, tp)
|
cfg_templates_path = [
|
||||||
for tp in builder.config.templates_path]
|
path.join(builder.confdir, tp) for tp in builder.config.templates_path
|
||||||
|
]
|
||||||
pathchain[0:0] = cfg_templates_path
|
pathchain[0:0] = cfg_templates_path
|
||||||
loaderchain[0:0] = cfg_templates_path
|
loaderchain[0:0] = cfg_templates_path
|
||||||
|
|
||||||
@@ -178,8 +181,7 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
|||||||
|
|
||||||
use_i18n = builder.app.translator is not None
|
use_i18n = builder.app.translator is not None
|
||||||
extensions = ['jinja2.ext.i18n'] if use_i18n else []
|
extensions = ['jinja2.ext.i18n'] if use_i18n else []
|
||||||
self.environment = SandboxedEnvironment(loader=self,
|
self.environment = SandboxedEnvironment(loader=self, extensions=extensions)
|
||||||
extensions=extensions)
|
|
||||||
self.environment.filters['tobool'] = _tobool
|
self.environment.filters['tobool'] = _tobool
|
||||||
self.environment.filters['toint'] = _toint
|
self.environment.filters['toint'] = _toint
|
||||||
self.environment.filters['todim'] = _todim
|
self.environment.filters['todim'] = _todim
|
||||||
@@ -191,7 +193,8 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
|||||||
if use_i18n:
|
if use_i18n:
|
||||||
# ``install_gettext_translations`` is injected by the ``jinja2.ext.i18n`` extension
|
# ``install_gettext_translations`` is injected by the ``jinja2.ext.i18n`` extension
|
||||||
self.environment.install_gettext_translations( # type: ignore[attr-defined]
|
self.environment.install_gettext_translations( # type: ignore[attr-defined]
|
||||||
builder.app.translator)
|
builder.app.translator
|
||||||
|
)
|
||||||
|
|
||||||
def render(self, template: str, context: dict) -> str: # type: ignore[override]
|
def render(self, template: str, context: dict) -> str: # type: ignore[override]
|
||||||
return self.environment.get_template(template).render(context)
|
return self.environment.get_template(template).render(context)
|
||||||
@@ -208,13 +211,12 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
|||||||
loaders = self.loaders
|
loaders = self.loaders
|
||||||
# exclamation mark starts search from theme
|
# exclamation mark starts search from theme
|
||||||
if template.startswith('!'):
|
if template.startswith('!'):
|
||||||
loaders = loaders[self.templatepathlen:]
|
loaders = loaders[self.templatepathlen :]
|
||||||
template = template[1:]
|
template = template[1:]
|
||||||
for loader in loaders:
|
for loader in loaders:
|
||||||
try:
|
try:
|
||||||
return loader.get_source(environment, template)
|
return loader.get_source(environment, template)
|
||||||
except TemplateNotFound:
|
except TemplateNotFound:
|
||||||
pass
|
pass
|
||||||
msg = (f"{template!r} not found in "
|
msg = f'{template!r} not found in {self.environment.loader.pathchain}' # type: ignore[union-attr]
|
||||||
f"{self.environment.loader.pathchain}") # type: ignore[union-attr]
|
|
||||||
raise TemplateNotFound(msg)
|
raise TemplateNotFound(msg)
|
||||||
|
|||||||
@@ -51,8 +51,10 @@ class _TranslationProxy:
|
|||||||
try:
|
try:
|
||||||
return f'i{self.__str__()!r}'
|
return f'i{self.__str__()!r}'
|
||||||
except Exception:
|
except Exception:
|
||||||
return (self.__class__.__name__
|
return (
|
||||||
+ f'({self._catalogue}, {self._namespace}, {self._message})')
|
self.__class__.__name__
|
||||||
|
+ f'({self._catalogue}, {self._namespace}, {self._message})'
|
||||||
|
)
|
||||||
|
|
||||||
def __add__(self, other: str) -> str:
|
def __add__(self, other: str) -> str:
|
||||||
return self.__str__() + other
|
return self.__str__() + other
|
||||||
@@ -197,6 +199,7 @@ def get_translation(catalog: str, namespace: str = 'general') -> Callable[[str],
|
|||||||
|
|
||||||
.. versionadded:: 1.8
|
.. versionadded:: 1.8
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def gettext(message: str) -> str:
|
def gettext(message: str) -> str:
|
||||||
if not is_translator_registered(catalog, namespace):
|
if not is_translator_registered(catalog, namespace):
|
||||||
# not initialized yet
|
# not initialized yet
|
||||||
@@ -220,13 +223,13 @@ __ = get_translation('sphinx', 'console')
|
|||||||
# labels
|
# labels
|
||||||
admonitionlabels = {
|
admonitionlabels = {
|
||||||
'attention': _('Attention'),
|
'attention': _('Attention'),
|
||||||
'caution': _('Caution'),
|
'caution': _('Caution'),
|
||||||
'danger': _('Danger'),
|
'danger': _('Danger'),
|
||||||
'error': _('Error'),
|
'error': _('Error'),
|
||||||
'hint': _('Hint'),
|
'hint': _('Hint'),
|
||||||
'important': _('Important'),
|
'important': _('Important'),
|
||||||
'note': _('Note'),
|
'note': _('Note'),
|
||||||
'seealso': _('See also'),
|
'seealso': _('See also'),
|
||||||
'tip': _('Tip'),
|
'tip': _('Tip'),
|
||||||
'warning': _('Warning'),
|
'warning': _('Warning'),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -65,13 +65,14 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
|
|||||||
self.statemachine = states.RSTStateMachine(
|
self.statemachine = states.RSTStateMachine(
|
||||||
state_classes=self.state_classes,
|
state_classes=self.state_classes,
|
||||||
initial_state=self.initial_state,
|
initial_state=self.initial_state,
|
||||||
debug=document.reporter.debug_flag)
|
debug=document.reporter.debug_flag,
|
||||||
|
)
|
||||||
|
|
||||||
# preprocess inputstring
|
# preprocess inputstring
|
||||||
if isinstance(inputstring, str):
|
if isinstance(inputstring, str):
|
||||||
lines = docutils.statemachine.string2lines(
|
lines = docutils.statemachine.string2lines(
|
||||||
inputstring, tab_width=document.settings.tab_width,
|
inputstring, tab_width=document.settings.tab_width, convert_whitespace=True
|
||||||
convert_whitespace=True)
|
)
|
||||||
|
|
||||||
inputlines = StringList(lines, document.current_source)
|
inputlines = StringList(lines, document.current_source)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ class Project:
|
|||||||
|
|
||||||
#: source_suffix. Same as :confval:`source_suffix`.
|
#: source_suffix. Same as :confval:`source_suffix`.
|
||||||
self.source_suffix = tuple(source_suffix)
|
self.source_suffix = tuple(source_suffix)
|
||||||
self._first_source_suffix = next(iter(self.source_suffix), "")
|
self._first_source_suffix = next(iter(self.source_suffix), '')
|
||||||
|
|
||||||
#: The name of documents belonging to this project.
|
#: The name of documents belonging to this project.
|
||||||
self.docnames: set[str] = set()
|
self.docnames: set[str] = set()
|
||||||
@@ -43,8 +43,9 @@ class Project:
|
|||||||
self._path_to_docname = other._path_to_docname
|
self._path_to_docname = other._path_to_docname
|
||||||
self._docname_to_path = other._docname_to_path
|
self._docname_to_path = other._docname_to_path
|
||||||
|
|
||||||
def discover(self, exclude_paths: Iterable[str] = (),
|
def discover(
|
||||||
include_paths: Iterable[str] = ("**",)) -> set[str]:
|
self, exclude_paths: Iterable[str] = (), include_paths: Iterable[str] = ('**',)
|
||||||
|
) -> set[str]:
|
||||||
"""Find all document files in the source directory and put them in
|
"""Find all document files in the source directory and put them in
|
||||||
:attr:`docnames`.
|
:attr:`docnames`.
|
||||||
"""
|
"""
|
||||||
@@ -61,17 +62,24 @@ class Project:
|
|||||||
if docname in self.docnames:
|
if docname in self.docnames:
|
||||||
pattern = os.path.join(self.srcdir, docname) + '.*'
|
pattern = os.path.join(self.srcdir, docname) + '.*'
|
||||||
files = [relpath(f, self.srcdir) for f in glob(pattern)]
|
files = [relpath(f, self.srcdir) for f in glob(pattern)]
|
||||||
logger.warning(__('multiple files found for the document "%s": %r\n'
|
logger.warning(
|
||||||
'Use %r for the build.'),
|
__(
|
||||||
docname, files, self.doc2path(docname, absolute=True),
|
'multiple files found for the document "%s": %r\n'
|
||||||
once=True)
|
'Use %r for the build.'
|
||||||
|
),
|
||||||
|
docname,
|
||||||
|
files,
|
||||||
|
self.doc2path(docname, absolute=True),
|
||||||
|
once=True,
|
||||||
|
)
|
||||||
elif os.access(os.path.join(self.srcdir, filename), os.R_OK):
|
elif os.access(os.path.join(self.srcdir, filename), os.R_OK):
|
||||||
self.docnames.add(docname)
|
self.docnames.add(docname)
|
||||||
self._path_to_docname[filename] = docname
|
self._path_to_docname[filename] = docname
|
||||||
self._docname_to_path[docname] = filename
|
self._docname_to_path[docname] = filename
|
||||||
else:
|
else:
|
||||||
logger.warning(__("Ignored unreadable document %r."),
|
logger.warning(
|
||||||
filename, location=docname)
|
__('Ignored unreadable document %r.'), filename, location=docname
|
||||||
|
)
|
||||||
|
|
||||||
return self.docnames
|
return self.docnames
|
||||||
|
|
||||||
|
|||||||
114
sphinx/roles.py
114
sphinx/roles.py
@@ -40,6 +40,7 @@ generic_docroles = {
|
|||||||
|
|
||||||
# -- generic cross-reference role ----------------------------------------------
|
# -- generic cross-reference role ----------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
class XRefRole(ReferenceRole):
|
class XRefRole(ReferenceRole):
|
||||||
"""
|
"""
|
||||||
A generic cross-referencing role. To create a callable that can be used as
|
A generic cross-referencing role. To create a callable that can be used as
|
||||||
@@ -67,10 +68,14 @@ class XRefRole(ReferenceRole):
|
|||||||
nodeclass: type[Element] = addnodes.pending_xref
|
nodeclass: type[Element] = addnodes.pending_xref
|
||||||
innernodeclass: type[TextElement] = nodes.literal
|
innernodeclass: type[TextElement] = nodes.literal
|
||||||
|
|
||||||
def __init__(self, fix_parens: bool = False, lowercase: bool = False,
|
def __init__(
|
||||||
nodeclass: type[Element] | None = None,
|
self,
|
||||||
innernodeclass: type[TextElement] | None = None,
|
fix_parens: bool = False,
|
||||||
warn_dangling: bool = False) -> None:
|
lowercase: bool = False,
|
||||||
|
nodeclass: type[Element] | None = None,
|
||||||
|
innernodeclass: type[TextElement] | None = None,
|
||||||
|
warn_dangling: bool = False,
|
||||||
|
) -> None:
|
||||||
self.fix_parens = fix_parens
|
self.fix_parens = fix_parens
|
||||||
self.lowercase = lowercase
|
self.lowercase = lowercase
|
||||||
self.warn_dangling = warn_dangling
|
self.warn_dangling = warn_dangling
|
||||||
@@ -111,7 +116,7 @@ class XRefRole(ReferenceRole):
|
|||||||
text = utils.unescape(self.text[1:])
|
text = utils.unescape(self.text[1:])
|
||||||
if self.fix_parens:
|
if self.fix_parens:
|
||||||
self.has_explicit_title = False # treat as implicit
|
self.has_explicit_title = False # treat as implicit
|
||||||
text, target = self.update_title_and_target(text, "")
|
text, target = self.update_title_and_target(text, '')
|
||||||
|
|
||||||
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
|
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
|
||||||
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
|
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
|
||||||
@@ -125,17 +130,20 @@ class XRefRole(ReferenceRole):
|
|||||||
title, target = self.update_title_and_target(title, target)
|
title, target = self.update_title_and_target(title, target)
|
||||||
|
|
||||||
# create the reference node
|
# create the reference node
|
||||||
options = {'refdoc': self.env.docname,
|
options = {
|
||||||
'refdomain': self.refdomain,
|
'refdoc': self.env.docname,
|
||||||
'reftype': self.reftype,
|
'refdomain': self.refdomain,
|
||||||
'refexplicit': self.has_explicit_title,
|
'reftype': self.reftype,
|
||||||
'refwarn': self.warn_dangling}
|
'refexplicit': self.has_explicit_title,
|
||||||
|
'refwarn': self.warn_dangling,
|
||||||
|
}
|
||||||
refnode = self.nodeclass(self.rawtext, **options)
|
refnode = self.nodeclass(self.rawtext, **options)
|
||||||
self.set_source_info(refnode)
|
self.set_source_info(refnode)
|
||||||
|
|
||||||
# determine the target and title for the class
|
# determine the target and title for the class
|
||||||
title, target = self.process_link(self.env, refnode, self.has_explicit_title,
|
title, target = self.process_link(
|
||||||
title, target)
|
self.env, refnode, self.has_explicit_title, title, target
|
||||||
|
)
|
||||||
refnode['reftarget'] = target
|
refnode['reftarget'] = target
|
||||||
refnode += self.innernodeclass(self.rawtext, title, classes=self.classes)
|
refnode += self.innernodeclass(self.rawtext, title, classes=self.classes)
|
||||||
|
|
||||||
@@ -143,8 +151,14 @@ class XRefRole(ReferenceRole):
|
|||||||
|
|
||||||
# methods that can be overwritten
|
# methods that can be overwritten
|
||||||
|
|
||||||
def process_link(self, env: BuildEnvironment, refnode: Element, has_explicit_title: bool,
|
def process_link(
|
||||||
title: str, target: str) -> tuple[str, str]:
|
self,
|
||||||
|
env: BuildEnvironment,
|
||||||
|
refnode: Element,
|
||||||
|
has_explicit_title: bool,
|
||||||
|
title: str,
|
||||||
|
target: str,
|
||||||
|
) -> tuple[str, str]:
|
||||||
"""Called after parsing title and target text, and creating the
|
"""Called after parsing title and target text, and creating the
|
||||||
reference node (given in *refnode*). This method can alter the
|
reference node (given in *refnode*). This method can alter the
|
||||||
reference node and must return a new (or the same) ``(title, target)``
|
reference node and must return a new (or the same) ``(title, target)``
|
||||||
@@ -152,8 +166,9 @@ class XRefRole(ReferenceRole):
|
|||||||
"""
|
"""
|
||||||
return title, ws_re.sub(' ', target)
|
return title, ws_re.sub(' ', target)
|
||||||
|
|
||||||
def result_nodes(self, document: nodes.document, env: BuildEnvironment, node: Element,
|
def result_nodes(
|
||||||
is_ref: bool) -> tuple[list[Node], list[system_message]]:
|
self, document: nodes.document, env: BuildEnvironment, node: Element, is_ref: bool
|
||||||
|
) -> tuple[list[Node], list[system_message]]:
|
||||||
"""Called before returning the finished nodes. *node* is the reference
|
"""Called before returning the finished nodes. *node* is the reference
|
||||||
node if one was created (*is_ref* is then true), else the content node.
|
node if one was created (*is_ref* is then true), else the content node.
|
||||||
This method can add other nodes and must return a ``(nodes, messages)``
|
This method can add other nodes and must return a ``(nodes, messages)``
|
||||||
@@ -163,8 +178,14 @@ class XRefRole(ReferenceRole):
|
|||||||
|
|
||||||
|
|
||||||
class AnyXRefRole(XRefRole):
|
class AnyXRefRole(XRefRole):
|
||||||
def process_link(self, env: BuildEnvironment, refnode: Element, has_explicit_title: bool,
|
def process_link(
|
||||||
title: str, target: str) -> tuple[str, str]:
|
self,
|
||||||
|
env: BuildEnvironment,
|
||||||
|
refnode: Element,
|
||||||
|
has_explicit_title: bool,
|
||||||
|
title: str,
|
||||||
|
target: str,
|
||||||
|
) -> tuple[str, str]:
|
||||||
result = super().process_link(env, refnode, has_explicit_title, title, target)
|
result = super().process_link(env, refnode, has_explicit_title, title, target)
|
||||||
# add all possible context info (i.e. std:program, py:module etc.)
|
# add all possible context info (i.e. std:program, py:module etc.)
|
||||||
refnode.attributes.update(env.ref_context)
|
refnode.attributes.update(env.ref_context)
|
||||||
@@ -174,8 +195,15 @@ class AnyXRefRole(XRefRole):
|
|||||||
class PEP(ReferenceRole):
|
class PEP(ReferenceRole):
|
||||||
def run(self) -> tuple[list[Node], list[system_message]]:
|
def run(self) -> tuple[list[Node], list[system_message]]:
|
||||||
target_id = 'index-%s' % self.env.new_serialno('index')
|
target_id = 'index-%s' % self.env.new_serialno('index')
|
||||||
entries = [('single', _('Python Enhancement Proposals; PEP %s') % self.target,
|
entries = [
|
||||||
target_id, '', None)]
|
(
|
||||||
|
'single',
|
||||||
|
_('Python Enhancement Proposals; PEP %s') % self.target,
|
||||||
|
target_id,
|
||||||
|
'',
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
index = addnodes.index(entries=entries)
|
index = addnodes.index(entries=entries)
|
||||||
target = nodes.target('', '', ids=[target_id])
|
target = nodes.target('', '', ids=[target_id])
|
||||||
@@ -187,11 +215,12 @@ class PEP(ReferenceRole):
|
|||||||
if self.has_explicit_title:
|
if self.has_explicit_title:
|
||||||
reference += nodes.strong(self.title, self.title)
|
reference += nodes.strong(self.title, self.title)
|
||||||
else:
|
else:
|
||||||
title = "PEP " + self.title
|
title = 'PEP ' + self.title
|
||||||
reference += nodes.strong(title, title)
|
reference += nodes.strong(title, title)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
msg = self.inliner.reporter.error(__('invalid PEP number %s') % self.target,
|
msg = self.inliner.reporter.error(
|
||||||
line=self.lineno)
|
__('invalid PEP number %s') % self.target, line=self.lineno
|
||||||
|
)
|
||||||
prb = self.inliner.problematic(self.rawtext, self.rawtext, msg)
|
prb = self.inliner.problematic(self.rawtext, self.rawtext, msg)
|
||||||
return [prb], [msg]
|
return [prb], [msg]
|
||||||
|
|
||||||
@@ -221,11 +250,12 @@ class RFC(ReferenceRole):
|
|||||||
if self.has_explicit_title:
|
if self.has_explicit_title:
|
||||||
reference += nodes.strong(self.title, self.title)
|
reference += nodes.strong(self.title, self.title)
|
||||||
else:
|
else:
|
||||||
title = "RFC " + self.title
|
title = 'RFC ' + self.title
|
||||||
reference += nodes.strong(title, title)
|
reference += nodes.strong(title, title)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
msg = self.inliner.reporter.error(__('invalid RFC number %s') % self.target,
|
msg = self.inliner.reporter.error(
|
||||||
line=self.lineno)
|
__('invalid RFC number %s') % self.target, line=self.lineno
|
||||||
|
)
|
||||||
prb = self.inliner.problematic(self.rawtext, self.rawtext, msg)
|
prb = self.inliner.problematic(self.rawtext, self.rawtext, msg)
|
||||||
return [prb], [msg]
|
return [prb], [msg]
|
||||||
|
|
||||||
@@ -271,8 +301,9 @@ class EmphasizedLiteral(SphinxRole):
|
|||||||
|
|
||||||
def run(self) -> tuple[list[Node], list[system_message]]:
|
def run(self) -> tuple[list[Node], list[system_message]]:
|
||||||
children = self.parse(self.text)
|
children = self.parse(self.text)
|
||||||
node = nodes.literal(self.rawtext, '', *children,
|
node = nodes.literal(
|
||||||
role=self.name.lower(), classes=[self.name])
|
self.rawtext, '', *children, role=self.name.lower(), classes=[self.name]
|
||||||
|
)
|
||||||
|
|
||||||
return [node], []
|
return [node], []
|
||||||
|
|
||||||
@@ -284,13 +315,13 @@ class EmphasizedLiteral(SphinxRole):
|
|||||||
if part == '\\\\': # escaped backslash
|
if part == '\\\\': # escaped backslash
|
||||||
stack[-1] += '\\'
|
stack[-1] += '\\'
|
||||||
elif part == '{':
|
elif part == '{':
|
||||||
if len(stack) >= 2 and stack[-2] == "{": # nested
|
if len(stack) >= 2 and stack[-2] == '{': # nested
|
||||||
stack[-1] += "{"
|
stack[-1] += '{'
|
||||||
else:
|
else:
|
||||||
# start emphasis
|
# start emphasis
|
||||||
stack.extend(('{', ''))
|
stack.extend(('{', ''))
|
||||||
elif part == '}':
|
elif part == '}':
|
||||||
if len(stack) == 3 and stack[1] == "{" and len(stack[2]) > 0:
|
if len(stack) == 3 and stack[1] == '{' and len(stack[2]) > 0:
|
||||||
# emphasized word found
|
# emphasized word found
|
||||||
if stack[0]:
|
if stack[0]:
|
||||||
result.append(nodes.Text(stack[0]))
|
result.append(nodes.Text(stack[0]))
|
||||||
@@ -322,7 +353,7 @@ class Abbreviation(SphinxRole):
|
|||||||
options = self.options.copy()
|
options = self.options.copy()
|
||||||
matched = self.abbr_re.search(self.text)
|
matched = self.abbr_re.search(self.text)
|
||||||
if matched:
|
if matched:
|
||||||
text = self.text[:matched.start()].strip()
|
text = self.text[: matched.start()].strip()
|
||||||
options['explanation'] = matched.group(1)
|
options['explanation'] = matched.group(1)
|
||||||
else:
|
else:
|
||||||
text = self.text
|
text = self.text
|
||||||
@@ -344,11 +375,10 @@ class Manpage(ReferenceRole):
|
|||||||
text = self.title[1:] if self.disabled else self.title
|
text = self.title[1:] if self.disabled else self.title
|
||||||
if not self.disabled and self.config.manpages_url:
|
if not self.disabled and self.config.manpages_url:
|
||||||
uri = self.config.manpages_url.format(**info)
|
uri = self.config.manpages_url.format(**info)
|
||||||
inner = nodes.reference('', text, classes=[self.name], refuri=uri)
|
inner = nodes.reference('', text, classes=[self.name], refuri=uri)
|
||||||
else:
|
else:
|
||||||
inner = nodes.Text(text)
|
inner = nodes.Text(text)
|
||||||
node = addnodes.manpage(self.rawtext, '', inner,
|
node = addnodes.manpage(self.rawtext, '', inner, classes=[self.name], **info)
|
||||||
classes=[self.name], **info)
|
|
||||||
|
|
||||||
return [node], []
|
return [node], []
|
||||||
|
|
||||||
@@ -377,10 +407,15 @@ class Manpage(ReferenceRole):
|
|||||||
# way as the Sphinx `code-block` directive.
|
# way as the Sphinx `code-block` directive.
|
||||||
#
|
#
|
||||||
# TODO: Change to use `SphinxRole` once SphinxRole is fixed to support options.
|
# TODO: Change to use `SphinxRole` once SphinxRole is fixed to support options.
|
||||||
def code_role(name: str, rawtext: str, text: str, lineno: int,
|
def code_role(
|
||||||
inliner: docutils.parsers.rst.states.Inliner,
|
name: str,
|
||||||
options: dict[str, Any] | None = None, content: Sequence[str] = (),
|
rawtext: str,
|
||||||
) -> tuple[list[Node], list[system_message]]:
|
text: str,
|
||||||
|
lineno: int,
|
||||||
|
inliner: docutils.parsers.rst.states.Inliner,
|
||||||
|
options: dict[str, Any] | None = None,
|
||||||
|
content: Sequence[str] = (),
|
||||||
|
) -> tuple[list[Node], list[system_message]]:
|
||||||
if options is None:
|
if options is None:
|
||||||
options = {}
|
options = {}
|
||||||
options = options.copy()
|
options = options.copy()
|
||||||
@@ -411,7 +446,6 @@ specific_docroles: dict[str, RoleFunction] = {
|
|||||||
'download': XRefRole(nodeclass=addnodes.download_reference),
|
'download': XRefRole(nodeclass=addnodes.download_reference),
|
||||||
# links to anything
|
# links to anything
|
||||||
'any': AnyXRefRole(warn_dangling=True),
|
'any': AnyXRefRole(warn_dangling=True),
|
||||||
|
|
||||||
'pep': PEP(),
|
'pep': PEP(),
|
||||||
'rfc': RFC(),
|
'rfc': RFC(),
|
||||||
'guilabel': GUILabel(),
|
'guilabel': GUILabel(),
|
||||||
|
|||||||
@@ -80,9 +80,10 @@ class Theme:
|
|||||||
else:
|
else:
|
||||||
value = _NO_DEFAULT
|
value = _NO_DEFAULT
|
||||||
if value is _NO_DEFAULT:
|
if value is _NO_DEFAULT:
|
||||||
msg = __(
|
msg = __('setting %s.%s occurs in none of the searched theme configs') % (
|
||||||
'setting %s.%s occurs in none of the searched theme configs',
|
section,
|
||||||
) % (section, name)
|
name,
|
||||||
|
)
|
||||||
raise ThemeError(msg)
|
raise ThemeError(msg)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@@ -159,8 +160,13 @@ class HTMLThemeFactory:
|
|||||||
name = entry[:-4]
|
name = entry[:-4]
|
||||||
themes[name] = pathname
|
themes[name] = pathname
|
||||||
else:
|
else:
|
||||||
logger.warning(__('file %r on theme path is not a valid '
|
logger.warning(
|
||||||
'zipfile or contains no theme'), entry)
|
__(
|
||||||
|
'file %r on theme path is not a valid '
|
||||||
|
'zipfile or contains no theme'
|
||||||
|
),
|
||||||
|
entry,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if path.isfile(path.join(pathname, _THEME_CONF)):
|
if path.isfile(path.join(pathname, _THEME_CONF)):
|
||||||
themes[entry] = pathname
|
themes[entry] = pathname
|
||||||
@@ -189,8 +195,7 @@ def _is_archived_theme(filename: str, /) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
def _load_theme_with_ancestors(
|
def _load_theme_with_ancestors(
|
||||||
theme_paths: dict[str, str],
|
theme_paths: dict[str, str], name: str, /
|
||||||
name: str, /,
|
|
||||||
) -> tuple[dict[str, configparser.RawConfigParser], list[str], list[str]]:
|
) -> tuple[dict[str, configparser.RawConfigParser], list[str], list[str]]:
|
||||||
themes: dict[str, configparser.RawConfigParser] = {}
|
themes: dict[str, configparser.RawConfigParser] = {}
|
||||||
theme_dirs: list[str] = []
|
theme_dirs: list[str] = []
|
||||||
@@ -211,7 +216,7 @@ def _load_theme_with_ancestors(
|
|||||||
if inherit not in theme_paths:
|
if inherit not in theme_paths:
|
||||||
msg = __(
|
msg = __(
|
||||||
'The %r theme inherits from %r, which is not a loaded theme. '
|
'The %r theme inherits from %r, which is not a loaded theme. '
|
||||||
'Loaded themes are: %s',
|
'Loaded themes are: %s'
|
||||||
) % (name, inherit, ', '.join(sorted(theme_paths)))
|
) % (name, inherit, ', '.join(sorted(theme_paths)))
|
||||||
raise ThemeError(msg)
|
raise ThemeError(msg)
|
||||||
name = inherit
|
name = inherit
|
||||||
@@ -223,7 +228,7 @@ def _load_theme_with_ancestors(
|
|||||||
|
|
||||||
|
|
||||||
def _load_theme(
|
def _load_theme(
|
||||||
name: str, theme_path: str, /,
|
name: str, theme_path: str, /
|
||||||
) -> tuple[str, str, str | None, configparser.RawConfigParser]:
|
) -> tuple[str, str, str | None, configparser.RawConfigParser]:
|
||||||
if path.isdir(theme_path):
|
if path.isdir(theme_path):
|
||||||
# already a directory, do nothing
|
# already a directory, do nothing
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Implements the low-level algorithms Sphinx uses for versioning doctrees."""
|
"""Implements the low-level algorithms Sphinx uses for versioning doctrees."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import pickle
|
import pickle
|
||||||
@@ -19,6 +20,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
import Levenshtein # type: ignore[import-not-found]
|
import Levenshtein # type: ignore[import-not-found]
|
||||||
|
|
||||||
IS_SPEEDUP = True
|
IS_SPEEDUP = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
IS_SPEEDUP = False
|
IS_SPEEDUP = False
|
||||||
|
|||||||
Reference in New Issue
Block a user