mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Reduce the formatter's target line length to 88 (#12757)
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
target-version = "py310" # Pin Ruff to Python 3.10
|
||||
line-length = 95
|
||||
line-length = 88
|
||||
output-format = "full"
|
||||
|
||||
extend-exclude = [
|
||||
@@ -427,6 +427,9 @@ select = [
|
||||
"ANN", # utilities don't need annotations
|
||||
]
|
||||
|
||||
[lint.pycodestyle]
|
||||
max-line-length = 95
|
||||
|
||||
[lint.flake8-quotes]
|
||||
inline-quotes = "single"
|
||||
|
||||
|
13
doc/conf.py
13
doc/conf.py
@@ -77,7 +77,14 @@ epub_use_index = False
|
||||
epub_description = 'Sphinx documentation generator system manual'
|
||||
|
||||
latex_documents = [
|
||||
('index', 'sphinx.tex', 'Sphinx Documentation', 'the Sphinx developers', 'manual', 1)
|
||||
(
|
||||
'index',
|
||||
'sphinx.tex',
|
||||
'Sphinx Documentation',
|
||||
'the Sphinx developers',
|
||||
'manual',
|
||||
1,
|
||||
)
|
||||
]
|
||||
latex_logo = '_static/sphinx.png'
|
||||
latex_elements = {
|
||||
@@ -324,7 +331,9 @@ def setup(app: Sphinx) -> None:
|
||||
app.connect('autodoc-process-docstring', cut_lines(4, what=['module']))
|
||||
app.connect('include-read', linkify_issues_in_changelog)
|
||||
app.connect('build-finished', build_redirects)
|
||||
fdesc = GroupedField('parameter', label='Parameters', names=['param'], can_collapse=True)
|
||||
fdesc = GroupedField(
|
||||
'parameter', label='Parameters', names=['param'], can_collapse=True
|
||||
)
|
||||
app.add_object_type(
|
||||
'event',
|
||||
'event',
|
||||
|
@@ -62,7 +62,15 @@ class IngredientIndex(Index):
|
||||
for ingredient, recipe_names in ingredient_recipes.items():
|
||||
for recipe_name in recipe_names:
|
||||
dispname, typ, docname, anchor = recipes[recipe_name]
|
||||
content[ingredient].append((dispname, 0, docname, anchor, docname, '', typ))
|
||||
content[ingredient].append((
|
||||
dispname,
|
||||
0,
|
||||
docname,
|
||||
anchor,
|
||||
docname,
|
||||
'',
|
||||
typ,
|
||||
))
|
||||
|
||||
# convert the dict to the sorted list of tuples expected
|
||||
content = sorted(content.items())
|
||||
@@ -153,7 +161,14 @@ class RecipeDomain(Domain):
|
||||
|
||||
self.data['recipe_ingredients'][name] = ingredients
|
||||
# name, dispname, type, docname, anchor, priority
|
||||
self.data['recipes'].append((name, signature, 'Recipe', self.env.docname, anchor, 0))
|
||||
self.data['recipes'].append((
|
||||
name,
|
||||
signature,
|
||||
'Recipe',
|
||||
self.env.docname,
|
||||
anchor,
|
||||
0,
|
||||
))
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> ExtensionMetadata:
|
||||
|
@@ -57,7 +57,9 @@ def purge_todos(app, env, docname):
|
||||
if not hasattr(env, 'todo_all_todos'):
|
||||
return
|
||||
|
||||
env.todo_all_todos = [todo for todo in env.todo_all_todos if todo['docname'] != docname]
|
||||
env.todo_all_todos = [
|
||||
todo for todo in env.todo_all_todos if todo['docname'] != docname
|
||||
]
|
||||
|
||||
|
||||
def merge_todos(app, env, docnames, other):
|
||||
@@ -98,7 +100,9 @@ def process_todo_nodes(app, doctree, fromdocname):
|
||||
newnode = nodes.reference('', '')
|
||||
innernode = nodes.emphasis(_('here'), _('here'))
|
||||
newnode['refdocname'] = todo_info['docname']
|
||||
newnode['refuri'] = app.builder.get_relative_uri(fromdocname, todo_info['docname'])
|
||||
newnode['refuri'] = app.builder.get_relative_uri(
|
||||
fromdocname, todo_info['docname']
|
||||
)
|
||||
newnode['refuri'] += '#' + todo_info['target']['refid']
|
||||
newnode.append(innernode)
|
||||
para += newnode
|
||||
|
@@ -16,7 +16,10 @@ if 'PYTHONWARNINGS' not in os.environ:
|
||||
|
||||
warnings.filterwarnings('default', category=RemovedInNextVersionWarning)
|
||||
warnings.filterwarnings(
|
||||
'ignore', 'The frontend.Option class .*', DeprecationWarning, module='docutils.frontend'
|
||||
'ignore',
|
||||
'The frontend.Option class .*',
|
||||
DeprecationWarning,
|
||||
module='docutils.frontend',
|
||||
)
|
||||
|
||||
#: Version info for better programmatic use.
|
||||
|
@@ -16,14 +16,25 @@ from sphinx.environment import CONFIG_CHANGED_REASON, CONFIG_OK, BuildEnvironmen
|
||||
from sphinx.environment.adapters.asset import ImageAdapter
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import UnicodeDecodeErrorHandler, get_filetype, import_object, logging, rst
|
||||
from sphinx.util import (
|
||||
UnicodeDecodeErrorHandler,
|
||||
get_filetype,
|
||||
import_object,
|
||||
logging,
|
||||
rst,
|
||||
)
|
||||
from sphinx.util.build_phase import BuildPhase
|
||||
from sphinx.util.console import bold
|
||||
from sphinx.util.display import progress_message, status_iterator
|
||||
from sphinx.util.docutils import sphinx_domains
|
||||
from sphinx.util.i18n import CatalogInfo, CatalogRepository, docname_to_domain
|
||||
from sphinx.util.osutil import SEP, canon_path, ensuredir, relative_uri, relpath
|
||||
from sphinx.util.parallel import ParallelTasks, SerialTasks, make_chunks, parallel_available
|
||||
from sphinx.util.parallel import (
|
||||
ParallelTasks,
|
||||
SerialTasks,
|
||||
make_chunks,
|
||||
parallel_available,
|
||||
)
|
||||
|
||||
# side effect: registers roles and directives
|
||||
from sphinx import directives # NoQA: F401 isort:skip
|
||||
|
@@ -26,7 +26,11 @@ from docutils.utils import relative_path
|
||||
from sphinx import __display_version__, package_dir
|
||||
from sphinx import version_info as sphinx_version
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.builders.html._assets import _CascadingStyleSheet, _file_checksum, _JavaScript
|
||||
from sphinx.builders.html._assets import (
|
||||
_CascadingStyleSheet,
|
||||
_file_checksum,
|
||||
_JavaScript,
|
||||
)
|
||||
from sphinx.config import ENUM, Config
|
||||
from sphinx.deprecation import _deprecation_warning
|
||||
from sphinx.domains import Domain, Index, IndexEntry
|
||||
|
@@ -12,7 +12,11 @@ from docutils.frontend import OptionParser
|
||||
import sphinx.builders.latex.nodes # NoQA: F401,E501 # Workaround: import this before writer to avoid ImportError
|
||||
from sphinx import addnodes, highlighting, package_dir
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.builders.latex.constants import ADDITIONAL_SETTINGS, DEFAULT_SETTINGS, SHORTHANDOFF
|
||||
from sphinx.builders.latex.constants import (
|
||||
ADDITIONAL_SETTINGS,
|
||||
DEFAULT_SETTINGS,
|
||||
SHORTHANDOFF,
|
||||
)
|
||||
from sphinx.builders.latex.theming import Theme, ThemeFactory
|
||||
from sphinx.builders.latex.util import ExtBabel
|
||||
from sphinx.config import ENUM, Config
|
||||
|
@@ -40,7 +40,11 @@ def _deprecation_warning(
|
||||
|
||||
# deprecated name -> (object to return, canonical path or empty string, removal version)
|
||||
_DEPRECATED_OBJECTS = {
|
||||
'deprecated_name': (object_to_return, 'fully_qualified_replacement_name', (9, 0)),
|
||||
'deprecated_name': (
|
||||
object_to_return,
|
||||
'fully_qualified_replacement_name',
|
||||
(9, 0),
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -65,7 +69,9 @@ def _deprecation_warning(
|
||||
|
||||
qualname = f'{module}.{attribute}'
|
||||
if canonical_name:
|
||||
message = f'The alias {qualname!r} is deprecated, use {canonical_name!r} instead.'
|
||||
message = (
|
||||
f'The alias {qualname!r} is deprecated, use {canonical_name!r} instead.'
|
||||
)
|
||||
else:
|
||||
message = f'{qualname!r} is deprecated.'
|
||||
|
||||
|
@@ -12,7 +12,12 @@ from typing import TYPE_CHECKING, Any, NoReturn
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.environment.adapters import toctree as toctree_adapters
|
||||
from sphinx.errors import BuildEnvironmentError, DocumentError, ExtensionError, SphinxError
|
||||
from sphinx.errors import (
|
||||
BuildEnvironmentError,
|
||||
DocumentError,
|
||||
ExtensionError,
|
||||
SphinxError,
|
||||
)
|
||||
from sphinx.locale import __
|
||||
from sphinx.transforms import SphinxTransformer
|
||||
from sphinx.util import DownloadFiles, FilenameUniqDict, logging
|
||||
|
@@ -45,7 +45,10 @@ class ExtensionError(SphinxError):
|
||||
"""Extension error."""
|
||||
|
||||
def __init__(
|
||||
self, message: str, orig_exc: Exception | None = None, modname: str | None = None
|
||||
self,
|
||||
message: str,
|
||||
orig_exc: Exception | None = None,
|
||||
modname: str | None = None,
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
|
@@ -84,7 +84,10 @@ class EventManager:
|
||||
listeners.remove(listener)
|
||||
|
||||
def emit(
|
||||
self, name: str, *args: Any, allowed_exceptions: tuple[type[Exception], ...] = ()
|
||||
self,
|
||||
name: str,
|
||||
*args: Any,
|
||||
allowed_exceptions: tuple[type[Exception], ...] = (),
|
||||
) -> list:
|
||||
"""Emit a Sphinx event."""
|
||||
# not every object likes to be repr()'d (think
|
||||
@@ -120,7 +123,10 @@ class EventManager:
|
||||
return results
|
||||
|
||||
def emit_firstresult(
|
||||
self, name: str, *args: Any, allowed_exceptions: tuple[type[Exception], ...] = ()
|
||||
self,
|
||||
name: str,
|
||||
*args: Any,
|
||||
allowed_exceptions: tuple[type[Exception], ...] = (),
|
||||
) -> Any:
|
||||
"""Emit a Sphinx event and returns first result.
|
||||
|
||||
|
@@ -96,7 +96,10 @@ def write_file(name: str, text: str, opts: CliOptions) -> Path:
|
||||
|
||||
|
||||
def create_module_file(
|
||||
package: str | None, basename: str, opts: CliOptions, user_template_dir: str | None = None
|
||||
package: str | None,
|
||||
basename: str,
|
||||
opts: CliOptions,
|
||||
user_template_dir: str | None = None,
|
||||
) -> Path:
|
||||
"""Build the text of the file and write the file."""
|
||||
options = copy(OPTIONS)
|
||||
@@ -148,7 +151,9 @@ def create_package_file(
|
||||
if not is_skipped_module(Path(root, sub), opts, excludes) and not is_initpy(sub)
|
||||
]
|
||||
submodules = sorted(set(submodules))
|
||||
submodules = [module_join(master_package, subroot, modname) for modname in submodules]
|
||||
submodules = [
|
||||
module_join(master_package, subroot, modname) for modname in submodules
|
||||
]
|
||||
options = copy(OPTIONS)
|
||||
if opts.includeprivate and 'private-members' not in options:
|
||||
options.append('private-members')
|
||||
@@ -316,7 +321,9 @@ def recurse_tree(
|
||||
if is_pkg or is_namespace:
|
||||
# we are in a package with something to document
|
||||
if subs or len(files) > 1 or not is_skipped_package(root, opts):
|
||||
subpackage = root[len(rootpath) :].lstrip(path.sep).replace(path.sep, '.')
|
||||
subpackage = (
|
||||
root[len(rootpath) :].lstrip(path.sep).replace(path.sep, '.')
|
||||
)
|
||||
# if this is not a namespace or
|
||||
# a namespace and there is something there to document
|
||||
if not is_namespace or has_child_module(root, excludes, opts):
|
||||
@@ -342,7 +349,9 @@ def recurse_tree(
|
||||
if not is_skipped_module(Path(rootpath, py_file), opts, excludes):
|
||||
module = py_file.split('.')[0]
|
||||
written_files.append(
|
||||
create_module_file(root_package, module, opts, user_template_dir)
|
||||
create_module_file(
|
||||
root_package, module, opts, user_template_dir
|
||||
)
|
||||
)
|
||||
toplevels.append(module)
|
||||
|
||||
@@ -361,7 +370,7 @@ def is_excluded(root: str | Path, excludes: Sequence[re.Pattern[str]]) -> bool:
|
||||
|
||||
def get_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
usage='%(prog)s [OPTIONS] -o <OUTPUT_PATH> <MODULE_PATH> ' '[EXCLUDE_PATTERN, ...]',
|
||||
usage='%(prog)s [OPTIONS] -o <OUTPUT_PATH> <MODULE_PATH> [EXCLUDE_PATTERN, ...]',
|
||||
epilog=__('For more information, visit <https://www.sphinx-doc.org/>.'),
|
||||
description=__("""
|
||||
Look recursively in <MODULE_PATH> for Python modules and packages and create
|
||||
@@ -384,7 +393,9 @@ Note: By default this script will not overwrite already created files."""),
|
||||
parser.add_argument(
|
||||
'exclude_pattern',
|
||||
nargs='*',
|
||||
help=__('fnmatch-style file and/or directory patterns ' 'to exclude from generation'),
|
||||
help=__(
|
||||
'fnmatch-style file and/or directory patterns ' 'to exclude from generation'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
@@ -411,7 +422,11 @@ Note: By default this script will not overwrite already created files."""),
|
||||
help=__('maximum depth of submodules to show in the TOC ' '(default: 4)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f', '--force', action='store_true', dest='force', help=__('overwrite existing files')
|
||||
'-f',
|
||||
'--force',
|
||||
action='store_true',
|
||||
dest='force',
|
||||
help=__('overwrite existing files'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-l',
|
||||
@@ -420,7 +435,8 @@ Note: By default this script will not overwrite already created files."""),
|
||||
dest='followlinks',
|
||||
default=False,
|
||||
help=__(
|
||||
'follow symbolic links. Powerful when combined ' 'with collective.recipe.omelette.'
|
||||
'follow symbolic links. Powerful when combined '
|
||||
'with collective.recipe.omelette.'
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -481,7 +497,8 @@ Note: By default this script will not overwrite already created files."""),
|
||||
action='store_true',
|
||||
dest='implicit_namespaces',
|
||||
help=__(
|
||||
'interpret module paths according to PEP-0420 ' 'implicit namespaces specification'
|
||||
'interpret module paths according to PEP-0420 '
|
||||
'implicit namespaces specification'
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -497,7 +514,9 @@ Note: By default this script will not overwrite already created files."""),
|
||||
'--remove-old',
|
||||
action='store_true',
|
||||
dest='remove_old',
|
||||
help=__('Remove existing files in the output directory that were not generated'),
|
||||
help=__(
|
||||
'Remove existing files in the output directory that were not generated'
|
||||
),
|
||||
)
|
||||
exclusive_group.add_argument(
|
||||
'-F',
|
||||
@@ -539,7 +558,9 @@ Note: By default this script will not overwrite already created files."""),
|
||||
'--doc-release',
|
||||
action='store',
|
||||
dest='release',
|
||||
help=__('project release, used when --full is given, ' 'defaults to --doc-version'),
|
||||
help=__(
|
||||
'project release, used when --full is given, ' 'defaults to --doc-version'
|
||||
),
|
||||
)
|
||||
|
||||
group = parser.add_argument_group(__('extension options'))
|
||||
@@ -649,7 +670,11 @@ def main(argv: Sequence[str] = (), /) -> int:
|
||||
'suffix': '.' + args.suffix,
|
||||
'master': 'index',
|
||||
'epub': True,
|
||||
'extensions': ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.todo'],
|
||||
'extensions': [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.todo',
|
||||
],
|
||||
'makefile': True,
|
||||
'batchfile': True,
|
||||
'make_mode': True,
|
||||
@@ -670,7 +695,9 @@ def main(argv: Sequence[str] = (), /) -> int:
|
||||
d['extensions'].extend(ext.split(','))
|
||||
|
||||
if not args.dryrun:
|
||||
qs.generate(d, silent=True, overwrite=args.force, templatedir=args.templatedir)
|
||||
qs.generate(
|
||||
d, silent=True, overwrite=args.force, templatedir=args.templatedir
|
||||
)
|
||||
elif args.tocfile:
|
||||
written_files.append(
|
||||
create_modules_toc_file(modules, args, args.tocfile, args.templatedir)
|
||||
|
@@ -131,7 +131,9 @@ class AutosummaryRenderer:
|
||||
msg = 'Expected a Sphinx application object!'
|
||||
raise ValueError(msg)
|
||||
|
||||
system_templates_path = [os.path.join(package_dir, 'ext', 'autosummary', 'templates')]
|
||||
system_templates_path = [
|
||||
os.path.join(package_dir, 'ext', 'autosummary', 'templates')
|
||||
]
|
||||
loader = SphinxTemplateLoader(
|
||||
app.srcdir, app.config.templates_path, system_templates_path
|
||||
)
|
||||
@@ -297,7 +299,9 @@ def generate_autosummary_content(
|
||||
ns['members'] = scanner.scan(imported_members)
|
||||
|
||||
respect_module_all = not app.config.autosummary_ignore_module_all
|
||||
imported_members = imported_members or ('__all__' in dir(obj) and respect_module_all)
|
||||
imported_members = imported_members or (
|
||||
'__all__' in dir(obj) and respect_module_all
|
||||
)
|
||||
|
||||
ns['functions'], ns['all_functions'] = _get_members(
|
||||
doc, app, obj, {'function'}, imported=imported_members
|
||||
@@ -378,7 +382,9 @@ def generate_autosummary_content(
|
||||
|
||||
def _skip_member(app: Sphinx, obj: Any, name: str, objtype: str) -> bool:
|
||||
try:
|
||||
return app.emit_firstresult('autodoc-skip-member', objtype, name, obj, False, {})
|
||||
return app.emit_firstresult(
|
||||
'autodoc-skip-member', objtype, name, obj, False, {}
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
__(
|
||||
@@ -465,7 +471,11 @@ def _get_module_attrs(name: str, members: Any) -> tuple[list[str], list[str]]:
|
||||
|
||||
|
||||
def _get_modules(
|
||||
obj: Any, *, skip: Sequence[str], name: str, public_members: Sequence[str] | None = None
|
||||
obj: Any,
|
||||
*,
|
||||
skip: Sequence[str],
|
||||
name: str,
|
||||
public_members: Sequence[str] | None = None,
|
||||
) -> tuple[list[str], list[str]]:
|
||||
items: list[str] = []
|
||||
public: list[str] = []
|
||||
@@ -511,7 +521,9 @@ def generate_autosummary_docs(
|
||||
showed_sources = sorted(sources)
|
||||
if len(showed_sources) > 20:
|
||||
showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
|
||||
logger.info(__('[autosummary] generating autosummary for: %s'), ', '.join(showed_sources))
|
||||
logger.info(
|
||||
__('[autosummary] generating autosummary for: %s'), ', '.join(showed_sources)
|
||||
)
|
||||
|
||||
if output_dir:
|
||||
logger.info(__('[autosummary] writing to %s'), output_dir)
|
||||
@@ -736,7 +748,9 @@ def find_autosummary_in_lines(
|
||||
if m:
|
||||
current_module = m.group(1).strip()
|
||||
# recurse into the automodule docstring
|
||||
documented.extend(find_autosummary_in_docstring(current_module, filename=filename))
|
||||
documented.extend(
|
||||
find_autosummary_in_docstring(current_module, filename=filename)
|
||||
)
|
||||
continue
|
||||
|
||||
m = module_re.match(line)
|
||||
@@ -823,7 +837,9 @@ The format of the autosummary directive is documented in the
|
||||
action='store_true',
|
||||
dest='remove_old',
|
||||
default=False,
|
||||
help=__('Remove existing files in the output directory that were not generated'),
|
||||
help=__(
|
||||
'Remove existing files in the output directory that were not generated'
|
||||
),
|
||||
)
|
||||
|
||||
return parser
|
||||
|
@@ -48,7 +48,11 @@ lexer_classes: dict[str, type[Lexer] | partial[Lexer]] = {
|
||||
}
|
||||
|
||||
|
||||
escape_hl_chars = {ord('\\'): '\\PYGZbs{}', ord('{'): '\\PYGZob{}', ord('}'): '\\PYGZcb{}'}
|
||||
escape_hl_chars = {
|
||||
ord('\\'): '\\PYGZbs{}',
|
||||
ord('{'): '\\PYGZob{}',
|
||||
ord('}'): '\\PYGZcb{}',
|
||||
}
|
||||
|
||||
# used if Pygments is available
|
||||
# MEMO: no use of \protected here to avoid having to do hyperref extras,
|
||||
@@ -96,7 +100,10 @@ class PygmentsBridge:
|
||||
latex_formatter = LatexFormatter[str]
|
||||
|
||||
def __init__(
|
||||
self, dest: str = 'html', stylename: str = 'sphinx', latex_engine: str | None = None
|
||||
self,
|
||||
dest: str = 'html',
|
||||
stylename: str = 'sphinx',
|
||||
latex_engine: str | None = None,
|
||||
) -> None:
|
||||
self.dest = dest
|
||||
self.latex_engine = latex_engine
|
||||
|
@@ -116,7 +116,9 @@ class SphinxFileSystemLoader(FileSystemLoader):
|
||||
template names.
|
||||
"""
|
||||
|
||||
def get_source(self, environment: Environment, template: str) -> tuple[str, str, Callable]:
|
||||
def get_source(
|
||||
self, environment: Environment, template: str
|
||||
) -> tuple[str, str, Callable]:
|
||||
for searchpath in self.searchpath:
|
||||
filename = path.join(searchpath, template)
|
||||
f = open_if_exists(filename)
|
||||
@@ -220,7 +222,9 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
||||
|
||||
# Loader interface
|
||||
|
||||
def get_source(self, environment: Environment, template: str) -> tuple[str, str, Callable]:
|
||||
def get_source(
|
||||
self, environment: Environment, template: str
|
||||
) -> tuple[str, str, Callable]:
|
||||
loaders = self.loaders
|
||||
# exclamation mark starts search from theme
|
||||
if template.startswith('!'):
|
||||
|
@@ -166,11 +166,15 @@ def init_console(
|
||||
return init([locale_dir], language, catalog, 'console')
|
||||
|
||||
|
||||
def get_translator(catalog: str = 'sphinx', namespace: str = 'general') -> NullTranslations:
|
||||
def get_translator(
|
||||
catalog: str = 'sphinx', namespace: str = 'general'
|
||||
) -> NullTranslations:
|
||||
return translators.get((namespace, catalog), NullTranslations())
|
||||
|
||||
|
||||
def is_translator_registered(catalog: str = 'sphinx', namespace: str = 'general') -> bool:
|
||||
def is_translator_registered(
|
||||
catalog: str = 'sphinx', namespace: str = 'general'
|
||||
) -> bool:
|
||||
return (namespace, catalog) in translators
|
||||
|
||||
|
||||
|
@@ -72,7 +72,9 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
|
||||
# preprocess inputstring
|
||||
if isinstance(inputstring, str):
|
||||
lines = docutils.statemachine.string2lines(
|
||||
inputstring, tab_width=document.settings.tab_width, convert_whitespace=True
|
||||
inputstring,
|
||||
tab_width=document.settings.tab_width,
|
||||
convert_whitespace=True,
|
||||
)
|
||||
|
||||
inputlines = StringList(lines, document.current_source)
|
||||
|
@@ -23,7 +23,9 @@ EXCLUDE_PATHS = ['**/_sources', '.#*', '**/.#*', '*.lproj/**']
|
||||
class Project:
|
||||
"""A project is the source code set of the Sphinx document(s)."""
|
||||
|
||||
def __init__(self, srcdir: str | os.PathLike[str], source_suffix: Iterable[str]) -> None:
|
||||
def __init__(
|
||||
self, srcdir: str | os.PathLike[str], source_suffix: Iterable[str]
|
||||
) -> None:
|
||||
#: Source directory.
|
||||
self.srcdir = _StrPath(srcdir)
|
||||
|
||||
@@ -82,7 +84,9 @@ class Project:
|
||||
self._docname_to_path[docname] = path
|
||||
else:
|
||||
logger.warning(
|
||||
__('Ignored unreadable document %r.'), filename, location=docname
|
||||
__('Ignored unreadable document %r.'),
|
||||
filename,
|
||||
location=docname,
|
||||
)
|
||||
|
||||
return self.docnames
|
||||
|
@@ -167,7 +167,11 @@ class XRefRole(ReferenceRole):
|
||||
return title, ws_re.sub(' ', target)
|
||||
|
||||
def result_nodes(
|
||||
self, document: nodes.document, env: BuildEnvironment, node: Element, is_ref: bool
|
||||
self,
|
||||
document: nodes.document,
|
||||
env: BuildEnvironment,
|
||||
node: Element,
|
||||
is_ref: bool,
|
||||
) -> tuple[list[Node], list[system_message]]:
|
||||
"""Called before returning the finished nodes. *node* is the reference
|
||||
node if one was created (*is_ref* is then true), else the content node.
|
||||
@@ -211,7 +215,9 @@ class PEP(ReferenceRole):
|
||||
|
||||
try:
|
||||
refuri = self.build_uri()
|
||||
reference = nodes.reference('', '', internal=False, refuri=refuri, classes=['pep'])
|
||||
reference = nodes.reference(
|
||||
'', '', internal=False, refuri=refuri, classes=['pep']
|
||||
)
|
||||
if self.has_explicit_title:
|
||||
reference += nodes.strong(self.title, self.title)
|
||||
else:
|
||||
@@ -246,7 +252,9 @@ class RFC(ReferenceRole):
|
||||
|
||||
try:
|
||||
refuri = self.build_uri()
|
||||
reference = nodes.reference('', '', internal=False, refuri=refuri, classes=['rfc'])
|
||||
reference = nodes.reference(
|
||||
'', '', internal=False, refuri=refuri, classes=['rfc']
|
||||
)
|
||||
if self.has_explicit_title:
|
||||
reference += nodes.strong(self.title, self.title)
|
||||
else:
|
||||
|
@@ -293,7 +293,9 @@ def _load_theme_with_ancestors(
|
||||
return themes, theme_dirs, tmp_dirs
|
||||
|
||||
|
||||
def _load_theme(name: str, theme_path: str, /) -> tuple[str, str, str | None, _ConfigFile]:
|
||||
def _load_theme(
|
||||
name: str, theme_path: str, /
|
||||
) -> tuple[str, str, str | None, _ConfigFile]:
|
||||
if path.isdir(theme_path):
|
||||
# already a directory, do nothing
|
||||
tmp_dir = None
|
||||
@@ -371,7 +373,9 @@ def _convert_theme_toml(cfg: _ThemeToml, /) -> _ConfigFile:
|
||||
pygments_table = theme.get('pygments_style', {})
|
||||
if isinstance(pygments_table, str):
|
||||
hint = f'pygments_style = {{ default = "{pygments_table}" }}'
|
||||
msg = __('The "theme.pygments_style" setting must be a table. Hint: "%s"') % hint
|
||||
msg = (
|
||||
__('The "theme.pygments_style" setting must be a table. Hint: "%s"') % hint
|
||||
)
|
||||
raise ThemeError(msg)
|
||||
pygments_style_default: str | None = pygments_table.get('default')
|
||||
pygments_style_dark: str | None = pygments_table.get('dark')
|
||||
@@ -401,15 +405,23 @@ def _validate_theme_conf(cfg: configparser.RawConfigParser, name: str) -> str:
|
||||
|
||||
def _convert_theme_conf(cfg: configparser.RawConfigParser, /) -> _ConfigFile:
|
||||
if stylesheet := cfg.get('theme', 'stylesheet', fallback=''):
|
||||
stylesheets: tuple[str, ...] | None = tuple(map(str.strip, stylesheet.split(',')))
|
||||
stylesheets: tuple[str, ...] | None = tuple(
|
||||
map(str.strip, stylesheet.split(','))
|
||||
)
|
||||
else:
|
||||
stylesheets = None
|
||||
if sidebar := cfg.get('theme', 'sidebars', fallback=''):
|
||||
sidebar_templates: tuple[str, ...] | None = tuple(map(str.strip, sidebar.split(',')))
|
||||
sidebar_templates: tuple[str, ...] | None = tuple(
|
||||
map(str.strip, sidebar.split(','))
|
||||
)
|
||||
else:
|
||||
sidebar_templates = None
|
||||
pygments_style_default: str | None = cfg.get('theme', 'pygments_style', fallback=None)
|
||||
pygments_style_dark: str | None = cfg.get('theme', 'pygments_dark_style', fallback=None)
|
||||
pygments_style_default: str | None = cfg.get(
|
||||
'theme', 'pygments_style', fallback=None
|
||||
)
|
||||
pygments_style_dark: str | None = cfg.get(
|
||||
'theme', 'pygments_dark_style', fallback=None
|
||||
)
|
||||
options = dict(cfg.items('options')) if cfg.has_section('options') else {}
|
||||
return _ConfigFile(
|
||||
stylesheets=stylesheets,
|
||||
|
@@ -45,7 +45,9 @@ def add_uids(doctree: Node, condition: Callable[[Node], bool]) -> Iterator[Node]
|
||||
yield node
|
||||
|
||||
|
||||
def merge_doctrees(old: Node, new: Node, condition: Callable[[Node], bool]) -> Iterator[Node]:
|
||||
def merge_doctrees(
|
||||
old: Node, new: Node, condition: Callable[[Node], bool]
|
||||
) -> Iterator[Node]:
|
||||
"""Merge the `old` doctree with the `new` one while looking at nodes
|
||||
matching the `condition`.
|
||||
|
||||
|
@@ -7,7 +7,12 @@ import pytest
|
||||
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
from sphinx.builders.latex import LaTeXBuilder
|
||||
from sphinx.environment import CONFIG_CHANGED, CONFIG_EXTENSIONS_CHANGED, CONFIG_NEW, CONFIG_OK
|
||||
from sphinx.environment import (
|
||||
CONFIG_CHANGED,
|
||||
CONFIG_EXTENSIONS_CHANGED,
|
||||
CONFIG_NEW,
|
||||
CONFIG_OK,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.sphinx('dummy', testroot='basic')
|
||||
|
@@ -1,6 +1,10 @@
|
||||
"""Test sphinx.util.docstrings."""
|
||||
|
||||
from sphinx.util.docstrings import prepare_commentdoc, prepare_docstring, separate_metadata
|
||||
from sphinx.util.docstrings import (
|
||||
prepare_commentdoc,
|
||||
prepare_docstring,
|
||||
separate_metadata,
|
||||
)
|
||||
|
||||
|
||||
def test_separate_metadata():
|
||||
|
@@ -4,7 +4,12 @@ from types import SimpleNamespace
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
|
||||
from docutils.parsers.rst.states import Inliner, RSTState, RSTStateMachine, state_classes
|
||||
from docutils.parsers.rst.states import (
|
||||
Inliner,
|
||||
RSTState,
|
||||
RSTStateMachine,
|
||||
state_classes,
|
||||
)
|
||||
from docutils.statemachine import StringList
|
||||
|
||||
from sphinx.util.docutils import SphinxDirective, new_document
|
||||
|
@@ -14,7 +14,11 @@ from typing import Callable, List, Optional, Union # NoQA: UP035
|
||||
import pytest
|
||||
|
||||
from sphinx.util import inspect
|
||||
from sphinx.util.inspect import TypeAliasForwardRef, TypeAliasNamespace, stringify_signature
|
||||
from sphinx.util.inspect import (
|
||||
TypeAliasForwardRef,
|
||||
TypeAliasNamespace,
|
||||
stringify_signature,
|
||||
)
|
||||
from sphinx.util.typing import stringify_annotation
|
||||
|
||||
|
||||
|
@@ -183,7 +183,9 @@ def run_compile() -> None:
|
||||
catalogue = read_po(infile, locale=locale.name)
|
||||
|
||||
if catalogue.fuzzy:
|
||||
log.info('catalogue %s is marked as fuzzy, skipping', po_file.relative_to(ROOT))
|
||||
log.info(
|
||||
'catalogue %s is marked as fuzzy, skipping', po_file.relative_to(ROOT)
|
||||
)
|
||||
continue
|
||||
|
||||
locale_errors = 0
|
||||
@@ -250,7 +252,9 @@ def run_compile() -> None:
|
||||
_write_pr_body_line('## Babel catalogue errors')
|
||||
_write_pr_body_line('')
|
||||
for locale_name, err_count in total_errors.items():
|
||||
log.error('error: %d errors encountered in %r locale.', err_count, locale_name)
|
||||
log.error(
|
||||
'error: %d errors encountered in %r locale.', err_count, locale_name
|
||||
)
|
||||
s = 's' if err_count != 1 else ''
|
||||
_write_pr_body_line(f'* {locale_name}: {err_count} error{s}')
|
||||
|
||||
|
@@ -80,7 +80,9 @@ def parse_version(version: str) -> VersionInfo:
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def bump_version(path: Path, version_info: VersionInfo, in_develop: bool = True) -> None:
|
||||
def bump_version(
|
||||
path: Path, version_info: VersionInfo, in_develop: bool = True
|
||||
) -> None:
|
||||
if in_develop or version_info.is_final:
|
||||
version = version_info.version
|
||||
else:
|
||||
@@ -150,7 +152,9 @@ class Changes:
|
||||
|
||||
@staticmethod
|
||||
def filter_empty_sections(body: str) -> str:
|
||||
return re.sub('^\n.+\n-{3,}\n+(?=\n.+\n[-=]{3,}\n)', '', body, flags=re.MULTILINE)
|
||||
return re.sub(
|
||||
'^\n.+\n-{3,}\n+(?=\n.+\n[-=]{3,}\n)', '', body, flags=re.MULTILINE
|
||||
)
|
||||
|
||||
|
||||
class Skip(Exception):
|
||||
|
Reference in New Issue
Block a user