Enable various pydocstyle (D) rules (#11878)

Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com>
This commit is contained in:
danieleades 2024-01-14 21:13:46 +00:00 committed by GitHub
parent 587e01177d
commit f7fbfaa471
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
100 changed files with 301 additions and 127 deletions

View File

@ -73,26 +73,25 @@ select = [
# "D107", # Missing docstring in `__init__`
# "D200", # One-line docstring should fit on one line
"D201", # No blank lines allowed before function docstring (found {num_lines})
# "D202", # No blank lines allowed after function docstring (found {num_lines})
# "D203", # 1 blank line required before class docstring
# "D204", # 1 blank line required after class docstring
"D202", # No blank lines allowed after function docstring (found {num_lines})
"D204", # 1 blank line required after class docstring
# "D205", # 1 blank line required between summary line and description
"D206", # Docstring should be indented with spaces, not tabs
# "D207", # Docstring is under-indented
# "D208", # Docstring is over-indented
# "D209", # Multi-line docstring closing quotes should be on a separate line
# "D210", # No whitespaces allowed surrounding docstring text
"D207", # Docstring is under-indented
"D208", # Docstring is over-indented
"D209", # Multi-line docstring closing quotes should be on a separate line
"D210", # No whitespaces allowed surrounding docstring text
"D211", # No blank lines allowed before class docstring
# "D212", # Multi-line docstring summary should start at the first line
# "D213", # Multi-line docstring summary should start at the second line
# "D214", # Section is over-indented ("{name}")
# "D215", # Section underline is over-indented ("{name}")
# "D300", # Use triple double quotes `"""`
# "D301", # Use `r"""` if any backslashes in a docstring
"D300", # Use triple double quotes `"""`
"D301", # Use `r"""` if any backslashes in a docstring
# "D400", # First line should end with a period
# "D401", # First line of docstring should be in imperative mood: "{first_line}"
# "D402", # First line should not be the function's signature
# "D403", # First word of the first line should be capitalized: `{}` -> `{}`
"D402", # First line should not be the function's signature
"D403", # First word of the first line should be capitalized: `{}` -> `{}`
# "D404", # First word of the docstring should not be "This"
"D405", # Section name should be properly capitalized ("{name}")
# "D406", # Section name should end with a newline ("{name}")
@ -499,9 +498,14 @@ select = [
# Ruff bug: https://github.com/astral-sh/ruff/issues/6540
"sphinx/transforms/i18n.py" = ["PGH004"]
# Function wrappers
"sphinx/ext/autodoc/importer.py" = ["D402"]
"sphinx/util/requests.py" = ["D402"]
"tests/*" = [
"E501",
"ANN", # tests don't need annotations
"D402",
"T201", # whitelist ``print`` for tests
]

View File

@ -210,8 +210,7 @@ def parse_event(env, sig, signode):
def linkify_issues_in_changelog(app, docname, source):
""" Linkify issue references like #123 in changelog to GitHub. """
"""Linkify issue references like #123 in changelog to GitHub."""
if docname == 'changes':
changelog_path = os.path.join(os.path.dirname(__file__), "../CHANGES.rst")
# this path trickery is needed because this script can

View File

@ -79,6 +79,7 @@ class translatable(nodes.Node):
class not_smartquotable:
"""A node which does not support smart-quotes."""
support_smartquotes = False
@ -163,6 +164,7 @@ class desc_signature(_desc_classes_injector, nodes.Part, nodes.Inline, nodes.Tex
This node always has the classes ``sig``, ``sig-object``, and the domain it belongs to.
"""
# Note: the domain name is being added through a post-transform DescSigAddDomainAsClass
classes = ['sig', 'sig-object']
@ -181,6 +183,7 @@ class desc_signature_line(nodes.Part, nodes.Inline, nodes.FixedTextElement):
with ``is_multiline`` set to ``True``.
Set ``add_permalink = True`` for the line that should get the permalink.
"""
sphinx_line_type = ''
@ -199,6 +202,7 @@ class desc_inline(_desc_classes_injector, nodes.Inline, nodes.TextElement):
This node always has the classes ``sig``, ``sig-inline``,
and the name of the domain it belongs to.
"""
classes = ['sig', 'sig-inline']
def __init__(self, domain: str, *args: Any, **kwargs: Any) -> None:
@ -219,6 +223,7 @@ class desc_name(_desc_classes_injector, nodes.Part, nodes.Inline, nodes.FixedTex
This node always has the class ``sig-name``.
"""
classes = ['sig-name', 'descname'] # 'descname' is for backwards compatibility
@ -230,6 +235,7 @@ class desc_addname(_desc_classes_injector, nodes.Part, nodes.Inline, nodes.Fixed
This node always has the class ``sig-prename``.
"""
# 'descclassname' is for backwards compatibility
classes = ['sig-prename', 'descclassname']
@ -256,6 +262,7 @@ class desc_parameterlist(nodes.Part, nodes.Inline, nodes.FixedTextElement):
Set ``multi_line_parameter_list = True`` to describe a multi-line parameter list.
In that case each parameter will then be written on its own, indented line.
"""
child_text_separator = ', '
def astext(self) -> str:
@ -269,6 +276,7 @@ class desc_type_parameter_list(nodes.Part, nodes.Inline, nodes.FixedTextElement)
Set ``multi_line_parameter_list = True`` to describe a multi-line type parameters list.
In that case each type parameter will then be written on its own, indented line.
"""
child_text_separator = ', '
def astext(self) -> str:
@ -285,6 +293,7 @@ class desc_type_parameter(nodes.Part, nodes.Inline, nodes.FixedTextElement):
class desc_optional(nodes.Part, nodes.Inline, nodes.FixedTextElement):
"""Node for marking optional parts of the parameter list."""
child_text_separator = ', '
def astext(self) -> str:
@ -315,6 +324,7 @@ SIG_ELEMENTS: set[type[desc_sig_element]] = set()
class desc_sig_element(nodes.inline, _desc_classes_injector):
"""Common parent class of nodes for inline text of a signature."""
classes: list[str] = []
def __init__(self, rawsource: str = '', text: str = '',
@ -334,6 +344,7 @@ class desc_sig_element(nodes.inline, _desc_classes_injector):
class desc_sig_space(desc_sig_element, _sig_element=True):
"""Node for a space in a signature."""
classes = ["w"]
def __init__(self, rawsource: str = '', text: str = ' ',
@ -343,41 +354,49 @@ class desc_sig_space(desc_sig_element, _sig_element=True):
class desc_sig_name(desc_sig_element, _sig_element=True):
"""Node for an identifier in a signature."""
classes = ["n"]
class desc_sig_operator(desc_sig_element, _sig_element=True):
"""Node for an operator in a signature."""
classes = ["o"]
class desc_sig_punctuation(desc_sig_element, _sig_element=True):
"""Node for punctuation in a signature."""
classes = ["p"]
class desc_sig_keyword(desc_sig_element, _sig_element=True):
"""Node for a general keyword in a signature."""
classes = ["k"]
class desc_sig_keyword_type(desc_sig_element, _sig_element=True):
"""Node for a keyword which is a built-in type in a signature."""
classes = ["kt"]
class desc_sig_literal_number(desc_sig_element, _sig_element=True):
"""Node for a numeric literal in a signature."""
classes = ["m"]
class desc_sig_literal_string(desc_sig_element, _sig_element=True):
"""Node for a string literal in a signature."""
classes = ["s"]
class desc_sig_literal_char(desc_sig_element, _sig_element=True):
"""Node for a character literal in a signature."""
classes = ["sc"]
@ -425,7 +444,7 @@ class index(nodes.Invisible, nodes.Inline, nodes.TextElement):
class centered(nodes.Part, nodes.TextElement):
"""This node is deprecated."""
"""Deprecated."""
class acks(nodes.Element):
@ -479,6 +498,7 @@ class pending_xref(nodes.Inline, nodes.Element):
These nodes are resolved before writing output, in
BuildEnvironment.resolve_references.
"""
child_text_separator = ''

View File

@ -317,7 +317,8 @@ class EpubBuilder(StandaloneHTMLBuilder):
def footnote_spot(tree: nodes.document) -> tuple[Element, int]:
"""Find or create a spot to place footnotes.
The function returns the tuple (parent, index)."""
The function returns the tuple (parent, index).
"""
# The code uses the following heuristic:
# a) place them after the last existing footnote
# b) place them after an (empty) Footnotes rubric
@ -480,7 +481,6 @@ class EpubBuilder(StandaloneHTMLBuilder):
"""Create a dictionary with all metadata for the content.opf
file properly escaped.
"""
if (source_date_epoch := os.getenv('SOURCE_DATE_EPOCH')) is not None:
time_tuple = time.gmtime(int(source_date_epoch))
else:

View File

@ -26,6 +26,7 @@ class ChangesBuilder(Builder):
"""
Write a summary with all versionadded/changed directives.
"""
name = 'changes'
epilog = __('The overview file is in %(outdir)s.')

View File

@ -21,6 +21,7 @@ class DirectoryHTMLBuilder(StandaloneHTMLBuilder):
a directory given by their pagename, so that generated URLs don't have
``.html`` in them.
"""
name = 'dirhtml'
def get_target_uri(self, docname: str, typ: str | None = None) -> str:

View File

@ -75,6 +75,7 @@ class Epub3Builder(_epub_base.EpubBuilder):
and META-INF/container.xml. Afterwards, all necessary files are zipped to
an epub file.
"""
name = 'epub'
epilog = __('The ePub file is in %(outdir)s.')
@ -240,7 +241,7 @@ def validate_config_values(app: Sphinx) -> None:
def convert_epub_css_files(app: Sphinx, config: Config) -> None:
"""This converts string styled epub_css_files to tuple styled one."""
"""Convert string styled epub_css_files to tuple styled one."""
epub_css_files: list[tuple[str, dict[str, Any]]] = []
for entry in config.epub_css_files:
if isinstance(entry, str):

View File

@ -39,6 +39,7 @@ logger = logging.getLogger(__name__)
class Message:
"""An entry of translatable message."""
def __init__(self, text: str, locations: list[tuple[str, int]], uuids: list[str]):
self.text = text
self.locations = locations
@ -119,6 +120,7 @@ class I18nTags(Tags):
To translate all text inside of only nodes, this class
always returns True value even if no tags are defined.
"""
def eval_condition(self, condition: Any) -> bool:
return True
@ -127,6 +129,7 @@ class I18nBuilder(Builder):
"""
General i18n builder.
"""
name = 'i18n'
versioning_method = 'text'
use_message_catalog = False
@ -212,6 +215,7 @@ class MessageCatalogBuilder(I18nBuilder):
"""
Builds gettext-style message catalogs (.pot files).
"""
name = 'gettext'
epilog = __('The message catalogs are in %(outdir)s.')

View File

@ -160,6 +160,7 @@ class StandaloneHTMLBuilder(Builder):
"""
Builds standalone HTML docs.
"""
name = 'html'
format = 'html'
epilog = __('The HTML pages are in %(outdir)s.')
@ -782,7 +783,7 @@ class StandaloneHTMLBuilder(Builder):
path.join(self.srcdir, src), err)
def create_pygments_style_file(self) -> None:
"""create a style file for pygments."""
"""Create a style file for pygments."""
with open(path.join(self.outdir, '_static', 'pygments.css'), 'w',
encoding="utf-8") as f:
f.write(self.highlighter.get_stylesheet())
@ -863,7 +864,7 @@ class StandaloneHTMLBuilder(Builder):
logger.warning(__('cannot copy static file %r'), err)
def copy_extra_files(self) -> None:
"""copy html_extra_path files."""
"""Copy html_extra_path files."""
try:
with progress_message(__('copying extra files')):
excluded = Matcher(self.config.exclude_patterns)
@ -1169,7 +1170,7 @@ class StandaloneHTMLBuilder(Builder):
def convert_html_css_files(app: Sphinx, config: Config) -> None:
"""This converts string styled html_css_files to tuple styled one."""
"""Convert string styled html_css_files to tuple styled one."""
html_css_files: list[tuple[str, dict]] = []
for entry in config.html_css_files:
if isinstance(entry, str):
@ -1192,7 +1193,7 @@ def _format_modified_time(timestamp: float) -> str:
def convert_html_js_files(app: Sphinx, config: Config) -> None:
"""This converts string styled html_js_files to tuple styled one."""
"""Convert string styled html_js_files to tuple styled one."""
html_js_files: list[tuple[str, dict]] = []
for entry in config.html_js_files:
if isinstance(entry, str):

View File

@ -31,6 +31,7 @@ class KeyboardTransform(SphinxPostTransform):
<literal class="kbd">
x
"""
default_priority = 400
formats = ('html',)
pattern = re.compile(r'(?<=.)(-|\+|\^|\s+)(?=.)')

View File

@ -108,6 +108,7 @@ class LaTeXBuilder(Builder):
"""
Builds LaTeX output to create PDF.
"""
name = 'latex'
format = 'latex'
epilog = __('The LaTeX files are in %(outdir)s.')
@ -389,7 +390,7 @@ class LaTeXBuilder(Builder):
@progress_message(__('copying TeX support files'))
def copy_support_files(self) -> None:
"""copy TeX support files from texinputs."""
"""Copy TeX support files from texinputs."""
# configure usage of xindy (impacts Makefile and latexmkrc)
# FIXME: convert this rather to a confval with suitable default
# according to language ? but would require extra documentation
@ -479,7 +480,7 @@ def install_packages_for_ja(app: Sphinx) -> None:
def default_latex_engine(config: Config) -> str:
""" Better default latex_engine settings for specific languages. """
"""Better default latex_engine settings for specific languages."""
if config.language == 'ja':
return 'uplatex'
if config.language.startswith('zh'):
@ -490,7 +491,7 @@ def default_latex_engine(config: Config) -> str:
def default_latex_docclass(config: Config) -> dict[str, str]:
""" Better default latex_docclass settings for specific languages. """
"""Better default latex_docclass settings for specific languages."""
if config.language == 'ja':
if config.latex_engine == 'uplatex':
return {'manual': 'ujbook',
@ -503,12 +504,12 @@ def default_latex_docclass(config: Config) -> dict[str, str]:
def default_latex_use_xindy(config: Config) -> bool:
""" Better default latex_use_xindy settings for specific engines. """
"""Better default latex_use_xindy settings for specific engines."""
return config.latex_engine in {'xelatex', 'lualatex'}
def default_latex_documents(config: Config) -> list[tuple[str, str, str, str, str]]:
""" Better default latex_documents settings. """
"""Better default latex_documents settings."""
project = texescape.escape(config.project, config.latex_engine)
author = texescape.escape(config.author, config.latex_engine)
return [(config.root_doc,

View File

@ -5,26 +5,30 @@ from docutils import nodes
class captioned_literal_block(nodes.container):
"""A node for a container of literal_block having a caption."""
pass
class footnotemark(nodes.Inline, nodes.Referential, nodes.TextElement):
"""A node represents ``\footnotemark``."""
r"""A node represents ``\footnotemark``."""
pass
class footnotetext(nodes.General, nodes.BackLinkable, nodes.Element,
nodes.Labeled, nodes.Targetable):
"""A node represents ``\footnotetext``."""
r"""A node represents ``\footnotetext``."""
class math_reference(nodes.Inline, nodes.Referential, nodes.TextElement):
"""A node for a reference for equation."""
pass
class thebibliography(nodes.container):
"""A node for wrapping bibliographies."""
pass

View File

@ -31,6 +31,7 @@ URI_SCHEMES = ('mailto:', 'http:', 'https:', 'ftp:')
class FootnoteDocnameUpdater(SphinxTransform):
"""Add docname to footnote and footnote_reference nodes."""
default_priority = 700
TARGET_NODES = (nodes.footnote, nodes.footnote_reference)
@ -59,6 +60,7 @@ class ShowUrlsTransform(SphinxPostTransform):
.. note:: This transform is used for integrated doctree
"""
default_priority = 400
formats = ('latex',)
@ -509,6 +511,7 @@ class BibliographyTransform(SphinxPostTransform):
<citation>
...
"""
default_priority = 750
formats = ('latex',)
@ -528,6 +531,7 @@ class CitationReferenceTransform(SphinxPostTransform):
To handle citation reference easily on LaTeX writer, this converts
pending_xref nodes to citation_reference.
"""
default_priority = 5 # before ReferencesResolver
formats = ('latex',)
@ -548,6 +552,7 @@ class MathReferenceTransform(SphinxPostTransform):
To handle math reference easily on LaTeX writer, this converts pending_xref
nodes to math_reference.
"""
default_priority = 5 # before ReferencesResolver
formats = ('latex',)
@ -563,6 +568,7 @@ class MathReferenceTransform(SphinxPostTransform):
class LiteralBlockTransform(SphinxPostTransform):
"""Replace container nodes for literal_block by captioned_literal_block."""
default_priority = 400
formats = ('latex',)
@ -575,6 +581,7 @@ class LiteralBlockTransform(SphinxPostTransform):
class DocumentTargetTransform(SphinxPostTransform):
"""Add :doc label to the first section of each document."""
default_priority = 400
formats = ('latex',)
@ -586,10 +593,10 @@ class DocumentTargetTransform(SphinxPostTransform):
class IndexInSectionTitleTransform(SphinxPostTransform):
"""Move index nodes in section title to outside of the title.
r"""Move index nodes in section title to outside of the title.
LaTeX index macro is not compatible with some handling of section titles
such as uppercasing done on LaTeX side (cf. fncychap handling of ``\\chapter``).
such as uppercasing done on LaTeX side (cf. fncychap handling of ``\chapter``).
Moving the index node to after the title node fixes that.
Before::
@ -611,6 +618,7 @@ class IndexInSectionTitleTransform(SphinxPostTransform):
blah blah blah
...
"""
default_priority = 400
formats = ('latex',)

View File

@ -35,7 +35,7 @@ class ExtBabel(Babel):
return 'english' # fallback to english
def get_mainlanguage_options(self) -> str | None:
"""Return options for polyglossia's ``\\setmainlanguage``."""
r"""Return options for polyglossia's ``\setmainlanguage``."""
if self.use_polyglossia is False:
return None
elif self.language == 'german':

View File

@ -59,6 +59,7 @@ class CheckExternalLinksBuilder(DummyBuilder):
"""
Checks for broken external links.
"""
name = 'linkcheck'
epilog = __('Look for any errors in the above output or in '
'%(outdir)s/output.txt')
@ -582,7 +583,6 @@ def _get_request_headers(
def contains_anchor(response: Response, anchor: str) -> bool:
"""Determine if an anchor is contained within an HTTP response."""
parser = AnchorCheckParser(unquote(anchor))
# Read file in chunks. If we find a matching anchor, we break
# the loop early in hopes not to have to download the whole thing.

View File

@ -30,6 +30,7 @@ class ManualPageBuilder(Builder):
"""
Builds groff output in manual page format.
"""
name = 'man'
format = 'man'
epilog = __('The manual pages are in %(outdir)s.')
@ -107,7 +108,7 @@ class ManualPageBuilder(Builder):
def default_man_pages(config: Config) -> list[tuple[str, str, str, list[str], int]]:
""" Better default man_pages settings. """
"""Better default man_pages settings."""
filename = make_filename_from_project(config.project)
return [(config.root_doc, filename, f'{config.project} {config.release}',
[config.author], 1)]

View File

@ -28,6 +28,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
A StandaloneHTMLBuilder subclass that puts the whole document tree on one
HTML page.
"""
name = 'singlehtml'
epilog = __('The HTML page is in %(outdir)s.')

View File

@ -41,6 +41,7 @@ class TexinfoBuilder(Builder):
"""
Builds Texinfo output to create Info documentation.
"""
name = 'texinfo'
format = 'texinfo'
epilog = __('The Texinfo files are in %(outdir)s.')
@ -205,7 +206,7 @@ class TexinfoBuilder(Builder):
def default_texinfo_documents(
config: Config,
) -> list[tuple[str, str, str, str, str, str, str]]:
""" Better default texinfo_documents settings. """
"""Better default texinfo_documents settings."""
filename = make_filename_from_project(config.project)
return [(config.root_doc, filename, config.project, config.author, filename,
'One line description of project', 'Miscellaneous')]

View File

@ -29,6 +29,7 @@ class XMLBuilder(Builder):
"""
Builds Docutils-native XML.
"""
name = 'xml'
format = 'xml'
epilog = __('The XML files are in %(outdir)s.')
@ -101,6 +102,7 @@ class PseudoXMLBuilder(XMLBuilder):
"""
Builds pseudo-XML for display purposes.
"""
name = 'pseudoxml'
format = 'pseudoxml'
epilog = __('The pseudo-XML files are in %(outdir)s.')

View File

@ -211,7 +211,6 @@ def ask_user(d: dict[str, Any]) -> None:
* makefile: make Makefile
* batchfile: make command file
"""
print(bold(__('Welcome to the Sphinx %s quickstart utility.')) % __display_version__)
print()
print(__('Please enter values for the following settings (just press Enter to\n'

View File

@ -71,6 +71,7 @@ class ENUM:
Example:
app.add_config_value('latex_show_urls', 'no', None, ENUM('no', 'footnote', 'inline'))
"""
def __init__(self, *candidates: str | bool | None) -> None:
self.candidates = candidates

View File

@ -46,7 +46,6 @@ def _deprecation_warning(
_deprecation_warning(__name__, name, canonical_name, remove=remove)
return deprecated_object
"""
if remove == (8, 0):
warning_class: type[Warning] = RemovedInSphinx80Warning
elif remove == (9, 0):

View File

@ -43,6 +43,7 @@ class TocTree(SphinxDirective):
Directive to notify Sphinx about the hierarchical structure of the docs,
and to include a table-of-contents like tree in the current document.
"""
has_content = True
required_arguments = 0
optional_arguments = 0
@ -173,6 +174,7 @@ class Author(SphinxDirective):
Directive to give the name of the author of the current document
or section. Shown in the output only if the show_authors option is on.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
@ -206,6 +208,7 @@ class SeeAlso(BaseAdmonition):
"""
An admonition mentioning things to look at as reference.
"""
node_class = addnodes.seealso
@ -213,6 +216,7 @@ class TabularColumns(SphinxDirective):
"""
Directive to give an explicit tabulary column definition to LaTeX.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
@ -230,6 +234,7 @@ class Centered(SphinxDirective):
"""
Directive to create a centered line of bold text.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
@ -252,6 +257,7 @@ class Acks(SphinxDirective):
"""
Directive for a list of names.
"""
has_content = True
required_arguments = 0
optional_arguments = 0
@ -274,6 +280,7 @@ class HList(SphinxDirective):
"""
Directive for a list that gets compacted horizontally.
"""
has_content = True
required_arguments = 0
optional_arguments = 0
@ -311,6 +318,7 @@ class Only(SphinxDirective):
"""
Directive to only include text if the given tag(s) are enabled.
"""
has_content = True
required_arguments = 1
optional_arguments = 0

View File

@ -80,6 +80,7 @@ class Code(SphinxDirective):
This is compatible with docutils' :rst:dir:`code` directive.
"""
optional_arguments = 1
option_spec: OptionSpec = {
'class': directives.class_option,

View File

@ -3721,6 +3721,7 @@ class CExprRole(SphinxRole):
class CDomain(Domain):
"""C language domain."""
name = 'c'
label = 'C'
object_types = {

View File

@ -45,6 +45,7 @@ class VersionChange(SphinxDirective):
"""
Directive to describe a change/addition/deprecation in a specific version.
"""
has_content = True
required_arguments = 1
optional_arguments = 1

View File

@ -103,6 +103,7 @@ class CitationDomain(Domain):
class CitationDefinitionTransform(SphinxTransform):
"""Mark citation definition labels as not smartquoted."""
default_priority = 619
def apply(self, **kwargs: Any) -> None:
@ -122,6 +123,7 @@ class CitationReferenceTransform(SphinxTransform):
Replace citation references by pending_xref nodes before the default
docutils transform tries to resolve them.
"""
default_priority = 619
def apply(self, **kwargs: Any) -> None:

View File

@ -7913,6 +7913,7 @@ class CPPDomain(Domain):
object_types dict below. They are the core different types of declarations in C++ that
one can document.
"""
name = 'cpp'
label = 'C++'
object_types = {

View File

@ -29,6 +29,7 @@ logger = logging.getLogger(__name__)
class IndexDomain(Domain):
"""Mathematics domain."""
name = 'index'
label = 'index'
@ -62,6 +63,7 @@ class IndexDirective(SphinxDirective):
"""
Directive to add entries to the index.
"""
has_content = False
required_arguments = 1
optional_arguments = 0

View File

@ -37,6 +37,7 @@ class JSObject(ObjectDescription[tuple[str, str]]):
"""
Description of a JavaScript object.
"""
#: If set to ``True`` this object is callable and a `desc_parameterlist` is
#: added
has_arguments = False
@ -247,6 +248,7 @@ class JSObject(ObjectDescription[tuple[str, str]]):
class JSCallable(JSObject):
"""Description of a JavaScript function, method or constructor."""
has_arguments = True
doc_field_types = [
@ -358,6 +360,7 @@ class JSXRefRole(XRefRole):
class JavaScriptDomain(Domain):
"""JavaScript language domain."""
name = 'js'
label = 'JavaScript'
# if you add a new object type make sure to edit JSObject.get_index_string

View File

@ -34,6 +34,7 @@ class MathReferenceRole(XRefRole):
class MathDomain(Domain):
"""Mathematics domain."""
name = 'math'
label = 'mathematics'

View File

@ -667,6 +667,7 @@ class PyObject(ObjectDescription[tuple[str, str]]):
:cvar allow_nesting: Class is an object that allows for nested namespaces
:vartype allow_nesting: bool
"""
option_spec: OptionSpec = {
'no-index': directives.flag,
'no-index-entry': directives.flag,
@ -1452,6 +1453,7 @@ class PythonModuleIndex(Index):
class PythonDomain(Domain):
"""Python language domain."""
name = 'py'
label = 'Python'
object_types: dict[str, ObjType] = {

View File

@ -35,6 +35,7 @@ class ReSTMarkup(ObjectDescription[str]):
"""
Description of generic reST markup.
"""
option_spec: OptionSpec = {
'no-index': directives.flag,
'no-index-entry': directives.flag,
@ -112,6 +113,7 @@ class ReSTDirective(ReSTMarkup):
"""
Description of a reST directive.
"""
def handle_signature(self, sig: str, signode: desc_signature) -> str:
name, args = parse_directive(sig)
desc_name = f'.. {name}::'
@ -139,6 +141,7 @@ class ReSTDirectiveOption(ReSTMarkup):
"""
Description of an option for reST directive.
"""
option_spec: OptionSpec = ReSTMarkup.option_spec.copy()
option_spec.update({
'type': directives.unchanged,
@ -199,6 +202,7 @@ class ReSTRole(ReSTMarkup):
"""
Description of a reST role.
"""
def handle_signature(self, sig: str, signode: desc_signature) -> str:
desc_name = f':{sig}:'
signode['fullname'] = sig.strip()
@ -211,6 +215,7 @@ class ReSTRole(ReSTMarkup):
class ReSTDomain(Domain):
"""ReStructuredText domain."""
name = 'rst'
label = 'reStructuredText'

View File

@ -43,6 +43,7 @@ class GenericObject(ObjectDescription[str]):
"""
A generic x-ref directive registered with Sphinx.add_object_type().
"""
indextemplate: str = ''
parse_node: Callable[[BuildEnvironment, str, desc_signature], str] | None = None
@ -104,6 +105,7 @@ class Target(SphinxDirective):
"""
Generic target for user-defined cross-reference types.
"""
indextemplate = ''
has_content = False

View File

@ -340,7 +340,7 @@ class BuildEnvironment:
self.settings.setdefault('smart_quotes', True)
def set_versioning_method(self, method: str | Callable, compare: bool) -> None:
"""This sets the doctree versioning method for this environment.
"""Set the doctree versioning method for this environment.
Versioning methods are a builder property; only builders with the same
versioning method can share the same doctree directory. Therefore, we
@ -426,7 +426,7 @@ class BuildEnvironment:
@property
def found_docs(self) -> set[str]:
"""contains all existing docnames."""
"""Contains all existing docnames."""
return self.project.docnames
def find_files(self, config: Config, builder: Builder) -> None:
@ -745,7 +745,6 @@ def _last_modified_time(filename: str | os.PathLike[str]) -> int:
We prefer to err on the side of re-rendering a file,
so we round up to the nearest microsecond.
"""
# upside-down floor division to get the ceiling
return -(os.stat(filename).st_mtime_ns // -1_000)

View File

@ -129,7 +129,7 @@ def _add_entry(word: str, subword: str, main: str | None, *,
def _key_func_0(entry: tuple[str, str]) -> tuple[bool, str]:
"""sort the index entries for same keyword."""
"""Sort the index entries for same keyword."""
main, uri = entry
return not main, uri # show main entries at first
@ -156,7 +156,7 @@ def _key_func_1(entry: tuple[str, list]) -> tuple[tuple[int, str], str]:
def _key_func_2(entry: tuple[str, list]) -> str:
"""sort the sub-index entries"""
"""Sort the sub-index entries"""
key = unicodedata.normalize('NFD', entry[0].lower())
if key.startswith('\N{RIGHT-TO-LEFT MARK}'):
key = key[1:]

View File

@ -47,7 +47,6 @@ def document_toc(env: BuildEnvironment, docname: str, tags: Tags) -> Node:
For a ToC tree that shows the document's place in the
ToC structure, use `get_toctree_for`.
"""
tocdepth = env.metadata[docname].get('tocdepth', 0)
try:
toc = _toctree_copy(env.tocs[docname], 2, tocdepth, False, tags)
@ -74,7 +73,6 @@ def global_toctree_for_doc(
This gives the global ToC, with all ancestors and their siblings.
"""
toctrees: list[Element] = []
for toctree_node in env.master_doctree.findall(addnodes.toctree):
if toctree := _resolve_toctree(
@ -113,7 +111,6 @@ def _resolve_toctree(
If *collapse* is True, all branches not containing docname will
be collapsed.
"""
if toctree.get('hidden', False) and not includehidden:
return None

View File

@ -41,19 +41,22 @@ class EnvironmentCollector:
def clear_doc(self, app: Sphinx, env: BuildEnvironment, docname: str) -> None:
"""Remove specified data of a document.
This method is called on the removal of the document."""
This method is called on the removal of the document.
"""
raise NotImplementedError
def merge_other(self, app: Sphinx, env: BuildEnvironment,
docnames: set[str], other: BuildEnvironment) -> None:
"""Merge in specified data regarding docnames from a different `BuildEnvironment`
object which coming from a subprocess in parallel builds."""
object which coming from a subprocess in parallel builds.
"""
raise NotImplementedError
def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
"""Process a document and gather specific data from it.
This method is called after the document is read."""
This method is called after the document is read.
"""
raise NotImplementedError
def get_updated_docs(self, app: Sphinx, env: BuildEnvironment) -> list[str]:

View File

@ -121,7 +121,7 @@ class DownloadFileCollector(EnvironmentCollector):
env.dlfiles.merge_other(docnames, other.dlfiles)
def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
"""Process downloadable file paths. """
"""Process downloadable file paths."""
for node in doctree.findall(addnodes.download_reference):
targetname = node['reftarget']
if '://' in targetname:

View File

@ -25,16 +25,19 @@ class SphinxError(Exception):
exception to a string ("category: message"). Should be set accordingly
in subclasses.
"""
category = 'Sphinx error'
class SphinxWarning(SphinxError):
"""Warning, treated as error."""
category = 'Warning, treated as error'
class ApplicationError(SphinxError):
"""Application initialization error."""
category = 'Application error'
@ -70,26 +73,31 @@ class ExtensionError(SphinxError):
class BuildEnvironmentError(SphinxError):
"""BuildEnvironment error."""
category = 'BuildEnvironment error'
class ConfigError(SphinxError):
"""Configuration error."""
category = 'Configuration error'
class DocumentError(SphinxError):
"""Document error."""
category = 'Document error'
class ThemeError(SphinxError):
"""Theme error."""
category = 'Theme error'
class VersionRequirementError(SphinxError):
"""Incompatible Sphinx version error."""
category = 'Sphinx version error'
@ -118,10 +126,13 @@ class PycodeError(Exception):
class NoUri(Exception):
"""Raised by builder.get_relative_uri() or from missing-reference handlers
if there is no URI available."""
if there is no URI available.
"""
pass
class FiletypeNotFoundError(Exception):
"""Raised by get_filetype() if a filename matches no source suffix."""
pass

View File

@ -84,7 +84,6 @@ class EventManager:
def emit(self, name: str, *args: Any,
allowed_exceptions: tuple[type[Exception], ...] = ()) -> list:
"""Emit a Sphinx event."""
# not every object likes to be repr()'d (think
# random stuff coming via autodoc)
with contextlib.suppress(Exception):

View File

@ -245,6 +245,7 @@ def between(
# But we define this class here to keep compatibility (see #4538)
class Options(dict):
"""A dict/attribute hybrid that returns None on nonexisting keys."""
def copy(self) -> Options:
return Options(super().copy())
@ -299,6 +300,7 @@ class Documenter:
in fact, it will be used to parse an auto directive's options that matches
the Documenter.
"""
#: name by which the directive is called (auto...) and the default
#: generated directive name
objtype = 'object'
@ -969,6 +971,7 @@ class ModuleDocumenter(Documenter):
"""
Specialized Documenter subclass for modules.
"""
objtype = 'module'
content_indent = ''
_extra_indent = ' '
@ -1131,6 +1134,7 @@ class ModuleLevelDocumenter(Documenter):
Specialized Documenter subclass for objects on module level (functions,
classes, data/constants).
"""
def resolve_name(self, modname: str | None, parents: Any, path: str, base: str,
) -> tuple[str | None, list[str]]:
if modname is not None:
@ -1154,6 +1158,7 @@ class ClassLevelDocumenter(Documenter):
Specialized Documenter subclass for objects on class level (methods,
attributes).
"""
def resolve_name(self, modname: str | None, parents: Any, path: str, base: str,
) -> tuple[str | None, list[str]]:
if modname is not None:
@ -1189,6 +1194,7 @@ class DocstringSignatureMixin:
Mixin for FunctionDocumenter and MethodDocumenter to provide the
feature of reading the signature from the docstring.
"""
_new_docstrings: list[list[str]] | None = None
_signatures: list[str] = []
@ -1270,6 +1276,7 @@ class DocstringStripSignatureMixin(DocstringSignatureMixin):
Mixin for AttributeDocumenter to provide the
feature of stripping any function signature from the docstring.
"""
def format_signature(self, **kwargs: Any) -> str:
if (
self.args is None
@ -1290,6 +1297,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
"""
Specialized Documenter subclass for functions.
"""
objtype = 'function'
member_order = 30
@ -1419,6 +1427,7 @@ class DecoratorDocumenter(FunctionDocumenter):
"""
Specialized Documenter subclass for decorator functions.
"""
objtype = 'decorator'
# must be lower than FunctionDocumenter
@ -1450,6 +1459,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
"""
Specialized Documenter subclass for classes.
"""
objtype = 'class'
member_order = 20
option_spec: OptionSpec = {
@ -1515,7 +1525,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
return None, None, None
def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
""" Get the `attr` function or method from `obj`, if it is user-defined. """
"""Get the `attr` function or method from `obj`, if it is user-defined."""
if inspect.is_builtin_class_method(obj, attr):
return None
attr = self.get_attr(obj, attr, None)
@ -1910,6 +1920,7 @@ class ExceptionDocumenter(ClassDocumenter):
"""
Specialized ClassDocumenter subclass for exceptions.
"""
objtype = 'exception'
member_order = 10
@ -2023,6 +2034,7 @@ class DataDocumenter(GenericAliasMixin,
"""
Specialized Documenter subclass for data items.
"""
objtype = 'data'
member_order = 40
priority = -10
@ -2144,6 +2156,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
"""
Specialized Documenter subclass for methods (normal, static and class).
"""
objtype = 'method'
directivetype = 'method'
member_order = 50
@ -2474,7 +2487,8 @@ class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
def import_object(self, raiseerror: bool = False) -> bool:
"""Check the existence of runtime instance attribute after failing to import the
attribute."""
attribute.
"""
try:
return super().import_object(raiseerror=True) # type: ignore[misc]
except ImportError as exc:
@ -2527,7 +2541,8 @@ class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
def import_object(self, raiseerror: bool = False) -> bool:
"""Check the exisitence of uninitialized instance attribute when failed to import
the attribute."""
the attribute.
"""
try:
return super().import_object(raiseerror=True) # type: ignore[misc]
except ImportError as exc:
@ -2566,6 +2581,7 @@ class AttributeDocumenter(GenericAliasMixin, SlotsMixin, # type: ignore[misc]
"""
Specialized Documenter subclass for attributes.
"""
objtype = 'attribute'
member_order = 60
option_spec: OptionSpec = dict(ModuleLevelDocumenter.option_spec)
@ -2723,6 +2739,7 @@ class PropertyDocumenter(DocstringStripSignatureMixin, # type: ignore[misc]
"""
Specialized Documenter subclass for properties.
"""
objtype = 'property'
member_order = 60
@ -2745,7 +2762,8 @@ class PropertyDocumenter(DocstringStripSignatureMixin, # type: ignore[misc]
def import_object(self, raiseerror: bool = False) -> bool:
"""Check the exisitence of uninitialized instance attribute when failed to import
the attribute."""
the attribute.
"""
ret = super().import_object(raiseerror)
if ret and not inspect.isproperty(self.object):
__dict__ = safe_getattr(self.parent, '__dict__', {})

View File

@ -104,6 +104,7 @@ class AutodocDirective(SphinxDirective):
It invokes a Documenter upon running. After the processing, it parses and returns
the content generated by Documenter.
"""
option_spec = DummyOptionSpec()
has_content = True
required_arguments = 1

View File

@ -61,9 +61,7 @@ def unmangle(subject: Any, name: str) -> str | None:
def import_module(modname: str, warningiserror: bool = False) -> Any:
"""
Call importlib.import_module(modname), convert exceptions to ImportError
"""
"""Call importlib.import_module(modname), convert exceptions to ImportError."""
try:
with logging.skip_warningiserror(not warningiserror):
return importlib.import_module(modname)

View File

@ -80,6 +80,7 @@ def _make_subclass(name: str, module: str, superclass: Any = _MockObject,
class _MockModule(ModuleType):
"""Used by autodoc_mock_imports."""
__file__ = os.devnull
__sphinx_mock__ = True
@ -97,6 +98,7 @@ class _MockModule(ModuleType):
class MockLoader(Loader):
"""A loader for mocking."""
def __init__(self, finder: MockFinder) -> None:
super().__init__()
self.finder = finder
@ -138,9 +140,9 @@ class MockFinder(MetaPathFinder):
def mock(modnames: list[str]) -> Generator[None, None, None]:
"""Insert mock modules during context::
with mock(['target.module.name']):
# mock modules are enabled here
...
with mock(['target.module.name']):
# mock modules are enabled here
...
"""
try:
finder = MockFinder(modnames)

View File

@ -745,6 +745,7 @@ class AutoLink(SphinxRole):
Expands to ':obj:`text`' if `text` is an object that can be imported;
otherwise expands to '*text*'.
"""
def run(self) -> tuple[list[Node], list[system_message]]:
pyobj_role = self.env.get_domain('py').role('obj')
assert pyobj_role is not None

View File

@ -248,8 +248,8 @@ class ModuleScanner:
def members_of(obj: Any, conf: Config) -> Sequence[str]:
"""Get the members of ``obj``, possibly ignoring the ``__all__`` module attribute
Follows the ``conf.autosummary_ignore_module_all`` setting."""
Follows the ``conf.autosummary_ignore_module_all`` setting.
"""
if conf.autosummary_ignore_module_all:
return dir(obj)
else:

View File

@ -69,6 +69,7 @@ class CoverageBuilder(Builder):
"""
Evaluates coverage of code in the documentation.
"""
name = 'coverage'
epilog = __('Testing of coverage in the sources finished, look at the '
'results in %(outdir)s' + path.sep + 'python.txt.')
@ -270,7 +271,7 @@ class CoverageBuilder(Builder):
self.py_documented[mod_name] = documented_objects
def _write_py_statistics(self, op: TextIO) -> None:
""" Outputs the table of ``op``."""
"""Outputs the table of ``op``."""
all_modules = set(self.py_documented.keys()).union(
set(self.py_undocumented.keys()))
all_objects: set[str] = set()

View File

@ -276,6 +276,7 @@ class DocTestBuilder(Builder):
"""
Runs test snippets in the documentation.
"""
name = 'doctest'
epilog = __('Testing of doctests in the sources finished, look at the '
'results in %(outdir)s/output.txt.')
@ -361,7 +362,8 @@ Doctest summary
def get_filename_for_node(self, node: Node, docname: str) -> str:
"""Try to get the file which actually contains the doctest, not the
filename of the document it's included in."""
filename of the document it's included in.
"""
try:
filename = relpath(node.source, self.env.srcdir)\
.rsplit(':docstring of ', maxsplit=1)[0]

View File

@ -22,6 +22,7 @@ logger = logging.getLogger(__name__)
class DurationDomain(Domain):
"""A domain for durations of Sphinx processing."""
name = 'duration'
@property

View File

@ -47,6 +47,7 @@ class GraphvizError(SphinxError):
class ClickableMapDefinition:
"""A manipulator for clickable map file of graphviz."""
maptag_re = re.compile('<map id="(.*?)"')
href_re = re.compile('href=".*?"')
@ -111,6 +112,7 @@ class Graphviz(SphinxDirective):
"""
Directive to insert arbitrary dot markup.
"""
has_content = True
required_arguments = 0
optional_arguments = 1
@ -179,6 +181,7 @@ class GraphvizSimple(SphinxDirective):
"""
Directive to insert arbitrary dot markup.
"""
has_content = True
required_arguments = 1
optional_arguments = 0

View File

@ -139,6 +139,7 @@ class InheritanceGraph:
from all the way to the root "object", and then is able to generate a
graphviz dot graph from them.
"""
def __init__(self, class_names: list[str], currmodule: str, show_builtins: bool = False,
private_bases: bool = False, parts: int = 0,
aliases: dict[str, str] | None = None, top_classes: Sequence[Any] = (),
@ -334,6 +335,7 @@ class inheritance_diagram(graphviz):
"""
A docutils node to use as a placeholder for the inheritance diagram.
"""
pass
@ -341,6 +343,7 @@ class InheritanceDiagram(SphinxDirective):
"""
Run when the inheritance_diagram directive is first encountered.
"""
has_content = False
required_arguments = 1
optional_arguments = 0

View File

@ -479,7 +479,6 @@ def resolve_reference_detect_inventory(env: BuildEnvironment,
to form ``inv_name:newtarget``. If ``inv_name`` is a named inventory, then resolution
is tried in that inventory with the new target.
"""
# ordinary direct lookup, use data as is
res = resolve_reference_any_inventory(env, True, node, contnode)
if res is not None:
@ -501,7 +500,6 @@ def resolve_reference_detect_inventory(env: BuildEnvironment,
def missing_reference(app: Sphinx, env: BuildEnvironment, node: pending_xref,
contnode: TextElement) -> nodes.reference | None:
"""Attempt to resolve a missing reference via intersphinx references."""
return resolve_reference_detect_inventory(env, node, contnode)

View File

@ -264,7 +264,8 @@ class Config:
Use the type annotations of class attributes that are documented in the docstring
but do not have a type in the docstring.
"""
""" # NoQA: D301
_config_values: dict[str, tuple[Any, _ConfigRebuild]] = {
'napoleon_google_docstring': (True, 'env'),
'napoleon_numpy_docstring': (True, 'env'),

View File

@ -1157,6 +1157,7 @@ class NumpyDocstring(GoogleDocstring):
The lines of the docstring in a list.
"""
def __init__(
self,
docstring: str | list[str],

View File

@ -185,6 +185,7 @@ def env_purge_doc(app: Sphinx, env: BuildEnvironment, docname: str) -> None:
class ViewcodeAnchorTransform(SphinxPostTransform):
"""Convert or remove viewcode_anchor nodes depends on builder."""
default_priority = 100
def run(self, **kwargs: Any) -> None:

View File

@ -152,6 +152,7 @@ def SphinxDummySourceClass(source: Any, *args: Any, **kwargs: Any) -> Any:
class SphinxFileInput(FileInput):
"""A basic FileInput for Sphinx."""
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs['error_handler'] = 'sphinx'
super().__init__(*args, **kwargs)

View File

@ -17,6 +17,7 @@ class _TranslationProxy:
The proxy implementation attempts to be as complete as possible, so that
the lazy objects should mostly work as expected, for example for sorting.
"""
__slots__ = '_catalogue', '_namespace', '_message'
def __init__(self, catalogue: str, namespace: str, message: str) -> None:

View File

@ -48,7 +48,6 @@ class Project:
"""Find all document files in the source directory and put them in
:attr:`docnames`.
"""
self.docnames.clear()
self._path_to_docname.clear()
self._docname_to_path.clear()

View File

@ -66,7 +66,6 @@ def app_params(request: Any, test_params: dict, shared_result: SharedResult,
Parameters that are specified by 'pytest.mark.sphinx' for
sphinx.application.Sphinx initialization
"""
# ##### process pytest.mark.sphinx
pargs: dict[int, Any] = {}

View File

@ -75,6 +75,7 @@ class SphinxTestApp(application.Sphinx):
A subclass of :class:`Sphinx` that runs on the test root, with some
better default values for the initialization parameters.
"""
_status: StringIO
_warning: StringIO
@ -148,10 +149,10 @@ class SphinxTestApp(application.Sphinx):
class SphinxTestAppWrapperForSkipBuilding:
"""
This class is a wrapper for SphinxTestApp to speed up the test by skipping
`app.build` process if it is already built and there is even one output
file.
"""A wrapper for SphinxTestApp.
This class is used to speed up the test by skipping ``app.build()``
if it has already been built and there are any output files.
"""
def __init__(self, app_: SphinxTestApp) -> None:

View File

@ -52,7 +52,8 @@ def extract_zip(filename: str, targetdir: str) -> None:
class Theme:
"""A Theme is a set of HTML templates and configurations.
This class supports both theme directory and theme archive (zipped theme)."""
This class supports both theme directory and theme archive (zipped theme).
"""
def __init__(self, name: str, theme_path: str, factory: HTMLThemeFactory) -> None:
self.name = name

View File

@ -98,6 +98,7 @@ class DefaultSubstitutions(SphinxTransform):
"""
Replace some substitutions if they aren't defined in the document.
"""
# run before the default Substitutions
default_priority = 210
@ -139,6 +140,7 @@ class MoveModuleTargets(SphinxTransform):
XXX Python specific
"""
default_priority = 210
def apply(self, **kwargs: Any) -> None:
@ -161,6 +163,7 @@ class HandleCodeBlocks(SphinxTransform):
"""
Several code block related transformations.
"""
default_priority = 210
def apply(self, **kwargs: Any) -> None:
@ -185,6 +188,7 @@ class AutoNumbering(SphinxTransform):
"""
Register IDs of tables, figures and literal_blocks to assign numbers.
"""
default_priority = 210
def apply(self, **kwargs: Any) -> None:
@ -201,6 +205,7 @@ class SortIds(SphinxTransform):
"""
Sort section IDs so that the "id[0-9]+" one comes last.
"""
default_priority = 261
def apply(self, **kwargs: Any) -> None:
@ -222,6 +227,7 @@ class ApplySourceWorkaround(SphinxTransform):
"""
Update source and rawsource attributes
"""
default_priority = 10
def apply(self, **kwargs: Any) -> None:
@ -234,6 +240,7 @@ class AutoIndexUpgrader(SphinxTransform):
"""
Detect old style (4 column based indices) and automatically upgrade to new style.
"""
default_priority = 210
def apply(self, **kwargs: Any) -> None:
@ -251,6 +258,7 @@ class ExtraTranslatableNodes(SphinxTransform):
"""
Make nodes translatable
"""
default_priority = 10
def apply(self, **kwargs: Any) -> None:
@ -270,6 +278,7 @@ class UnreferencedFootnotesDetector(SphinxTransform):
"""
Detect unreferenced footnotes and emit warnings
"""
default_priority = 200
def apply(self, **kwargs: Any) -> None:
@ -291,6 +300,7 @@ class UnreferencedFootnotesDetector(SphinxTransform):
class DoctestTransform(SphinxTransform):
"""Set "doctest" style to each doctest_block node"""
default_priority = 500
def apply(self, **kwargs: Any) -> None:
@ -300,6 +310,7 @@ class DoctestTransform(SphinxTransform):
class FilterSystemMessages(SphinxTransform):
"""Filter system messages from a doctree."""
default_priority = 999
def apply(self, **kwargs: Any) -> None:
@ -315,6 +326,7 @@ class SphinxContentsFilter(ContentsFilter):
Used with BuildEnvironment.add_toc_from() to discard cross-file links
within table-of-contents link nodes.
"""
visit_pending_xref = ContentsFilter.ignore_node_but_process_children
def visit_image(self, node: nodes.image) -> None:
@ -327,6 +339,7 @@ class SphinxSmartQuotes(SmartQuotes, SphinxTransform):
refs: sphinx.parsers.RSTParser
"""
default_priority = 750
def apply(self, **kwargs: Any) -> None:
@ -377,6 +390,7 @@ class SphinxSmartQuotes(SmartQuotes, SphinxTransform):
class DoctreeReadEvent(SphinxTransform):
"""Emit :event:`doctree-read` event."""
default_priority = 880
def apply(self, **kwargs: Any) -> None:
@ -385,6 +399,7 @@ class DoctreeReadEvent(SphinxTransform):
class ManpageLink(SphinxTransform):
"""Find manpage section numbers and names"""
default_priority = 999
def apply(self, **kwargs: Any) -> None:
@ -403,6 +418,7 @@ class ManpageLink(SphinxTransform):
class GlossarySorter(SphinxTransform):
"""Sort glossaries that have the ``sorted`` flag."""
# This must be done after i18n, therefore not right
# away in the glossary directive.
default_priority = 500

View File

@ -53,6 +53,7 @@ class RefOnlyBulletListTransform(SphinxTransform):
Specifically implemented for 'Indices and Tables' section, which looks
odd when html_compact_lists is false.
"""
default_priority = 100
def apply(self, **kwargs: Any) -> None:

View File

@ -95,6 +95,7 @@ class PreserveTranslatableMessages(SphinxTransform):
"""
Preserve original translatable messages before translation
"""
default_priority = 10 # this MUST be invoked before Locale transform
def apply(self, **kwargs: Any) -> None:
@ -339,6 +340,7 @@ class Locale(SphinxTransform):
"""
Replace translatable nodes with their translated doctree.
"""
default_priority = 20
def apply(self, **kwargs: Any) -> None:
@ -538,6 +540,7 @@ class TranslationProgressTotaliser(SphinxTransform):
"""
Calculate the number of translated and untranslated nodes.
"""
default_priority = 25 # MUST happen after Locale
def apply(self, **kwargs: Any) -> None:
@ -561,6 +564,7 @@ class AddTranslationClasses(SphinxTransform):
"""
Add ``translated`` or ``untranslated`` classes to indicate translation status.
"""
default_priority = 950
def apply(self, **kwargs: Any) -> None:
@ -597,6 +601,7 @@ class RemoveTranslatableInline(SphinxTransform):
"""
Remove inline nodes used for translation as placeholders.
"""
default_priority = 999
def apply(self, **kwargs: Any) -> None:

View File

@ -33,6 +33,7 @@ class SphinxPostTransform(SphinxTransform):
They resolve references, convert images, do special transformation for each output
formats and so on. This class helps to implement these post transforms.
"""
builders: tuple[str, ...] = ()
formats: tuple[str, ...] = ()
@ -234,6 +235,7 @@ class OnlyNodeTransform(SphinxPostTransform):
class SigElementFallbackTransform(SphinxPostTransform):
"""Fallback various desc_* nodes to inline if translator does not support them."""
default_priority = 200
def run(self, **kwargs: Any) -> None:
@ -276,6 +278,7 @@ class SigElementFallbackTransform(SphinxPostTransform):
class PropagateDescDomain(SphinxPostTransform):
"""Add the domain name of the parent node as a class in each desc_signature node."""
default_priority = 200
def run(self, **kwargs: Any) -> None:

View File

@ -32,6 +32,7 @@ class HighlightLanguageTransform(SphinxTransform):
:rst:dir:`highlight` directive. After processing, this transform
removes ``highlightlang`` node from doctree.
"""
default_priority = 400
def apply(self, **kwargs: Any) -> None:
@ -88,6 +89,7 @@ class TrimDoctestFlagsTransform(SphinxTransform):
see :confval:`trim_doctest_flags` for more information.
"""
default_priority = HighlightLanguageTransform.default_priority + 1
def apply(self, **kwargs: Any) -> None:

View File

@ -173,6 +173,7 @@ class ImageConverter(BaseImageConverter):
3. Register your image converter to Sphinx using
:py:meth:`.Sphinx.add_post_transform`
"""
default_priority = 200
#: The converter is available or not. Will be filled at the first call of

View File

@ -29,6 +29,7 @@ class SphinxDanglingReferences(DanglingReferences):
class SphinxDomains(SphinxTransform):
"""Collect objects to Sphinx domains for cross references."""
default_priority = 850
def apply(self, **kwargs: Any) -> None:

View File

@ -68,6 +68,7 @@ class FilenameUniqDict(dict):
interpreted as filenames, and keeps track of a set of docnames they
appear in. Used for images and downloadable files in the environment.
"""
def __init__(self) -> None:
self._existing: set[str] = set()
@ -178,6 +179,7 @@ class Tee:
"""
File-like object writing to two streams.
"""
def __init__(self, stream1: IO, stream2: IO) -> None:
self.stream1 = stream1
self.stream2 = stream2

View File

@ -14,6 +14,7 @@ if TYPE_CHECKING:
class TeeStripANSI:
"""File-like object writing to two streams."""
def __init__(
self,
stream_term: SupportsWrite,

View File

@ -5,6 +5,7 @@ from enum import IntEnum
class BuildPhase(IntEnum):
"""Build phase of Sphinx application."""
INITIALIZATION = 1
READING = 2
CONSISTENCY_CHECK = 3

View File

@ -52,6 +52,7 @@ class Field:
:returns: description of the return value
:rtype: description of the return type
"""
is_grouped = False
is_typed = False
@ -152,6 +153,7 @@ class GroupedField(Field):
:raises ErrorClass: description when it is raised
"""
is_grouped = True
list_type = nodes.bullet_list
@ -208,6 +210,7 @@ class TypedField(GroupedField):
:param SomeClass foo: description of parameter foo
"""
is_typed = True
def __init__(
@ -272,6 +275,7 @@ class DocFieldTransformer:
Transforms field lists in "doc field" syntax into better-looking
equivalents, using the field type definitions given on a domain.
"""
typemap: dict[str, tuple[Field, bool]]
def __init__(self, directive: ObjectDescription) -> None:

View File

@ -290,6 +290,7 @@ class sphinx_domains(CustomReSTDispatcher):
"""Monkey-patch directive and role dispatch, so that domain-specific
markup takes precedence.
"""
def __init__(self, env: BuildEnvironment) -> None:
self.env = env
super().__init__()
@ -451,6 +452,7 @@ class SphinxRole:
.. note:: The subclasses of this class might not work with docutils.
This class is strongly coupled with Sphinx.
"""
name: str #: The role name actually used in the document.
rawtext: str #: A string containing the entire interpreted text input.
text: str #: The interpreted text content.
@ -519,6 +521,7 @@ class ReferenceRole(SphinxRole):
the role. The parsed result; link title and target will be stored to
``self.title`` and ``self.target``.
"""
has_explicit_title: bool #: A boolean indicates the role has explicit title or not.
disabled: bool #: A boolean indicates the reference is disabled.
title: str #: The link title for the interpreted text.

View File

@ -453,6 +453,7 @@ class TypeAliasForwardRef:
This avoids the error on evaluating the type inside `get_type_hints()`.
"""
def __init__(self, name: str) -> None:
self.name = name

View File

@ -85,6 +85,7 @@ def convert_serializable(records: list[logging.LogRecord]) -> None:
class SphinxLogRecord(logging.LogRecord):
"""Log record class supporting location"""
prefix = ''
location: Any = None
@ -101,11 +102,13 @@ class SphinxLogRecord(logging.LogRecord):
class SphinxInfoLogRecord(SphinxLogRecord):
"""Info log record class supporting location"""
prefix = '' # do not show any prefix for INFO messages
class SphinxWarningLogRecord(SphinxLogRecord):
"""Warning log record class supporting location"""
@property
def prefix(self) -> str: # type: ignore[override]
if self.levelno >= logging.CRITICAL:
@ -118,6 +121,7 @@ class SphinxWarningLogRecord(SphinxLogRecord):
class SphinxLoggerAdapter(logging.LoggerAdapter):
"""LoggerAdapter allowing ``type`` and ``subtype`` keywords."""
KEYWORDS = ['type', 'subtype', 'location', 'nonl', 'color', 'once']
def log( # type: ignore[override]
@ -146,6 +150,7 @@ class SphinxLoggerAdapter(logging.LoggerAdapter):
class WarningStreamHandler(logging.StreamHandler):
"""StreamHandler for warnings."""
pass
@ -476,6 +481,7 @@ class SphinxLogRecordTranslator(logging.Filter):
* Make a instance of SphinxLogRecord
* docname to path if location given
"""
LogRecordClass: type[logging.LogRecord]
def __init__(self, app: Sphinx) -> None:
@ -507,11 +513,13 @@ class SphinxLogRecordTranslator(logging.Filter):
class InfoLogRecordTranslator(SphinxLogRecordTranslator):
"""LogRecordTranslator for INFO level log records."""
LogRecordClass = SphinxInfoLogRecord
class WarningLogRecordTranslator(SphinxLogRecordTranslator):
"""LogRecordTranslator for WARNING level log records."""
LogRecordClass = SphinxWarningLogRecord
@ -543,6 +551,7 @@ class ColorizeFormatter(logging.Formatter):
class SafeEncodingWriter:
"""Stream writer which ignores UnicodeEncodeError silently"""
def __init__(self, stream: IO) -> None:
self.stream = stream
self.encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
@ -562,6 +571,7 @@ class SafeEncodingWriter:
class LastMessagesWriter:
"""Stream writer storing last 10 messages in memory to save trackback"""
def __init__(self, app: Sphinx, stream: IO) -> None:
self.app = app

View File

@ -91,7 +91,8 @@ _pat_cache: dict[str, re.Pattern[str]] = {}
def patmatch(name: str, pat: str) -> re.Match[str] | None:
"""Return if name matches the regular expression (pattern)
``pat```. Adapted from fnmatch module."""
``pat```. Adapted from fnmatch module.
"""
if pat not in _pat_cache:
_pat_cache[pat] = re.compile(_translate_pattern(pat))
return _pat_cache[pat].match(name)

View File

@ -50,7 +50,7 @@ class NodeMatcher:
following example searches ``reference`` node having ``refdomain`` attributes::
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING, Any
matcher = NodeMatcher(nodes.reference, refdomain=Any)
doctree.findall(matcher)
# => [<reference ...>, <reference ...>, ...]

View File

@ -37,7 +37,7 @@ def canon_path(native_path: str | os.PathLike[str], /) -> str:
def path_stabilize(filepath: str | os.PathLike[str], /) -> str:
"Normalize path separator and unicode string"
"""Normalize path separator and unicode string"""
new_path = canon_path(filepath)
return unicodedata.normalize('NFC', new_path)
@ -89,7 +89,8 @@ def copytimes(source: str | os.PathLike[str], dest: str | os.PathLike[str]) -> N
def copyfile(source: str | os.PathLike[str], dest: str | os.PathLike[str]) -> None:
"""Copy a file and its modification times, if possible.
Note: ``copyfile`` skips copying if the file has not been changed"""
Note: ``copyfile`` skips copying if the file has not been changed
"""
if not path.exists(dest) or not filecmp.cmp(source, dest):
shutil.copyfile(source, dest)
with contextlib.suppress(OSError):
@ -132,6 +133,7 @@ abspath = path.abspath
class _chdir:
"""Remove this fall-back once support for Python 3.10 is removed."""
def __init__(self, target_dir: str, /) -> None:
self.path = target_dir
self._dirs: list[str] = []
@ -170,6 +172,7 @@ class FileAvoidWrite:
Objects can be used as context managers.
"""
def __init__(self, path: str) -> None:
self._path = path
self._io: StringIO | None = None

View File

@ -30,17 +30,19 @@ def _get_tls_cacert(url: str, certs: str | dict[str, str] | None) -> str | bool:
def get(url: str, **kwargs: Any) -> requests.Response:
"""Sends a GET request like requests.get().
"""Sends a GET request like ``requests.get()``.
This sets up User-Agent header and TLS verification automatically."""
This sets up User-Agent header and TLS verification automatically.
"""
with _Session() as session:
return session.get(url, **kwargs)
def head(url: str, **kwargs: Any) -> requests.Response:
"""Sends a HEAD request like requests.head().
"""Sends a HEAD request like ``requests.head()``.
This sets up User-Agent header and TLS verification automatically."""
This sets up User-Agent header and TLS verification automatically.
"""
with _Session() as session:
return session.head(url, **kwargs)
@ -54,7 +56,8 @@ class _Session(requests.Session):
) -> requests.Response:
"""Sends a request with an HTTP verb and url.
This sets up User-Agent header and TLS verification automatically."""
This sets up User-Agent header and TLS verification automatically.
"""
headers = kwargs.setdefault('headers', {})
headers.setdefault('User-Agent', _user_agent or _USER_AGENT)
if _tls_info:

View File

@ -144,6 +144,7 @@ def levenshtein_distance(a: str, b: str) -> int:
class UIDTransform(SphinxTransform):
"""Add UIDs to doctree for versioning."""
default_priority = 880
def apply(self, **kwargs: Any) -> None:

View File

@ -158,7 +158,7 @@ class Table:
return 'tabulary'
def get_colspec(self) -> str:
"""Returns a column spec of table.
r"""Returns a column spec of table.
This is what LaTeX calls the 'preamble argument' of the used table environment.

View File

@ -49,6 +49,7 @@ class NestedInlineTransform:
<strong>foo=</strong><emphasis>var</emphasis>
<strong>&bar=</strong><emphasis>2</emphasis>
"""
def __init__(self, document: nodes.document) -> None:
self.document = document

View File

@ -96,7 +96,8 @@ def find_subsections(section: Element) -> list[nodes.section]:
def smart_capwords(s: str, sep: str | None = None) -> str:
"""Like string.capwords() but does not capitalize words that already
contain a capital letter."""
contain a capital letter.
"""
words = s.split(sep)
for i, word in enumerate(words):
if all(x.islower() for x in word):
@ -106,6 +107,7 @@ def smart_capwords(s: str, sep: str | None = None) -> str:
class TexinfoWriter(writers.Writer):
"""Texinfo writer for generating Texinfo documents."""
supported = ('texinfo', 'texi')
settings_spec: tuple[str, Any, tuple[tuple[str, list[str], dict[str, str]], ...]] = (
@ -255,7 +257,8 @@ class TexinfoTranslator(SphinxTranslator):
def collect_node_names(self) -> None:
"""Generates a unique id for each section.
Assigns the attribute ``node_name`` to each section."""
Assigns the attribute ``node_name`` to each section.
"""
def add_node_name(name: str) -> str:
node_id = self.escape_id(name)
@ -352,7 +355,8 @@ class TexinfoTranslator(SphinxTranslator):
def escape_arg(self, s: str) -> str:
"""Return an escaped string suitable for use as an argument
to a Texinfo command."""
to a Texinfo command.
"""
s = self.escape(s)
# commas are the argument delimiters
s = s.replace(',', '@comma{}')

View File

@ -26,6 +26,7 @@ class Cell:
"""Represents a cell in a table.
It can span multiple columns or multiple lines.
"""
def __init__(self, text: str = "", rowspan: int = 1, colspan: int = 1) -> None:
self.text = text
self.wrapped: list[str] = []
@ -93,6 +94,7 @@ class Table:
+--------+--------+
"""
def __init__(self, colwidth: list[int] | None = None) -> None:
self.lines: list[list[Cell]] = []
self.separator = 0
@ -262,9 +264,8 @@ class TextWrapper(textwrap.TextWrapper):
r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash
def _wrap_chunks(self, chunks: list[str]) -> list[str]:
"""_wrap_chunks(chunks : [string]) -> [string]
"""The original _wrap_chunks uses len() to calculate width.
The original _wrap_chunks uses len() to calculate width.
This method respects wide/fullwidth characters for width adjustment.
"""
lines: list[str] = []
@ -309,10 +310,7 @@ class TextWrapper(textwrap.TextWrapper):
return lines
def _break_word(self, word: str, space_left: int) -> tuple[str, str]:
"""_break_word(word : string, space_left : int) -> (string, string)
Break line by unicode width instead of len(word).
"""
"""Break line by unicode width instead of len(word)."""
total = 0
for i, c in enumerate(word):
total += column_width(c)
@ -321,9 +319,8 @@ class TextWrapper(textwrap.TextWrapper):
return word, ''
def _split(self, text: str) -> list[str]:
"""_split(text : string) -> [string]
"""Override original method that only split by 'wordsep_re'.
Override original method that only split by 'wordsep_re'.
This '_split' splits wide-characters into chunks by one character.
"""
def split(t: str) -> list[str]:
@ -339,12 +336,7 @@ class TextWrapper(textwrap.TextWrapper):
def _handle_long_word(self, reversed_chunks: list[str], cur_line: list[str],
cur_len: int, width: int) -> None:
"""_handle_long_word(chunks : [string],
cur_line : [string],
cur_len : int, width : int)
Override original method for using self._break_word() instead of slice.
"""
"""Override original method for using self._break_word() instead of slice."""
space_left = max(width - cur_len, 1)
if self.break_long_words:
l, r = self._break_word(reversed_chunks[-1], space_left)

View File

@ -48,5 +48,5 @@ class PseudoXMLWriter(BaseXMLWriter):
self.output = self.document.pformat()
def supports(self, format: str) -> bool:
"""This writer supports all format-specific elements."""
"""All format-specific elements are supported."""
return True

View File

@ -5,7 +5,7 @@ module_level_var: int = 99
class PEP526GoogleClass:
"""Sample class with PEP 526 annotations and google docstring
"""Sample class with PEP 526 annotations and google docstring.
Attributes:
attr1: Attr1 description.

View File

@ -16,5 +16,6 @@ class PEP526NumpyClass:
attr2:
Attr2 description
"""
attr1: int
attr2: str

View File

@ -17,7 +17,6 @@ def sig_elements() -> set[type[addnodes.desc_sig_element]]:
def test_desc_sig_element_nodes(sig_elements):
"""Test the registration of ``desc_sig_element`` subclasses."""
# expected desc_sig_* node classes (must be declared *after* reloading
# the module since otherwise the objects are not the correct ones)
EXPECTED_SIG_ELEMENTS = {

View File

@ -22,6 +22,7 @@ def runnable(command):
class EPUBElementTree:
"""Test helper for content.opf and toc.ncx"""
namespaces = {
'idpf': 'http://www.idpf.org/2007/opf',
'dc': 'http://purl.org/dc/elements/1.1/',

View File

@ -432,7 +432,6 @@ def test_nitpick_ignore_regex_fullmatch(app, status, warning):
def test_conf_py_language_none(tmp_path):
"""Regression test for #10474."""
# Given a conf.py file with language = None
(tmp_path / 'conf.py').write_text("language = None", encoding='utf-8')
@ -446,7 +445,6 @@ def test_conf_py_language_none(tmp_path):
@mock.patch("sphinx.config.logger")
def test_conf_py_language_none_warning(logger, tmp_path):
"""Regression test for #10474."""
# Given a conf.py file with language = None
(tmp_path / 'conf.py').write_text("language = None", encoding='utf-8')
@ -463,7 +461,6 @@ def test_conf_py_language_none_warning(logger, tmp_path):
def test_conf_py_no_language(tmp_path):
"""Regression test for #10474."""
# Given a conf.py file with no language attribute
(tmp_path / 'conf.py').write_text("", encoding='utf-8')
@ -476,7 +473,6 @@ def test_conf_py_no_language(tmp_path):
def test_conf_py_nitpick_ignore_list(tmp_path):
"""Regression test for #11355."""
# Given a conf.py file with no language attribute
(tmp_path / 'conf.py').write_text("", encoding='utf-8')

View File

@ -301,9 +301,9 @@ def test_extension_parsed(make_app, apidoc):
)
def test_toc_all_references_should_exist_pep420_enabled(make_app, apidoc):
"""All references in toc should exist. This test doesn't say if
directories with empty __init__.py and and nothing else should be
skipped, just ensures consistency between what's referenced in the toc
and what is created. This is the variant with pep420 enabled.
directories with empty __init__.py and and nothing else should be
skipped, just ensures consistency between what's referenced in the toc
and what is created. This is the variant with pep420 enabled.
"""
outdir = apidoc.outdir
assert (outdir / 'conf.py').is_file()
@ -331,9 +331,9 @@ def test_toc_all_references_should_exist_pep420_enabled(make_app, apidoc):
)
def test_toc_all_references_should_exist_pep420_disabled(make_app, apidoc):
"""All references in toc should exist. This test doesn't say if
directories with empty __init__.py and and nothing else should be
skipped, just ensures consistency between what's referenced in the toc
and what is created. This is the variant with pep420 disabled.
directories with empty __init__.py and and nothing else should be
skipped, just ensures consistency between what's referenced in the toc
and what is created. This is the variant with pep420 disabled.
"""
outdir = apidoc.outdir
assert (outdir / 'conf.py').is_file()
@ -378,7 +378,7 @@ def extract_toc(path):
)
def test_subpackage_in_toc(make_app, apidoc):
"""Make sure that empty subpackages with non-empty subpackages in them
are not skipped (issue #4520)
are not skipped (issue #4520)
"""
outdir = apidoc.outdir
assert (outdir / 'conf.py').is_file()
@ -642,7 +642,6 @@ def test_no_duplicates(rootdir, tmp_path):
We can't use pytest.mark.apidoc here as we use a different set of arguments
to apidoc_main
"""
original_suffixes = sphinx.ext.apidoc.PY_SUFFIXES
try:
# Ensure test works on Windows

View File

@ -228,6 +228,7 @@ def test_format_signature(app):
class F2:
"""some docstring for F2."""
def __init__(self, *args, **kw):
"""
__init__(a1, a2, kw1=True, kw2=False)

View File

@ -429,23 +429,25 @@ def test_load_mappings_fallback(tmp_path, app, status, warning):
class TestStripBasicAuth:
"""Tests for sphinx.ext.intersphinx._strip_basic_auth()"""
def test_auth_stripped(self):
"""basic auth creds stripped from URL containing creds"""
"""Basic auth creds stripped from URL containing creds"""
url = 'https://user:12345@domain.com/project/objects.inv'
expected = 'https://domain.com/project/objects.inv'
actual = _strip_basic_auth(url)
assert expected == actual
def test_no_auth(self):
"""url unchanged if param doesn't contain basic auth creds"""
"""Url unchanged if param doesn't contain basic auth creds"""
url = 'https://domain.com/project/objects.inv'
expected = 'https://domain.com/project/objects.inv'
actual = _strip_basic_auth(url)
assert expected == actual
def test_having_port(self):
"""basic auth creds correctly stripped from URL containing creds even if URL
contains port"""
"""Basic auth creds correctly stripped from URL containing creds even if URL
contains port
"""
url = 'https://user:12345@domain.com:8080/project/objects.inv'
expected = 'https://domain.com:8080/project/objects.inv'
actual = _strip_basic_auth(url)

View File

@ -55,7 +55,7 @@ class SampleClass:
@simple_decorator
def __decorated_func__(self):
"""doc"""
"""Doc"""
pass

View File

@ -36,6 +36,7 @@ class NamedtupleSubclass(namedtuple('NamedtupleSubclass', ('attr1', 'attr2'))):
Adds a newline after the type
"""
# To avoid creating a dict, as a namedtuple doesn't have it:
__slots__ = ()
@ -1186,7 +1187,7 @@ Do as you please
actual = str(GoogleDocstring(cleandoc(PEP526GoogleClass.__doc__), config, app=None, what="class",
obj=PEP526GoogleClass))
expected = """\
Sample class with PEP 526 annotations and google docstring
Sample class with PEP 526 annotations and google docstring.
.. attribute:: attr1

View File

@ -86,7 +86,6 @@ def test_todo_valid_link(app, status, warning):
that exists in the LaTeX output. The target was previously incorrectly
omitted (GitHub issue #1020).
"""
# Ensure the LaTeX output is built.
app.builder.build_all()

View File

@ -79,6 +79,7 @@ def test_keyboard_hyphen_spaces(app):
class TestSigElementFallbackTransform:
"""Integration test for :class:`sphinx.transforms.post_transforms.SigElementFallbackTransform`."""
# safe copy of the "built-in" desc_sig_* nodes (during the test, instances of such nodes
# will be created sequentially, so we fix a possible order at the beginning using a tuple)
_builtin_sig_elements: tuple[type[addnodes.desc_sig_element], ...] = tuple(SIG_ELEMENTS)

View File

@ -237,8 +237,8 @@ def test_split_explicit_target(title, expected):
def test_apply_source_workaround_literal_block_no_source():
"""Regression test for #11091.
Test that apply_source_workaround doesn't raise.
"""
Test that apply_source_workaround doesn't raise.
"""
literal_block = nodes.literal_block('', '')
list_item = nodes.list_item('', literal_block)
bullet_list = nodes.bullet_list('', list_item)

View File

@ -117,7 +117,6 @@ def run_extract() -> None:
def run_update() -> None:
"""Catalog merging command."""
log = _get_logger()
domain = 'sphinx'
@ -160,7 +159,6 @@ def run_compile() -> None:
Unfortunately, babel's setup command isn't built very extensible, so
most of the run() code is duplicated here.
"""
log = _get_logger()
directory = os.path.join('sphinx', 'locale')