mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Fix PGH003 (type: ignore comment must have parameters)
This commit is contained in:
@@ -208,8 +208,6 @@ ignore = [
|
||||
"NPY", # numpy is not used in Sphinx
|
||||
# pandas-vet
|
||||
"PD", # pandas is not used in Sphinx
|
||||
# pygrep-hooks
|
||||
"PGH003",
|
||||
# flake8-pie
|
||||
"PIE790", # unnecessary 'pass' statement
|
||||
# pylint
|
||||
|
||||
@@ -42,7 +42,7 @@ class document(nodes.document):
|
||||
|
||||
def set_id(self, node: Element, msgnode: Element | None = None,
|
||||
suggested_prefix: str = '') -> str:
|
||||
return super().set_id(node, msgnode, suggested_prefix) # type: ignore
|
||||
return super().set_id(node, msgnode, suggested_prefix) # type: ignore[call-arg]
|
||||
|
||||
|
||||
class translatable(nodes.Node):
|
||||
|
||||
@@ -38,7 +38,7 @@ from sphinx.roles import XRefRole
|
||||
from sphinx.theming import Theme
|
||||
from sphinx.util import docutils, logging
|
||||
from sphinx.util.build_phase import BuildPhase
|
||||
from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util.console import bold # type: ignore[attr-defined]
|
||||
from sphinx.util.display import progress_message
|
||||
from sphinx.util.i18n import CatalogRepository
|
||||
from sphinx.util.logging import prefixed_warnings
|
||||
|
||||
@@ -20,7 +20,7 @@ from sphinx.events import EventManager
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import UnicodeDecodeErrorHandler, get_filetype, import_object, logging, rst
|
||||
from sphinx.util.build_phase import BuildPhase
|
||||
from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util.console import bold # type: ignore[attr-defined]
|
||||
from sphinx.util.display import progress_message, status_iterator
|
||||
from sphinx.util.docutils import sphinx_domains
|
||||
from sphinx.util.i18n import CatalogInfo, CatalogRepository, docname_to_domain
|
||||
@@ -490,7 +490,8 @@ class Builder:
|
||||
publisher.settings.record_dependencies = DependencyList()
|
||||
with sphinx_domains(self.env), rst.default_role(docname, self.config.default_role):
|
||||
# set up error_handler for the target document
|
||||
codecs.register_error('sphinx', UnicodeDecodeErrorHandler(docname)) # type: ignore
|
||||
codecs.register_error('sphinx',
|
||||
UnicodeDecodeErrorHandler(docname)) # type: ignore[arg-type]
|
||||
|
||||
publisher.set_source(source_path=filename)
|
||||
publisher.publish()
|
||||
|
||||
@@ -13,7 +13,7 @@ from sphinx.domains.changeset import ChangeSetDomain
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.theming import HTMLThemeFactory
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util.console import bold # type: ignore[attr-defined]
|
||||
from sphinx.util.fileutil import copy_asset_file
|
||||
from sphinx.util.osutil import ensuredir, os_path
|
||||
|
||||
|
||||
@@ -251,7 +251,7 @@ def convert_epub_css_files(app: Sphinx, config: Config) -> None:
|
||||
logger.warning(__('invalid css_file: %r, ignored'), entry)
|
||||
continue
|
||||
|
||||
config.epub_css_files = epub_css_files # type: ignore
|
||||
config.epub_css_files = epub_css_files # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> dict[str, Any]:
|
||||
|
||||
@@ -19,7 +19,7 @@ from sphinx.builders import Builder
|
||||
from sphinx.errors import ThemeError
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util.console import bold # type: ignore[attr-defined]
|
||||
from sphinx.util.display import status_iterator
|
||||
from sphinx.util.i18n import CatalogInfo, docname_to_domain
|
||||
from sphinx.util.index_entries import split_index_msg
|
||||
@@ -153,7 +153,7 @@ class I18nBuilder(Builder):
|
||||
|
||||
for toctree in self.env.tocs[docname].findall(addnodes.toctree):
|
||||
for node, msg in extract_messages(toctree):
|
||||
node.uid = '' # type: ignore # Hack UUID model
|
||||
node.uid = '' # type: ignore[attr-defined] # Hack UUID model
|
||||
catalog.add(msg, node)
|
||||
|
||||
for node, msg in extract_messages(doctree):
|
||||
|
||||
@@ -134,7 +134,7 @@ class BuildInfo:
|
||||
if tags:
|
||||
self.tags_hash = get_stable_hash(sorted(tags))
|
||||
|
||||
def __eq__(self, other: BuildInfo) -> bool: # type: ignore
|
||||
def __eq__(self, other: BuildInfo) -> bool: # type: ignore[override]
|
||||
return (self.config_hash == other.config_hash and
|
||||
self.tags_hash == other.tags_hash)
|
||||
|
||||
@@ -1189,7 +1189,7 @@ def convert_html_css_files(app: Sphinx, config: Config) -> None:
|
||||
logger.warning(__('invalid css_file: %r, ignored'), entry)
|
||||
continue
|
||||
|
||||
config.html_css_files = html_css_files # type: ignore
|
||||
config.html_css_files = html_css_files # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def convert_html_js_files(app: Sphinx, config: Config) -> None:
|
||||
@@ -1206,7 +1206,7 @@ def convert_html_js_files(app: Sphinx, config: Config) -> None:
|
||||
logger.warning(__('invalid js_file: %r, ignored'), entry)
|
||||
continue
|
||||
|
||||
config.html_js_files = html_js_files # type: ignore
|
||||
config.html_js_files = html_js_files # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def setup_resource_paths(app: Sphinx, pagename: str, templatename: str,
|
||||
@@ -1229,7 +1229,7 @@ def validate_math_renderer(app: Sphinx) -> None:
|
||||
if app.builder.format != 'html':
|
||||
return
|
||||
|
||||
name = app.builder.math_renderer_name # type: ignore
|
||||
name = app.builder.math_renderer_name # type: ignore[attr-defined]
|
||||
if name is None:
|
||||
raise ConfigError(__('Many math_renderers are registered. '
|
||||
'But no math_renderer is selected.'))
|
||||
@@ -1269,7 +1269,7 @@ def validate_html_logo(app: Sphinx, config: Config) -> None:
|
||||
not path.isfile(path.join(app.confdir, config.html_logo)) and
|
||||
not isurl(config.html_logo)):
|
||||
logger.warning(__('logo file %r does not exist'), config.html_logo)
|
||||
config.html_logo = None # type: ignore
|
||||
config.html_logo = None # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def validate_html_favicon(app: Sphinx, config: Config) -> None:
|
||||
@@ -1278,7 +1278,7 @@ def validate_html_favicon(app: Sphinx, config: Config) -> None:
|
||||
not path.isfile(path.join(app.confdir, config.html_favicon)) and
|
||||
not isurl(config.html_favicon)):
|
||||
logger.warning(__('favicon file %r does not exist'), config.html_favicon)
|
||||
config.html_favicon = None # type: ignore
|
||||
config.html_favicon = None # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def error_on_html_4(_app: Sphinx, config: Config) -> None:
|
||||
|
||||
@@ -22,7 +22,7 @@ from sphinx.environment.adapters.asset import ImageAdapter
|
||||
from sphinx.errors import NoUri, SphinxError
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.util import logging, texescape
|
||||
from sphinx.util.console import bold, darkgreen # type: ignore
|
||||
from sphinx.util.console import bold, darkgreen # type: ignore[attr-defined]
|
||||
from sphinx.util.display import progress_message, status_iterator
|
||||
from sphinx.util.docutils import SphinxFileOutput, new_document
|
||||
from sphinx.util.fileutil import copy_asset_file
|
||||
@@ -156,7 +156,7 @@ class LaTeXBuilder(Builder):
|
||||
logger.warning(__('"latex_documents" config value references unknown '
|
||||
'document %s'), docname)
|
||||
continue
|
||||
self.document_data.append(entry) # type: ignore
|
||||
self.document_data.append(entry) # type: ignore[arg-type]
|
||||
if docname.endswith(SEP + 'index'):
|
||||
docname = docname[:-5]
|
||||
self.titles.append((docname, entry[2]))
|
||||
|
||||
@@ -22,7 +22,13 @@ from sphinx.builders.dummy import DummyBuilder
|
||||
from sphinx.locale import __
|
||||
from sphinx.transforms.post_transforms import SphinxPostTransform
|
||||
from sphinx.util import encode_uri, logging, requests
|
||||
from sphinx.util.console import darkgray, darkgreen, purple, red, turquoise # type: ignore
|
||||
from sphinx.util.console import ( # type: ignore[attr-defined]
|
||||
darkgray,
|
||||
darkgreen,
|
||||
purple,
|
||||
red,
|
||||
turquoise,
|
||||
)
|
||||
from sphinx.util.nodes import get_node_line
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -15,7 +15,7 @@ from sphinx.builders import Builder
|
||||
from sphinx.config import Config
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.console import darkgreen # type: ignore
|
||||
from sphinx.util.console import darkgreen # type: ignore[attr-defined]
|
||||
from sphinx.util.display import progress_message
|
||||
from sphinx.util.nodes import inline_all_toctrees
|
||||
from sphinx.util.osutil import ensuredir, make_filename_from_project
|
||||
|
||||
@@ -13,7 +13,7 @@ from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
from sphinx.environment.adapters.toctree import global_toctree_for_doc
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.console import darkgreen # type: ignore
|
||||
from sphinx.util.console import darkgreen # type: ignore[attr-defined]
|
||||
from sphinx.util.display import progress_message
|
||||
from sphinx.util.nodes import inline_all_toctrees
|
||||
|
||||
@@ -150,7 +150,7 @@ class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
||||
docnames = self.env.all_docs
|
||||
|
||||
with progress_message(__('preparing documents')):
|
||||
self.prepare_writing(docnames) # type: ignore
|
||||
self.prepare_writing(docnames) # type: ignore[arg-type]
|
||||
|
||||
with progress_message(__('assembling single document')):
|
||||
doctree = self.assemble_doctree()
|
||||
|
||||
@@ -20,7 +20,7 @@ from sphinx.environment.adapters.asset import ImageAdapter
|
||||
from sphinx.errors import NoUri
|
||||
from sphinx.locale import _, __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.console import darkgreen # type: ignore
|
||||
from sphinx.util.console import darkgreen # type: ignore[attr-defined]
|
||||
from sphinx.util.display import progress_message, status_iterator
|
||||
from sphinx.util.docutils import new_document
|
||||
from sphinx.util.fileutil import copy_asset_file
|
||||
@@ -81,7 +81,7 @@ class TexinfoBuilder(Builder):
|
||||
logger.warning(__('"texinfo_documents" config value references unknown '
|
||||
'document %s'), docname)
|
||||
continue
|
||||
self.document_data.append(entry) # type: ignore
|
||||
self.document_data.append(entry) # type: ignore[arg-type]
|
||||
if docname.endswith(SEP + 'index'):
|
||||
docname = docname[:-5]
|
||||
self.titles.append((docname, entry[2]))
|
||||
|
||||
@@ -71,7 +71,7 @@ class XMLBuilder(Builder):
|
||||
doctree = doctree.deepcopy()
|
||||
for domain in self.env.domains.values():
|
||||
xmlns = "xmlns:" + domain.name
|
||||
doctree[xmlns] = "https://www.sphinx-doc.org/" # type: ignore
|
||||
doctree[xmlns] = "https://www.sphinx-doc.org/" # type: ignore[index]
|
||||
for node in doctree.findall(nodes.Element):
|
||||
for att, value in node.attributes.items():
|
||||
if isinstance(value, tuple):
|
||||
|
||||
@@ -21,7 +21,12 @@ from sphinx.application import Sphinx
|
||||
from sphinx.errors import SphinxError, SphinxParallelError
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import Tee
|
||||
from sphinx.util.console import color_terminal, nocolor, red, terminal_safe # type: ignore
|
||||
from sphinx.util.console import ( # type: ignore[attr-defined]
|
||||
color_terminal,
|
||||
nocolor,
|
||||
red,
|
||||
terminal_safe,
|
||||
)
|
||||
from sphinx.util.docutils import docutils_namespace, patch_docutils
|
||||
from sphinx.util.exceptions import format_exception_cut_frames, save_traceback
|
||||
from sphinx.util.osutil import ensuredir
|
||||
@@ -240,7 +245,7 @@ def _parse_arguments(argv: list[str] = sys.argv[1:]) -> argparse.Namespace:
|
||||
except Exception as exc:
|
||||
parser.error(__('cannot open warning file %r: %s') % (
|
||||
args.warnfile, exc))
|
||||
warning = Tee(warning, warnfp) # type: ignore
|
||||
warning = Tee(warning, warnfp) # type: ignore[assignment]
|
||||
error = warning
|
||||
|
||||
args.status = status
|
||||
|
||||
@@ -16,7 +16,12 @@ from os import path
|
||||
|
||||
import sphinx
|
||||
from sphinx.cmd.build import build_main
|
||||
from sphinx.util.console import blue, bold, color_terminal, nocolor # type: ignore
|
||||
from sphinx.util.console import ( # type: ignore[attr-defined]
|
||||
blue,
|
||||
bold,
|
||||
color_terminal,
|
||||
nocolor,
|
||||
)
|
||||
from sphinx.util.osutil import rmtree
|
||||
|
||||
try:
|
||||
|
||||
@@ -31,7 +31,13 @@ from docutils.utils import column_width
|
||||
import sphinx.locale
|
||||
from sphinx import __display_version__, package_dir
|
||||
from sphinx.locale import __
|
||||
from sphinx.util.console import bold, color_terminal, colorize, nocolor, red # type: ignore
|
||||
from sphinx.util.console import ( # type: ignore[attr-defined]
|
||||
bold,
|
||||
color_terminal,
|
||||
colorize,
|
||||
nocolor,
|
||||
red,
|
||||
)
|
||||
from sphinx.util.osutil import ensuredir
|
||||
from sphinx.util.template import SphinxRenderer
|
||||
|
||||
|
||||
@@ -402,7 +402,8 @@ def convert_highlight_options(app: Sphinx, config: Config) -> None:
|
||||
options = config.highlight_options
|
||||
if options and not all(isinstance(v, dict) for v in options.values()):
|
||||
# old styled option detected because all values are not dictionary.
|
||||
config.highlight_options = {config.highlight_language: options} # type: ignore
|
||||
config.highlight_options = {config.highlight_language: # type: ignore[attr-defined]
|
||||
options}
|
||||
|
||||
|
||||
def init_numfig_format(app: Sphinx, config: Config) -> None:
|
||||
@@ -414,7 +415,7 @@ def init_numfig_format(app: Sphinx, config: Config) -> None:
|
||||
|
||||
# override default labels by configuration
|
||||
numfig_format.update(config.numfig_format)
|
||||
config.numfig_format = numfig_format # type: ignore
|
||||
config.numfig_format = numfig_format # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def correct_copyright_year(_app: Sphinx, config: Config) -> None:
|
||||
@@ -523,7 +524,7 @@ def check_primary_domain(app: Sphinx, config: Config) -> None:
|
||||
primary_domain = config.primary_domain
|
||||
if primary_domain and not app.registry.has_domain(primary_domain):
|
||||
logger.warning(__('primary_domain %r not found, ignored.'), primary_domain)
|
||||
config.primary_domain = None # type: ignore
|
||||
config.primary_domain = None # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def check_root_doc(app: Sphinx, env: BuildEnvironment, added: set[str],
|
||||
@@ -536,7 +537,7 @@ def check_root_doc(app: Sphinx, env: BuildEnvironment, added: set[str],
|
||||
'contents' in app.project.docnames):
|
||||
logger.warning(__('Since v2.0, Sphinx uses "index" as root_doc by default. '
|
||||
'Please add "root_doc = \'contents\'" to your conf.py.'))
|
||||
app.config.root_doc = "contents" # type: ignore
|
||||
app.config.root_doc = "contents" # type: ignore[attr-defined]
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
@@ -1843,7 +1843,7 @@ class Symbol:
|
||||
recurseInAnon=recurseInAnon,
|
||||
searchInSiblings=searchInSiblings)
|
||||
if Symbol.debug_lookup:
|
||||
symbols = list(symbols) # type: ignore
|
||||
symbols = list(symbols) # type: ignore[assignment]
|
||||
Symbol.debug_indent -= 2
|
||||
return SymbolLookupResult(symbols, parentSymbol, ident)
|
||||
|
||||
|
||||
@@ -4652,7 +4652,7 @@ class Symbol:
|
||||
recurseInAnon=recurseInAnon, correctPrimaryTemplateArgs=False,
|
||||
searchInSiblings=searchInSiblings)
|
||||
if Symbol.debug_lookup:
|
||||
symbols = list(symbols) # type: ignore
|
||||
symbols = list(symbols) # type: ignore[assignment]
|
||||
Symbol.debug_indent -= 2
|
||||
return SymbolLookupResult(symbols, parentSymbol,
|
||||
identOrOp, templateParams, templateArgs)
|
||||
@@ -7145,7 +7145,7 @@ class DefinitionParser(BaseParser):
|
||||
templatePrefix = self._check_template_consistency(name, templatePrefix,
|
||||
fullSpecShorthand=False)
|
||||
res = ASTNamespace(name, templatePrefix)
|
||||
res.objectType = 'namespace' # type: ignore
|
||||
res.objectType = 'namespace' # type: ignore[attr-defined]
|
||||
return res
|
||||
|
||||
def parse_xref_object(self) -> tuple[ASTNamespace | ASTDeclaration, bool]:
|
||||
@@ -7160,7 +7160,7 @@ class DefinitionParser(BaseParser):
|
||||
templatePrefix = self._check_template_consistency(name, templatePrefix,
|
||||
fullSpecShorthand=True)
|
||||
res1 = ASTNamespace(name, templatePrefix)
|
||||
res1.objectType = 'xref' # type: ignore
|
||||
res1.objectType = 'xref' # type: ignore[attr-defined]
|
||||
return res1, True
|
||||
except DefinitionError as e1:
|
||||
try:
|
||||
|
||||
@@ -329,7 +329,7 @@ class _TypeParameterListParser(TokenProcessor):
|
||||
yield a, b, c
|
||||
|
||||
idents: list[str] = []
|
||||
tokens: Iterable[Token] = iter(tokens) # type: ignore
|
||||
tokens: Iterable[Token] = iter(tokens) # type: ignore[no-redef]
|
||||
# do not format opening brackets
|
||||
for tok in tokens:
|
||||
if not tok.match([token.OP, '('], [token.OP, '['], [token.OP, '{']):
|
||||
@@ -487,7 +487,7 @@ def _parse_arglist(
|
||||
children = _parse_annotation(param.annotation, env)
|
||||
node += addnodes.desc_sig_punctuation('', ':')
|
||||
node += addnodes.desc_sig_space()
|
||||
node += addnodes.desc_sig_name('', '', *children) # type: ignore
|
||||
node += addnodes.desc_sig_name('', '', *children) # type: ignore[arg-type]
|
||||
if param.default is not param.empty:
|
||||
if param.annotation is not param.empty:
|
||||
node += addnodes.desc_sig_space()
|
||||
@@ -576,7 +576,7 @@ class PyXrefMixin:
|
||||
) -> Node:
|
||||
# we use inliner=None to make sure we get the old behaviour with a single
|
||||
# pending_xref node
|
||||
result = super().make_xref(rolename, domain, target, # type: ignore
|
||||
result = super().make_xref(rolename, domain, target, # type: ignore[misc]
|
||||
innernode, contnode,
|
||||
env, inliner=None, location=None)
|
||||
if isinstance(result, pending_xref):
|
||||
|
||||
@@ -1102,7 +1102,7 @@ def warn_missing_reference(app: Sphinx, domain: Domain, node: pending_xref,
|
||||
return None
|
||||
else:
|
||||
target = node['reftarget']
|
||||
if target not in domain.anonlabels: # type: ignore
|
||||
if target not in domain.anonlabels: # type: ignore[attr-defined]
|
||||
msg = __('undefined label: %r')
|
||||
else:
|
||||
msg = __('Failed to create a cross reference. A title or caption not found: %r')
|
||||
|
||||
@@ -377,7 +377,7 @@ class BuildEnvironment:
|
||||
|
||||
This possibly comes from a parallel build process.
|
||||
"""
|
||||
docnames = set(docnames) # type: ignore
|
||||
docnames = set(docnames) # type: ignore[assignment]
|
||||
for docname in docnames:
|
||||
self.all_docs[docname] = other.all_docs[docname]
|
||||
self.included[docname] = other.included[docname]
|
||||
|
||||
@@ -231,7 +231,7 @@ def _entries_from_toctree(
|
||||
# nodes with length 1 don't have any children anyway
|
||||
if len(top_level) > 1:
|
||||
if subtrees := list(top_level.findall(addnodes.toctree)):
|
||||
top_level[1][:] = subtrees # type: ignore
|
||||
top_level[1][:] = subtrees # type: ignore[index]
|
||||
else:
|
||||
top_level.pop(1)
|
||||
# resolve all sub-toctrees
|
||||
|
||||
@@ -32,7 +32,7 @@ class MetadataCollector(EnvironmentCollector):
|
||||
return
|
||||
elif isinstance(doctree[index], nodes.docinfo):
|
||||
md = app.env.metadata[app.env.docname]
|
||||
for node in doctree[index]: # type: ignore
|
||||
for node in doctree[index]: # type: ignore[attr-defined]
|
||||
# nodes are multiply inherited...
|
||||
if isinstance(node, nodes.authors):
|
||||
authors = cast(list[nodes.author], node)
|
||||
|
||||
@@ -50,7 +50,7 @@ class ExtensionError(SphinxError):
|
||||
self.modname = modname
|
||||
|
||||
@property
|
||||
def category(self) -> str: # type: ignore
|
||||
def category(self) -> str: # type: ignore[override]
|
||||
if self.modname:
|
||||
return 'Extension error (%s)' % self.modname
|
||||
else:
|
||||
|
||||
@@ -266,7 +266,7 @@ class ObjectMember(tuple):
|
||||
"""
|
||||
|
||||
def __new__(cls, name: str, obj: Any, **kwargs: Any) -> Any:
|
||||
return super().__new__(cls, (name, obj)) # type: ignore
|
||||
return super().__new__(cls, (name, obj)) # type: ignore[arg-type]
|
||||
|
||||
def __init__(self, name: str, obj: Any, docstring: str | None = None,
|
||||
class_: Any = None, skipped: bool = False) -> None:
|
||||
@@ -338,7 +338,7 @@ class Documenter:
|
||||
# extra signature items (arguments and return annotation,
|
||||
# also set after resolve_name succeeds)
|
||||
self.args: str | None = None
|
||||
self.retann = ''
|
||||
self.retann: str = ''
|
||||
# the object to document (set after import_object succeeds)
|
||||
self.object: Any = None
|
||||
self.object_name = ''
|
||||
@@ -1172,7 +1172,7 @@ class DocstringSignatureMixin:
|
||||
|
||||
def _find_signature(self) -> tuple[str | None, str | None] | None:
|
||||
# candidates of the object name
|
||||
valid_names = [self.objpath[-1]] # type: ignore
|
||||
valid_names = [self.objpath[-1]] # type: ignore[attr-defined]
|
||||
if isinstance(self, ClassDocumenter):
|
||||
valid_names.append('__init__')
|
||||
if hasattr(self.object, '__mro__'):
|
||||
@@ -1204,7 +1204,8 @@ class DocstringSignatureMixin:
|
||||
break
|
||||
|
||||
# re-prepare docstring to ignore more leading indentation
|
||||
tab_width = self.directive.state.document.settings.tab_width # type: ignore
|
||||
directive = self.directive # type: ignore[attr-defined]
|
||||
tab_width = directive.state.document.settings.tab_width
|
||||
self._new_docstrings[i] = prepare_docstring('\n'.join(doclines[j + 1:]),
|
||||
tab_width)
|
||||
|
||||
@@ -1227,13 +1228,15 @@ class DocstringSignatureMixin:
|
||||
return super().get_doc() # type: ignore[misc]
|
||||
|
||||
def format_signature(self, **kwargs: Any) -> str:
|
||||
if self.args is None and self.config.autodoc_docstring_signature: # type: ignore
|
||||
self.args: str | None
|
||||
if (self.args is None
|
||||
and self.config.autodoc_docstring_signature): # type: ignore[attr-defined]
|
||||
# only act if a signature is not explicitly given already, and if
|
||||
# the feature is enabled
|
||||
result = self._find_signature()
|
||||
if result is not None:
|
||||
self.args, self.retann = result
|
||||
sig = super().format_signature(**kwargs) # type: ignore
|
||||
sig = super().format_signature(**kwargs) # type: ignore[misc]
|
||||
if self._signatures:
|
||||
return "\n".join([sig] + self._signatures)
|
||||
else:
|
||||
@@ -1261,7 +1264,7 @@ class DocstringStripSignatureMixin(DocstringSignatureMixin):
|
||||
return super().format_signature(**kwargs)
|
||||
|
||||
|
||||
class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore
|
||||
class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
|
||||
"""
|
||||
Specialized Documenter subclass for functions.
|
||||
"""
|
||||
@@ -1379,7 +1382,8 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
||||
if params[0].annotation is Parameter.empty:
|
||||
params[0] = params[0].replace(annotation=typ)
|
||||
try:
|
||||
dummy.__signature__ = sig.replace(parameters=params) # type: ignore
|
||||
dummy.__signature__ = sig.replace( # type: ignore[attr-defined]
|
||||
parameters=params)
|
||||
return dummy
|
||||
except (AttributeError, TypeError):
|
||||
# failed to update signature (ex. built-in or extension types)
|
||||
@@ -1419,7 +1423,7 @@ _CLASS_NEW_BLACKLIST = [
|
||||
]
|
||||
|
||||
|
||||
class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore
|
||||
class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
|
||||
"""
|
||||
Specialized Documenter subclass for classes.
|
||||
"""
|
||||
@@ -1707,7 +1711,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
self.config.autodoc_inherit_docstrings)
|
||||
if not want_all:
|
||||
if not self.options.members:
|
||||
return False, [] # type: ignore
|
||||
return False, [] # type: ignore[return-value]
|
||||
# specific members given
|
||||
selected = []
|
||||
for name in self.options.members:
|
||||
@@ -1947,7 +1951,7 @@ class UninitializedGlobalVariableMixin(DataDocumenterMixinBase):
|
||||
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
try:
|
||||
return super().import_object(raiseerror=True) # type: ignore
|
||||
return super().import_object(raiseerror=True) # type: ignore[misc]
|
||||
except ImportError as exc:
|
||||
# annotation only instance variable (PEP-526)
|
||||
try:
|
||||
@@ -1976,7 +1980,7 @@ class UninitializedGlobalVariableMixin(DataDocumenterMixinBase):
|
||||
if self.object is UNINITIALIZED_ATTR:
|
||||
return []
|
||||
else:
|
||||
return super().get_doc() # type: ignore
|
||||
return super().get_doc() # type: ignore[misc]
|
||||
|
||||
|
||||
class DataDocumenter(GenericAliasMixin,
|
||||
@@ -2099,7 +2103,7 @@ class DataDocumenter(GenericAliasMixin,
|
||||
super().add_content(more_content)
|
||||
|
||||
|
||||
class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore
|
||||
class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
|
||||
"""
|
||||
Specialized Documenter subclass for methods (normal, static and class).
|
||||
"""
|
||||
@@ -2267,7 +2271,8 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
||||
if params[1].annotation is Parameter.empty:
|
||||
params[1] = params[1].replace(annotation=typ)
|
||||
try:
|
||||
dummy.__signature__ = sig.replace(parameters=params) # type: ignore
|
||||
dummy.__signature__ = sig.replace( # type: ignore[attr-defined]
|
||||
parameters=params)
|
||||
return dummy
|
||||
except (AttributeError, TypeError):
|
||||
# failed to update signature (ex. built-in or extension types)
|
||||
@@ -2322,7 +2327,7 @@ class NonDataDescriptorMixin(DataDocumenterMixinBase):
|
||||
"""
|
||||
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
ret = super().import_object(raiseerror) # type: ignore
|
||||
ret = super().import_object(raiseerror) # type: ignore[misc]
|
||||
if ret and not inspect.isattributedescriptor(self.object):
|
||||
self.non_data_descriptor = True
|
||||
else:
|
||||
@@ -2340,7 +2345,7 @@ class NonDataDescriptorMixin(DataDocumenterMixinBase):
|
||||
# to display
|
||||
return None
|
||||
else:
|
||||
return super().get_doc() # type: ignore
|
||||
return super().get_doc() # type: ignore[misc]
|
||||
|
||||
|
||||
class SlotsMixin(DataDocumenterMixinBase):
|
||||
@@ -2359,7 +2364,7 @@ class SlotsMixin(DataDocumenterMixinBase):
|
||||
return False
|
||||
|
||||
def import_object(self, raiseerror: bool = False) -> bool:
|
||||
ret = super().import_object(raiseerror) # type: ignore
|
||||
ret = super().import_object(raiseerror) # type: ignore[misc]
|
||||
if self.isslotsattribute():
|
||||
self.object = SLOTSATTR
|
||||
|
||||
@@ -2385,7 +2390,7 @@ class SlotsMixin(DataDocumenterMixinBase):
|
||||
(self.parent.__qualname__, exc), type='autodoc')
|
||||
return []
|
||||
else:
|
||||
return super().get_doc() # type: ignore
|
||||
return super().get_doc() # type: ignore[misc]
|
||||
|
||||
|
||||
class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
|
||||
@@ -2405,7 +2410,7 @@ class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
|
||||
def is_runtime_instance_attribute(self, parent: Any) -> bool:
|
||||
"""Check the subject is an attribute defined in __init__()."""
|
||||
# An instance variable defined in __init__().
|
||||
if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore
|
||||
if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore[attr-defined]
|
||||
return True
|
||||
if self.is_runtime_instance_attribute_not_commented(parent):
|
||||
return True
|
||||
@@ -2433,12 +2438,12 @@ class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
|
||||
"""Check the existence of runtime instance attribute after failing to import the
|
||||
attribute."""
|
||||
try:
|
||||
return super().import_object(raiseerror=True) # type: ignore
|
||||
return super().import_object(raiseerror=True) # type: ignore[misc]
|
||||
except ImportError as exc:
|
||||
try:
|
||||
with mock(self.config.autodoc_mock_imports):
|
||||
ret = import_object(self.modname, self.objpath[:-1], 'class',
|
||||
attrgetter=self.get_attr, # type: ignore
|
||||
attrgetter=self.get_attr, # type: ignore[attr-defined]
|
||||
warningiserror=self.config.autodoc_warningiserror)
|
||||
parent = ret[3]
|
||||
if self.is_runtime_instance_attribute(parent):
|
||||
@@ -2463,7 +2468,7 @@ class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
|
||||
self.is_runtime_instance_attribute_not_commented(self.parent)):
|
||||
return None
|
||||
else:
|
||||
return super().get_doc() # type: ignore
|
||||
return super().get_doc() # type: ignore[misc]
|
||||
|
||||
|
||||
class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
|
||||
@@ -2486,11 +2491,11 @@ class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
|
||||
"""Check the exisitence of uninitialized instance attribute when failed to import
|
||||
the attribute."""
|
||||
try:
|
||||
return super().import_object(raiseerror=True) # type: ignore
|
||||
return super().import_object(raiseerror=True) # type: ignore[misc]
|
||||
except ImportError as exc:
|
||||
try:
|
||||
ret = import_object(self.modname, self.objpath[:-1], 'class',
|
||||
attrgetter=self.get_attr, # type: ignore
|
||||
attrgetter=self.get_attr, # type: ignore[attr-defined]
|
||||
warningiserror=self.config.autodoc_warningiserror)
|
||||
parent = ret[3]
|
||||
if self.is_uninitialized_instance_attribute(parent):
|
||||
@@ -2513,10 +2518,10 @@ class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
|
||||
def get_doc(self) -> list[list[str]] | None:
|
||||
if self.object is UNINITIALIZED_ATTR:
|
||||
return None
|
||||
return super().get_doc() # type: ignore
|
||||
return super().get_doc() # type: ignore[misc]
|
||||
|
||||
|
||||
class AttributeDocumenter(GenericAliasMixin, SlotsMixin, # type: ignore
|
||||
class AttributeDocumenter(GenericAliasMixin, SlotsMixin, # type: ignore[misc]
|
||||
RuntimeInstanceAttributeMixin,
|
||||
UninitializedInstanceAttributeMixin, NonDataDescriptorMixin,
|
||||
DocstringStripSignatureMixin, ClassLevelDocumenter):
|
||||
@@ -2657,10 +2662,10 @@ class AttributeDocumenter(GenericAliasMixin, SlotsMixin, # type: ignore
|
||||
# a docstring from the value which descriptor returns unexpectedly.
|
||||
# ref: https://github.com/sphinx-doc/sphinx/issues/7805
|
||||
orig = self.config.autodoc_inherit_docstrings
|
||||
self.config.autodoc_inherit_docstrings = False # type: ignore
|
||||
self.config.autodoc_inherit_docstrings = False # type: ignore[attr-defined]
|
||||
return super().get_doc()
|
||||
finally:
|
||||
self.config.autodoc_inherit_docstrings = orig # type: ignore
|
||||
self.config.autodoc_inherit_docstrings = orig # type: ignore[attr-defined]
|
||||
|
||||
def add_content(self, more_content: StringList | None) -> None:
|
||||
# Disable analyzing attribute comment on Documenter.add_content() to control it on
|
||||
@@ -2673,7 +2678,8 @@ class AttributeDocumenter(GenericAliasMixin, SlotsMixin, # type: ignore
|
||||
super().add_content(more_content)
|
||||
|
||||
|
||||
class PropertyDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter): # type: ignore
|
||||
class PropertyDocumenter(DocstringStripSignatureMixin, # type: ignore[misc]
|
||||
ClassLevelDocumenter):
|
||||
"""
|
||||
Specialized Documenter subclass for properties.
|
||||
"""
|
||||
|
||||
@@ -111,7 +111,8 @@ class AutodocDirective(SphinxDirective):
|
||||
reporter = self.state.document.reporter
|
||||
|
||||
try:
|
||||
source, lineno = reporter.get_source_and_line(self.lineno) # type: ignore
|
||||
source, lineno = reporter.get_source_and_line( # type: ignore[attr-defined]
|
||||
self.lineno)
|
||||
except AttributeError:
|
||||
source, lineno = (None, None)
|
||||
logger.debug('[autodoc] %s:%s: input:\n%s', source, lineno, self.block_text)
|
||||
|
||||
@@ -38,10 +38,10 @@ def get_function_def(obj: Any) -> ast.FunctionDef | None:
|
||||
# subject is placed inside class or block. To read its docstring,
|
||||
# this adds if-block before the declaration.
|
||||
module = ast.parse('if True:\n' + source)
|
||||
return module.body[0].body[0] # type: ignore
|
||||
return module.body[0].body[0] # type: ignore[attr-defined]
|
||||
else:
|
||||
module = ast.parse(source)
|
||||
return module.body[0] # type: ignore
|
||||
return module.body[0] # type: ignore[return-value]
|
||||
except (OSError, TypeError): # failed to load source code
|
||||
return None
|
||||
|
||||
|
||||
@@ -66,9 +66,10 @@ def signature_from_ast(node: ast.FunctionDef, bound_method: bool,
|
||||
params.pop(0)
|
||||
|
||||
# merge type_comment into signature
|
||||
if not_suppressed(type_comment.argtypes): # type: ignore
|
||||
if not_suppressed(type_comment.argtypes): # type: ignore[attr-defined]
|
||||
for i, param in enumerate(params):
|
||||
params[i] = param.replace(annotation=type_comment.argtypes[i]) # type: ignore
|
||||
params[i] = param.replace(
|
||||
annotation=type_comment.argtypes[i]) # type: ignore[attr-defined]
|
||||
|
||||
if node.returns:
|
||||
return Signature(params, return_annotation=node.returns)
|
||||
|
||||
@@ -161,7 +161,7 @@ class FakeDirective(DocumenterBridge):
|
||||
document = Struct(settings=settings)
|
||||
app = FakeApplication()
|
||||
app.config.add('autodoc_class_signature', 'mixed', True, None)
|
||||
env = BuildEnvironment(app) # type: ignore
|
||||
env = BuildEnvironment(app) # type: ignore[arg-type]
|
||||
state = Struct(document=document)
|
||||
super().__init__(env, None, Options(), 0, state)
|
||||
|
||||
|
||||
@@ -731,13 +731,15 @@ def main(argv: list[str] = sys.argv[1:]) -> None:
|
||||
sphinx.locale.init_console()
|
||||
|
||||
app = DummyApplication(sphinx.locale.get_translator())
|
||||
logging.setup(app, sys.stdout, sys.stderr) # type: ignore
|
||||
logging.setup(app, sys.stdout, sys.stderr) # type: ignore[arg-type]
|
||||
setup_documenters(app)
|
||||
args = get_parser().parse_args(argv)
|
||||
|
||||
if args.templates:
|
||||
app.config.templates_path.append(path.abspath(args.templates))
|
||||
app.config.autosummary_ignore_module_all = not args.respect_module_all # type: ignore
|
||||
app.config.autosummary_ignore_module_all = ( # type: ignore[attr-defined]
|
||||
not args.respect_module_all
|
||||
)
|
||||
|
||||
generate_autosummary_docs(args.source_file, args.output_dir,
|
||||
'.' + args.suffix,
|
||||
|
||||
@@ -20,7 +20,7 @@ from sphinx.application import Sphinx
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.console import red # type: ignore
|
||||
from sphinx.util.console import red # type: ignore[attr-defined]
|
||||
from sphinx.util.inspect import safe_getattr
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -23,7 +23,7 @@ import sphinx
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util.console import bold # type: ignore[attr-defined]
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
from sphinx.util.osutil import relpath
|
||||
from sphinx.util.typing import OptionSpec
|
||||
@@ -238,7 +238,7 @@ class TestCode:
|
||||
|
||||
|
||||
class SphinxDocTestRunner(doctest.DocTestRunner):
|
||||
def summarize(self, out: Callable, verbose: bool | None = None, # type: ignore
|
||||
def summarize(self, out: Callable, verbose: bool | None = None, # type: ignore[override]
|
||||
) -> tuple[int, int]:
|
||||
string_io = StringIO()
|
||||
old_stdout = sys.stdout
|
||||
@@ -253,7 +253,8 @@ class SphinxDocTestRunner(doctest.DocTestRunner):
|
||||
def _DocTestRunner__patched_linecache_getlines(self, filename: str,
|
||||
module_globals: Any = None) -> Any:
|
||||
# this is overridden from DocTestRunner adding the try-except below
|
||||
m = self._DocTestRunner__LINECACHE_FILENAME_RE.match(filename) # type: ignore
|
||||
m = self._DocTestRunner__LINECACHE_FILENAME_RE.match( # type: ignore[attr-defined]
|
||||
filename)
|
||||
if m and m.group('name') == self.test.name:
|
||||
try:
|
||||
example = self.test.examples[int(m.group('examplenum'))]
|
||||
@@ -264,7 +265,8 @@ class SphinxDocTestRunner(doctest.DocTestRunner):
|
||||
pass
|
||||
else:
|
||||
return example.source.splitlines(True)
|
||||
return self.save_linecache_getlines(filename, module_globals) # type: ignore
|
||||
return self.save_linecache_getlines( # type: ignore[attr-defined]
|
||||
filename, module_globals)
|
||||
|
||||
|
||||
# the new builder -- use sphinx-build.py -b doctest to run
|
||||
@@ -286,7 +288,7 @@ class DocTestBuilder(Builder):
|
||||
# for doctest examples but unusable for multi-statement code such
|
||||
# as setup code -- to be able to use doctest error reporting with
|
||||
# that code nevertheless, we monkey-patch the "compile" it uses.
|
||||
doctest.compile = self.compile # type: ignore
|
||||
doctest.compile = self.compile # type: ignore[attr-defined]
|
||||
|
||||
sys.path[0:0] = self.config.doctest_path
|
||||
|
||||
@@ -404,8 +406,8 @@ Doctest summary
|
||||
self.cleanup_runner = SphinxDocTestRunner(verbose=False,
|
||||
optionflags=self.opt)
|
||||
|
||||
self.test_runner._fakeout = self.setup_runner._fakeout # type: ignore
|
||||
self.cleanup_runner._fakeout = self.setup_runner._fakeout # type: ignore
|
||||
self.test_runner._fakeout = self.setup_runner._fakeout # type: ignore[attr-defined]
|
||||
self.cleanup_runner._fakeout = self.setup_runner._fakeout # type: ignore[attr-defined]
|
||||
|
||||
if self.config.doctest_test_doctest_blocks:
|
||||
def condition(node: Node) -> bool:
|
||||
@@ -532,7 +534,7 @@ Doctest summary
|
||||
# disable <BLANKLINE> processing as it is not needed
|
||||
options[doctest.DONT_ACCEPT_BLANKLINE] = True
|
||||
# find out if we're testing an exception
|
||||
m = parser._EXCEPTION_RE.match(output) # type: ignore
|
||||
m = parser._EXCEPTION_RE.match(output) # type: ignore[attr-defined]
|
||||
if m:
|
||||
exc_msg = m.group('msg')
|
||||
else:
|
||||
|
||||
@@ -293,8 +293,8 @@ def render_dot(self: SphinxTranslator, code: str, options: dict, format: str,
|
||||
logger.warning(__('dot command %r cannot be run (needed for graphviz '
|
||||
'output), check the graphviz_dot setting'), graphviz_dot)
|
||||
if not hasattr(self.builder, '_graphviz_warned_dot'):
|
||||
self.builder._graphviz_warned_dot = {} # type: ignore
|
||||
self.builder._graphviz_warned_dot[graphviz_dot] = True # type: ignore
|
||||
self.builder._graphviz_warned_dot = {} # type: ignore[attr-defined]
|
||||
self.builder._graphviz_warned_dot[graphviz_dot] = True # type: ignore[attr-defined]
|
||||
return None, None
|
||||
except CalledProcessError as exc:
|
||||
raise GraphvizError(__('dot exited with error:\n[stderr]\n%r\n'
|
||||
|
||||
@@ -119,9 +119,9 @@ def ensure_tempdir(builder: Builder) -> str:
|
||||
just removing the whole directory (see cleanup_tempdir)
|
||||
"""
|
||||
if not hasattr(builder, '_imgmath_tempdir'):
|
||||
builder._imgmath_tempdir = tempfile.mkdtemp() # type: ignore
|
||||
builder._imgmath_tempdir = tempfile.mkdtemp() # type: ignore[attr-defined]
|
||||
|
||||
return builder._imgmath_tempdir # type: ignore
|
||||
return builder._imgmath_tempdir # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def compile_math(latex: str, builder: Builder) -> str:
|
||||
@@ -262,7 +262,7 @@ def render_math(
|
||||
try:
|
||||
dvipath = compile_math(latex, self.builder)
|
||||
except InvokeError:
|
||||
self.builder._imgmath_warned_latex = True # type: ignore
|
||||
self.builder._imgmath_warned_latex = True # type: ignore[attr-defined]
|
||||
return None, None
|
||||
|
||||
# .dvi -> .png/.svg
|
||||
@@ -272,7 +272,7 @@ def render_math(
|
||||
elif image_format == 'svg':
|
||||
depth = convert_dvi_to_svg(dvipath, self.builder, generated_path)
|
||||
except InvokeError:
|
||||
self.builder._imgmath_warned_image_translator = True # type: ignore
|
||||
self.builder._imgmath_warned_image_translator = True # type: ignore[attr-defined]
|
||||
return None, None
|
||||
|
||||
return generated_path, depth
|
||||
|
||||
@@ -374,8 +374,8 @@ class InheritanceDiagram(SphinxDirective):
|
||||
# references to real URLs later. These nodes will eventually be
|
||||
# removed from the doctree after we're done with them.
|
||||
for name in graph.get_all_class_names():
|
||||
refnodes, x = class_role( # type: ignore
|
||||
'class', ':class:`%s`' % name, name, 0, self.state) # type: ignore
|
||||
refnodes, x = class_role( # type: ignore[call-arg,misc]
|
||||
'class', ':class:`%s`' % name, name, 0, self.state) # type: ignore[arg-type]
|
||||
node.extend(refnodes)
|
||||
# Store the graph object so we can use it to generate the
|
||||
# dot file later
|
||||
|
||||
@@ -68,10 +68,10 @@ class InventoryAdapter:
|
||||
|
||||
if not hasattr(env, 'intersphinx_cache'):
|
||||
# initial storage when fetching inventories before processing
|
||||
self.env.intersphinx_cache = {} # type: ignore
|
||||
self.env.intersphinx_cache = {} # type: ignore[attr-defined]
|
||||
|
||||
self.env.intersphinx_inventory = {} # type: ignore
|
||||
self.env.intersphinx_named_inventory = {} # type: ignore
|
||||
self.env.intersphinx_inventory = {} # type: ignore[attr-defined]
|
||||
self.env.intersphinx_named_inventory = {} # type: ignore[attr-defined]
|
||||
|
||||
@property
|
||||
def cache(self) -> dict[str, InventoryCacheEntry]:
|
||||
@@ -83,19 +83,19 @@ class InventoryAdapter:
|
||||
- Element two is a time value for cache invalidation, a float
|
||||
- Element three is the loaded remote inventory, type Inventory
|
||||
"""
|
||||
return self.env.intersphinx_cache # type: ignore
|
||||
return self.env.intersphinx_cache # type: ignore[attr-defined]
|
||||
|
||||
@property
|
||||
def main_inventory(self) -> Inventory:
|
||||
return self.env.intersphinx_inventory # type: ignore
|
||||
return self.env.intersphinx_inventory # type: ignore[attr-defined]
|
||||
|
||||
@property
|
||||
def named_inventory(self) -> dict[str, Inventory]:
|
||||
return self.env.intersphinx_named_inventory # type: ignore
|
||||
return self.env.intersphinx_named_inventory # type: ignore[attr-defined]
|
||||
|
||||
def clear(self) -> None:
|
||||
self.env.intersphinx_inventory.clear() # type: ignore
|
||||
self.env.intersphinx_named_inventory.clear() # type: ignore
|
||||
self.env.intersphinx_inventory.clear() # type: ignore[attr-defined]
|
||||
self.env.intersphinx_named_inventory.clear() # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def _strip_basic_auth(url: str) -> str:
|
||||
@@ -720,7 +720,7 @@ def inspect_main(argv: list[str]) -> None:
|
||||
|
||||
try:
|
||||
filename = argv[0]
|
||||
invdata = fetch_inventory(MockApp(), '', filename) # type: ignore
|
||||
invdata = fetch_inventory(MockApp(), '', filename) # type: ignore[arg-type]
|
||||
for key in sorted(invdata or {}):
|
||||
print(key)
|
||||
for entry, einfo in sorted(invdata[key].items()):
|
||||
|
||||
@@ -453,7 +453,7 @@ def _skip_member(app: Sphinx, what: str, name: str, obj: Any,
|
||||
except Exception:
|
||||
cls_is_owner = False
|
||||
else:
|
||||
cls_is_owner = (cls and hasattr(cls, name) and # type: ignore
|
||||
cls_is_owner = (cls and hasattr(cls, name) and # type: ignore[assignment]
|
||||
name in cls.__dict__)
|
||||
else:
|
||||
cls_is_owner = False
|
||||
|
||||
@@ -166,7 +166,7 @@ class GoogleDocstring:
|
||||
else:
|
||||
from sphinx.ext.napoleon import Config
|
||||
|
||||
self._config = Config() # type: ignore
|
||||
self._config = Config() # type: ignore[assignment]
|
||||
|
||||
if not what:
|
||||
if inspect.isclass(obj):
|
||||
|
||||
@@ -86,10 +86,10 @@ def is_supported_builder(builder: Builder) -> bool:
|
||||
def doctree_read(app: Sphinx, doctree: Node) -> None:
|
||||
env = app.builder.env
|
||||
if not hasattr(env, '_viewcode_modules'):
|
||||
env._viewcode_modules = {} # type: ignore
|
||||
env._viewcode_modules = {} # type: ignore[attr-defined]
|
||||
|
||||
def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool:
|
||||
entry = env._viewcode_modules.get(modname, None) # type: ignore
|
||||
entry = env._viewcode_modules.get(modname, None) # type: ignore[attr-defined]
|
||||
if entry is False:
|
||||
return False
|
||||
|
||||
@@ -99,7 +99,7 @@ def doctree_read(app: Sphinx, doctree: Node) -> None:
|
||||
analyzer = ModuleAnalyzer.for_module(modname)
|
||||
analyzer.find_tags()
|
||||
except Exception:
|
||||
env._viewcode_modules[modname] = False # type: ignore
|
||||
env._viewcode_modules[modname] = False # type: ignore[attr-defined]
|
||||
return False
|
||||
|
||||
code = analyzer.code
|
||||
@@ -109,7 +109,7 @@ def doctree_read(app: Sphinx, doctree: Node) -> None:
|
||||
|
||||
if entry is None or entry[0] != code:
|
||||
entry = code, tags, {}, refname
|
||||
env._viewcode_modules[modname] = entry # type: ignore
|
||||
env._viewcode_modules[modname] = entry # type: ignore[attr-defined]
|
||||
_, tags, used, _ = entry
|
||||
if fullname in tags:
|
||||
used[fullname] = docname
|
||||
@@ -153,14 +153,14 @@ def env_merge_info(app: Sphinx, env: BuildEnvironment, docnames: Iterable[str],
|
||||
return
|
||||
# create a _viewcode_modules dict on the main environment
|
||||
if not hasattr(env, '_viewcode_modules'):
|
||||
env._viewcode_modules = {} # type: ignore
|
||||
env._viewcode_modules = {} # type: ignore[attr-defined]
|
||||
# now merge in the information from the subprocess
|
||||
for modname, entry in other._viewcode_modules.items():
|
||||
if modname not in env._viewcode_modules: # type: ignore
|
||||
env._viewcode_modules[modname] = entry # type: ignore
|
||||
if modname not in env._viewcode_modules: # type: ignore[attr-defined]
|
||||
env._viewcode_modules[modname] = entry # type: ignore[attr-defined]
|
||||
else:
|
||||
if env._viewcode_modules[modname]: # type: ignore
|
||||
used = env._viewcode_modules[modname][2] # type: ignore
|
||||
if env._viewcode_modules[modname]: # type: ignore[attr-defined]
|
||||
used = env._viewcode_modules[modname][2] # type: ignore[attr-defined]
|
||||
for fullname, docname in entry[2].items():
|
||||
if fullname not in used:
|
||||
used[fullname] = docname
|
||||
@@ -244,7 +244,7 @@ def collect_pages(app: Sphinx) -> Generator[tuple[str, dict[str, Any], str], Non
|
||||
return
|
||||
if not is_supported_builder(app.builder):
|
||||
return
|
||||
highlighter = app.builder.highlighter # type: ignore
|
||||
highlighter = app.builder.highlighter # type: ignore[attr-defined]
|
||||
urito = app.builder.get_relative_uri
|
||||
|
||||
modnames = set(env._viewcode_modules)
|
||||
|
||||
@@ -56,7 +56,7 @@ def _todim(val: int | str) -> str:
|
||||
return 'initial'
|
||||
elif str(val).isdigit():
|
||||
return '0' if int(val) == 0 else '%spx' % val
|
||||
return val # type: ignore
|
||||
return val # type: ignore[return-value]
|
||||
|
||||
|
||||
def _slice_index(values: list, slices: int) -> Iterator[list]:
|
||||
@@ -194,7 +194,7 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
|
||||
if use_i18n:
|
||||
self.environment.install_gettext_translations(builder.app.translator)
|
||||
|
||||
def render(self, template: str, context: dict) -> str: # type: ignore
|
||||
def render(self, template: str, context: dict) -> str: # type: ignore[override]
|
||||
return self.environment.get_template(template).render(context)
|
||||
|
||||
def render_string(self, source: str, context: dict) -> str:
|
||||
|
||||
@@ -60,7 +60,7 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
|
||||
|
||||
def parse(self, inputstring: str | StringList, document: nodes.document) -> None:
|
||||
"""Parse text and generate a document tree."""
|
||||
self.setup_parse(inputstring, document) # type: ignore
|
||||
self.setup_parse(inputstring, document) # type: ignore[arg-type]
|
||||
self.statemachine = states.RSTStateMachine(
|
||||
state_classes=self.state_classes,
|
||||
initial_state=self.initial_state,
|
||||
|
||||
@@ -164,7 +164,8 @@ class _UnparseVisitor(ast.NodeVisitor):
|
||||
)
|
||||
|
||||
if is_simple_tuple(node.slice):
|
||||
elts = ", ".join(self.visit(e) for e in node.slice.elts) # type: ignore
|
||||
elts = ", ".join(self.visit(e)
|
||||
for e in node.slice.elts) # type: ignore[attr-defined]
|
||||
return f"{self.visit(node.value)}[{elts}]"
|
||||
return f"{self.visit(node.value)}[{self.visit(node.slice)}]"
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ def get_assign_targets(node: ast.AST) -> list[ast.expr]:
|
||||
if isinstance(node, ast.Assign):
|
||||
return node.targets
|
||||
else:
|
||||
return [node.target] # type: ignore
|
||||
return [node.target] # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def get_lvar_names(node: ast.AST, self: ast.arg | None = None) -> list[str]:
|
||||
@@ -47,13 +47,13 @@ def get_lvar_names(node: ast.AST, self: ast.arg | None = None) -> list[str]:
|
||||
if node_name in ('Constant', 'Index', 'Slice', 'Subscript'):
|
||||
raise TypeError('%r does not create new variable' % node)
|
||||
if node_name == 'Name':
|
||||
if self is None or node.id == self_id: # type: ignore
|
||||
return [node.id] # type: ignore
|
||||
if self is None or node.id == self_id: # type: ignore[attr-defined]
|
||||
return [node.id] # type: ignore[attr-defined]
|
||||
else:
|
||||
raise TypeError('The assignment %r is not instance variable' % node)
|
||||
elif node_name in ('Tuple', 'List'):
|
||||
members = []
|
||||
for elt in node.elts: # type: ignore
|
||||
for elt in node.elts: # type: ignore[attr-defined]
|
||||
try:
|
||||
members.extend(get_lvar_names(elt, self))
|
||||
except TypeError:
|
||||
@@ -65,13 +65,13 @@ def get_lvar_names(node: ast.AST, self: ast.arg | None = None) -> list[str]:
|
||||
self and node.value.id == self_id # type: ignore[attr-defined]
|
||||
):
|
||||
# instance variable
|
||||
return ["%s" % get_lvar_names(node.attr, self)[0]] # type: ignore
|
||||
return ["%s" % get_lvar_names(node.attr, self)[0]] # type: ignore[attr-defined]
|
||||
else:
|
||||
raise TypeError('The assignment %r is not instance variable' % node)
|
||||
elif node_name == 'str':
|
||||
return [node] # type: ignore
|
||||
return [node] # type: ignore[list-item]
|
||||
elif node_name == 'Starred':
|
||||
return get_lvar_names(node.value, self) # type: ignore
|
||||
return get_lvar_names(node.value, self) # type: ignore[attr-defined]
|
||||
else:
|
||||
raise NotImplementedError('Unexpected node name %r' % node_name)
|
||||
|
||||
@@ -363,7 +363,8 @@ class VariableCommentPicker(ast.NodeVisitor):
|
||||
self.add_variable_annotation(varname, node.annotation)
|
||||
elif hasattr(node, 'type_comment') and node.type_comment:
|
||||
for varname in varnames:
|
||||
self.add_variable_annotation(varname, node.type_comment) # type: ignore
|
||||
self.add_variable_annotation(
|
||||
varname, node.type_comment) # type: ignore[arg-type]
|
||||
|
||||
# check comments after assignment
|
||||
parser = AfterCommentParser([current_line[node.col_offset:]] +
|
||||
@@ -398,7 +399,7 @@ class VariableCommentPicker(ast.NodeVisitor):
|
||||
|
||||
def visit_AnnAssign(self, node: ast.AnnAssign) -> None:
|
||||
"""Handles AnnAssign node and pick up a variable comment."""
|
||||
self.visit_Assign(node) # type: ignore
|
||||
self.visit_Assign(node) # type: ignore[arg-type]
|
||||
|
||||
def visit_Expr(self, node: ast.Expr) -> None:
|
||||
"""Handles Expr node and pick up a comment if string."""
|
||||
@@ -458,7 +459,7 @@ class VariableCommentPicker(ast.NodeVisitor):
|
||||
|
||||
def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None:
|
||||
"""Handles AsyncFunctionDef node and set context."""
|
||||
self.visit_FunctionDef(node) # type: ignore
|
||||
self.visit_FunctionDef(node) # type: ignore[arg-type]
|
||||
|
||||
|
||||
class DefinitionFinder(TokenProcessor):
|
||||
|
||||
@@ -384,7 +384,7 @@ def code_role(name: str, rawtext: str, text: str, lineno: int,
|
||||
return [node], []
|
||||
|
||||
|
||||
code_role.options = { # type: ignore
|
||||
code_role.options = { # type: ignore[attr-defined]
|
||||
'class': docutils.parsers.rst.directives.class_option,
|
||||
'language': docutils.parsers.rst.directives.unchanged,
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ class SharedResult:
|
||||
|
||||
@pytest.fixture()
|
||||
def app_params(request: Any, test_params: dict, shared_result: SharedResult,
|
||||
sphinx_test_tempdir: str, rootdir: str) -> tuple[dict, dict]:
|
||||
sphinx_test_tempdir: str, rootdir: str) -> _app_params:
|
||||
"""
|
||||
Parameters that are specified by 'pytest.mark.sphinx' for
|
||||
sphinx.application.Sphinx initialization
|
||||
@@ -99,7 +99,10 @@ def app_params(request: Any, test_params: dict, shared_result: SharedResult,
|
||||
testroot_path = rootdir / ('test-' + testroot)
|
||||
shutil.copytree(testroot_path, srcdir)
|
||||
|
||||
return namedtuple('app_params', 'args,kwargs')(args, kwargs) # type: ignore
|
||||
return _app_params(args, kwargs)
|
||||
|
||||
|
||||
_app_params = namedtuple('_app_params', 'args,kwargs')
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
||||
@@ -108,8 +108,8 @@ class SphinxTestApp(application.Sphinx):
|
||||
warningiserror = False
|
||||
|
||||
self._saved_path = sys.path[:]
|
||||
self._saved_directives = directives._directives.copy() # type: ignore
|
||||
self._saved_roles = roles._roles.copy() # type: ignore
|
||||
self._saved_directives = directives._directives.copy() # type: ignore[attr-defined]
|
||||
self._saved_roles = roles._roles.copy() # type: ignore[attr-defined]
|
||||
|
||||
self._saved_nodeclasses = {v for v in dir(nodes.GenericNodeVisitor)
|
||||
if v.startswith('visit_')}
|
||||
@@ -127,8 +127,8 @@ class SphinxTestApp(application.Sphinx):
|
||||
locale.translators.clear()
|
||||
sys.path[:] = self._saved_path
|
||||
sys.modules.pop('autodoc_fodder', None)
|
||||
directives._directives = self._saved_directives # type: ignore
|
||||
roles._roles = self._saved_roles # type: ignore
|
||||
directives._directives = self._saved_directives # type: ignore[attr-defined]
|
||||
roles._roles = self._saved_roles # type: ignore[attr-defined]
|
||||
for method in dir(nodes.GenericNodeVisitor):
|
||||
if method.startswith('visit_') and \
|
||||
method not in self._saved_nodeclasses:
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Any, Callable, TypeVar
|
||||
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util.console import bold # type: ignore[attr-defined]
|
||||
|
||||
if False:
|
||||
from collections.abc import Iterable, Iterator
|
||||
|
||||
@@ -61,13 +61,13 @@ additional_nodes: set[type[Element]] = set()
|
||||
def docutils_namespace() -> Generator[None, None, None]:
|
||||
"""Create namespace for reST parsers."""
|
||||
try:
|
||||
_directives = copy(directives._directives) # type: ignore
|
||||
_roles = copy(roles._roles) # type: ignore
|
||||
_directives = copy(directives._directives) # type: ignore[attr-defined]
|
||||
_roles = copy(roles._roles) # type: ignore[attr-defined]
|
||||
|
||||
yield
|
||||
finally:
|
||||
directives._directives = _directives # type: ignore
|
||||
roles._roles = _roles # type: ignore
|
||||
directives._directives = _directives # type: ignore[attr-defined]
|
||||
roles._roles = _roles # type: ignore[attr-defined]
|
||||
|
||||
for node in list(additional_nodes):
|
||||
unregister_node(node)
|
||||
@@ -76,7 +76,7 @@ def docutils_namespace() -> Generator[None, None, None]:
|
||||
|
||||
def is_directive_registered(name: str) -> bool:
|
||||
"""Check the *name* directive is already registered."""
|
||||
return name in directives._directives # type: ignore
|
||||
return name in directives._directives # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def register_directive(name: str, directive: type[Directive]) -> None:
|
||||
@@ -90,7 +90,7 @@ def register_directive(name: str, directive: type[Directive]) -> None:
|
||||
|
||||
def is_role_registered(name: str) -> bool:
|
||||
"""Check the *name* role is already registered."""
|
||||
return name in roles._roles # type: ignore
|
||||
return name in roles._roles # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def register_role(name: str, role: RoleFunction) -> None:
|
||||
@@ -104,7 +104,7 @@ def register_role(name: str, role: RoleFunction) -> None:
|
||||
|
||||
def unregister_role(name: str) -> None:
|
||||
"""Unregister a role from docutils."""
|
||||
roles._roles.pop(name, None) # type: ignore
|
||||
roles._roles.pop(name, None) # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def is_node_registered(node: type[Element]) -> bool:
|
||||
@@ -119,7 +119,7 @@ def register_node(node: type[Element]) -> None:
|
||||
inside ``docutils_namespace()`` to prevent side-effects.
|
||||
"""
|
||||
if not hasattr(nodes.GenericNodeVisitor, 'visit_' + node.__name__):
|
||||
nodes._add_node_class_names([node.__name__]) # type: ignore
|
||||
nodes._add_node_class_names([node.__name__]) # type: ignore[attr-defined]
|
||||
additional_nodes.add(node)
|
||||
|
||||
|
||||
@@ -376,17 +376,17 @@ def switch_source_input(state: State, content: StringList) -> Generator[None, No
|
||||
"""Switch current source input of state temporarily."""
|
||||
try:
|
||||
# remember the original ``get_source_and_line()`` method
|
||||
get_source_and_line = state.memo.reporter.get_source_and_line # type: ignore
|
||||
gsal = state.memo.reporter.get_source_and_line # type: ignore[attr-defined]
|
||||
|
||||
# replace it by new one
|
||||
state_machine = StateMachine([], None) # type: ignore[arg-type]
|
||||
state_machine.input_lines = content
|
||||
state.memo.reporter.get_source_and_line = state_machine.get_source_and_line # type: ignore # noqa: E501
|
||||
state.memo.reporter.get_source_and_line = state_machine.get_source_and_line # type: ignore[attr-defined] # noqa: E501
|
||||
|
||||
yield
|
||||
finally:
|
||||
# restore the method
|
||||
state.memo.reporter.get_source_and_line = get_source_and_line # type: ignore
|
||||
state.memo.reporter.get_source_and_line = gsal # type: ignore[attr-defined]
|
||||
|
||||
|
||||
class SphinxFileOutput(FileOutput):
|
||||
@@ -496,7 +496,7 @@ class SphinxRole:
|
||||
def get_source_info(self, lineno: int | None = None) -> tuple[str, int]:
|
||||
if lineno is None:
|
||||
lineno = self.lineno
|
||||
return self.inliner.reporter.get_source_and_line(lineno) # type: ignore
|
||||
return self.inliner.reporter.get_source_and_line(lineno) # type: ignore[attr-defined]
|
||||
|
||||
def set_source_info(self, node: Node, lineno: int | None = None) -> None:
|
||||
node.source, node.line = self.get_source_info(lineno)
|
||||
|
||||
@@ -320,7 +320,8 @@ def isproperty(obj: Any) -> bool:
|
||||
|
||||
def isgenericalias(obj: Any) -> bool:
|
||||
"""Check if the object is GenericAlias."""
|
||||
return isinstance(obj, (types.GenericAlias, typing._BaseGenericAlias)) # type: ignore
|
||||
return isinstance(
|
||||
obj, (types.GenericAlias, typing._BaseGenericAlias)) # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def safe_getattr(obj: Any, name: str, *defargs: Any) -> Any:
|
||||
@@ -721,14 +722,15 @@ def signature_from_ast(node: ast.FunctionDef, code: str = '') -> inspect.Signatu
|
||||
positionals = len(args.args)
|
||||
|
||||
for _ in range(len(defaults), positionals):
|
||||
defaults.insert(0, Parameter.empty) # type: ignore
|
||||
defaults.insert(0, Parameter.empty) # type: ignore[arg-type]
|
||||
|
||||
if hasattr(args, "posonlyargs"):
|
||||
for i, arg in enumerate(args.posonlyargs):
|
||||
if defaults[i] is Parameter.empty:
|
||||
default = Parameter.empty
|
||||
else:
|
||||
default = DefaultValue(ast_unparse(defaults[i], code)) # type: ignore
|
||||
default = DefaultValue(
|
||||
ast_unparse(defaults[i], code)) # type: ignore[assignment]
|
||||
|
||||
annotation = ast_unparse(arg.annotation, code) or Parameter.empty
|
||||
params.append(Parameter(arg.arg, Parameter.POSITIONAL_ONLY,
|
||||
@@ -739,7 +741,7 @@ def signature_from_ast(node: ast.FunctionDef, code: str = '') -> inspect.Signatu
|
||||
default = Parameter.empty
|
||||
else:
|
||||
default = DefaultValue(
|
||||
ast_unparse(defaults[i + posonlyargs], code), # type: ignore
|
||||
ast_unparse(defaults[i + posonlyargs], code), # type: ignore[assignment]
|
||||
)
|
||||
|
||||
annotation = ast_unparse(arg.annotation, code) or Parameter.empty
|
||||
@@ -755,7 +757,8 @@ def signature_from_ast(node: ast.FunctionDef, code: str = '') -> inspect.Signatu
|
||||
if args.kw_defaults[i] is None:
|
||||
default = Parameter.empty
|
||||
else:
|
||||
default = DefaultValue(ast_unparse(args.kw_defaults[i], code)) # type: ignore
|
||||
default = DefaultValue(
|
||||
ast_unparse(args.kw_defaults[i], code)) # type: ignore[arg-type,assignment]
|
||||
annotation = ast_unparse(arg.annotation, code) or Parameter.empty
|
||||
params.append(Parameter(arg.arg, Parameter.KEYWORD_ONLY, default=default,
|
||||
annotation=annotation))
|
||||
|
||||
@@ -106,7 +106,7 @@ class SphinxInfoLogRecord(SphinxLogRecord):
|
||||
class SphinxWarningLogRecord(SphinxLogRecord):
|
||||
"""Warning log record class supporting location"""
|
||||
@property
|
||||
def prefix(self) -> str: # type: ignore
|
||||
def prefix(self) -> str: # type: ignore[override]
|
||||
if self.levelno >= logging.CRITICAL:
|
||||
return 'CRITICAL: '
|
||||
elif self.levelno >= logging.ERROR:
|
||||
@@ -131,7 +131,7 @@ class SphinxLoggerAdapter(logging.LoggerAdapter):
|
||||
def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
||||
self.log(VERBOSE, msg, *args, **kwargs)
|
||||
|
||||
def process(self, msg: str, kwargs: dict) -> tuple[str, dict]: # type: ignore
|
||||
def process(self, msg: str, kwargs: dict) -> tuple[str, dict]: # type: ignore[override]
|
||||
extra = kwargs.setdefault('extra', {})
|
||||
for keyword in self.KEYWORDS:
|
||||
if keyword in kwargs:
|
||||
@@ -481,10 +481,10 @@ class SphinxLogRecordTranslator(logging.Filter):
|
||||
self.app = app
|
||||
super().__init__()
|
||||
|
||||
def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore
|
||||
def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[override]
|
||||
if isinstance(record, logging.LogRecord):
|
||||
# force subclassing to handle location
|
||||
record.__class__ = self.LogRecordClass # type: ignore
|
||||
record.__class__ = self.LogRecordClass # type: ignore[assignment]
|
||||
|
||||
location = getattr(record, 'location', None)
|
||||
if isinstance(location, tuple):
|
||||
|
||||
@@ -198,7 +198,7 @@ def is_translatable(node: Node) -> bool:
|
||||
if isinstance(node, nodes.image) and (node.get('translatable') or node.get('alt')):
|
||||
return True
|
||||
|
||||
if isinstance(node, nodes.Inline) and 'translatable' not in node: # type: ignore
|
||||
if isinstance(node, nodes.Inline) and 'translatable' not in node: # type: ignore[operator]
|
||||
# inline node must not be translated if 'translatable' is not set
|
||||
return False
|
||||
|
||||
@@ -225,7 +225,7 @@ def is_translatable(node: Node) -> bool:
|
||||
return False
|
||||
return True
|
||||
|
||||
if isinstance(node, nodes.meta): # type: ignore
|
||||
if isinstance(node, nodes.meta): # type: ignore[attr-defined]
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -261,7 +261,7 @@ def extract_messages(doctree: Element) -> Iterable[tuple[Element, str]]:
|
||||
msg = f'.. image:: {image_uri}'
|
||||
else:
|
||||
msg = ''
|
||||
elif isinstance(node, nodes.meta): # type: ignore
|
||||
elif isinstance(node, nodes.meta): # type: ignore[attr-defined]
|
||||
msg = node["content"]
|
||||
else:
|
||||
msg = node.rawsource.replace('\n', ' ').strip()
|
||||
@@ -567,7 +567,8 @@ def set_source_info(directive: Directive, node: Node) -> None:
|
||||
|
||||
|
||||
def set_role_source_info(inliner: Inliner, lineno: int, node: Node) -> None:
|
||||
node.source, node.line = inliner.reporter.get_source_and_line(lineno) # type: ignore
|
||||
gsal = inliner.reporter.get_source_and_line # type: ignore[attr-defined]
|
||||
node.source, node.line = gsal(lineno)
|
||||
|
||||
|
||||
def copy_source_info(src: Element, dst: Element) -> None:
|
||||
@@ -647,7 +648,7 @@ def _copy_except__document(el: Element) -> Element:
|
||||
return newnode
|
||||
|
||||
|
||||
nodes.Element.copy = _copy_except__document # type: ignore
|
||||
nodes.Element.copy = _copy_except__document # type: ignore[assignment]
|
||||
|
||||
|
||||
def _deepcopy(el: Element) -> Element:
|
||||
@@ -665,4 +666,4 @@ def _deepcopy(el: Element) -> Element:
|
||||
return newnode
|
||||
|
||||
|
||||
nodes.Element.deepcopy = _deepcopy # type: ignore
|
||||
nodes.Element.deepcopy = _deepcopy # type: ignore[assignment]
|
||||
|
||||
@@ -12,7 +12,7 @@ from docutils import nodes
|
||||
from docutils.parsers.rst.states import Inliner
|
||||
|
||||
try:
|
||||
from types import UnionType # type: ignore # python 3.10 or above
|
||||
from types import UnionType # type: ignore[attr-defined] # python 3.10 or above
|
||||
except ImportError:
|
||||
UnionType = None
|
||||
|
||||
@@ -161,7 +161,7 @@ def restify(cls: type | None, mode: str = 'fully-qualified-except-typing') -> st
|
||||
return ':py:obj:`~typing.Union`\\ [%s]' % args
|
||||
elif inspect.isgenericalias(cls):
|
||||
if isinstance(cls.__origin__, typing._SpecialForm): # type: ignore[attr-defined]
|
||||
text = restify(cls.__origin__, mode) # type: ignore
|
||||
text = restify(cls.__origin__, mode) # type: ignore[attr-defined,arg-type]
|
||||
elif getattr(cls, '_name', None):
|
||||
cls_name = cls._name # type: ignore[attr-defined]
|
||||
if cls.__module__ == 'typing':
|
||||
|
||||
@@ -91,7 +91,7 @@ def merge_doctrees(old: Node, new: Node, condition: Any) -> Iterator[Node]:
|
||||
# choose the old node with the best ratio for each new node and set the uid
|
||||
# as long as the ratio is under a certain value, in which case we consider
|
||||
# them not changed but different
|
||||
ratios = sorted(ratios.items(), key=itemgetter(1)) # type: ignore
|
||||
ratios = sorted(ratios.items(), key=itemgetter(1)) # type: ignore[assignment]
|
||||
for (old_node, new_node), ratio in ratios:
|
||||
if new_node in seen:
|
||||
continue
|
||||
|
||||
@@ -337,7 +337,7 @@ class HTML5Translator(SphinxTranslator, BaseTranslator):
|
||||
self.depart_reference(node)
|
||||
|
||||
# overwritten -- we don't want source comments to show up in the HTML
|
||||
def visit_comment(self, node: Element) -> None: # type: ignore
|
||||
def visit_comment(self, node: Element) -> None: # type: ignore[override]
|
||||
raise nodes.SkipNode
|
||||
|
||||
# overwritten
|
||||
@@ -881,7 +881,7 @@ class HTML5Translator(SphinxTranslator, BaseTranslator):
|
||||
else:
|
||||
node['classes'].append('row-odd')
|
||||
self.body.append(self.starttag(node, 'tr', ''))
|
||||
node.column = 0 # type: ignore
|
||||
node.column = 0 # type: ignore[attr-defined]
|
||||
|
||||
def visit_field_list(self, node: Element) -> None:
|
||||
self._fieldlist_row_indices.append(0)
|
||||
|
||||
@@ -31,7 +31,7 @@ try:
|
||||
from docutils.utils.roman import toRoman
|
||||
except ImportError:
|
||||
# In Debian/Ubuntu, roman package is provided as roman, not as docutils.utils.roman
|
||||
from roman import toRoman # type: ignore
|
||||
from roman import toRoman # type: ignore[no-redef]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sphinx.builders.latex import LaTeXBuilder
|
||||
@@ -1643,7 +1643,7 @@ class LaTeXTranslator(SphinxTranslator):
|
||||
if has_dup_label(prev):
|
||||
ids = node['ids'][:] # copy to avoid side-effects
|
||||
while has_dup_label(prev):
|
||||
ids.remove(prev['refid']) # type: ignore
|
||||
ids.remove(prev['refid']) # type: ignore[index]
|
||||
prev = get_prev_node(prev) # type: ignore[arg-type]
|
||||
else:
|
||||
ids = iter(node['ids']) # read-only iterator
|
||||
|
||||
@@ -241,7 +241,7 @@ class ManualPageTranslator(SphinxTranslator, BaseTranslator):
|
||||
super().visit_term(node)
|
||||
|
||||
# overwritten -- we don't want source comments to show up
|
||||
def visit_comment(self, node: Element) -> None: # type: ignore
|
||||
def visit_comment(self, node: Element) -> None: # type: ignore[override]
|
||||
raise nodes.SkipNode
|
||||
|
||||
# overwritten -- added ensure_eol()
|
||||
@@ -309,7 +309,7 @@ class ManualPageTranslator(SphinxTranslator, BaseTranslator):
|
||||
self.body.append(self.defs['reference'][0])
|
||||
# avoid repeating escaping code... fine since
|
||||
# visit_Text calls astext() and only works on that afterwards
|
||||
self.visit_Text(node) # type: ignore
|
||||
self.visit_Text(node) # type: ignore[arg-type]
|
||||
self.body.append(self.defs['reference'][1])
|
||||
|
||||
uri = node.get('refuri', '')
|
||||
|
||||
@@ -233,9 +233,9 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
# filename
|
||||
if not elements['filename']:
|
||||
elements['filename'] = self.document.get('source') or 'untitled'
|
||||
if elements['filename'][-4:] in ('.txt', '.rst'): # type: ignore
|
||||
elements['filename'] = elements['filename'][:-4] # type: ignore
|
||||
elements['filename'] += '.info' # type: ignore
|
||||
if elements['filename'][-4:] in ('.txt', '.rst'): # type: ignore[index]
|
||||
elements['filename'] = elements['filename'][:-4] # type: ignore[index]
|
||||
elements['filename'] += '.info' # type: ignore[operator]
|
||||
# direntry
|
||||
if self.settings.texinfo_dir_entry:
|
||||
entry = self.format_menu_entry(
|
||||
@@ -863,7 +863,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
except (KeyError, IndexError) as exc:
|
||||
raise nodes.SkipNode from exc
|
||||
# footnotes are repeated for each reference
|
||||
footnode.walkabout(self) # type: ignore
|
||||
footnode.walkabout(self) # type: ignore[union-attr]
|
||||
raise nodes.SkipChildren
|
||||
|
||||
def visit_citation(self, node: Element) -> None:
|
||||
@@ -1214,7 +1214,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
width = self.tex_image_length(node.get('width', ''))
|
||||
height = self.tex_image_length(node.get('height', ''))
|
||||
alt = self.escape_arg(node.get('alt', ''))
|
||||
filename = f"{self.elements['filename'][:-5]}-figures/{name}" # type: ignore
|
||||
filename = f"{self.elements['filename'][:-5]}-figures/{name}" # type: ignore[index]
|
||||
self.body.append('\n@image{%s,%s,%s,%s,%s}\n' %
|
||||
(filename, width, height, alt, ext[1:]))
|
||||
|
||||
|
||||
@@ -436,10 +436,10 @@ class TextTranslator(SphinxTranslator):
|
||||
result.append((indent, res))
|
||||
for itemindent, item in content:
|
||||
if itemindent == -1:
|
||||
toformat.append(item) # type: ignore
|
||||
toformat.append(item) # type: ignore[arg-type]
|
||||
else:
|
||||
do_format()
|
||||
result.append((indent + itemindent, item)) # type: ignore
|
||||
result.append((indent + itemindent, item)) # type: ignore[arg-type]
|
||||
toformat = []
|
||||
do_format()
|
||||
if first is not None and result:
|
||||
@@ -521,7 +521,7 @@ class TextTranslator(SphinxTranslator):
|
||||
else:
|
||||
char = '^'
|
||||
text = ''
|
||||
text = ''.join(x[1] for x in self.states.pop() if x[0] == -1) # type: ignore
|
||||
text = ''.join(x[1] for x in self.states.pop() if x[0] == -1) # type: ignore[misc]
|
||||
if self.add_secnumbers:
|
||||
text = self.get_section_number_string(node) + text
|
||||
self.stateindent.pop()
|
||||
|
||||
@@ -177,29 +177,29 @@ def test_restify_type_hints_alias():
|
||||
|
||||
|
||||
def test_restify_type_ForwardRef():
|
||||
from typing import ForwardRef # type: ignore
|
||||
from typing import ForwardRef # type: ignore[attr-defined]
|
||||
assert restify(ForwardRef("myint")) == ":py:class:`myint`"
|
||||
|
||||
|
||||
def test_restify_type_Literal():
|
||||
from typing import Literal # type: ignore
|
||||
from typing import Literal # type: ignore[attr-defined]
|
||||
assert restify(Literal[1, "2", "\r"]) == ":py:obj:`~typing.Literal`\\ [1, '2', '\\r']"
|
||||
|
||||
|
||||
def test_restify_pep_585():
|
||||
assert restify(list[str]) == ":py:class:`list`\\ [:py:class:`str`]" # type: ignore
|
||||
assert restify(dict[str, str]) == (":py:class:`dict`\\ " # type: ignore
|
||||
assert restify(list[str]) == ":py:class:`list`\\ [:py:class:`str`]" # type: ignore[attr-defined]
|
||||
assert restify(dict[str, str]) == (":py:class:`dict`\\ " # type: ignore[attr-defined]
|
||||
"[:py:class:`str`, :py:class:`str`]")
|
||||
assert restify(dict[str, tuple[int, ...]]) == (":py:class:`dict`\\ " # type: ignore
|
||||
assert restify(dict[str, tuple[int, ...]]) == (":py:class:`dict`\\ " # type: ignore[attr-defined]
|
||||
"[:py:class:`str`, :py:class:`tuple`\\ "
|
||||
"[:py:class:`int`, ...]]")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.')
|
||||
def test_restify_type_union_operator():
|
||||
assert restify(int | None) == ":py:class:`int` | :py:obj:`None`" # type: ignore
|
||||
assert restify(int | str) == ":py:class:`int` | :py:class:`str`" # type: ignore
|
||||
assert restify(int | str | None) == (":py:class:`int` | :py:class:`str` | " # type: ignore
|
||||
assert restify(int | None) == ":py:class:`int` | :py:obj:`None`" # type: ignore[attr-defined]
|
||||
assert restify(int | str) == ":py:class:`int` | :py:class:`str`" # type: ignore[attr-defined]
|
||||
assert restify(int | str | None) == (":py:class:`int` | :py:class:`str` | " # type: ignore[attr-defined]
|
||||
":py:obj:`None`")
|
||||
|
||||
|
||||
@@ -318,7 +318,7 @@ def test_stringify_type_hints_pep_585():
|
||||
|
||||
|
||||
def test_stringify_Annotated():
|
||||
from typing import Annotated # type: ignore
|
||||
from typing import Annotated # type: ignore[attr-defined]
|
||||
assert stringify_annotation(Annotated[str, "foo", "bar"], 'fully-qualified-except-typing') == "str"
|
||||
assert stringify_annotation(Annotated[str, "foo", "bar"], "smart") == "str"
|
||||
|
||||
@@ -424,12 +424,12 @@ def test_stringify_type_hints_alias():
|
||||
assert stringify_annotation(MyStr, 'fully-qualified-except-typing') == "str"
|
||||
assert stringify_annotation(MyStr, "smart") == "str"
|
||||
|
||||
assert stringify_annotation(MyTuple) == "Tuple[str, str]" # type: ignore
|
||||
assert stringify_annotation(MyTuple, "smart") == "~typing.Tuple[str, str]" # type: ignore
|
||||
assert stringify_annotation(MyTuple) == "Tuple[str, str]" # type: ignore[attr-defined]
|
||||
assert stringify_annotation(MyTuple, "smart") == "~typing.Tuple[str, str]" # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def test_stringify_type_Literal():
|
||||
from typing import Literal # type: ignore
|
||||
from typing import Literal # type: ignore[attr-defined]
|
||||
assert stringify_annotation(Literal[1, "2", "\r"], 'fully-qualified-except-typing') == "Literal[1, '2', '\\r']"
|
||||
assert stringify_annotation(Literal[1, "2", "\r"], "fully-qualified") == "typing.Literal[1, '2', '\\r']"
|
||||
assert stringify_annotation(Literal[1, "2", "\r"], "smart") == "~typing.Literal[1, '2', '\\r']"
|
||||
@@ -437,17 +437,17 @@ def test_stringify_type_Literal():
|
||||
|
||||
@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.')
|
||||
def test_stringify_type_union_operator():
|
||||
assert stringify_annotation(int | None) == "int | None" # type: ignore
|
||||
assert stringify_annotation(int | None, "smart") == "int | None" # type: ignore
|
||||
assert stringify_annotation(int | None) == "int | None" # type: ignore[attr-defined]
|
||||
assert stringify_annotation(int | None, "smart") == "int | None" # type: ignore[attr-defined]
|
||||
|
||||
assert stringify_annotation(int | str) == "int | str" # type: ignore
|
||||
assert stringify_annotation(int | str, "smart") == "int | str" # type: ignore
|
||||
assert stringify_annotation(int | str) == "int | str" # type: ignore[attr-defined]
|
||||
assert stringify_annotation(int | str, "smart") == "int | str" # type: ignore[attr-defined]
|
||||
|
||||
assert stringify_annotation(int | str | None) == "int | str | None" # type: ignore
|
||||
assert stringify_annotation(int | str | None, "smart") == "int | str | None" # type: ignore
|
||||
assert stringify_annotation(int | str | None) == "int | str | None" # type: ignore[attr-defined]
|
||||
assert stringify_annotation(int | str | None, "smart") == "int | str | None" # type: ignore[attr-defined]
|
||||
|
||||
assert stringify_annotation(int | Struct) == "int | struct.Struct" # type: ignore
|
||||
assert stringify_annotation(int | Struct, "smart") == "int | ~struct.Struct" # type: ignore
|
||||
assert stringify_annotation(int | Struct) == "int | struct.Struct" # type: ignore[attr-defined]
|
||||
assert stringify_annotation(int | Struct, "smart") == "int | ~struct.Struct" # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def test_stringify_broken_type_hints():
|
||||
|
||||
@@ -77,7 +77,7 @@ def f14() -> Any:
|
||||
pass
|
||||
|
||||
|
||||
def f15(x: "Unknown", y: "int") -> Any: # noqa: F821 # type: ignore
|
||||
def f15(x: "Unknown", y: "int") -> Any: # noqa: F821 # type: ignore[attr-defined]
|
||||
pass
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user