mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge pull request #10078 from danieleades/refactor/unused-loop-control-variables
address some unused loop control variables
This commit is contained in:
commit
d82d370739
@ -87,7 +87,7 @@ class RecipeIndex(Index):
|
||||
# first letter of the recipe as a key to group thing
|
||||
#
|
||||
# name, subtype, docname, anchor, extra, qualifier, description
|
||||
for name, dispname, typ, docname, anchor, _ in recipes:
|
||||
for _name, dispname, typ, docname, anchor, _priority in recipes:
|
||||
content[dispname[0].lower()].append(
|
||||
(dispname, 0, docname, anchor, docname, '', typ))
|
||||
|
||||
|
@ -92,7 +92,7 @@ class toctree(nodes.General, nodes.Element, translatable):
|
||||
def preserve_original_messages(self) -> None:
|
||||
# toctree entries
|
||||
rawentries = self.setdefault('rawentries', [])
|
||||
for title, docname in self['entries']:
|
||||
for title, _docname in self['entries']:
|
||||
if title:
|
||||
rawentries.append(title)
|
||||
|
||||
|
@ -562,7 +562,7 @@ class Builder:
|
||||
for chunk in status_iterator(chunks, __('writing output... '), "darkgreen",
|
||||
len(chunks), self.app.verbosity):
|
||||
arg = []
|
||||
for i, docname in enumerate(chunk):
|
||||
for docname in chunk:
|
||||
doctree = self.env.get_and_resolve_doctree(docname, self)
|
||||
self.write_doc_serialized(docname, doctree)
|
||||
arg.append((docname, doctree))
|
||||
|
@ -377,14 +377,14 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
"""Fix href attributes for genindex pages."""
|
||||
# XXX: modifies tree inline
|
||||
# Logic modeled from themes/basic/genindex.html
|
||||
for key, columns in tree:
|
||||
for entryname, (links, subitems, key_) in columns:
|
||||
for _key, columns in tree:
|
||||
for _entryname, (links, subitems, _key) in columns:
|
||||
for (i, (ismain, link)) in enumerate(links):
|
||||
m = self.refuri_re.match(link)
|
||||
if m:
|
||||
links[i] = (ismain,
|
||||
self.fix_fragment(m.group(1), m.group(2)))
|
||||
for subentryname, subentrylinks in subitems:
|
||||
for _subentryname, subentrylinks in subitems:
|
||||
for (i, (ismain, link)) in enumerate(subentrylinks):
|
||||
m = self.refuri_re.match(link)
|
||||
if m:
|
||||
|
@ -157,7 +157,7 @@ class I18nBuilder(Builder):
|
||||
if 'index' in self.env.config.gettext_additional_targets:
|
||||
# Extract translatable messages from index entries.
|
||||
for node, entries in traverse_translatable_index(doctree):
|
||||
for typ, msg, tid, main, key_ in entries:
|
||||
for typ, msg, _tid, _main, _key in entries:
|
||||
for m in split_index_msg(typ, msg):
|
||||
if typ == 'pair' and m in pairindextypes.values():
|
||||
# avoid built-in translated message was incorporated
|
||||
@ -227,7 +227,7 @@ class MessageCatalogBuilder(I18nBuilder):
|
||||
template_files = set()
|
||||
for template_path in self.config.templates_path:
|
||||
tmpl_abs_path = path.join(self.app.srcdir, template_path)
|
||||
for dirpath, dirs, files in walk(tmpl_abs_path):
|
||||
for dirpath, _dirs, files in walk(tmpl_abs_path):
|
||||
for fn in files:
|
||||
if fn.endswith('.html'):
|
||||
filename = canon_path(path.join(dirpath, fn))
|
||||
@ -247,7 +247,7 @@ class MessageCatalogBuilder(I18nBuilder):
|
||||
try:
|
||||
with open(template, encoding='utf-8') as f:
|
||||
context = f.read()
|
||||
for line, meth, msg in extract_translations(context):
|
||||
for line, _meth, msg in extract_translations(context):
|
||||
origin = MsgOrigin(template, line)
|
||||
self.catalogs['sphinx'].add(msg, origin)
|
||||
except Exception as exc:
|
||||
|
@ -486,7 +486,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
rellinks: List[Tuple[str, str, str, str]] = []
|
||||
if self.use_index:
|
||||
rellinks.append(('genindex', _('General Index'), 'I', _('index')))
|
||||
for indexname, indexcls, content, collapse in self.domain_indices:
|
||||
for indexname, indexcls, _content, _collapse in self.domain_indices:
|
||||
# if it has a short name
|
||||
if indexcls.shortname:
|
||||
rellinks.append((indexname, indexcls.localname,
|
||||
|
@ -339,7 +339,7 @@ class HyperlinkAvailabilityChecker:
|
||||
self.wqueue = PriorityQueue()
|
||||
|
||||
def invoke_threads(self) -> None:
|
||||
for i in range(self.config.linkcheck_workers):
|
||||
for _i in range(self.config.linkcheck_workers):
|
||||
thread = HyperlinkAvailabilityCheckWorker(self.env, self.config,
|
||||
self.rqueue, self.wqueue,
|
||||
self.rate_limits, self.builder)
|
||||
@ -348,7 +348,7 @@ class HyperlinkAvailabilityChecker:
|
||||
|
||||
def shutdown_threads(self) -> None:
|
||||
self.wqueue.join()
|
||||
for worker in self.workers:
|
||||
for _worker in self.workers:
|
||||
self.wqueue.put(CheckRequest(CHECK_IMMEDIATELY, None), False)
|
||||
|
||||
def check(self, hyperlinks: Dict[str, Hyperlink]) -> Generator[CheckResult, None, None]:
|
||||
|
@ -176,7 +176,7 @@ class ObjectDescription(SphinxDirective, Generic[T]):
|
||||
|
||||
self.names: List[T] = []
|
||||
signatures = self.get_signatures()
|
||||
for i, sig in enumerate(signatures):
|
||||
for sig in signatures:
|
||||
# add a signature node for each signature in the current unit
|
||||
# and add a reference target for it
|
||||
signode = addnodes.desc_signature(sig, '')
|
||||
|
@ -342,7 +342,7 @@ class Only(SphinxDirective):
|
||||
# be placed in the doctree.
|
||||
n_sects_to_raise = current_depth - nested_depth + 1
|
||||
parent = cast(nodes.Element, self.state.parent)
|
||||
for i in range(n_sects_to_raise):
|
||||
for _i in range(n_sects_to_raise):
|
||||
if parent.parent:
|
||||
parent = parent.parent
|
||||
parent.append(node)
|
||||
|
@ -3654,8 +3654,7 @@ class CAliasObject(ObjectDescription):
|
||||
" When skipping the root declaration,"
|
||||
" need 'maxdepth' 0 for infinite or at least 2.",
|
||||
location=self.get_location())
|
||||
signatures = self.get_signatures()
|
||||
for i, sig in enumerate(signatures):
|
||||
for sig in self.get_signatures():
|
||||
node.append(AliasNode(sig, aliasOptions, self.state.document, env=self.env))
|
||||
return [node]
|
||||
|
||||
|
@ -130,7 +130,7 @@ class ChangeSetDomain(Domain):
|
||||
self.changesets.setdefault(version, []).append(changeset)
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for version, changes in self.changesets.items():
|
||||
for changes in self.changesets.values():
|
||||
for changeset in changes[:]:
|
||||
if changeset.docname == docname:
|
||||
changes.remove(changeset)
|
||||
|
@ -48,7 +48,7 @@ class CitationDomain(Domain):
|
||||
return self.data.setdefault('citation_refs', {})
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for key, (fn, _l, lineno) in list(self.citations.items()):
|
||||
for key, (fn, _l, _lineno) in list(self.citations.items()):
|
||||
if fn == docname:
|
||||
del self.citations[key]
|
||||
for key, docnames in list(self.citation_refs.items()):
|
||||
@ -81,7 +81,7 @@ class CitationDomain(Domain):
|
||||
docnames.add(self.env.docname)
|
||||
|
||||
def check_consistency(self) -> None:
|
||||
for name, (docname, labelid, lineno) in self.citations.items():
|
||||
for name, (docname, _labelid, lineno) in self.citations.items():
|
||||
if name not in self.citation_refs:
|
||||
logger.warning(__('Citation [%s] is not referenced.'), name,
|
||||
type='ref', subtype='citation', location=(docname, lineno))
|
||||
|
@ -6873,7 +6873,7 @@ class DefinitionParser(BaseParser):
|
||||
self.warn(msg)
|
||||
|
||||
newTemplates: List[Union[ASTTemplateParams, ASTTemplateIntroduction]] = []
|
||||
for i in range(numExtra):
|
||||
for _i in range(numExtra):
|
||||
newTemplates.append(ASTTemplateParams([]))
|
||||
if templatePrefix and not isMemberInstantiation:
|
||||
newTemplates.extend(templatePrefix.templates)
|
||||
@ -7579,7 +7579,7 @@ class CPPAliasObject(ObjectDescription):
|
||||
" need 'maxdepth' 0 for infinite or at least 2.",
|
||||
location=self.get_location())
|
||||
signatures = self.get_signatures()
|
||||
for i, sig in enumerate(signatures):
|
||||
for sig in signatures:
|
||||
node.append(AliasNode(sig, aliasOptions, env=self.env))
|
||||
|
||||
contentnode = addnodes.desc_content()
|
||||
|
@ -385,10 +385,10 @@ class JavaScriptDomain(Domain):
|
||||
self.modules[modname] = (self.env.docname, node_id)
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for fullname, (pkg_docname, node_id, _l) in list(self.objects.items()):
|
||||
for fullname, (pkg_docname, _node_id, _l) in list(self.objects.items()):
|
||||
if pkg_docname == docname:
|
||||
del self.objects[fullname]
|
||||
for modname, (pkg_docname, node_id) in list(self.modules.items()):
|
||||
for modname, (pkg_docname, _node_id) in list(self.modules.items()):
|
||||
if pkg_docname == docname:
|
||||
del self.modules[modname]
|
||||
|
||||
|
@ -81,7 +81,7 @@ class MathDomain(Domain):
|
||||
self.data['has_equations'][docname] = any(document.findall(math_node))
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for equation_id, (doc, eqno) in list(self.equations.items()):
|
||||
for equation_id, (doc, _eqno) in list(self.equations.items()):
|
||||
if doc == docname:
|
||||
del self.equations[equation_id]
|
||||
|
||||
|
@ -235,7 +235,7 @@ class ReSTDomain(Domain):
|
||||
self.objects[objtype, name] = (self.env.docname, node_id)
|
||||
|
||||
def clear_doc(self, docname: str) -> None:
|
||||
for (typ, name), (doc, node_id) in list(self.objects.items()):
|
||||
for (typ, name), (doc, _node_id) in list(self.objects.items()):
|
||||
if doc == docname:
|
||||
del self.objects[typ, name]
|
||||
|
||||
|
@ -55,7 +55,7 @@ class IndexEntries:
|
||||
domain = cast(IndexDomain, self.env.get_domain('index'))
|
||||
for fn, entries in domain.entries.items():
|
||||
# new entry types must be listed in directives/other.py!
|
||||
for type, value, tid, main, index_key in entries:
|
||||
for type, value, tid, main, index_key in entries: # noqa: B007
|
||||
try:
|
||||
if type == 'single':
|
||||
try:
|
||||
|
@ -177,7 +177,7 @@ class TocTreeCollector(EnvironmentCollector):
|
||||
def _walk_toctree(toctreenode: addnodes.toctree, depth: int) -> None:
|
||||
if depth == 0:
|
||||
return
|
||||
for (title, ref) in toctreenode['entries']:
|
||||
for (_title, ref) in toctreenode['entries']:
|
||||
if url_re.match(ref) or ref == 'self':
|
||||
# don't mess with those
|
||||
continue
|
||||
@ -261,7 +261,7 @@ class TocTreeCollector(EnvironmentCollector):
|
||||
else:
|
||||
_walk_doctree(docname, subnode, secnum)
|
||||
elif isinstance(subnode, addnodes.toctree):
|
||||
for title, subdocname in subnode['entries']:
|
||||
for _title, subdocname in subnode['entries']:
|
||||
if url_re.match(subdocname) or subdocname == 'self':
|
||||
# don't mess with those
|
||||
continue
|
||||
|
@ -227,7 +227,7 @@ def walk(rootpath: str, excludes: List[str], opts: Any
|
||||
|
||||
def has_child_module(rootpath: str, excludes: List[str], opts: Any) -> bool:
|
||||
"""Check the given directory contains child module/s (at least one)."""
|
||||
for root, subs, files in walk(rootpath, excludes, opts):
|
||||
for _root, _subs, files in walk(rootpath, excludes, opts):
|
||||
if files:
|
||||
return True
|
||||
|
||||
|
@ -119,7 +119,7 @@ def process_autosummary_toc(app: Sphinx, doctree: nodes.document) -> None:
|
||||
|
||||
def crawl_toc(node: Element, depth: int = 1) -> None:
|
||||
crawled[node] = True
|
||||
for j, subnode in enumerate(node):
|
||||
for subnode in node:
|
||||
try:
|
||||
if (isinstance(subnode, autosummary_toc) and
|
||||
isinstance(subnode[0], addnodes.toctree)):
|
||||
@ -271,7 +271,7 @@ class Autosummary(SphinxDirective):
|
||||
docnames = []
|
||||
excluded = Matcher(self.config.exclude_patterns)
|
||||
filename_map = self.config.autosummary_filename_map
|
||||
for name, sig, summary, real_name in items:
|
||||
for _name, _sig, _summary, real_name in items:
|
||||
real_name = filename_map.get(real_name, real_name)
|
||||
docname = posixpath.join(tree_prefix, real_name)
|
||||
docname = posixpath.normpath(posixpath.join(dirname, docname))
|
||||
@ -610,7 +610,7 @@ def limited_join(sep: str, items: List[str], max_chars: int = 30,
|
||||
|
||||
n_chars = 0
|
||||
n_items = 0
|
||||
for j, item in enumerate(items):
|
||||
for item in items:
|
||||
n_chars += len(item) + len(sep)
|
||||
if n_chars < max_chars - len(overflow_marker):
|
||||
n_items += 1
|
||||
|
@ -318,7 +318,7 @@ def generate_autosummary_content(name: str, obj: Any, parent: Any,
|
||||
|
||||
def get_modules(obj: Any) -> Tuple[List[str], List[str]]:
|
||||
items: List[str] = []
|
||||
for _, modname, ispkg in pkgutil.iter_modules(obj.__path__):
|
||||
for _, modname, _ispkg in pkgutil.iter_modules(obj.__path__):
|
||||
fullname = name + '.' + modname
|
||||
try:
|
||||
module = import_module(fullname)
|
||||
|
@ -68,7 +68,7 @@ class ExternalLinksChecker(SphinxPostTransform):
|
||||
|
||||
uri = refnode['refuri']
|
||||
|
||||
for alias, (base_uri, caption) in self.app.config.extlinks.items():
|
||||
for alias, (base_uri, _caption) in self.app.config.extlinks.items():
|
||||
uri_pattern = re.compile(base_uri.replace('%s', '(?P<value>.+)'))
|
||||
match = uri_pattern.match(uri)
|
||||
if match and match.groupdict().get('value'):
|
||||
|
@ -65,7 +65,7 @@ def read_svg_depth(filename: str) -> int:
|
||||
"""Read the depth from comment at last line of SVG file
|
||||
"""
|
||||
with open(filename) as f:
|
||||
for line in f:
|
||||
for line in f: # noqa: B007
|
||||
pass
|
||||
# Only last line is checked
|
||||
matched = depthsvgcomment_re.match(line)
|
||||
|
@ -288,7 +288,7 @@ class Config:
|
||||
}
|
||||
|
||||
def __init__(self, **settings: Any) -> None:
|
||||
for name, (default, rebuild) in self._config_values.items():
|
||||
for name, (default, _rebuild) in self._config_values.items():
|
||||
setattr(self, name, default)
|
||||
for name, value in settings.items():
|
||||
setattr(self, name, value)
|
||||
|
@ -112,7 +112,7 @@ class BuildDoc(Command):
|
||||
for guess in ('doc', 'docs'):
|
||||
if not os.path.isdir(guess):
|
||||
continue
|
||||
for root, dirnames, filenames in os.walk(guess):
|
||||
for root, _dirnames, filenames in os.walk(guess):
|
||||
if 'conf.py' in filenames:
|
||||
return root
|
||||
return os.curdir
|
||||
|
@ -115,7 +115,7 @@ class path(str):
|
||||
# as well. To avoid failures when adding additional files/directories
|
||||
# to the destination tree, ensure destination directories are not marked
|
||||
# read-only.
|
||||
for root, dirs, files in os.walk(destination):
|
||||
for root, _dirs, files in os.walk(destination):
|
||||
os.chmod(root, 0o755 & ~UMASK)
|
||||
for name in files:
|
||||
os.chmod(os.path.join(root, name), 0o644 & ~UMASK)
|
||||
|
@ -178,7 +178,7 @@ _unicode_literals_re = re.compile(r'u(".*?")|u(\'.*?\')')
|
||||
|
||||
|
||||
def find_files(root: str, suffix: bool = None) -> Generator[str, None, None]:
|
||||
for dirpath, dirs, files in os.walk(root, followlinks=True):
|
||||
for dirpath, _dirs, files in os.walk(root, followlinks=True):
|
||||
dirpath = path(dirpath)
|
||||
for f in [f for f in files if not suffix or f.endswith(suffix)]: # type: ignore
|
||||
fpath = dirpath / f
|
||||
|
@ -462,7 +462,7 @@ class Locale(SphinxTransform):
|
||||
# Extract and translate messages for index entries.
|
||||
for node, entries in traverse_translatable_index(self.document):
|
||||
new_entries: List[Tuple[str, str, str, str, str]] = []
|
||||
for type, msg, tid, main, key_ in entries:
|
||||
for type, msg, tid, main, _key in entries:
|
||||
msg_parts = split_index_msg(type, msg)
|
||||
msgstr_parts = []
|
||||
for part in msg_parts:
|
||||
|
@ -88,7 +88,7 @@ def get_matching_files(dirname: str,
|
||||
|
||||
dirs[:] = sorted(dirs[i] for (i, _) in qdirs)
|
||||
|
||||
for i, filename in sorted(qfiles):
|
||||
for _i, filename in sorted(qfiles):
|
||||
yield filename
|
||||
|
||||
|
||||
@ -132,7 +132,7 @@ class FilenameUniqDict(dict):
|
||||
self._existing.discard(unique)
|
||||
|
||||
def merge_other(self, docnames: Set[str], other: Dict[str, Tuple[Set[str], Any]]) -> None:
|
||||
for filename, (docs, unique) in other.items():
|
||||
for filename, (docs, _unique) in other.items():
|
||||
for doc in docs & set(docnames):
|
||||
self.add_file(doc, filename)
|
||||
|
||||
@ -190,13 +190,13 @@ class DownloadFiles(dict):
|
||||
return self[filename][1]
|
||||
|
||||
def purge_doc(self, docname: str) -> None:
|
||||
for filename, (docs, dest) in list(self.items()):
|
||||
for filename, (docs, _dest) in list(self.items()):
|
||||
docs.discard(docname)
|
||||
if not docs:
|
||||
del self[filename]
|
||||
|
||||
def merge_other(self, docnames: Set[str], other: Dict[str, Tuple[Set[str], Any]]) -> None:
|
||||
for filename, (docs, dest) in other.items():
|
||||
for filename, (docs, _dest) in other.items():
|
||||
for docname in docs & set(docnames):
|
||||
self.add_file(docname, filename)
|
||||
|
||||
@ -441,7 +441,7 @@ def split_full_qualified_name(name: str) -> Tuple[Optional[str], str]:
|
||||
calling this function.
|
||||
"""
|
||||
parts = name.split('.')
|
||||
for i, part in enumerate(parts, 1):
|
||||
for i, _part in enumerate(parts, 1):
|
||||
try:
|
||||
modname = ".".join(parts[:i])
|
||||
import_module(modname)
|
||||
|
@ -75,7 +75,7 @@ def ensuredir(path: str) -> None:
|
||||
|
||||
def mtimes_of_files(dirnames: List[str], suffix: str) -> Iterator[float]:
|
||||
for dirname in dirnames:
|
||||
for root, dirs, files in os.walk(dirname):
|
||||
for root, _dirs, files in os.walk(dirname):
|
||||
for sfile in files:
|
||||
if sfile.endswith(suffix):
|
||||
try:
|
||||
|
@ -1508,7 +1508,7 @@ class LaTeXTranslator(SphinxTranslator):
|
||||
if not node.get('inline', True):
|
||||
self.body.append(CR)
|
||||
entries = node['entries']
|
||||
for type, string, tid, ismain, key_ in entries:
|
||||
for type, string, _tid, ismain, _key in entries:
|
||||
m = ''
|
||||
if ismain:
|
||||
m = '|spxpagem'
|
||||
@ -1976,7 +1976,7 @@ class LaTeXTranslator(SphinxTranslator):
|
||||
|
||||
def depart_container(self, node: Element) -> None:
|
||||
classes = node.get('classes', [])
|
||||
for c in classes:
|
||||
for _c in classes:
|
||||
self.body.append('\n\\end{sphinxuseclass}')
|
||||
|
||||
def visit_decoration(self, node: Element) -> None:
|
||||
|
@ -312,7 +312,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
del node_menus[top['node_name']]
|
||||
top['node_name'] = 'Top'
|
||||
# handle the indices
|
||||
for name, content in self.indices:
|
||||
for name, _content in self.indices:
|
||||
node_menus[name] = []
|
||||
node_menus['Top'].append(name)
|
||||
|
||||
@ -320,7 +320,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
"""Collect the relative links (next, previous, up) for each "node"."""
|
||||
rellinks = self.rellinks
|
||||
node_menus = self.node_menus
|
||||
for id, entries in node_menus.items():
|
||||
for id in node_menus:
|
||||
rellinks[id] = ['', '', '']
|
||||
# up's
|
||||
for id, entries in node_menus.items():
|
||||
@ -466,7 +466,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
def collect_indices(self) -> None:
|
||||
def generate(content: List[Tuple[str, List[IndexEntry]]], collapsed: bool) -> str:
|
||||
ret = ['\n@menu\n']
|
||||
for letter, entries in content:
|
||||
for _letter, entries in content:
|
||||
for entry in entries:
|
||||
if not entry[3]:
|
||||
continue
|
||||
@ -1018,7 +1018,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
if len(self.colwidths) != self.n_cols:
|
||||
return
|
||||
self.body.append('\n\n@multitable ')
|
||||
for i, n in enumerate(self.colwidths):
|
||||
for n in self.colwidths:
|
||||
self.body.append('{%s} ' % ('x' * (n + 2)))
|
||||
|
||||
def depart_colspec(self, node: Element) -> None:
|
||||
@ -1054,7 +1054,7 @@ class TexinfoTranslator(SphinxTranslator):
|
||||
self.entry_sep = '@tab'
|
||||
|
||||
def depart_entry(self, node: Element) -> None:
|
||||
for i in range(node.get('morecols', 0)):
|
||||
for _i in range(node.get('morecols', 0)):
|
||||
self.body.append('\n@tab\n')
|
||||
|
||||
# -- Field Lists
|
||||
|
@ -167,8 +167,8 @@ class Table:
|
||||
@property
|
||||
def cells(self) -> Generator[Cell, None, None]:
|
||||
seen: Set[Cell] = set()
|
||||
for lineno, line in enumerate(self.lines):
|
||||
for colno, cell in enumerate(line):
|
||||
for line in self.lines:
|
||||
for cell in line:
|
||||
if cell and cell not in seen:
|
||||
yield cell
|
||||
seen.add(cell)
|
||||
|
@ -716,7 +716,7 @@ def test_domain_c_build_field_role(app, status, warning):
|
||||
|
||||
def _get_obj(app, queryName):
|
||||
domain = app.env.get_domain('c')
|
||||
for name, dispname, objectType, docname, anchor, prio in domain.get_objects():
|
||||
for name, _dispname, objectType, docname, anchor, _prio in domain.get_objects():
|
||||
if name == queryName:
|
||||
return (docname, anchor, objectType)
|
||||
return (queryName, "not", "found")
|
||||
|
@ -101,9 +101,9 @@ class SetupTest(TestCase):
|
||||
def test_add_config_values(self):
|
||||
app = mock.Mock(Sphinx)
|
||||
setup(app)
|
||||
for name, (default, rebuild) in Config._config_values.items():
|
||||
for name in Config._config_values:
|
||||
has_config = False
|
||||
for method_name, args, kwargs in app.method_calls:
|
||||
for method_name, args, _kwargs in app.method_calls:
|
||||
if(method_name == 'add_config_value' and
|
||||
args[0] == name):
|
||||
has_config = True
|
||||
@ -112,7 +112,7 @@ class SetupTest(TestCase):
|
||||
|
||||
has_process_docstring = False
|
||||
has_skip_member = False
|
||||
for method_name, args, kwargs in app.method_calls:
|
||||
for method_name, args, _kwargs in app.method_calls:
|
||||
if method_name == 'connect':
|
||||
if(args[0] == 'autodoc-process-docstring' and
|
||||
args[1] == _process_docstring):
|
||||
|
@ -47,7 +47,7 @@ def write_mo(pathname, po):
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_intl(app_params):
|
||||
srcdir = path(app_params.kwargs['srcdir'])
|
||||
for dirpath, dirs, files in os.walk(srcdir):
|
||||
for dirpath, _dirs, files in os.walk(srcdir):
|
||||
dirpath = path(dirpath)
|
||||
for f in [f for f in files if f.endswith('.po')]:
|
||||
po = dirpath / f
|
||||
|
@ -69,7 +69,7 @@ def main(args: List[str]) -> int:
|
||||
if os.path.isfile(path):
|
||||
errors += lint(path)
|
||||
elif os.path.isdir(path):
|
||||
for root, dirs, files in os.walk(path):
|
||||
for root, _dirs, files in os.walk(path):
|
||||
for filename in files:
|
||||
if filename.endswith('.rst'):
|
||||
path = os.path.join(root, filename)
|
||||
|
Loading…
Reference in New Issue
Block a user