mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Enable the G002 check
This commit is contained in:
@@ -133,7 +133,7 @@ select = [
|
||||
"FURB",
|
||||
# flake8-logging-format ('G')
|
||||
"G001", # Logging statement uses `str.format`
|
||||
# "G002", # Logging statement uses `%`
|
||||
"G002", # Logging statement uses `%`
|
||||
# "G003", # Logging statement uses `+`
|
||||
"G004", # Logging statement uses f-string
|
||||
"G010", # Logging statement uses `warn` instead of `warning`
|
||||
|
||||
@@ -194,7 +194,7 @@ class Sphinx:
|
||||
self.messagelog: deque = deque(maxlen=10)
|
||||
|
||||
# say hello to the world
|
||||
logger.info(bold(__('Running Sphinx v%s') % sphinx.__display_version__))
|
||||
logger.info(bold(__('Running Sphinx v%s')), sphinx.__display_version__)
|
||||
|
||||
# status code for command-line application
|
||||
self.statuscode = 0
|
||||
@@ -274,7 +274,7 @@ class Sphinx:
|
||||
if self.config.language == 'en':
|
||||
self.translator, _ = locale.init([], None)
|
||||
else:
|
||||
logger.info(bold(__('loading translations [%s]... ') % self.config.language),
|
||||
logger.info(bold(__('loading translations [%s]... ')), self.config.language,
|
||||
nonl=True)
|
||||
|
||||
# compile mo files if sphinx.po file in user locale directories are updated
|
||||
@@ -376,13 +376,13 @@ class Sphinx:
|
||||
else:
|
||||
msg = __('build %s, %s warnings.')
|
||||
|
||||
logger.info(bold(msg % (status, self._warncount)))
|
||||
logger.info(bold(msg), status, self._warncount)
|
||||
else:
|
||||
logger.info(bold(__('build %s.') % status))
|
||||
logger.info(bold(__('build %s.')), status)
|
||||
|
||||
if self.statuscode == 0 and self.builder.epilog:
|
||||
logger.info('')
|
||||
logger.info(self.builder.epilog % {
|
||||
logger.info(self.builder.epilog, {
|
||||
'outdir': relpath(self.outdir),
|
||||
'project': self.config.project,
|
||||
})
|
||||
|
||||
@@ -306,7 +306,7 @@ class Builder:
|
||||
:meth:`!write`.
|
||||
"""
|
||||
if summary:
|
||||
logger.info(bold(__('building [%s]: ') % self.name) + summary)
|
||||
logger.info(bold(__('building [%s]: ')) + summary, self.name)
|
||||
|
||||
# while reading, collect all warnings from docutils
|
||||
with logging.pending_warnings():
|
||||
|
||||
@@ -56,7 +56,7 @@ class ChangesBuilder(Builder):
|
||||
|
||||
changesets = domain.get_changesets_for(version)
|
||||
if not changesets:
|
||||
logger.info(bold(__('no changes in version %s.') % version))
|
||||
logger.info(bold(__('no changes in version %s.')), version)
|
||||
return
|
||||
logger.info(bold(__('writing summary file...')))
|
||||
for changeset in changesets:
|
||||
|
||||
@@ -240,7 +240,7 @@ class MessageCatalogBuilder(I18nBuilder):
|
||||
def _extract_from_template(self) -> None:
|
||||
files = list(self._collect_templates())
|
||||
files.sort()
|
||||
logger.info(bold(__('building [%s]: ') % self.name), nonl=True)
|
||||
logger.info(bold(__('building [%s]: ')), self.name, nonl=True)
|
||||
logger.info(__('targets for %d template files'), len(files))
|
||||
|
||||
extract_translations = self.templates.environment.extract_translations
|
||||
|
||||
@@ -464,7 +464,7 @@ def validate_config_values(app: Sphinx, config: Config) -> None:
|
||||
for key in list(config.latex_elements):
|
||||
if key not in DEFAULT_SETTINGS:
|
||||
msg = __("Unknown configure key: latex_elements[%r], ignored.")
|
||||
logger.warning(msg % (key,))
|
||||
logger.warning(msg, key)
|
||||
config.latex_elements.pop(key)
|
||||
|
||||
|
||||
@@ -472,7 +472,7 @@ def validate_latex_theme_options(app: Sphinx, config: Config) -> None:
|
||||
for key in list(config.latex_theme_options):
|
||||
if key not in Theme.UPDATABLE_KEYS:
|
||||
msg = __("Unknown theme option: latex_theme_options[%r], ignored.")
|
||||
logger.warning(msg % (key,))
|
||||
logger.warning(msg, key)
|
||||
config.latex_theme_options.pop(key)
|
||||
|
||||
|
||||
|
||||
@@ -124,8 +124,8 @@ class CodeBlock(SphinxDirective):
|
||||
nlines = len(self.content)
|
||||
hl_lines = parselinenos(linespec, nlines)
|
||||
if any(i >= nlines for i in hl_lines):
|
||||
logger.warning(__('line number spec is out of range(1-%d): %r') %
|
||||
(nlines, self.options['emphasize-lines']),
|
||||
logger.warning(__('line number spec is out of range(1-%d): %r'),
|
||||
nlines, self.options['emphasize-lines'],
|
||||
location=location)
|
||||
|
||||
hl_lines = [x + 1 for x in hl_lines if x < nlines]
|
||||
@@ -274,8 +274,8 @@ class LiteralIncludeReader:
|
||||
if linespec:
|
||||
linelist = parselinenos(linespec, len(lines))
|
||||
if any(i >= len(lines) for i in linelist):
|
||||
logger.warning(__('line number spec is out of range(1-%d): %r') %
|
||||
(len(lines), linespec), location=location)
|
||||
logger.warning(__('line number spec is out of range(1-%d): %r'),
|
||||
len(lines), linespec, location=location)
|
||||
|
||||
if 'lineno-match' in self.options:
|
||||
# make sure the line list is not "disjoint".
|
||||
@@ -450,8 +450,8 @@ class LiteralInclude(SphinxDirective):
|
||||
if 'emphasize-lines' in self.options:
|
||||
hl_lines = parselinenos(self.options['emphasize-lines'], lines)
|
||||
if any(i >= lines for i in hl_lines):
|
||||
logger.warning(__('line number spec is out of range(1-%d): %r') %
|
||||
(lines, self.options['emphasize-lines']),
|
||||
logger.warning(__('line number spec is out of range(1-%d): %r'),
|
||||
lines, self.options['emphasize-lines'],
|
||||
location=location)
|
||||
extra_args['hl_lines'] = [x + 1 for x in hl_lines if x < lines]
|
||||
extra_args['linenostart'] = reader.lineno_start
|
||||
|
||||
@@ -543,7 +543,7 @@ class AliasTransform(SphinxTransform):
|
||||
signode.clear()
|
||||
signode += addnodes.desc_name(sig, sig)
|
||||
|
||||
logger.warning("Could not find C declaration for alias '%s'." % name,
|
||||
logger.warning("Could not find C declaration for alias '%s'.", name,
|
||||
location=node)
|
||||
node.replace_self(signode)
|
||||
continue
|
||||
@@ -557,7 +557,7 @@ class AliasTransform(SphinxTransform):
|
||||
signode += addnodes.desc_name(sig, sig)
|
||||
|
||||
logger.warning(
|
||||
"Can not render C declaration for alias '%s'. No such declaration." % name,
|
||||
"Can not render C declaration for alias '%s'. No such declaration.", name,
|
||||
location=node)
|
||||
node.replace_self(signode)
|
||||
continue
|
||||
|
||||
@@ -705,7 +705,7 @@ class AliasTransform(SphinxTransform):
|
||||
signode.clear()
|
||||
signode += addnodes.desc_name(sig, sig)
|
||||
|
||||
logger.warning("Can not find C++ declaration for alias '%s'." % ast,
|
||||
logger.warning("Can not find C++ declaration for alias '%s'.", ast,
|
||||
location=node)
|
||||
node.replace_self(signode)
|
||||
else:
|
||||
|
||||
@@ -60,8 +60,8 @@ class MathDomain(Domain):
|
||||
def note_equation(self, docname: str, labelid: str, location: Any = None) -> None:
|
||||
if labelid in self.equations:
|
||||
other = self.equations[labelid][0]
|
||||
logger.warning(__('duplicate label of equation %s, other instance in %s') %
|
||||
(labelid, other), location=location)
|
||||
logger.warning(__('duplicate label of equation %s, other instance in %s'),
|
||||
labelid, other, location=location)
|
||||
|
||||
self.equations[labelid] = (docname, self.env.new_serialno('eqno') + 1)
|
||||
|
||||
|
||||
@@ -244,8 +244,8 @@ class ReSTDomain(Domain):
|
||||
def note_object(self, objtype: str, name: str, node_id: str, location: Any = None) -> None:
|
||||
if (objtype, name) in self.objects:
|
||||
docname, node_id = self.objects[objtype, name]
|
||||
logger.warning(__('duplicate description of %s %s, other instance in %s') %
|
||||
(objtype, name, docname), location=location)
|
||||
logger.warning(__('duplicate description of %s %s, other instance in %s'),
|
||||
objtype, name, docname, location=location)
|
||||
|
||||
self.objects[objtype, name] = (self.env.docname, node_id)
|
||||
|
||||
|
||||
@@ -1110,7 +1110,7 @@ def warn_missing_reference(app: Sphinx, domain: Domain, node: pending_xref,
|
||||
else:
|
||||
msg = __('Failed to create a cross reference. A title or caption not found: %r')
|
||||
|
||||
logger.warning(msg % target, location=node, type='ref', subtype=node['reftype'])
|
||||
logger.warning(msg, target, location=node, type='ref', subtype=node['reftype'])
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -86,7 +86,7 @@ class ImageCollector(EnvironmentCollector):
|
||||
for imgpath in candidates.values():
|
||||
app.env.dependencies[docname].add(imgpath)
|
||||
if not os.access(path.join(app.srcdir, imgpath), os.R_OK):
|
||||
logger.warning(__('image file not readable: %s') % imgpath,
|
||||
logger.warning(__('image file not readable: %s'), imgpath,
|
||||
location=node, type='image', subtype='not_readable')
|
||||
continue
|
||||
app.env.images.add_file(docname, imgpath)
|
||||
@@ -105,7 +105,7 @@ class ImageCollector(EnvironmentCollector):
|
||||
if mimetype not in candidates:
|
||||
globbed.setdefault(mimetype, []).append(new_imgpath)
|
||||
except OSError as err:
|
||||
logger.warning(__('image file %s not readable: %s') % (filename, err),
|
||||
logger.warning(__('image file %s not readable: %s'), filename, err,
|
||||
location=node, type='image', subtype='not_readable')
|
||||
for key, files in globbed.items():
|
||||
candidates[key] = sorted(files, key=len)[0] # select by similarity
|
||||
@@ -131,7 +131,7 @@ class DownloadFileCollector(EnvironmentCollector):
|
||||
rel_filename, filename = app.env.relfn2path(targetname, app.env.docname)
|
||||
app.env.dependencies[app.env.docname].add(rel_filename)
|
||||
if not os.access(filename, os.R_OK):
|
||||
logger.warning(__('download file not readable: %s') % filename,
|
||||
logger.warning(__('download file not readable: %s'), filename,
|
||||
location=node, type='download', subtype='not_readable')
|
||||
continue
|
||||
node['filename'] = app.env.dlfiles.add_file(app.env.docname, rel_filename)
|
||||
|
||||
@@ -396,7 +396,7 @@ class Documenter:
|
||||
# an autogenerated one
|
||||
matched = py_ext_sig_re.match(self.name)
|
||||
if matched is None:
|
||||
logger.warning(__('invalid signature for auto%s (%r)') % (self.objtype, self.name),
|
||||
logger.warning(__('invalid signature for auto%s (%r)'), self.objtype, self.name,
|
||||
type='autodoc')
|
||||
return False
|
||||
explicit_modname, path, base, tp_list, args, retann = matched.groups()
|
||||
@@ -892,7 +892,7 @@ class Documenter:
|
||||
logger.warning(
|
||||
__("don't know which module to import for autodocumenting "
|
||||
'%r (try placing a "module" or "currentmodule" directive '
|
||||
'in the document, or giving an explicit module name)') %
|
||||
'in the document, or giving an explicit module name)'),
|
||||
self.name, type='autodoc')
|
||||
return
|
||||
|
||||
@@ -1024,7 +1024,7 @@ class ModuleDocumenter(Documenter):
|
||||
ret = super().parse_name()
|
||||
if self.args or self.retann:
|
||||
logger.warning(__('signature arguments or return annotation '
|
||||
'given for automodule %s') % self.fullname,
|
||||
'given for automodule %s'), self.fullname,
|
||||
type='autodoc')
|
||||
return ret
|
||||
|
||||
@@ -1037,8 +1037,8 @@ class ModuleDocumenter(Documenter):
|
||||
except ValueError as exc:
|
||||
# invalid __all__ found.
|
||||
logger.warning(__('__all__ should be a list of strings, not %r '
|
||||
'(in module %s) -- ignoring __all__') %
|
||||
(exc.args[0], self.fullname), type='autodoc')
|
||||
'(in module %s) -- ignoring __all__'),
|
||||
exc.args[0], self.fullname, type='autodoc')
|
||||
|
||||
return ret
|
||||
|
||||
@@ -1103,8 +1103,8 @@ class ModuleDocumenter(Documenter):
|
||||
ret.append(members[name])
|
||||
else:
|
||||
logger.warning(__('missing attribute mentioned in :members: option: '
|
||||
'module %s, attribute %s') %
|
||||
(safe_getattr(self.object, '__name__', '???'), name),
|
||||
'module %s, attribute %s'),
|
||||
safe_getattr(self.object, '__name__', '???', name),
|
||||
type='autodoc')
|
||||
return False, ret
|
||||
|
||||
@@ -1756,8 +1756,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
if name in members:
|
||||
selected.append(members[name])
|
||||
else:
|
||||
logger.warning(__('missing attribute %s in object %s') %
|
||||
(name, self.fullname), type='autodoc')
|
||||
logger.warning(__('missing attribute %s in object %s'),
|
||||
name, self.fullname, type='autodoc')
|
||||
return False, selected
|
||||
elif self.options.inherited_members:
|
||||
return False, list(members.values())
|
||||
|
||||
@@ -130,8 +130,8 @@ class AutodocDirective(SphinxDirective):
|
||||
documenter_options = process_documenter_options(doccls, self.config, self.options)
|
||||
except (KeyError, ValueError, TypeError) as exc:
|
||||
# an option is either unknown or has a wrong type
|
||||
logger.error('An option to %s is either unknown or has an invalid value: %s' %
|
||||
(self.name, exc), location=(self.env.docname, lineno))
|
||||
logger.error('An option to %s is either unknown or has an invalid value: %s',
|
||||
self.name, exc, location=(self.env.docname, lineno))
|
||||
return []
|
||||
|
||||
# generate the output
|
||||
|
||||
@@ -467,11 +467,11 @@ def generate_autosummary_docs(sources: list[str],
|
||||
showed_sources = sorted(sources)
|
||||
if len(showed_sources) > 20:
|
||||
showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
|
||||
logger.info(__('[autosummary] generating autosummary for: %s') %
|
||||
logger.info(__('[autosummary] generating autosummary for: %s'),
|
||||
', '.join(showed_sources))
|
||||
|
||||
if output_dir:
|
||||
logger.info(__('[autosummary] writing to %s') % output_dir)
|
||||
logger.info(__('[autosummary] writing to %s'), output_dir)
|
||||
|
||||
if base_path is not None:
|
||||
sources = [os.path.join(base_path, filename) for filename in sources]
|
||||
|
||||
@@ -130,7 +130,7 @@ class Theme:
|
||||
'are not supported, returning the default value instead '
|
||||
'(tried to get a value from %r)'
|
||||
)
|
||||
logger.info(msg % section)
|
||||
logger.info(msg, section)
|
||||
value = default
|
||||
if value is _NO_DEFAULT:
|
||||
msg = __('setting %s.%s occurs in none of the searched theme configs') % (
|
||||
@@ -148,7 +148,7 @@ class Theme:
|
||||
options = self._options.copy()
|
||||
for option, value in overrides.items():
|
||||
if option not in options:
|
||||
logger.warning(__('unsupported theme option %r given') % option)
|
||||
logger.warning(__('unsupported theme option %r given'), option)
|
||||
else:
|
||||
options[option] = value
|
||||
|
||||
|
||||
@@ -80,8 +80,8 @@ class ImageDownloader(BaseImageConverter):
|
||||
_tls_info=(config.tls_verify, config.tls_cacerts),
|
||||
)
|
||||
if r.status_code >= 400:
|
||||
logger.warning(__('Could not fetch remote image: %s [%d]') %
|
||||
(node['uri'], r.status_code))
|
||||
logger.warning(__('Could not fetch remote image: %s [%d]'),
|
||||
node['uri'], r.status_code)
|
||||
else:
|
||||
self.app.env.original_image_uri[path] = node['uri']
|
||||
|
||||
@@ -108,7 +108,7 @@ class ImageDownloader(BaseImageConverter):
|
||||
node['uri'] = path
|
||||
self.app.env.images.add_file(self.env.docname, path)
|
||||
except Exception as exc:
|
||||
logger.warning(__('Could not fetch remote image: %s [%s]') % (node['uri'], exc))
|
||||
logger.warning(__('Could not fetch remote image: %s [%s]'), node['uri'], exc)
|
||||
|
||||
|
||||
class DataURIExtractor(BaseImageConverter):
|
||||
|
||||
@@ -332,8 +332,8 @@ class LaTeXTranslator(SphinxTranslator):
|
||||
self.top_sectionlevel = \
|
||||
self.sectionnames.index(self.config.latex_toplevel_sectioning)
|
||||
except ValueError:
|
||||
logger.warning(__('unknown %r toplevel_sectioning for class %r') %
|
||||
(self.config.latex_toplevel_sectioning, self.theme.docclass))
|
||||
logger.warning(__('unknown %r toplevel_sectioning for class %r'),
|
||||
self.config.latex_toplevel_sectioning, self.theme.docclass)
|
||||
|
||||
if self.config.numfig:
|
||||
self.numfig_secnum_depth = self.config.numfig_secnum_depth
|
||||
|
||||
Reference in New Issue
Block a user