Enable the PLR6201 lint in Ruff

This commit is contained in:
Adam Turner 2024-10-19 03:07:28 +01:00
parent 903da2046e
commit e58dd58f35
52 changed files with 219 additions and 207 deletions

View File

@ -51,7 +51,6 @@ ignore = [
"PLR2004", # Magic value used in comparison, consider replacing `{value}` with a constant variable
"PLR5501", # Use `elif` instead of `else` then `if`, to reduce indentation
"PLR6104", # Use `{operator}` to perform an augmented assignment directly
"PLR6201", # Use a set literal when testing for membership
"PLR6301", # Method `{method_name}` could be a function, class method, or static method
# pylint ('PLW')
"PLW2901", # Outer {outer_kind} variable `{name}` overwritten by inner {inner_kind} target

View File

@ -1673,7 +1673,7 @@ class Sphinx:
.. versionadded: 4.1
"""
if policy not in ('always', 'per_page'):
if policy not in {'always', 'per_page'}:
raise ValueError('policy %s is not supported' % policy)
self.registry.html_assets_policy = policy

View File

@ -580,7 +580,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
if ext not in self.media_types:
# we always have JS and potentially OpenSearch files, don't
# always warn about them
if ext not in ('.js', '.xml'):
if ext not in {'.js', '.xml'}:
logger.warning(
__('unknown mimetype for %s, ignoring'),
filename,

View File

@ -555,7 +555,7 @@ def validate_latex_theme_options(app: Sphinx, config: Config) -> None:
def install_packages_for_ja(app: Sphinx) -> None:
"""Install packages for Japanese."""
if app.config.language == 'ja' and app.config.latex_engine in ('platex', 'uplatex'):
if app.config.language == 'ja' and app.config.latex_engine in {'platex', 'uplatex'}:
app.add_latex_package('pxjahyper', after_hyperref=True)

View File

@ -55,7 +55,7 @@ class BuiltInTheme(Theme):
else:
self.docclass = config.latex_docclass.get('manual', 'report')
if name in ('manual', 'howto'):
if name in {'manual', 'howto'}:
self.wrapperclass = 'sphinx' + name
else:
self.wrapperclass = name

View File

@ -566,7 +566,7 @@ class MathReferenceTransform(SphinxPostTransform):
def run(self, **kwargs: Any) -> None:
equations = self.env.domains.math_domain.data['objects']
for node in self.document.findall(addnodes.pending_xref):
if node['refdomain'] == 'math' and node['reftype'] in ('eq', 'numref'):
if node['refdomain'] == 'math' and node['reftype'] in {'eq', 'numref'}:
docname, _ = equations.get(node['reftarget'], (None, None))
if docname:
refnode = math_reference(

View File

@ -681,7 +681,7 @@ class AnchorCheckParser(HTMLParser):
def handle_starttag(self, tag: Any, attrs: Any) -> None:
for key, value in attrs:
if key in ('id', 'name') and value == self.search_anchor:
if key in {'id', 'name'} and value == self.search_anchor:
self.found = True
break

View File

@ -121,9 +121,9 @@ def choice(*l: str) -> Callable[[str], str]:
def boolean(x: str) -> bool:
if x.upper() not in ('Y', 'YES', 'N', 'NO'):
if x.upper() not in {'Y', 'YES', 'N', 'NO'}:
raise ValidationError(__("Please enter either 'y' or 'n'."))
return x.upper() in ('Y', 'YES')
return x.upper() in {'Y', 'YES'}
def suffix(x: str) -> str:

View File

@ -290,7 +290,7 @@ class CMemberObject(CObject):
@property
def display_object_type(self) -> str:
# the distinction between var and member is only cosmetic
assert self.objtype in ('member', 'var')
assert self.objtype in {'member', 'var'}
return self.objtype
@ -354,7 +354,7 @@ class CNamespaceObject(SphinxDirective):
def run(self) -> list[Node]:
rootSymbol = self.env.domaindata['c']['root_symbol']
if self.arguments[0].strip() in ('NULL', '0', 'nullptr'):
if self.arguments[0].strip() in {'NULL', '0', 'nullptr'}:
symbol = rootSymbol
stack: list[Symbol] = []
else:
@ -383,7 +383,7 @@ class CNamespacePushObject(SphinxDirective):
option_spec: ClassVar[OptionSpec] = {}
def run(self) -> list[Node]:
if self.arguments[0].strip() in ('NULL', '0', 'nullptr'):
if self.arguments[0].strip() in {'NULL', '0', 'nullptr'}:
return []
parser = DefinitionParser(self.arguments[0],
location=self.get_location(),

View File

@ -151,7 +151,7 @@ class ASTNestedName(ASTBase):
assert not self.rooted, str(self)
assert len(self.names) == 1
self.names[0].describe_signature(signode, 'noneIsName', env, '', symbol)
elif mode in ('markType', 'lastIsName', 'markName'):
elif mode in {'markType', 'lastIsName', 'markName'}:
# Each element should be a pending xref targeting the complete
# prefix.
prefix = ''

View File

@ -608,7 +608,7 @@ class DefinitionParser(BaseParser):
if self.skip_word('register'):
storage = 'register'
continue
if outer in ('member', 'function'):
if outer in {'member', 'function'}:
if self.skip_word('static'):
storage = 'static'
continue
@ -649,7 +649,7 @@ class DefinitionParser(BaseParser):
def _parse_decl_specs(self, outer: str | None, typed: bool = True) -> ASTDeclSpecs:
if outer:
if outer not in ('type', 'member', 'function'):
if outer not in {'type', 'member', 'function'}:
raise Exception('Internal error, unknown outer "%s".' % outer)
leftSpecs = self._parse_decl_specs_simple(outer, typed)
rightSpecs = None
@ -664,7 +664,7 @@ class DefinitionParser(BaseParser):
def _parse_declarator_name_suffix(
self, named: bool | str, paramMode: str, typed: bool,
) -> ASTDeclarator:
assert named in (True, False, 'single')
assert named in {True, False, 'single'}
# now we should parse the name, and then suffixes
if named == 'single':
if self.match(identifier_re):
@ -747,7 +747,7 @@ class DefinitionParser(BaseParser):
def _parse_declarator(self, named: bool | str, paramMode: str,
typed: bool = True) -> ASTDeclarator:
# 'typed' here means 'parse return type stuff'
if paramMode not in ('type', 'function'):
if paramMode not in {'type', 'function'}:
raise Exception(
"Internal error, unknown paramMode '%s'." % paramMode)
prevErrors = []
@ -860,7 +860,7 @@ class DefinitionParser(BaseParser):
doesn't need to name the arguments, but otherwise is a single name
"""
if outer: # always named
if outer not in ('type', 'member', 'function'):
if outer not in {'type', 'member', 'function'}:
raise Exception('Internal error, unknown outer "%s".' % outer)
assert named
@ -915,7 +915,7 @@ class DefinitionParser(BaseParser):
def _parse_type_with_init(self, named: bool | str, outer: str | None) -> ASTTypeWithInit:
if outer:
assert outer in ('type', 'member', 'function')
assert outer in {'type', 'member', 'function'}
type = self._parse_type(outer=outer, named=named)
init = self._parse_initializer(outer=outer)
return ASTTypeWithInit(type, init)
@ -987,11 +987,11 @@ class DefinitionParser(BaseParser):
return ASTEnumerator(name, init, attrs)
def parse_declaration(self, objectType: str, directiveType: str) -> ASTDeclaration:
if objectType not in ('function', 'member',
'macro', 'struct', 'union', 'enum', 'enumerator', 'type'):
if objectType not in {'function', 'member',
'macro', 'struct', 'union', 'enum', 'enumerator', 'type'}:
raise Exception('Internal error, unknown objectType "%s".' % objectType)
if directiveType not in ('function', 'member', 'var',
'macro', 'struct', 'union', 'enum', 'enumerator', 'type'):
if directiveType not in {'function', 'member', 'var',
'macro', 'struct', 'union', 'enum', 'enumerator', 'type'}:
raise Exception('Internal error, unknown directiveType "%s".' % directiveType)
declaration: DeclarationType | None = None

View File

@ -460,7 +460,7 @@ class CPPClassObject(CPPObject):
@property
def display_object_type(self) -> str:
# the distinction between class and struct is only cosmetic
assert self.objtype in ('class', 'struct')
assert self.objtype in {'class', 'struct'}
return self.objtype
@ -490,7 +490,7 @@ class CPPNamespaceObject(SphinxDirective):
def run(self) -> list[Node]:
rootSymbol = self.env.domaindata['cpp']['root_symbol']
if self.arguments[0].strip() in ('NULL', '0', 'nullptr'):
if self.arguments[0].strip() in {'NULL', '0', 'nullptr'}:
symbol = rootSymbol
stack: list[Symbol] = []
else:
@ -520,7 +520,7 @@ class CPPNamespacePushObject(SphinxDirective):
option_spec: ClassVar[OptionSpec] = {}
def run(self) -> list[Node]:
if self.arguments[0].strip() in ('NULL', '0', 'nullptr'):
if self.arguments[0].strip() in {'NULL', '0', 'nullptr'}:
return []
parser = DefinitionParser(self.arguments[0],
location=self.get_location(),
@ -628,7 +628,7 @@ class AliasTransform(SphinxTransform):
for sChild in s._children:
if sChild.declaration is None:
continue
if sChild.declaration.objectType in ("templateParam", "functionParam"):
if sChild.declaration.objectType in {"templateParam", "functionParam"}:
continue
childNodes = self._render_symbol(
sChild, maxdepth=maxdepth, skipThis=False,
@ -961,7 +961,7 @@ class CPPDomain(Domain):
typ: str, target: str, node: pending_xref,
contnode: Element) -> tuple[Element | None, str | None]:
# add parens again for those that could be functions
if typ in ('any', 'func'):
if typ in {'any', 'func'}:
target += '()'
parser = DefinitionParser(target, location=node, config=env.config)
try:
@ -1081,7 +1081,7 @@ class CPPDomain(Domain):
if (env.config.add_function_parentheses and typ == 'func' and
title.endswith('operator()')):
addParen += 1
if (typ in ('any', 'func') and
if (typ in {'any', 'func'} and
title.endswith('operator') and
displayName.endswith('operator()')):
addParen += 1

View File

@ -270,7 +270,7 @@ class ASTNestedName(ASTBase):
assert len(self.names) == 1
assert not self.templates[0]
self.names[0].describe_signature(signode, 'param', env, '', symbol)
elif mode in ('markType', 'lastIsName', 'markName'):
elif mode in {'markType', 'lastIsName', 'markName'}:
# Each element should be a pending xref targeting the complete
# prefix. however, only the identifier part should be a link, such
# that template args can be a link as well.
@ -1541,7 +1541,7 @@ class ASTOperatorBuildIn(ASTOperator):
return ids[self.op]
def _stringify(self, transform: StringifyTransform) -> str:
if self.op in ('new', 'new[]', 'delete', 'delete[]') or self.op[0] in "abcnox":
if self.op in {'new', 'new[]', 'delete', 'delete[]'} or self.op[0] in "abcnox":
return 'operator ' + self.op
else:
return 'operator' + self.op
@ -1549,7 +1549,7 @@ class ASTOperatorBuildIn(ASTOperator):
def _describe_identifier(self, signode: TextElement, identnode: TextElement,
env: BuildEnvironment, symbol: Symbol) -> None:
signode += addnodes.desc_sig_keyword('operator', 'operator')
if self.op in ('new', 'new[]', 'delete', 'delete[]') or self.op[0] in "abcnox":
if self.op in {'new', 'new[]', 'delete', 'delete[]'} or self.op[0] in "abcnox":
signode += addnodes.desc_sig_space()
identnode += addnodes.desc_sig_operator(self.op, self.op)
@ -2099,7 +2099,7 @@ class ASTParametersQualifiers(ASTBase):
signode += addnodes.desc_sig_space()
signode += addnodes.desc_sig_punctuation('=', '=')
signode += addnodes.desc_sig_space()
assert self.initializer in ('0', 'delete', 'default')
assert self.initializer in {'0', 'delete', 'default'}
if self.initializer == '0':
signode += addnodes.desc_sig_literal_number('0', '0')
else:
@ -4390,7 +4390,7 @@ class ASTDeclaration(ASTBase):
elif self.objectType in {'member', 'function'}:
pass
elif self.objectType == 'class':
assert self.directiveType in ('class', 'struct')
assert self.directiveType in {'class', 'struct'}
mainDeclNode += addnodes.desc_sig_keyword(self.directiveType, self.directiveType)
mainDeclNode += addnodes.desc_sig_space()
elif self.objectType == 'union':

View File

@ -483,7 +483,7 @@ class DefinitionParser(BaseParser):
postFixes: list[ASTPostfixOp] = []
while True:
self.skip_ws()
if prefixType in ('expr', 'cast', 'typeid'):
if prefixType in {'expr', 'cast', 'typeid'}:
if self.skip_string_and_ws('['):
expr = self._parse_expression()
self.skip_ws()
@ -967,15 +967,15 @@ class DefinitionParser(BaseParser):
while self.match(_simple_type_specifiers_re):
t = self.matched_text
names.append(t)
if t in ('auto', 'void', 'bool',
if t in {'auto', 'void', 'bool',
'char', 'wchar_t', 'char8_t', 'char16_t', 'char32_t',
'int', '__int64', '__int128',
'float', 'double',
'__float80', '_Float64x', '__float128', '_Float128'):
'__float80', '_Float64x', '__float128', '_Float128'}:
if typ is not None:
self.fail(f"Can not have both {t} and {typ}.")
typ = t
elif t in ('signed', 'unsigned'):
elif t in {'signed', 'unsigned'}:
if signedness is not None:
self.fail(f"Can not have both {t} and {signedness}.")
signedness = t
@ -987,7 +987,7 @@ class DefinitionParser(BaseParser):
if len(width) != 0 and width[0] != 'long':
self.fail(f"Can not have both {t} and {width[0]}.")
width.append(t)
elif t in ('_Imaginary', '_Complex'):
elif t in {'_Imaginary', '_Complex'}:
if modifier is not None:
self.fail(f"Can not have both {t} and {modifier}.")
modifier = t
@ -995,9 +995,9 @@ class DefinitionParser(BaseParser):
if len(names) == 0:
return None
if typ in ('auto', 'void', 'bool',
if typ in {'auto', 'void', 'bool',
'wchar_t', 'char8_t', 'char16_t', 'char32_t',
'__float80', '_Float64x', '__float128', '_Float128'):
'__float80', '_Float64x', '__float128', '_Float128'}:
if modifier is not None:
self.fail(f"Can not have both {typ} and {modifier}.")
if signedness is not None:
@ -1012,7 +1012,7 @@ class DefinitionParser(BaseParser):
elif typ == 'int':
if modifier is not None:
self.fail(f"Can not have both {typ} and {modifier}.")
elif typ in ('__int64', '__int128'):
elif typ in {'__int64', '__int128'}:
if modifier is not None:
self.fail(f"Can not have both {typ} and {modifier}.")
if len(width) != 0:
@ -1211,7 +1211,7 @@ class DefinitionParser(BaseParser):
if volatile:
continue
if not storage:
if outer in ('member', 'function'):
if outer in {'member', 'function'}:
if self.skip_word('static'):
storage = 'static'
continue
@ -1225,11 +1225,11 @@ class DefinitionParser(BaseParser):
if self.skip_word('register'):
storage = 'register'
continue
if not inline and outer in ('function', 'member'):
if not inline and outer in {'function', 'member'}:
inline = self.skip_word('inline')
if inline:
continue
if not constexpr and outer in ('member', 'function'):
if not constexpr and outer in {'member', 'function'}:
constexpr = self.skip_word("constexpr")
if constexpr:
continue
@ -1281,7 +1281,7 @@ class DefinitionParser(BaseParser):
def _parse_decl_specs(self, outer: str, typed: bool = True) -> ASTDeclSpecs:
if outer:
if outer not in ('type', 'member', 'function', 'templateParam'):
if outer not in {'type', 'member', 'function', 'templateParam'}:
raise Exception('Internal error, unknown outer "%s".' % outer)
"""
storage-class-specifier function-specifier "constexpr"
@ -1364,7 +1364,7 @@ class DefinitionParser(BaseParser):
typed: bool = True,
) -> ASTDeclarator:
# 'typed' here means 'parse return type stuff'
if paramMode not in ('type', 'function', 'operatorCast', 'new'):
if paramMode not in {'type', 'function', 'operatorCast', 'new'}:
raise Exception(
"Internal error, unknown paramMode '%s'." % paramMode)
prevErrors = []
@ -1532,12 +1532,12 @@ class DefinitionParser(BaseParser):
outer == operatorCast: annoying case, we should not take the params
"""
if outer: # always named
if outer not in ('type', 'member', 'function',
'operatorCast', 'templateParam'):
if outer not in {'type', 'member', 'function',
'operatorCast', 'templateParam'}:
raise Exception('Internal error, unknown outer "%s".' % outer)
if outer != 'operatorCast':
assert named
if outer in ('type', 'function'):
if outer in {'type', 'function'}:
# We allow type objects to just be a name.
# Some functions don't have normal return types: constructors,
# destructors, cast operators
@ -1616,7 +1616,7 @@ class DefinitionParser(BaseParser):
self, named: bool | str,
outer: str) -> ASTTypeWithInit | ASTTemplateParamConstrainedTypeWithInit:
if outer:
assert outer in ('type', 'member', 'function', 'templateParam')
assert outer in {'type', 'member', 'function', 'templateParam'}
type = self._parse_type(outer=outer, named=named)
if outer != 'templateParam':
init = self._parse_initializer(outer=outer)
@ -1993,12 +1993,12 @@ class DefinitionParser(BaseParser):
return templatePrefix
def parse_declaration(self, objectType: str, directiveType: str) -> ASTDeclaration:
if objectType not in ('class', 'union', 'function', 'member', 'type',
'concept', 'enum', 'enumerator'):
if objectType not in {'class', 'union', 'function', 'member', 'type',
'concept', 'enum', 'enumerator'}:
raise Exception('Internal error, unknown objectType "%s".' % objectType)
if directiveType not in ('class', 'struct', 'union', 'function', 'member', 'var',
if directiveType not in {'class', 'struct', 'union', 'function', 'member', 'var',
'type', 'concept',
'enum', 'enum-struct', 'enum-class', 'enumerator'):
'enum', 'enum-struct', 'enum-class', 'enumerator'}:
raise Exception('Internal error, unknown directiveType "%s".' % directiveType)
visibility = None
templatePrefix = None
@ -2009,7 +2009,7 @@ class DefinitionParser(BaseParser):
if self.match(_visibility_re):
visibility = self.matched_text
if objectType in ('type', 'concept', 'member', 'function', 'class', 'union'):
if objectType in {'type', 'concept', 'member', 'function', 'class', 'union'}:
templatePrefix = self._parse_template_declaration_prefix(objectType)
if objectType == 'type':

View File

@ -799,7 +799,7 @@ class Symbol:
if (otherChild.declaration.objectType ==
ourChild.declaration.objectType and
otherChild.declaration.objectType in
('templateParam', 'functionParam') and
{'templateParam', 'functionParam'} and
ourChild.parent.declaration == otherChild.parent.declaration):
# `ourChild` was just created during merging by the call
# to `_fill_empty` on the parent and can be ignored.

View File

@ -96,7 +96,7 @@ class MathDomain(Domain):
def resolve_xref(self, env: BuildEnvironment, fromdocname: str, builder: Builder,
typ: str, target: str, node: pending_xref, contnode: Element,
) -> Element | None:
assert typ in ('eq', 'numref')
assert typ in {'eq', 'numref'}
result = self.equations.get(target)
if result:
docname, number = result

View File

@ -913,9 +913,9 @@ def builtin_resolver(app: Sphinx, env: BuildEnvironment,
if node.get('refdomain') != 'py':
return None
elif node.get('reftype') in ('class', 'obj') and node.get('reftarget') == 'None':
elif node.get('reftype') in {'class', 'obj'} and node.get('reftarget') == 'None':
return contnode
elif node.get('reftype') in ('class', 'obj', 'exc'):
elif node.get('reftype') in {'class', 'obj', 'exc'}:
reftarget = node.get('reftarget')
if inspect.isclass(getattr(builtins, reftarget, None)):
# built-in class

View File

@ -140,7 +140,7 @@ def _parse_annotation(annotation: str, env: BuildEnvironment) -> list[Node]:
result.append(addnodes.desc_sig_punctuation('', ']'))
# Wrap the Text nodes inside brackets by literal node if the subscript is a Literal
if result[0] in ('Literal', 'typing.Literal'):
if result[0] in {'Literal', 'typing.Literal'}:
for i, subnode in enumerate(result[1:], start=1):
if isinstance(subnode, nodes.Text):
result[i] = nodes.literal('', '', subnode)
@ -436,9 +436,9 @@ def _parse_arglist(
if param.kind != param.POSITIONAL_ONLY and last_kind == param.POSITIONAL_ONLY:
# PEP-570: Separator for Positional Only Parameter: /
params += addnodes.desc_parameter('', '', addnodes.desc_sig_operator('', '/'))
if param.kind == param.KEYWORD_ONLY and last_kind in (param.POSITIONAL_OR_KEYWORD,
if param.kind == param.KEYWORD_ONLY and last_kind in {param.POSITIONAL_OR_KEYWORD,
param.POSITIONAL_ONLY,
None):
None}:
# PEP-3102: Separator for Keyword Only Parameter: *
params += addnodes.desc_parameter('', '', addnodes.desc_sig_operator('', '*'))

View File

@ -107,7 +107,7 @@ SLOTSATTR = object()
def members_option(arg: Any) -> object | list[str]:
"""Used to convert the :members: option to auto directives."""
if arg in (None, True):
if arg in {None, True}:
return ALL
elif arg is False:
return None
@ -117,14 +117,14 @@ def members_option(arg: Any) -> object | list[str]:
def exclude_members_option(arg: Any) -> object | set[str]:
"""Used to convert the :exclude-members: option."""
if arg in (None, True):
if arg in {None, True}:
return EMPTY
return {x.strip() for x in arg.split(',') if x.strip()}
def inherited_members_option(arg: Any) -> set[str]:
"""Used to convert the :inherited-members: option to auto directives."""
if arg in (None, True):
if arg in {None, True}:
return {'object'}
elif arg:
return {x.strip() for x in arg.split(',')}
@ -134,9 +134,9 @@ def inherited_members_option(arg: Any) -> set[str]:
def member_order_option(arg: Any) -> str | None:
"""Used to convert the :member-order: option to auto directives."""
if arg in (None, True):
if arg in {None, True}:
return None
elif arg in ('alphabetical', 'bysource', 'groupwise'):
elif arg in {'alphabetical', 'bysource', 'groupwise'}:
return arg
else:
raise ValueError(__('invalid value for member-order option: %s') % arg)
@ -144,7 +144,7 @@ def member_order_option(arg: Any) -> str | None:
def class_doc_from_option(arg: Any) -> str | None:
"""Used to convert the :class-doc-from: option to autoclass directives."""
if arg in ('both', 'class', 'init'):
if arg in {'both', 'class', 'init'}:
return arg
else:
raise ValueError(__('invalid value for class-doc-from option: %s') % arg)
@ -154,7 +154,7 @@ SUPPRESS = object()
def annotation_option(arg: Any) -> Any:
if arg in (None, True):
if arg in {None, True}:
# suppress showing the representation of the object
return SUPPRESS
else:
@ -178,8 +178,9 @@ def merge_members_option(options: dict) -> None:
members = options.setdefault('members', [])
for key in ('private-members', 'special-members'):
if key in options and options[key] not in (ALL, None):
for member in options[key]:
other_members = options.get(key)
if other_members is not None and other_members is not ALL:
for member in other_members:
if member not in members:
members.append(member)
@ -1318,7 +1319,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
(inspect.isroutine(member) and isinstance(parent, ModuleDocumenter)))
def format_args(self, **kwargs: Any) -> str:
if self.config.autodoc_typehints in ('none', 'description'):
if self.config.autodoc_typehints in {'none', 'description'}:
kwargs.setdefault('show_annotation', False)
if self.config.autodoc_typehints_format == "short":
kwargs.setdefault('unqualified_typehints', True)
@ -1615,7 +1616,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
return None, None, None
def format_args(self, **kwargs: Any) -> str:
if self.config.autodoc_typehints in ('none', 'description'):
if self.config.autodoc_typehints in {'none', 'description'}:
kwargs.setdefault('show_annotation', False)
if self.config.autodoc_typehints_format == "short":
kwargs.setdefault('unqualified_typehints', True)
@ -1797,7 +1798,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
# for classes, what the "docstring" is can be controlled via a
# config value; the default is only the class docstring
if classdoc_from in ('both', 'init'):
if classdoc_from in {'both', 'init'}:
__init__ = self.get_attr(self.object, '__init__', None)
initdocstring = getdoc(__init__, self.get_attr,
self.config.autodoc_inherit_docstrings,
@ -2183,7 +2184,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
return ret
def format_args(self, **kwargs: Any) -> str:
if self.config.autodoc_typehints in ('none', 'description'):
if self.config.autodoc_typehints in {'none', 'description'}:
kwargs.setdefault('show_annotation', False)
if self.config.autodoc_typehints_format == "short":
kwargs.setdefault('unqualified_typehints', True)

View File

@ -161,7 +161,7 @@ def update_defvalue(app: Sphinx, obj: Any, bound_method: bool) -> None:
# Consume kw_defaults for kwonly args
kw_defaults.pop(0)
else:
if param.kind in (param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD):
if param.kind in {param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD}:
default = defaults.pop(0)
value = get_default_value(lines, default)
if value is None:

View File

@ -45,7 +45,7 @@ def record_typehints(app: Sphinx, objtype: str, name: str, obj: Any,
def merge_typehints(app: Sphinx, domain: str, objtype: str, contentnode: Element) -> None:
if domain != 'py':
return
if app.config.autodoc_typehints not in ('both', 'description'):
if app.config.autodoc_typehints not in {'both', 'description'}:
return
try:
@ -177,14 +177,14 @@ def augment_descriptions_with_types(
elif parts[0] == 'type':
name = ' '.join(parts[1:])
has_type.add(name)
elif parts[0] in ('return', 'returns'):
elif parts[0] in {'return', 'returns'}:
has_description.add('return')
elif parts[0] == 'rtype':
has_type.add('return')
# Add 'type' for parameters with a description but no declared type.
for name, annotation in annotations.items():
if name in ('return', 'returns'):
if name in {'return', 'returns'}:
continue
if '*' + name in has_description:

View File

@ -358,7 +358,7 @@ def generate_autosummary_content(
if modname is None or qualname is None:
modname, qualname = _split_full_qualified_name(name)
if doc.objtype in ('method', 'attribute', 'property'):
if doc.objtype in {'method', 'attribute', 'property'}:
ns['class'] = qualname.rsplit('.', 1)[0]
if doc.objtype == 'class':

View File

@ -86,7 +86,7 @@ class TestDirective(SphinxDirective):
test = code
code = doctestopt_re.sub('', code)
nodetype: type[TextElement] = nodes.literal_block
if self.name in ('testsetup', 'testcleanup') or 'hide' in self.options:
if self.name in {'testsetup', 'testcleanup'} or 'hide' in self.options:
nodetype = nodes.comment
if self.arguments:
groups = [x.strip() for x in self.arguments[0].split(',')]
@ -105,7 +105,7 @@ class TestDirective(SphinxDirective):
# don't try to highlight output
node['language'] = 'none'
node['options'] = {}
if self.name in ('doctest', 'testoutput') and 'options' in self.options:
if self.name in {'doctest', 'testoutput'} and 'options' in self.options:
# parse doctest-like output comparison flags
option_strings = self.options['options'].replace(',', ' ').split()
for option in option_strings:

View File

@ -325,7 +325,7 @@ def render_dot_html(self: HTML5Translator, node: graphviz, code: str, options: d
) -> tuple[str, str]:
format = self.builder.config.graphviz_output_format
try:
if format not in ('png', 'svg'):
if format not in {'png', 'svg'}:
raise GraphvizError(__("graphviz_output_format must be one of 'png', "
"'svg', but is %r") % format)
fname, outfn = render_dot(self, code, options, format, prefix, filename)

View File

@ -96,7 +96,7 @@ def generate_latex_macro(image_format: str,
'fontsize': config.imgmath_font_size,
'baselineskip': int(round(config.imgmath_font_size * 1.2)),
'preamble': config.imgmath_latex_preamble,
# the dvips option is important when imgmath_latex in ["xelatex", "tectonic"],
# the dvips option is important when imgmath_latex in {"xelatex", "tectonic"},
# it has no impact when imgmath_latex="latex"
'tightpage': '' if image_format == 'png' else ',dvips,tightpage',
'math': math,

View File

@ -166,7 +166,7 @@ class InheritanceGraph:
def _class_info(self, classes: list[Any], show_builtins: bool, private_bases: bool,
parts: int, aliases: dict[str, str] | None, top_classes: Sequence[Any],
) -> list[tuple[str, str, list[str], str]]:
) -> list[tuple[str, str, Sequence[str], str | None]]:
"""Return name and bases for all classes that are ancestors of
*classes*.
@ -221,7 +221,11 @@ class InheritanceGraph:
for cls in classes:
recurse(cls)
return list(all_classes.values()) # type: ignore[arg-type]
return [
(cls_name, fullname, tuple(bases), tooltip)
for (cls_name, fullname, bases, tooltip)
in all_classes.values()
]
def class_name(
self, cls: Any, parts: int = 0, aliases: dict[str, str] | None = None,
@ -232,7 +236,7 @@ class InheritanceGraph:
completely general.
"""
module = cls.__module__
if module in ('__builtin__', 'builtins'):
if module in {'__builtin__', 'builtins'}:
fullname = cls.__name__
else:
fullname = f'{module}.{cls.__qualname__}'

View File

@ -448,10 +448,10 @@ def _skip_member(
"""
has_doc = getattr(obj, '__doc__', False)
is_member = what in ('class', 'exception', 'module')
is_member = what in {'class', 'exception', 'module'}
if name != '__weakref__' and has_doc and is_member:
cls_is_owner = False
if what in ('class', 'exception'):
if what in {'class', 'exception'}:
qualname = getattr(obj, '__qualname__', '')
cls_path, _, _ = qualname.rpartition('.')
if cls_path:

View File

@ -635,7 +635,7 @@ class GoogleDocstring:
def _parse(self) -> None:
self._parsed_lines = self._consume_empty()
if self._name and self._what in ('attribute', 'data', 'property'):
if self._name and self._what in {'attribute', 'data', 'property'}:
res: list[str] = []
with contextlib.suppress(StopIteration):
res = self._parse_attribute_docstring()
@ -893,7 +893,7 @@ class GoogleDocstring:
def _lookup_annotation(self, _name: str) -> str:
if self._config.napoleon_attr_annotations:
if self._what in ('module', 'class', 'exception') and self._obj:
if self._what in {'module', 'class', 'exception'} and self._obj:
# cache the class annotations
if not hasattr(self, '_annotations'):
localns = getattr(self._config, 'autodoc_type_aliases', {})
@ -1038,7 +1038,7 @@ def _token_type(token: str, location: str | None = None) -> str:
location=location,
)
type_ = 'literal'
elif token in ('optional', 'default'):
elif token in {'optional', 'default'}:
# default is not a official keyword (yet) but supported by the
# reference implementation (numpydoc) and widely used
type_ = 'control'

View File

@ -26,7 +26,7 @@ if TYPE_CHECKING:
def _tobool(val: str) -> bool:
if isinstance(val, str):
return val.lower() in ('true', '1', 'yes', 'on')
return val.lower() in {'true', '1', 'yes', 'on'}
return bool(val)

View File

@ -47,14 +47,14 @@ def get_lvar_names(node: ast.AST, self: ast.arg | None = None) -> list[str]:
self_id = self.arg
node_name = node.__class__.__name__
if node_name in ('Constant', 'Index', 'Slice', 'Subscript'):
if node_name in {'Constant', 'Index', 'Slice', 'Subscript'}:
raise TypeError('%r does not create new variable' % node)
if node_name == 'Name':
if self is None or node.id == self_id: # type: ignore[attr-defined]
return [node.id] # type: ignore[attr-defined]
else:
raise TypeError('The assignment %r is not instance variable' % node)
elif node_name in ('Tuple', 'List'):
elif node_name in {'Tuple', 'List'}:
members = []
for elt in node.elts: # type: ignore[attr-defined]
with contextlib.suppress(TypeError):

View File

@ -246,7 +246,7 @@ class ReferencesResolver(SphinxPostTransform):
return
elif domain and typ in domain.dangling_warnings:
msg = domain.dangling_warnings[typ] % {'target': target}
elif node.get('refdomain', 'std') not in ('', 'std'):
elif node.get('refdomain', 'std') not in {'', 'std'}:
msg = __('%s:%s reference target not found: %s') % (
node['refdomain'],
typ,

View File

@ -90,7 +90,7 @@ char_literal_re = re.compile(
def verify_description_mode(mode: str) -> None:
if mode not in ('lastIsName', 'noneIsName', 'markType', 'markName', 'param', 'udl'):
if mode not in {'lastIsName', 'noneIsName', 'markType', 'markName', 'param', 'udl'}:
raise Exception("Description mode '%s' is invalid." % mode)

View File

@ -106,7 +106,7 @@ def color_terminal() -> bool:
if 'COLORTERM' in os.environ:
return True
term = os.environ.get('TERM', 'dumb').lower()
return term in ('xterm', 'linux') or 'color' in term
return term in {'xterm', 'linux'} or 'color' in term
def nocolor() -> None:

View File

@ -142,7 +142,8 @@ class CatalogRepository:
basedir = path.join(locale_dir, self.language, 'LC_MESSAGES')
for root, dirnames, filenames in os.walk(basedir):
# skip dot-directories
for dirname in [d for d in dirnames if d.startswith('.')]:
dot_directories = [d for d in dirnames if d.startswith('.')]
for dirname in dot_directories:
dirnames.remove(dirname)
for filename in filenames:

View File

@ -799,11 +799,11 @@ def stringify_signature(
):
# PEP-570: Separator for Positional Only Parameter: /
args.append('/')
if param.kind == Parameter.KEYWORD_ONLY and last_kind in (
if param.kind == Parameter.KEYWORD_ONLY and last_kind in {
Parameter.POSITIONAL_OR_KEYWORD,
Parameter.POSITIONAL_ONLY,
None,
):
}:
# PEP-3102: Separator for Keyword Only Parameter: *
args.append('*')

View File

@ -103,7 +103,7 @@ _tex_hlescape_map_without_unicode: dict[int, str] = {}
def escape(s: str, latex_engine: str | None = None) -> str:
"""Escape text for LaTeX output."""
if latex_engine in ('lualatex', 'xelatex'):
if latex_engine in {'lualatex', 'xelatex'}:
# unicode based LaTeX engine
return s.translate(_tex_escape_map_without_unicode)
else:
@ -112,7 +112,7 @@ def escape(s: str, latex_engine: str | None = None) -> str:
def hlescape(s: str, latex_engine: str | None = None) -> str:
"""Escape text for LaTeX highlighter."""
if latex_engine in ('lualatex', 'xelatex'):
if latex_engine in {'lualatex', 'xelatex'}:
# unicode based LaTeX engine
return s.translate(_tex_hlescape_map_without_unicode)
else:

View File

@ -302,7 +302,7 @@ def restify(cls: Any, mode: _RestifyMode = 'fully-qualified-except-typing') -> s
# are printed natively and ``None``-like types are kept as is.
# *cls* is defined in ``typing``, and thus ``__args__`` must exist
return ' | '.join(restify(a, mode) for a in cls.__args__)
elif cls.__module__ in ('__builtin__', 'builtins'):
elif cls.__module__ in {'__builtin__', 'builtins'}:
if hasattr(cls, '__args__'):
if not cls.__args__: # Empty tuple, list, ...
return rf':py:class:`{cls.__name__}`\ [{cls.__args__!r}]'

View File

@ -291,7 +291,7 @@ def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
amount, unit = match.groups()[:2]
if scale == 100:
float(amount) # validate amount is float
if unit in ('', 'px'):
if unit in {'', 'px'}:
res = r'%s\sphinxpxdimen' % amount
elif unit == 'pt':
res = '%sbp' % amount # convert to 'bp'
@ -299,7 +299,7 @@ def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
res = r'%.3f\linewidth' % (float(amount) / 100.0)
else:
amount_float = float(amount) * scale / 100.0
if unit in ('', 'px'):
if unit in {'', 'px'}:
res = r'%.5f\sphinxpxdimen' % amount_float
elif unit == 'pt':
res = '%.5fbp' % amount_float
@ -1090,7 +1090,7 @@ class LaTeXTranslator(SphinxTranslator):
self.no_latex_floats -= 1
def visit_rubric(self, node: nodes.rubric) -> None:
if len(node) == 1 and node.astext() in ('Footnotes', _('Footnotes')):
if len(node) == 1 and node.astext() in {'Footnotes', _('Footnotes')}:
raise nodes.SkipNode
tag = 'subsubsection'
if 'heading-level' in node:
@ -1684,7 +1684,7 @@ class LaTeXTranslator(SphinxTranslator):
if any(isinstance(child, nodes.caption) for child in node):
self.body.append(r'\capstart')
self.context.append(r'\end{sphinxfigure-in-table}\relax' + CR)
elif node.get('align', '') in ('left', 'right'):
elif node.get('align', '') in {'left', 'right'}:
length = None
if 'width' in node:
length = self.latex_image_length(node['width'])

View File

@ -259,7 +259,7 @@ class ManualPageTranslator(SphinxTranslator, BaseTranslator): # type: ignore[mi
# overwritten -- handle footnotes rubric
def visit_rubric(self, node: Element) -> None:
self.ensure_eol()
if len(node) == 1 and node.astext() in ('Footnotes', _('Footnotes')):
if len(node) == 1 and node.astext() in {'Footnotes', _('Footnotes')}:
self.body.append('.SH ' + self.deunicode(node.astext()).upper() + '\n')
raise nodes.SkipNode
self.body.append('.sp\n')

View File

@ -240,7 +240,7 @@ class TexinfoTranslator(SphinxTranslator):
# filename
if not elements['filename']:
elements['filename'] = self.document.get('source') or 'untitled'
if elements['filename'][-4:] in ('.txt', '.rst'): # type: ignore[index]
if elements['filename'][-4:] in {'.txt', '.rst'}: # type: ignore[index]
elements['filename'] = elements['filename'][:-4] # type: ignore[index]
elements['filename'] += '.info' # type: ignore[operator]
# direntry
@ -657,7 +657,7 @@ class TexinfoTranslator(SphinxTranslator):
self.body.append('\n\n')
def visit_rubric(self, node: Element) -> None:
if len(node) == 1 and node.astext() in ('Footnotes', _('Footnotes')):
if len(node) == 1 and node.astext() in {'Footnotes', _('Footnotes')}:
raise nodes.SkipNode
try:
rubric = self.rubrics[self.section_level]

View File

@ -2189,7 +2189,7 @@ def test_duplicated_labels_before_module(app):
tested_labels = set()
# iterate over the (explicit) labels in the corresponding index.rst
for rst_label_name in [
for rst_label_name in (
'label_1a',
'label_1b',
'label_2',
@ -2198,7 +2198,7 @@ def test_duplicated_labels_before_module(app):
'label_auto_1b',
'label_auto_2',
'label_auto_3',
]:
):
tex_label_name = 'index:' + rst_label_name.replace('_', '-')
tex_label_code = r'\phantomsection\label{\detokenize{%s}}' % tex_label_name
assert (

View File

@ -37,10 +37,7 @@ def test_sectioning(app):
doctree = app.env.get_doctree('only')
app.env.apply_post_transforms(doctree, 'only')
parts = [
getsects(n)
for n in [_n for _n in doctree.children if isinstance(_n, nodes.section)]
]
parts = [getsects(n) for n in doctree.children if isinstance(n, nodes.section)]
for i, s in enumerate(parts):
testsects(str(i + 1) + '.', s, 4)
actual_headings = '\n'.join(p[0] for p in parts)

View File

@ -48,7 +48,7 @@ def _check(name, input, idDict, output, key, asTextOutput):
if key is None:
key = name
key += ' '
if name in ('function', 'member'):
if name in {'function', 'member'}:
inputActual = input
outputAst = output
outputAsText = output
@ -192,8 +192,8 @@ def test_domain_c_ast_expressions():
exprCheck(expr)
expr = i + l + u
exprCheck(expr)
for suffix in ['', 'f', 'F', 'l', 'L']:
for e in [
for suffix in ('', 'f', 'F', 'l', 'L'):
for e in (
'5e42',
'5e+42',
'5e-42',
@ -213,10 +213,10 @@ def test_domain_c_ast_expressions():
"1'2'3.e7'8'9",
".4'5'6e7'8'9",
"1'2'3.4'5'6e7'8'9",
]:
):
expr = e + suffix
exprCheck(expr)
for e in [
for e in (
'ApF',
'Ap+F',
'Ap-F',
@ -236,12 +236,12 @@ def test_domain_c_ast_expressions():
"A'B'C.p1'2'3",
".D'E'Fp1'2'3",
"A'B'C.D'E'Fp1'2'3",
]:
):
expr = '0x' + e + suffix
exprCheck(expr)
exprCheck('"abc\\"cba"') # string
# character literals
for p in ['', 'u8', 'u', 'U', 'L']:
for p in ('', 'u8', 'u', 'U', 'L'):
exprCheck(p + "'a'")
exprCheck(p + "'\\n'")
exprCheck(p + "'\\012'")

View File

@ -50,7 +50,7 @@ def _check(name, input, idDict, output, key, asTextOutput):
if key is None:
key = name
key += ' '
if name in ('function', 'member'):
if name in {'function', 'member'}:
inputActual = input
outputAst = output
outputAsText = output
@ -270,7 +270,7 @@ def test_domain_cpp_ast_expressions():
".D'E'Fp1'2'3",
"A'B'C.D'E'Fp1'2'3",
]
for suffix in ['', 'f', 'F', 'l', 'L']:
for suffix in ('', 'f', 'F', 'l', 'L'):
for e in decimalFloats:
expr = e + suffix
exprCheck(expr, 'L' + expr.replace("'", '') + 'E')

View File

@ -365,7 +365,7 @@ def test_toc_all_references_should_exist_pep420_enabled(apidoc):
found_refs = []
missing_files = []
for ref in refs:
if ref and ref[0] in (':', '#'):
if ref and ref[0] in {':', '#'}:
continue
found_refs.append(ref)
filename = f'{ref}.rst'
@ -396,7 +396,7 @@ def test_toc_all_references_should_exist_pep420_disabled(apidoc):
found_refs = []
missing_files = []
for ref in refs:
if ref and ref[0] in (':', '#'):
if ref and ref[0] in {':', '#'}:
continue
filename = f'{ref}.rst'
found_refs.append(ref)

View File

@ -133,7 +133,7 @@ def test_format_signature(app):
return None
def skip_member(app, what, name, obj, skip, options):
if name in ('__special1__', '__special2__'):
if name in {'__special1__', '__special2__'}:
return skip
if name.startswith('__'):
return True

View File

@ -337,7 +337,7 @@ def test_autosummary_generate_content_for_module_skipped(app):
template = Mock()
def skip_member(app, what, name, obj, skip, options):
if name in ('Foo', 'bar', 'Exc'):
if name in {'Foo', 'bar', 'Exc'}:
return True
return None

View File

@ -48,25 +48,25 @@ def test_inheritance_diagram(app):
# basic inheritance diagram showing all classes
for cls in graphs['basic_diagram'].class_info:
# use in b/c traversing order is different sometimes
assert cls in [
('dummy.test.A', 'dummy.test.A', [], None),
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
('dummy.test.C', 'dummy.test.C', ['dummy.test.A'], None),
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
('dummy.test.B', 'dummy.test.B', ['dummy.test.A'], None),
]
assert cls in {
('dummy.test.A', 'dummy.test.A', (), None),
('dummy.test.F', 'dummy.test.F', ('dummy.test.C',), None),
('dummy.test.C', 'dummy.test.C', ('dummy.test.A',), None),
('dummy.test.E', 'dummy.test.E', ('dummy.test.B',), None),
('dummy.test.D', 'dummy.test.D', ('dummy.test.B', 'dummy.test.C'), None),
('dummy.test.B', 'dummy.test.B', ('dummy.test.A',), None),
}
# inheritance diagram using :parts: 1 option
for cls in graphs['diagram_w_parts'].class_info:
assert cls in [
('A', 'dummy.test.A', [], None),
('F', 'dummy.test.F', ['C'], None),
('C', 'dummy.test.C', ['A'], None),
('E', 'dummy.test.E', ['B'], None),
('D', 'dummy.test.D', ['B', 'C'], None),
('B', 'dummy.test.B', ['A'], None),
]
assert cls in {
('A', 'dummy.test.A', (), None),
('F', 'dummy.test.F', ('C',), None),
('C', 'dummy.test.C', ('A',), None),
('E', 'dummy.test.E', ('B',), None),
('D', 'dummy.test.D', ('B', 'C'), None),
('B', 'dummy.test.B', ('A',), None),
}
# inheritance diagram with 1 top class
# :top-classes: dummy.test.B
@ -78,14 +78,14 @@ def test_inheritance_diagram(app):
# E D F
#
for cls in graphs['diagram_w_1_top_class'].class_info:
assert cls in [
('dummy.test.A', 'dummy.test.A', [], None),
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
('dummy.test.C', 'dummy.test.C', ['dummy.test.A'], None),
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
('dummy.test.B', 'dummy.test.B', [], None),
]
assert cls in {
('dummy.test.A', 'dummy.test.A', (), None),
('dummy.test.F', 'dummy.test.F', ('dummy.test.C',), None),
('dummy.test.C', 'dummy.test.C', ('dummy.test.A',), None),
('dummy.test.E', 'dummy.test.E', ('dummy.test.B',), None),
('dummy.test.D', 'dummy.test.D', ('dummy.test.B', 'dummy.test.C'), None),
('dummy.test.B', 'dummy.test.B', (), None),
}
# inheritance diagram with 2 top classes
# :top-classes: dummy.test.B, dummy.test.C
@ -97,13 +97,13 @@ def test_inheritance_diagram(app):
# E D F
#
for cls in graphs['diagram_w_2_top_classes'].class_info:
assert cls in [
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
('dummy.test.C', 'dummy.test.C', [], None),
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
('dummy.test.B', 'dummy.test.B', [], None),
]
assert cls in {
('dummy.test.F', 'dummy.test.F', ('dummy.test.C',), None),
('dummy.test.C', 'dummy.test.C', (), None),
('dummy.test.E', 'dummy.test.E', ('dummy.test.B',), None),
('dummy.test.D', 'dummy.test.D', ('dummy.test.B', 'dummy.test.C'), None),
('dummy.test.B', 'dummy.test.B', (), None),
}
# inheritance diagram with 2 top classes and specifying the entire module
# rendering should be
@ -119,27 +119,27 @@ def test_inheritance_diagram(app):
# If you'd like to not show class A in the graph don't specify the entire module.
# this is a known issue.
for cls in graphs['diagram_module_w_2_top_classes'].class_info:
assert cls in [
('dummy.test.F', 'dummy.test.F', ['dummy.test.C'], None),
('dummy.test.C', 'dummy.test.C', [], None),
('dummy.test.E', 'dummy.test.E', ['dummy.test.B'], None),
('dummy.test.D', 'dummy.test.D', ['dummy.test.B', 'dummy.test.C'], None),
('dummy.test.B', 'dummy.test.B', [], None),
('dummy.test.A', 'dummy.test.A', [], None),
]
assert cls in {
('dummy.test.F', 'dummy.test.F', ('dummy.test.C',), None),
('dummy.test.C', 'dummy.test.C', (), None),
('dummy.test.E', 'dummy.test.E', ('dummy.test.B',), None),
('dummy.test.D', 'dummy.test.D', ('dummy.test.B', 'dummy.test.C'), None),
('dummy.test.B', 'dummy.test.B', (), None),
('dummy.test.A', 'dummy.test.A', (), None),
}
# inheritance diagram involving a base class nested within another class
for cls in graphs['diagram_w_nested_classes'].class_info:
assert cls in [
('dummy.test_nested.A', 'dummy.test_nested.A', [], None),
assert cls in {
('dummy.test_nested.A', 'dummy.test_nested.A', (), None),
(
'dummy.test_nested.C',
'dummy.test_nested.C',
['dummy.test_nested.A.B'],
('dummy.test_nested.A.B',),
None,
),
('dummy.test_nested.A.B', 'dummy.test_nested.A.B', [], None),
]
('dummy.test_nested.A.B', 'dummy.test_nested.A.B', (), None),
}
# An external inventory to test intersphinx links in inheritance diagrams
@ -293,17 +293,17 @@ def test_inheritance_diagram_latex_alias(app):
assert (
'test.DocSubDir2',
'test.DocSubDir2',
['test.DocSubDir1'],
('test.DocSubDir1',),
None,
) in aliased_graph
assert (
'test.DocSubDir1',
'test.DocSubDir1',
['test.DocHere'],
('test.DocHere',),
None,
) in aliased_graph
assert ('test.DocHere', 'test.DocHere', ['alias.Foo'], None) in aliased_graph
assert ('alias.Foo', 'alias.Foo', [], None) in aliased_graph
assert ('test.DocHere', 'test.DocHere', ('alias.Foo',), None) in aliased_graph
assert ('alias.Foo', 'alias.Foo', (), None) in aliased_graph
content = (app.outdir / 'index.html').read_text(encoding='utf8')

View File

@ -66,7 +66,7 @@ def _info(app):
def elem_gettexts(elem):
return [_f for _f in [s.strip() for s in elem.itertext()] if _f]
return list(filter(None, map(str.strip, elem.itertext())))
def elem_getref(elem):
@ -368,8 +368,9 @@ def test_gettext_section(app):
# --- section
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'section.po')
actual = read_po(app.outdir / 'section.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
@sphinx_intl
@ -380,7 +381,7 @@ def test_text_section(app):
# --- section
result = (app.outdir / 'section.txt').read_text(encoding='utf8')
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'section.po')
for expect_msg in [m for m in expect if m.id]:
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.string in result
@ -537,13 +538,15 @@ def test_gettext_toctree(app):
# --- toctree (index.rst)
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'index.po')
actual = read_po(app.outdir / 'index.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
# --- toctree (toctree.rst)
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'toctree.po')
actual = read_po(app.outdir / 'toctree.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
@sphinx_intl
@ -554,8 +557,9 @@ def test_gettext_table(app):
# --- toctree
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'table.po')
actual = read_po(app.outdir / 'table.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
@sphinx_intl
@ -566,7 +570,7 @@ def test_text_table(app):
# --- toctree
result = (app.outdir / 'table.txt').read_text(encoding='utf8')
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'table.po')
for expect_msg in [m for m in expect if m.id]:
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.string in result
@ -595,8 +599,9 @@ def test_gettext_topic(app):
# --- topic
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'topic.po')
actual = read_po(app.outdir / 'topic.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
@sphinx_intl
@ -607,7 +612,7 @@ def test_text_topic(app):
# --- topic
result = (app.outdir / 'topic.txt').read_text(encoding='utf8')
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'topic.po')
for expect_msg in [m for m in expect if m.id]:
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.string in result
@ -621,8 +626,9 @@ def test_gettext_definition_terms(app):
app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'definition_terms.po'
)
actual = read_po(app.outdir / 'definition_terms.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
@sphinx_intl
@ -633,8 +639,9 @@ def test_gettext_glossary_terms(app):
# --- glossary terms: regression test for #1090
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'glossary_terms.po')
actual = read_po(app.outdir / 'glossary_terms.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
warnings = app.warning.getvalue().replace(os.sep, '/')
assert 'term not in glossary' not in warnings
@ -649,8 +656,9 @@ def test_gettext_glossary_term_inconsistencies(app):
app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'glossary_terms_inconsistency.po'
)
actual = read_po(app.outdir / 'glossary_terms_inconsistency.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
@sphinx_intl
@ -661,10 +669,11 @@ def test_gettext_literalblock(app):
# --- gettext builder always ignores ``only`` directive
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'literalblock.po')
actual = read_po(app.outdir / 'literalblock.pot')
for expect_msg in [m for m in expect if m.id]:
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
if len(expect_msg.id.splitlines()) == 1:
# compare translations only labels
assert expect_msg.id in [m.id for m in actual if m.id]
assert expect_msg.id in actual_msg_ids
else:
pass # skip code-blocks and literalblocks
@ -677,8 +686,9 @@ def test_gettext_buildr_ignores_only_directive(app):
# --- gettext builder always ignores ``only`` directive
expect = read_po(app.srcdir / _CATALOG_LOCALE / 'LC_MESSAGES' / 'only.po')
actual = read_po(app.outdir / 'only.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
actual_msg_ids = {msg.id for msg in actual if msg.id}
for expect_msg in (msg for msg in expect if msg.id):
assert expect_msg.id in actual_msg_ids
@sphinx_intl

View File

@ -40,7 +40,7 @@ def test_ModuleAnalyzer_for_file():
def test_ModuleAnalyzer_for_module(rootdir):
analyzer = ModuleAnalyzer.for_module('sphinx')
assert analyzer.modname == 'sphinx'
assert analyzer.srcname in (SPHINX_MODULE_PATH, os.path.abspath(SPHINX_MODULE_PATH))
assert analyzer.srcname in {SPHINX_MODULE_PATH, os.path.abspath(SPHINX_MODULE_PATH)}
saved_path = sys.path.copy()
sys.path.insert(0, str(rootdir / 'test-pycode'))

View File

@ -78,7 +78,7 @@ def test_strip_ansi_short_forms():
# some messages use '\x1b[0m' instead of ``reset(s)``, so we
# test whether this alternative form is supported or not.
for strip_function in [strip_colors, strip_escape_sequences]:
for strip_function in strip_colors, strip_escape_sequences:
# \x1b[m and \x1b[0m are equivalent to \x1b[00m
assert strip_function('\x1b[m') == ''
assert strip_function('\x1b[0m') == ''

View File

@ -299,10 +299,10 @@ def test_signature_annotations():
# optional union
sig = inspect.signature(mod.f20)
assert stringify_signature(sig) in (
assert stringify_signature(sig) in {
'() -> int | str | None',
'() -> str | int | None',
)
}
# Any
sig = inspect.signature(mod.f14)