Drop support for Python 3.9 (#12633)

This commit is contained in:
Adam Turner 2024-07-22 15:05:15 +01:00 committed by GitHub
parent 8c6d234e96
commit 9e3f4521db
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
73 changed files with 221 additions and 297 deletions

View File

@ -32,7 +32,6 @@ jobs:
fail-fast: false
matrix:
python:
- "3.9"
- "3.10"
- "3.11"
- "3.12"

View File

@ -1,4 +1,4 @@
target-version = "py39" # Pin Ruff to Python 3.9
target-version = "py310" # Pin Ruff to Python 3.10
line-length = 95
output-format = "full"
@ -419,8 +419,8 @@ select = [
]
# these tests need old ``typing`` generic aliases
"tests/test_util/test_util_typing.py" = ["UP006", "UP035"]
"tests/test_util/typing_test_data.py" = ["FA100", "UP006", "UP035"]
"tests/test_util/test_util_typing.py" = ["UP006", "UP007", "UP035"]
"tests/test_util/typing_test_data.py" = ["FA100", "UP006", "UP007", "UP035"]
"utils/*" = [
"T201", # whitelist ``print`` for stdout messages

View File

@ -4,6 +4,8 @@ Release 8.0.0 (in development)
Dependencies
------------
* #12633: Drop Python 3.9 support.
Incompatible changes
--------------------

View File

@ -174,19 +174,19 @@ of targets and allows testing against multiple different Python environments:
tox -av
* To run unit tests for a specific Python version, such as Python 3.10::
* To run unit tests for a specific Python version, such as Python 3.12::
tox -e py310
tox -e py312
* To run unit tests for a specific Python version and turn on deprecation
warnings so they're shown in the test output::
PYTHONWARNINGS=error tox -e py310
PYTHONWARNINGS=error tox -e py312
* Arguments to ``pytest`` can be passed via ``tox``, e.g., in order to run a
particular test::
tox -e py310 tests/test_module.py::test_new_feature
tox -e py312 tests/test_module.py::test_new_feature
You can also test by installing dependencies in your local environment::

View File

@ -246,7 +246,7 @@ After you have published your sources on GitLab, create a file named
pages:
stage: deploy
image: python:3.9-slim
image: python:3.12-slim
before_script:
- apt-get update && apt-get install make --no-install-recommends -y
- python -m pip install sphinx furo

View File

@ -79,10 +79,10 @@ a comma-separated list of group names.
* ``pyversion``, a string option, can be used to specify the required Python
version for the example to be tested. For instance, in the following case
the example will be tested only for Python versions greater than 3.10::
the example will be tested only for Python versions greater than 3.12::
.. doctest::
:pyversion: > 3.10
:pyversion: > 3.12
The following operands are supported:

View File

@ -152,18 +152,18 @@ Install either ``python3x-sphinx`` using :command:`port`:
::
$ sudo port install py39-sphinx
$ sudo port install py312-sphinx
To set up the executable paths, use the ``port select`` command:
::
$ sudo port select --set python python39
$ sudo port select --set sphinx py39-sphinx
$ sudo port select --set python python312
$ sudo port select --set sphinx py312-sphinx
For more information, refer to the `package overview`__.
__ https://www.macports.org/ports.php?by=library&substr=py39-sphinx
__ https://www.macports.org/ports.php?by=library&substr=py312-sphinx
Windows
~~~~~~~

View File

@ -13,7 +13,7 @@ urls.Download = "https://pypi.org/project/Sphinx/"
urls.Homepage = "https://www.sphinx-doc.org/"
urls."Issue tracker" = "https://github.com/sphinx-doc/sphinx/issues"
license.text = "BSD-2-Clause"
requires-python = ">=3.9"
requires-python = ">=3.10"
# Classifiers list: https://pypi.org/classifiers/
classifiers = [
@ -30,7 +30,6 @@ classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
@ -71,7 +70,6 @@ dependencies = [
"imagesize>=1.3",
"requests>=2.30.0",
"packaging>=23.0",
"importlib-metadata>=6.0; python_version < '3.10'",
"tomli>=2; python_version < '3.11'",
"colorama>=0.4.6; sys_platform == 'win32'",
]
@ -91,7 +89,6 @@ lint = [
"types-Pillow==10.2.0.20240520",
"types-Pygments==2.18.0.20240506",
"types-requests>=2.30.0", # align with requests
"importlib-metadata>=6.0", # for mypy (Python<=3.9)
"tomli>=2", # for mypy (Python<=3.10)
"pytest>=6.0",
]
@ -211,7 +208,7 @@ exclude = [
]
check_untyped_defs = true
disallow_incomplete_defs = true
python_version = "3.9"
python_version = "3.10"
show_column_numbers = true
show_error_context = true
strict_optional = true

View File

@ -79,7 +79,7 @@ class _RootArgumentParser(argparse.ArgumentParser):
]
if commands := list(_load_subcommand_descriptions()):
command_max_length = min(max(map(len, next(zip(*commands), ()))), 22)
command_max_length = min(max(map(len, next(zip(*commands, strict=True), ()))), 22)
help_fragments += [
'\n',
bold(underline(__('Commands:'))),

View File

@ -10,10 +10,10 @@ import os
import pickle
import sys
from collections import deque
from collections.abc import Collection, Sequence # NoQA: TCH003
from collections.abc import Callable, Collection, Sequence # NoQA: TCH003
from io import StringIO
from os import path
from typing import IO, TYPE_CHECKING, Any, Callable, Literal
from typing import IO, TYPE_CHECKING, Any, Literal
from docutils.nodes import TextElement # NoQA: TCH002
from docutils.parsers.rst import Directive, roles

View File

@ -87,9 +87,9 @@ def _stable_hash(obj: Any) -> str:
"""
if isinstance(obj, dict):
obj = sorted(map(_stable_hash, obj.items()))
if isinstance(obj, (list, tuple, set, frozenset)):
if isinstance(obj, list | tuple | set | frozenset):
obj = sorted(map(_stable_hash, obj))
elif isinstance(obj, (type, types.FunctionType)):
elif isinstance(obj, type | types.FunctionType):
# The default repr() of functions includes the ID, which is not ideal.
# We use the fully qualified name instead.
obj = f'{obj.__module__}.{obj.__qualname__}'
@ -734,7 +734,7 @@ class StandaloneHTMLBuilder(Builder):
'genindex-split.html')
self.handle_page('genindex-all', genindexcontext,
'genindex.html')
for (key, entries), count in zip(genindex, indexcounts):
for (key, entries), count in zip(genindex, indexcounts, strict=True):
ctx = {'key': key, 'entries': entries, 'count': count,
'genindexentries': genindex}
self.handle_page('genindex-' + key, ctx,

View File

@ -417,7 +417,7 @@ class LaTeXFootnoteVisitor(nodes.NodeVisitor):
self.unrestrict(node)
def visit_title(self, node: nodes.title) -> None:
if isinstance(node.parent, (nodes.section, nodes.table)):
if isinstance(node.parent, nodes.section | nodes.table):
self.restrict(node)
def depart_title(self, node: nodes.title) -> None:

View File

@ -27,8 +27,8 @@ from sphinx.util.http_date import rfc1123_to_epoch
from sphinx.util.nodes import get_node_line
if TYPE_CHECKING:
from collections.abc import Iterator
from typing import Any, Callable
from collections.abc import Callable, Iterator
from typing import Any
from requests import Response

View File

@ -8,7 +8,7 @@ import os
import sys
import time
from os import path
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
# try to import readline, unix specific enhancement
try:
@ -36,7 +36,7 @@ from sphinx.util.osutil import ensuredir
from sphinx.util.template import SphinxRenderer
if TYPE_CHECKING:
from collections.abc import Sequence
from collections.abc import Callable, Sequence
EXTENSIONS = {
'autodoc': __('automatically insert docstrings from modules'),

View File

@ -8,7 +8,7 @@ import traceback
import types
import warnings
from os import getenv, path
from typing import TYPE_CHECKING, Any, Literal, NamedTuple, Union
from typing import TYPE_CHECKING, Any, Literal, NamedTuple
from sphinx.deprecation import RemovedInSphinx90Warning
from sphinx.errors import ConfigError, ExtensionError
@ -66,7 +66,7 @@ def is_serializable(obj: object, *, _seen: frozenset[int] = frozenset()) -> bool
is_serializable(key, _seen=seen) and is_serializable(value, _seen=seen)
for key, value in obj.items()
)
elif isinstance(obj, (list, tuple, set, frozenset)):
elif isinstance(obj, list | tuple | set | frozenset):
seen = _seen | {id(obj)}
return all(is_serializable(item, _seen=seen) for item in obj)
@ -87,13 +87,13 @@ class ENUM:
self.candidates = candidates
def match(self, value: str | list | tuple) -> bool:
if isinstance(value, (list, tuple)):
if isinstance(value, list | tuple):
return all(item in self.candidates for item in value)
else:
return value in self.candidates
_OptValidTypes = Union[tuple[()], tuple[type, ...], frozenset[type], ENUM]
_OptValidTypes = tuple[()] | tuple[type, ...] | frozenset[type] | ENUM
class _Opt:
@ -549,7 +549,7 @@ def _validate_valid_types(
) -> tuple[()] | tuple[type, ...] | frozenset[type] | ENUM:
if not valid_types:
return ()
if isinstance(valid_types, (frozenset, ENUM)):
if isinstance(valid_types, frozenset | ENUM):
return valid_types
if isinstance(valid_types, type):
return frozenset((valid_types,))
@ -584,7 +584,7 @@ def convert_source_suffix(app: Sphinx, config: Config) -> None:
config.source_suffix = {source_suffix: 'restructuredtext'}
logger.info(__("Converting `source_suffix = %r` to `source_suffix = %r`."),
source_suffix, config.source_suffix)
elif isinstance(source_suffix, (list, tuple)):
elif isinstance(source_suffix, list | tuple):
# if list, considers as all of them are default filetype
config.source_suffix = dict.fromkeys(source_suffix, 'restructuredtext')
logger.info(__("Converting `source_suffix = %r` to `source_suffix = %r`."),

View File

@ -8,7 +8,8 @@ from __future__ import annotations
import copy
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, cast
from collections.abc import Callable
from typing import TYPE_CHECKING, Any, NamedTuple, cast
from docutils.nodes import Element, Node, system_message
@ -153,7 +154,7 @@ class Index(ABC):
raise NotImplementedError
TitleGetter = Callable[[Node], Optional[str]]
TitleGetter = Callable[[Node], str | None]
class Domain:

View File

@ -18,13 +18,12 @@ from sphinx.util.cfamily import (
)
if TYPE_CHECKING:
from docutils.nodes import Element, Node, TextElement
from sphinx.domains.c._symbol import Symbol
from sphinx.environment import BuildEnvironment
DeclarationType = Union[
DeclarationType = Union[ # NoQA: UP007
"ASTStruct", "ASTUnion", "ASTEnum", "ASTEnumerator",
"ASTType", "ASTTypeWithInit", "ASTMacro",
]

View File

@ -1,6 +1,6 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
from sphinx.domains.c._ast import (
ASTAlignofExpr,
@ -53,7 +53,6 @@ from sphinx.domains.c._ast import (
ASTTypeWithInit,
ASTUnaryOpExpr,
ASTUnion,
DeclarationType,
)
from sphinx.domains.c._ids import (
_expression_assignment_ops,
@ -80,7 +79,9 @@ from sphinx.util.cfamily import (
)
if TYPE_CHECKING:
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from sphinx.domains.c._ast import DeclarationType
class DefinitionParser(BaseParser):

View File

@ -30,7 +30,6 @@ from sphinx.util.cfamily import (
)
if TYPE_CHECKING:
from docutils.nodes import Element, TextElement
from sphinx.addnodes import desc_signature

View File

@ -1,7 +1,7 @@
from __future__ import annotations
import re
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
from sphinx.domains.cpp._ast import (
ASTAlignofExpr,
@ -127,7 +127,7 @@ from sphinx.util.cfamily import (
)
if TYPE_CHECKING:
from collections.abc import Sequence
from collections.abc import Callable, Sequence
logger = logging.getLogger(__name__)

View File

@ -1,6 +1,6 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable, NoReturn
from typing import TYPE_CHECKING, Any, NoReturn
from sphinx.domains.cpp._ast import (
ASTDeclaration,
@ -17,7 +17,7 @@ from sphinx.locale import __
from sphinx.util import logging
if TYPE_CHECKING:
from collections.abc import Iterator
from collections.abc import Callable, Iterator
from sphinx.environment import BuildEnvironment

View File

@ -74,7 +74,7 @@ class MathDomain(Domain):
def process_doc(self, env: BuildEnvironment, docname: str,
document: nodes.document) -> None:
def math_node(node: Node) -> bool:
return isinstance(node, (nodes.math, nodes.math_block))
return isinstance(node, nodes.math | nodes.math_block)
self.data['has_equations'][docname] = any(document.findall(math_node))

View File

@ -25,7 +25,6 @@ from sphinx.util.nodes import (
)
if TYPE_CHECKING:
from docutils.nodes import Node
from docutils.parsers.rst.states import Inliner

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import re
from copy import copy
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Final, cast
from typing import TYPE_CHECKING, Any, ClassVar, Final, cast
from docutils import nodes
from docutils.nodes import Element, Node, system_message
@ -23,7 +23,7 @@ from sphinx.util.nodes import clean_astext, make_id, make_refnode
from sphinx.util.parsing import nested_parse_to_nodes
if TYPE_CHECKING:
from collections.abc import Iterable, Iterator
from collections.abc import Callable, Iterable, Iterator
from sphinx.application import Sphinx
from sphinx.builders import Builder
@ -402,7 +402,7 @@ class Glossary(SphinxDirective):
in_comment = False
was_empty = True
messages: list[Node] = []
for line, (source, lineno) in zip(self.content, self.content.items):
for line, (source, lineno) in zip(self.content, self.content.items, strict=True):
# empty line -> add to last definition
if not line:
if in_definition and entries:
@ -814,13 +814,12 @@ class StandardDomain(Domain):
if not sectname:
continue
else:
if (isinstance(node, (nodes.definition_list,
nodes.field_list)) and
if (isinstance(node, nodes.definition_list | nodes.field_list) and
node.children):
node = cast(nodes.Element, node.children[0])
if isinstance(node, (nodes.field, nodes.definition_list_item)):
if isinstance(node, nodes.field | nodes.definition_list_item):
node = cast(nodes.Element, node.children[0])
if isinstance(node, (nodes.term, nodes.field_name)):
if isinstance(node, nodes.term | nodes.field_name):
sectname = clean_astext(node)
else:
toctree = next(node.findall(addnodes.toctree), None)
@ -1114,7 +1113,7 @@ class StandardDomain(Domain):
return title_getter(elem)
else:
for subnode in elem:
if isinstance(subnode, (nodes.caption, nodes.title)):
if isinstance(subnode, nodes.caption | nodes.title):
return clean_astext(subnode)
return None

View File

@ -9,7 +9,7 @@ import time
from collections import defaultdict
from copy import copy
from os import path
from typing import TYPE_CHECKING, Any, Callable, NoReturn
from typing import TYPE_CHECKING, Any, NoReturn
from sphinx import addnodes
from sphinx.environment.adapters import toctree as toctree_adapters
@ -23,7 +23,7 @@ from sphinx.util.nodes import is_translatable
from sphinx.util.osutil import canon_path, os_path
if TYPE_CHECKING:
from collections.abc import Iterator
from collections.abc import Callable, Iterator
from pathlib import Path
from docutils import nodes

View File

@ -13,16 +13,14 @@ from sphinx.util import logging
from sphinx.util.index_entries import _split_into
if TYPE_CHECKING:
from typing import Literal, Optional, Union
from typing_extensions import TypeAlias
from typing import Literal, TypeAlias
from sphinx.builders import Builder
from sphinx.environment import BuildEnvironment
_IndexEntryTarget: TypeAlias = tuple[Optional[str], Union[str, Literal[False]]]
_IndexEntryTarget: TypeAlias = tuple[str | None, str | Literal[False]]
_IndexEntryTargets: TypeAlias = list[_IndexEntryTarget]
_IndexEntryCategoryKey: TypeAlias = Optional[str]
_IndexEntryCategoryKey: TypeAlias = str | None
_IndexEntrySubItems: TypeAlias = dict[
str,
tuple[_IndexEntryTargets, _IndexEntryCategoryKey],

View File

@ -404,7 +404,7 @@ def _toctree_standard_entry(
def _toctree_add_classes(node: Element, depth: int, docname: str) -> None:
"""Add 'toctree-l%d' and 'current' classes to the toctree."""
for subnode in node.children:
if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)):
if isinstance(subnode, addnodes.compact_paragraph | nodes.list_item):
# for <p> and <li>, indicate the depth level and recurse
subnode['classes'].append(f'toctree-l{depth - 1}')
_toctree_add_classes(subnode, depth, docname)
@ -442,7 +442,7 @@ def _toctree_copy(node: ET, depth: int, maxdepth: int, collapse: bool, tags: Tag
copy = node.copy()
for subnode in node.children:
if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)):
if isinstance(subnode, addnodes.compact_paragraph | nodes.list_item):
# for <p> and <li>, just recurse
copy.append(_toctree_copy(subnode, depth, maxdepth, collapse, tags))
elif isinstance(subnode, nodes.bullet_list):
@ -462,7 +462,7 @@ def _toctree_copy(node: ET, depth: int, maxdepth: int, collapse: bool, tags: Tag
copy.append(_toctree_copy(
child, depth, maxdepth, collapse, tags, # type: ignore[type-var]
))
elif isinstance(subnode, (nodes.reference, nodes.title)):
elif isinstance(subnode, nodes.reference | nodes.title):
# deep copy references and captions
sub_node_copy = subnode.copy()
sub_node_copy.children = [child.deepcopy() for child in subnode.children]

View File

@ -8,7 +8,7 @@ from __future__ import annotations
import contextlib
from collections import defaultdict
from operator import attrgetter
from typing import TYPE_CHECKING, Any, Callable, NamedTuple
from typing import TYPE_CHECKING, Any, NamedTuple
from sphinx.errors import ExtensionError, SphinxError
from sphinx.locale import __
@ -16,6 +16,8 @@ from sphinx.util import logging
from sphinx.util.inspect import safe_getattr
if TYPE_CHECKING:
from collections.abc import Callable
from sphinx.application import Sphinx

View File

@ -10,9 +10,8 @@ from __future__ import annotations
import functools
import operator
import re
import sys
from inspect import Parameter, Signature
from typing import TYPE_CHECKING, Any, Callable, ClassVar, TypeVar
from typing import TYPE_CHECKING, Any, ClassVar, TypeVar
from docutils.statemachine import StringList
@ -40,7 +39,7 @@ from sphinx.util.typing import (
)
if TYPE_CHECKING:
from collections.abc import Iterator, Sequence
from collections.abc import Callable, Iterator, Sequence
from types import ModuleType
from sphinx.application import Sphinx
@ -612,7 +611,7 @@ class Documenter:
# add additional content (e.g. from document), if present
if more_content:
for line, src in zip(more_content.data, more_content.items):
for line, src in zip(more_content.data, more_content.items, strict=True):
self.add_line(line, src[0], src[1])
def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
@ -975,7 +974,7 @@ class ModuleDocumenter(Documenter):
super().add_content(None)
self.indent = old_indent
if more_content:
for line, src in zip(more_content.data, more_content.items):
for line, src in zip(more_content.data, more_content.items, strict=True):
self.add_line(line, src[0], src[1])
@classmethod
@ -1450,7 +1449,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
# Must be higher than FunctionDocumenter, ClassDocumenter, and
# AttributeDocumenter as NewType can be an attribute and is a class
# after Python 3.10. Before 3.10 it is a kind of function object
# after Python 3.10.
priority = 15
_signature_class: Any = None
@ -1740,24 +1739,6 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
if isinstance(self.object, TypeVar):
if self.object.__doc__ == TypeVar.__doc__:
return []
if sys.version_info[:2] < (3, 10):
if inspect.isNewType(self.object) or isinstance(self.object, TypeVar):
parts = self.modname.strip('.').split('.')
orig_objpath = self.objpath
for i in range(len(parts)):
new_modname = '.'.join(parts[:len(parts) - i])
new_objpath = parts[len(parts) - i:] + orig_objpath
try:
analyzer = ModuleAnalyzer.for_module(new_modname)
analyzer.analyze()
key = ('', new_objpath[-1])
comment = list(analyzer.attr_docs.get(key, []))
if comment:
self.objpath = new_objpath
self.modname = new_modname
return [comment]
except PycodeError:
pass
if self.doc_as_attr:
# Don't show the docstring of the class when it is an alias.
if self.get_variable_comment():

View File

@ -1,6 +1,7 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
from docutils import nodes
from docutils.statemachine import StringList

View File

@ -102,9 +102,9 @@ def _is_lambda(x: Any, /) -> bool:
def _get_arguments_inner(x: Any, /) -> ast.arguments | None:
if isinstance(x, (ast.AsyncFunctionDef, ast.FunctionDef, ast.Lambda)):
if isinstance(x, ast.AsyncFunctionDef | ast.FunctionDef | ast.Lambda):
return x.args
if isinstance(x, (ast.Assign, ast.AnnAssign)):
if isinstance(x, ast.Assign | ast.AnnAssign):
return _get_arguments_inner(x.value)
return None

View File

@ -11,7 +11,7 @@ import sys
import time
from io import StringIO
from os import path
from typing import TYPE_CHECKING, Any, Callable, ClassVar
from typing import TYPE_CHECKING, Any, ClassVar
from docutils import nodes
from docutils.parsers.rst import directives
@ -27,7 +27,7 @@ from sphinx.util.docutils import SphinxDirective
from sphinx.util.osutil import relpath
if TYPE_CHECKING:
from collections.abc import Iterable, Sequence
from collections.abc import Callable, Iterable, Sequence
from docutils.nodes import Element, Node, TextElement
@ -420,12 +420,12 @@ Doctest summary
if self.config.doctest_test_doctest_blocks:
def condition(node: Node) -> bool:
return (isinstance(node, (nodes.literal_block, nodes.comment)) and
return (isinstance(node, nodes.literal_block | nodes.comment) and
'testnodetype' in node) or \
isinstance(node, nodes.doctest_block)
else:
def condition(node: Node) -> bool:
return isinstance(node, (nodes.literal_block, nodes.comment)) \
return isinstance(node, nodes.literal_block | nodes.comment) \
and 'testnodetype' in node
for node in doctree.findall(condition):
if self.skipped(node): # type: ignore[arg-type]

View File

@ -63,7 +63,7 @@ def validate_intersphinx_mapping(app: Sphinx, config: Config) -> None:
continue
# ensure values are properly formatted
if not isinstance(value, (tuple, list)):
if not isinstance(value, (tuple | list)):
errors += 1
msg = __(
'Invalid value `%r` in intersphinx_mapping[%r]. '
@ -105,7 +105,7 @@ def validate_intersphinx_mapping(app: Sphinx, config: Config) -> None:
# ensure inventory locations are None or non-empty
targets: list[InventoryLocation] = []
for target in (inv if isinstance(inv, (tuple, list)) else (inv,)):
for target in (inv if isinstance(inv, (tuple | list)) else (inv,)):
if target is None or target and isinstance(target, str):
targets.append(target)
else:

View File

@ -7,8 +7,6 @@ from typing import TYPE_CHECKING, Final
from sphinx.util import logging
if TYPE_CHECKING:
from typing import Optional
from sphinx.environment import BuildEnvironment
from sphinx.util.typing import Inventory
@ -26,7 +24,7 @@ if TYPE_CHECKING:
#:
#: Empty strings are not expected and ``None`` indicates the default
#: inventory file name :data:`~sphinx.builder.html.INVENTORY_FILENAME`.
InventoryLocation = Optional[str]
InventoryLocation = str | None
#: Inventory cache entry. The integer field is the cache expiration time.
InventoryCacheEntry = tuple[InventoryName, int, Inventory]

View File

@ -8,14 +8,14 @@ import inspect
import re
from functools import partial
from itertools import starmap
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
from sphinx.locale import _, __
from sphinx.util import logging
from sphinx.util.typing import get_type_hints, stringify_annotation
if TYPE_CHECKING:
from collections.abc import Iterator
from collections.abc import Callable, Iterator
from sphinx.application import Sphinx
from sphinx.config import Config as SphinxConfig

View File

@ -4,7 +4,7 @@ from __future__ import annotations
from os import path
from pprint import pformat
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
from jinja2 import BaseLoader, FileSystemLoader, TemplateNotFound
from jinja2.sandbox import SandboxedEnvironment
@ -15,7 +15,7 @@ from sphinx.util import logging
from sphinx.util.osutil import mtimes_of_files
if TYPE_CHECKING:
from collections.abc import Iterator
from collections.abc import Callable, Iterator
from jinja2.environment import Environment

View File

@ -9,8 +9,8 @@ from os import path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from collections.abc import Iterable
from typing import Any, Callable
from collections.abc import Callable, Iterable
from typing import Any
class _TranslationProxy:

View File

@ -130,7 +130,7 @@ class _UnparseVisitor(ast.NodeVisitor):
def visit_Constant(self, node: ast.Constant) -> str:
if node.value is Ellipsis:
return "..."
elif isinstance(node.value, (int, float, complex)):
elif isinstance(node.value, int | float | complex):
if self.code:
return ast.get_source_segment(self.code, node) or repr(node.value)
else:
@ -141,7 +141,7 @@ class _UnparseVisitor(ast.NodeVisitor):
def visit_Dict(self, node: ast.Dict) -> str:
keys = (self.visit(k) for k in node.keys if k is not None)
values = (self.visit(v) for v in node.values)
items = (k + ": " + v for k, v in zip(keys, values))
items = (k + ": " + v for k, v in zip(keys, values, strict=True))
return "{" + ", ".join(items) + "}"
def visit_Lambda(self, node: ast.Lambda) -> str:

View File

@ -108,7 +108,7 @@ class Token:
return self.kind == other
elif isinstance(other, str):
return self.value == other
elif isinstance(other, (list, tuple)):
elif isinstance(other, list | tuple):
return [self.kind, self.value] == list(other)
elif other is None:
return False
@ -404,7 +404,7 @@ class VariableCommentPicker(ast.NodeVisitor):
def visit_Expr(self, node: ast.Expr) -> None:
"""Handles Expr node and pick up a comment if string."""
if (isinstance(self.previous, (ast.Assign, ast.AnnAssign)) and
if (isinstance(self.previous, ast.Assign | ast.AnnAssign) and
isinstance(node.value, ast.Constant) and isinstance(node.value.value, str)):
try:
targets = get_assign_targets(self.previous)

View File

@ -2,16 +2,11 @@
from __future__ import annotations
import sys
import traceback
from importlib import import_module
from importlib.metadata import entry_points
from types import MethodType
from typing import TYPE_CHECKING, Any, Callable
if sys.version_info >= (3, 10):
from importlib.metadata import entry_points
else:
from importlib_metadata import entry_points
from typing import TYPE_CHECKING, Any
from sphinx.domains import Domain, Index, ObjType
from sphinx.domains.std import GenericObject, Target
@ -25,7 +20,7 @@ from sphinx.util import logging
from sphinx.util.logging import prefixed_warnings
if TYPE_CHECKING:
from collections.abc import Iterator, Sequence
from collections.abc import Callable, Iterator, Sequence
from docutils import nodes
from docutils.core import Publisher

View File

@ -4,12 +4,13 @@ import os
import shutil
import sys
import warnings
from typing import IO, TYPE_CHECKING, Any, Callable
from typing import IO, TYPE_CHECKING, Any
from sphinx.deprecation import RemovedInSphinx90Warning
if TYPE_CHECKING:
import builtins
from collections.abc import Callable
warnings.warn("'sphinx.testing.path' is deprecated. "
"Use 'os.path' or 'pathlib' instead.",

View File

@ -43,7 +43,7 @@ def assert_node(node: Node, cls: Any = None, xpath: str = "", **kwargs: Any) ->
'The node%s has %d child nodes, not one' % (xpath, len(node))
assert_node(node[0], cls[1:], xpath=xpath + "[0]", **kwargs)
elif isinstance(cls, tuple):
assert isinstance(node, (list, nodes.Element)), \
assert isinstance(node, list | nodes.Element), \
'The node%s does not have any items' % xpath
assert len(node) == len(cls), \
'The node%s has %d child nodes, not %r' % (xpath, len(node), len(cls))

View File

@ -10,6 +10,7 @@ import os
import shutil
import sys
import tempfile
from importlib.metadata import entry_points
from os import path
from typing import TYPE_CHECKING, Any
from zipfile import ZipFile
@ -26,10 +27,6 @@ if sys.version_info >= (3, 11):
else:
import tomli as tomllib
if sys.version_info >= (3, 10):
from importlib.metadata import entry_points
else:
from importlib_metadata import entry_points
if TYPE_CHECKING:
from collections.abc import Callable

View File

@ -22,10 +22,10 @@ from sphinx.util.nodes import apply_source_workaround, is_smartquotable
if TYPE_CHECKING:
from collections.abc import Iterator
from typing import Literal
from typing import Literal, TypeAlias
from docutils.nodes import Node, Text
from typing_extensions import TypeAlias, TypeIs
from typing_extensions import TypeIs
from sphinx.application import Sphinx
from sphinx.config import Config
@ -247,7 +247,7 @@ class ApplySourceWorkaround(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
for node in self.document.findall(): # type: Node
if isinstance(node, (nodes.TextElement, nodes.image, nodes.topic)):
if isinstance(node, nodes.TextElement | nodes.image | nodes.topic):
apply_source_workaround(node)
@ -477,7 +477,7 @@ def _reorder_index_target_nodes(start_node: nodes.target) -> None:
# as we want *consecutive* target & index nodes.
node: nodes.Node
for node in start_node.findall(descend=False, siblings=True):
if isinstance(node, (nodes.target, addnodes.index)):
if isinstance(node, nodes.target | addnodes.index):
nodes_to_reorder.append(node)
continue
break # must be a consecutive run of target or index nodes

View File

@ -3,8 +3,9 @@
from __future__ import annotations
import re
from collections.abc import Callable
from copy import deepcopy
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
from docutils import nodes

View File

@ -7,9 +7,9 @@ from sphinx.util import logging
from sphinx.util.console import bold, color_terminal
if False:
from collections.abc import Iterable, Iterator
from collections.abc import Callable, Iterable, Iterator
from types import TracebackType
from typing import Any, Callable, TypeVar
from typing import Any, TypeVar
from typing_extensions import ParamSpec
@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
def display_chunk(chunk: Any) -> str:
if isinstance(chunk, (list, tuple)):
if isinstance(chunk, list | tuple):
if len(chunk) == 1:
return str(chunk[0])
return f'{chunk[0]} .. {chunk[-1]}'

View File

@ -356,7 +356,7 @@ class DocFieldTransformer:
if is_typefield:
# filter out only inline nodes; others will result in invalid
# markup being written out
content = [n for n in content if isinstance(n, (nodes.Inline, nodes.Text))]
content = [n for n in content if isinstance(n, nodes.Inline | nodes.Text)]
if content:
types.setdefault(typename, {})[fieldarg] = content
continue

View File

@ -8,7 +8,7 @@ from collections.abc import Sequence # NoQA: TCH003
from contextlib import contextmanager
from copy import copy
from os import path
from typing import IO, TYPE_CHECKING, Any, Callable, cast
from typing import IO, TYPE_CHECKING, Any, cast
import docutils
from docutils import nodes
@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
report_re = re.compile('^(.+?:(?:\\d+)?): \\((DEBUG|INFO|WARNING|ERROR|SEVERE)/(\\d+)?\\) ')
if TYPE_CHECKING:
from collections.abc import Iterator
from collections.abc import Callable, Iterator # NoQA: TCH003
from types import ModuleType
from docutils.frontend import Values

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import os
import posixpath
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
from docutils.utils import relative_path
@ -12,6 +12,8 @@ from sphinx.util import logging
from sphinx.util.osutil import copyfile, ensuredir
if TYPE_CHECKING:
from collections.abc import Callable
from sphinx.util.template import BaseRenderer
from sphinx.util.typing import PathMatcher

View File

@ -20,7 +20,7 @@ from sphinx.util.osutil import SEP, canon_path, relpath
if TYPE_CHECKING:
import datetime as dt
from collections.abc import Iterator
from typing import Protocol, Union
from typing import Protocol
from babel.core import Locale
@ -52,7 +52,7 @@ if TYPE_CHECKING:
locale: str | Locale | None = ...,
) -> str: ...
Formatter = Union[DateFormatter, TimeFormatter, DatetimeFormatter]
Formatter = DateFormatter | TimeFormatter | DatetimeFormatter
logger = logging.getLogger(__name__)

View File

@ -27,9 +27,9 @@ if TYPE_CHECKING:
from collections.abc import Callable, Sequence
from inspect import _ParameterKind
from types import MethodType, ModuleType
from typing import Final, Protocol, Union
from typing import Final, Protocol, TypeAlias
from typing_extensions import TypeAlias, TypeIs
from typing_extensions import TypeIs
class _SupportsGet(Protocol):
def __get__(self, __instance: Any, __owner: type | None = ...) -> Any: ... # NoQA: E704
@ -42,21 +42,21 @@ if TYPE_CHECKING:
# instance is contravariant but we do not need that precision
def __delete__(self, __instance: Any) -> None: ... # NoQA: E704
_RoutineType: TypeAlias = Union[
types.FunctionType,
types.LambdaType,
types.MethodType,
types.BuiltinFunctionType,
types.BuiltinMethodType,
types.WrapperDescriptorType,
types.MethodDescriptorType,
types.ClassMethodDescriptorType,
]
_SignatureType: TypeAlias = Union[
Callable[..., Any],
staticmethod,
classmethod,
]
_RoutineType: TypeAlias = (
types.FunctionType
| types.LambdaType
| types.MethodType
| types.BuiltinFunctionType
| types.BuiltinMethodType
| types.WrapperDescriptorType
| types.MethodDescriptorType
| types.ClassMethodDescriptorType
)
_SignatureType: TypeAlias = (
Callable[..., Any]
| staticmethod
| classmethod
)
logger = logging.getLogger(__name__)
@ -128,20 +128,14 @@ def getall(obj: Any) -> Sequence[str] | None:
__all__ = safe_getattr(obj, '__all__', None)
if __all__ is None:
return None
if isinstance(__all__, (list, tuple)) and all(isinstance(e, str) for e in __all__):
if isinstance(__all__, list | tuple) and all(isinstance(e, str) for e in __all__):
return __all__
raise ValueError(__all__)
def getannotations(obj: Any) -> Mapping[str, Any]:
"""Safely get the ``__annotations__`` attribute of an object."""
if sys.version_info >= (3, 10, 0) or not isinstance(obj, type):
__annotations__ = safe_getattr(obj, '__annotations__', None)
else:
# Workaround for bugfix not available until python 3.10 as recommended by docs
# https://docs.python.org/3.10/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
__dict__ = safe_getattr(obj, '__dict__', {})
__annotations__ = __dict__.get('__annotations__', None)
__annotations__ = safe_getattr(obj, '__annotations__', None)
if isinstance(__annotations__, Mapping):
return __annotations__
return {}
@ -198,7 +192,7 @@ def getslots(obj: Any) -> dict[str, Any] | dict[str, None] | None:
return __slots__
elif isinstance(__slots__, str):
return {__slots__: None}
elif isinstance(__slots__, (list, tuple)):
elif isinstance(__slots__, list | tuple):
return dict.fromkeys(__slots__)
else:
raise ValueError
@ -206,11 +200,7 @@ def getslots(obj: Any) -> dict[str, Any] | dict[str, None] | None:
def isNewType(obj: Any) -> bool:
"""Check the if object is a kind of :class:`~typing.NewType`."""
if sys.version_info[:2] >= (3, 10):
return isinstance(obj, typing.NewType)
__module__ = safe_getattr(obj, '__module__', None)
__qualname__ = safe_getattr(obj, '__qualname__', None)
return __module__ == 'typing' and __qualname__ == 'NewType.<locals>.new_type'
return isinstance(obj, typing.NewType)
def isenumclass(x: Any) -> TypeIs[type[enum.Enum]]:
@ -237,7 +227,7 @@ def unpartial(obj: Any) -> Any:
def ispartial(obj: Any) -> TypeIs[partial | partialmethod]:
"""Check if the object is a partial function or method."""
return isinstance(obj, (partial, partialmethod))
return isinstance(obj, partial | partialmethod)
def isclassmethod(
@ -397,12 +387,12 @@ def _is_wrapped_coroutine(obj: Any) -> bool:
def isproperty(obj: Any) -> TypeIs[property | cached_property]:
"""Check if the object is property (possibly cached)."""
return isinstance(obj, (property, cached_property))
return isinstance(obj, property | cached_property)
def isgenericalias(obj: Any) -> TypeIs[types.GenericAlias]:
"""Check if the object is a generic alias."""
return isinstance(obj, (types.GenericAlias, typing._BaseGenericAlias)) # type: ignore[attr-defined]
return isinstance(obj, types.GenericAlias | typing._BaseGenericAlias) # type: ignore[attr-defined]
def safe_getattr(obj: Any, name: str, *defargs: Any) -> Any:
@ -852,11 +842,11 @@ def signature_from_ast(node: ast.FunctionDef, code: str = '') -> Signature:
params: list[Parameter] = []
# positional-only arguments (introduced in Python 3.8)
for arg, defexpr in zip(args.posonlyargs, defaults):
for arg, defexpr in zip(args.posonlyargs, defaults, strict=False):
params.append(_define(Parameter.POSITIONAL_ONLY, arg, code, defexpr=defexpr))
# normal arguments
for arg, defexpr in zip(args.args, defaults[pos_only_offset:]):
for arg, defexpr in zip(args.args, defaults[pos_only_offset:], strict=False):
params.append(_define(Parameter.POSITIONAL_OR_KEYWORD, arg, code, defexpr=defexpr))
# variadic positional argument (no possible default expression)
@ -864,7 +854,7 @@ def signature_from_ast(node: ast.FunctionDef, code: str = '') -> Signature:
params.append(_define(Parameter.VAR_POSITIONAL, args.vararg, code, defexpr=None))
# keyword-only arguments
for arg, defexpr in zip(args.kwonlyargs, args.kw_defaults):
for arg, defexpr in zip(args.kwonlyargs, args.kw_defaults, strict=False):
params.append(_define(Parameter.KEYWORD_ONLY, arg, code, defexpr=defexpr))
# variadic keyword argument (no possible default expression)

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import os
import re
import zlib
from typing import IO, TYPE_CHECKING, Callable
from typing import IO, TYPE_CHECKING
from sphinx.locale import __
from sphinx.util import logging
@ -13,7 +13,7 @@ BUFSIZE = 16 * 1024
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from collections.abc import Iterator
from collections.abc import Callable, Iterator
from sphinx.builders import Builder
from sphinx.environment import BuildEnvironment

View File

@ -4,12 +4,12 @@ from __future__ import annotations
import os.path
import re
from typing import TYPE_CHECKING, Callable
from typing import TYPE_CHECKING
from sphinx.util.osutil import canon_path, path_stabilize
if TYPE_CHECKING:
from collections.abc import Iterable, Iterator
from collections.abc import Callable, Iterable, Iterator
def _translate_pattern(pat: str) -> str:

View File

@ -5,7 +5,7 @@ from __future__ import annotations
import contextlib
import re
import unicodedata
from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, cast
from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast
from docutils import nodes
from docutils.nodes import Node
@ -16,7 +16,7 @@ from sphinx.util import logging
from sphinx.util.parsing import _fresh_title_style_context
if TYPE_CHECKING:
from collections.abc import Iterable, Iterator
from collections.abc import Callable, Iterable, Iterator
from docutils.nodes import Element
from docutils.parsers.rst import Directive
@ -178,12 +178,12 @@ def apply_source_workaround(node: Element) -> None:
return
# workaround: some docutils nodes doesn't have source, line.
if (isinstance(node, (
nodes.rubric, # #1305 rubric directive
nodes.line, # #1477 line node
nodes.image, # #3093 image directive in substitution
nodes.field_name, # #3335 field list syntax
))):
if isinstance(node, (
nodes.rubric # #1305 rubric directive
| nodes.line # #1477 line node
| nodes.image # #3093 image directive in substitution
| nodes.field_name # #3335 field list syntax
)):
logger.debug('[i18n] PATCH: %r to have source and line: %s',
get_full_module_name(node), repr_domxml(node))
try:

View File

@ -49,7 +49,7 @@ def relative_uri(base: str, to: str) -> str:
b2 = base.split('#')[0].split(SEP)
t2 = to.split('#')[0].split(SEP)
# remove common segments (except the last segment)
for x, y in zip(b2[:-1], t2[:-1]):
for x, y in zip(b2[:-1], t2[:-1], strict=False):
if x != y:
break
b2.pop(0)

View File

@ -6,7 +6,7 @@ import os
import time
import traceback
from math import sqrt
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
try:
import multiprocessing
@ -18,7 +18,7 @@ from sphinx.errors import SphinxParallelError
from sphinx.util import logging
if TYPE_CHECKING:
from collections.abc import Sequence
from collections.abc import Callable, Sequence
logger = logging.getLogger(__name__)

View File

@ -19,7 +19,7 @@ def _get_tls_cacert(url: str, certs: str | dict[str, str] | None) -> str | bool:
"""Get additional CA cert for a specific URL."""
if not certs:
return True
elif isinstance(certs, (str, tuple)):
elif isinstance(certs, str | tuple):
return certs
else:
hostname = urlsplit(url).netloc

View File

@ -5,7 +5,7 @@ from __future__ import annotations
import os
from functools import partial
from os import path
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Any
from jinja2 import TemplateNotFound
from jinja2.loaders import BaseLoader
@ -17,7 +17,7 @@ from sphinx.locale import get_translator
from sphinx.util import rst, texescape
if TYPE_CHECKING:
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from jinja2.environment import Environment
@ -38,7 +38,7 @@ class BaseRenderer:
class FileRenderer(BaseRenderer):
def __init__(self, search_path: Sequence[str | os.PathLike[str]]) -> None:
if isinstance(search_path, (str, os.PathLike)):
if isinstance(search_path, str | os.PathLike):
search_path = [search_path]
else:
# filter "None" paths

View File

@ -6,14 +6,13 @@ import dataclasses
import sys
import types
import typing
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from contextvars import Context, ContextVar, Token
from struct import Struct
from typing import (
TYPE_CHECKING,
Annotated,
Any,
Callable,
ForwardRef,
TypedDict,
TypeVar,
@ -25,9 +24,9 @@ from docutils.parsers.rst.states import Inliner
if TYPE_CHECKING:
from collections.abc import Mapping
from typing import Final, Literal, Protocol
from typing import Final, Literal, Protocol, TypeAlias
from typing_extensions import TypeAlias, TypeIs
from typing_extensions import TypeIs
from sphinx.application import Sphinx
@ -41,10 +40,6 @@ if TYPE_CHECKING:
'smart',
]
if sys.version_info >= (3, 10):
from types import UnionType
else:
UnionType = None
# classes that have an incorrect .__module__ attribute
_INVALID_BUILTIN_CLASSES: Final[Mapping[object, str]] = {
@ -85,7 +80,7 @@ def is_invalid_builtin_class(obj: Any) -> bool:
# Text like nodes which are initialized with text and rawsource
TextlikeNode = Union[nodes.Text, nodes.TextElement]
TextlikeNode = nodes.Text | nodes.TextElement
# type of None
NoneType = type(None)
@ -206,7 +201,7 @@ def _is_unpack_form(obj: Any) -> bool:
# that typing_extensions.Unpack should not be used in that case
return typing.get_origin(obj) is Unpack
# 3.9 and 3.10 require typing_extensions.Unpack
# Python 3.10 requires typing_extensions.Unpack
origin = typing.get_origin(obj)
return (
getattr(origin, '__module__', None) == 'typing_extensions'
@ -215,13 +210,11 @@ def _is_unpack_form(obj: Any) -> bool:
def _typing_internal_name(obj: Any) -> str | None:
if sys.version_info[:2] >= (3, 10):
try:
return obj.__name__
except AttributeError:
# e.g. ParamSpecArgs, ParamSpecKwargs
return ''
return getattr(obj, '_name', None)
try:
return obj.__name__
except AttributeError:
# e.g. ParamSpecArgs, ParamSpecKwargs
return ''
def restify(cls: Any, mode: _RestifyMode = 'fully-qualified-except-typing') -> str:
@ -291,11 +284,9 @@ def restify(cls: Any, mode: _RestifyMode = 'fully-qualified-except-typing') -> s
return (f':py:class:`{module_prefix}{cls.__module__}.{cls.__name__}`'
fr'\ [{args}, {meta}]')
elif inspect.isNewType(cls):
if sys.version_info[:2] >= (3, 10):
# newtypes have correct module info since Python 3.10+
return f':py:class:`{module_prefix}{cls.__module__}.{cls.__name__}`'
return f':py:class:`{cls.__name__}`'
elif UnionType and isinstance(cls, UnionType):
# newtypes have correct module info since Python 3.10+
return f':py:class:`{module_prefix}{cls.__module__}.{cls.__name__}`'
elif isinstance(cls, types.UnionType):
# Union types (PEP 585) retain their definition order when they
# are printed natively and ``None``-like types are kept as is.
return ' | '.join(restify(a, mode) for a in cls.__args__)
@ -436,17 +427,14 @@ def stringify_annotation(
return annotation_name
return module_prefix + f'{annotation_module}.{annotation_name}'
elif isNewType(annotation):
if sys.version_info[:2] >= (3, 10):
# newtypes have correct module info since Python 3.10+
return module_prefix + f'{annotation_module}.{annotation_name}'
return annotation_name
return module_prefix + f'{annotation_module}.{annotation_name}'
elif ismockmodule(annotation):
return module_prefix + annotation_name
elif ismock(annotation):
return module_prefix + f'{annotation_module}.{annotation_name}'
elif is_invalid_builtin_class(annotation):
return module_prefix + _INVALID_BUILTIN_CLASSES[annotation]
elif _is_annotated_form(annotation): # for py39+
elif _is_annotated_form(annotation): # for py310+
pass
elif annotation_module == 'builtins' and annotation_qualname:
args = getattr(annotation, '__args__', None)
@ -495,7 +483,7 @@ def stringify_annotation(
elif hasattr(annotation, '__origin__'):
# instantiated generic provided by a user
qualname = stringify_annotation(annotation.__origin__, mode)
elif UnionType and isinstance(annotation, UnionType): # types.UnionType (for py3.10+)
elif isinstance(annotation, types.UnionType):
qualname = 'types.UnionType'
else:
# we weren't able to extract the base type, appending arguments would
@ -505,7 +493,7 @@ def stringify_annotation(
# Process the generic arguments (if any).
# They must be a list or a tuple, otherwise they are considered 'broken'.
annotation_args = getattr(annotation, '__args__', ())
if annotation_args and isinstance(annotation_args, (list, tuple)):
if annotation_args and isinstance(annotation_args, list | tuple):
if (
qualname in {'Union', 'types.UnionType'}
and all(getattr(a, '__origin__', ...) is typing.Literal for a in annotation_args)
@ -525,7 +513,7 @@ def stringify_annotation(
args = ', '.join(_format_literal_arg_stringify(a, mode=mode)
for a in annotation_args)
return f'{module_prefix}Literal[{args}]'
elif _is_annotated_form(annotation): # for py39+
elif _is_annotated_form(annotation): # for py310+
args = stringify_annotation(annotation_args[0], mode)
meta_args = []
for m in annotation.__metadata__:
@ -541,11 +529,6 @@ def stringify_annotation(
else:
meta_args.append(repr(m))
meta = ', '.join(meta_args)
if sys.version_info[:2] <= (3, 9):
if mode == 'smart':
return f'~typing.Annotated[{args}, {meta}]'
if mode == 'fully-qualified':
return f'typing.Annotated[{args}, {meta}]'
if sys.version_info[:2] <= (3, 11):
if mode == 'fully-qualified-except-typing':
return f'Annotated[{args}, {meta}]'

View File

@ -1369,7 +1369,7 @@ class LaTeXTranslator(SphinxTranslator):
not isinstance(node.parent[index - 1], nodes.compound)):
# insert blank line, if the paragraph follows a non-paragraph node in a compound
self.body.append(r'\noindent' + CR)
elif index == 1 and isinstance(node.parent, (nodes.footnote, footnotetext)):
elif index == 1 and isinstance(node.parent, nodes.footnote | footnotetext):
# don't insert blank line, if the paragraph is second child of a footnote
# (first one is label node)
pass
@ -2081,7 +2081,7 @@ class LaTeXTranslator(SphinxTranslator):
done = 0
if len(node.children) == 1:
child = node.children[0]
if isinstance(child, (nodes.bullet_list, nodes.enumerated_list)):
if isinstance(child, nodes.bullet_list | nodes.enumerated_list):
done = 1
if not done:
self.body.append(r'\begin{quote}' + CR)
@ -2092,7 +2092,7 @@ class LaTeXTranslator(SphinxTranslator):
done = 0
if len(node.children) == 1:
child = node.children[0]
if isinstance(child, (nodes.bullet_list, nodes.enumerated_list)):
if isinstance(child, nodes.bullet_list | nodes.enumerated_list):
done = 1
if not done:
self.body.append(r'\end{quote}' + CR)

View File

@ -297,7 +297,7 @@ class TexinfoTranslator(SphinxTranslator):
# try to find a suitable "Top" node
title = self.document.next_node(nodes.title)
top = title.parent if title else self.document
if not isinstance(top, (nodes.document, nodes.section)):
if not isinstance(top, nodes.document | nodes.section):
top = self.document
if top is not self.document:
entries = node_menus[top['node_name']]
@ -625,7 +625,7 @@ class TexinfoTranslator(SphinxTranslator):
parent = node.parent
if isinstance(parent, nodes.table):
return
if isinstance(parent, (nodes.Admonition, nodes.sidebar, nodes.topic)):
if isinstance(parent, nodes.Admonition | nodes.sidebar | nodes.topic):
raise nodes.SkipNode
if not isinstance(parent, nodes.section):
logger.warning(__('encountered title node not in section, topic, table, '
@ -694,7 +694,7 @@ class TexinfoTranslator(SphinxTranslator):
def visit_reference(self, node: Element) -> None:
# an xref's target is displayed in Info so we ignore a few
# cases for the sake of appearance
if isinstance(node.parent, (nodes.title, addnodes.desc_type)):
if isinstance(node.parent, nodes.title | addnodes.desc_type):
return
if len(node) != 0 and isinstance(node[0], nodes.image):
return
@ -987,7 +987,7 @@ class TexinfoTranslator(SphinxTranslator):
self.add_anchor(id, node)
# anchors and indexes need to go in front
for n in node[::]:
if isinstance(n, (addnodes.index, nodes.target)):
if isinstance(n, addnodes.index | nodes.target):
n.walkabout(self)
node.remove(n)
self.body.append('\n%s ' % self.at_item_x)

View File

@ -6,7 +6,7 @@ import os
import re
import textwrap
from collections.abc import Iterable, Iterator, Sequence
from itertools import chain, groupby
from itertools import chain, groupby, pairwise
from typing import TYPE_CHECKING, Any, cast
from docutils import nodes, writers
@ -221,10 +221,10 @@ class Table:
tail = "+" if out[-1][0] == "-" else "|"
glue = [
"+" if left[0] == "-" or right[0] == "-" else "|"
for left, right in zip(out, out[1:])
for left, right in pairwise(out)
]
glue.append(tail)
return head + "".join(chain.from_iterable(zip(out, glue)))
return head + "".join(chain.from_iterable(zip(out, glue, strict=False)))
for lineno, line in enumerate(self.lines):
if self.separator and lineno == self.separator:

View File

@ -25,10 +25,9 @@ from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError
if TYPE_CHECKING:
from collections.abc import Iterable
from typing import Union
CircularList = list[Union[int, 'CircularList']]
CircularDict = dict[str, Union[int, 'CircularDict']]
CircularList = list[int | 'CircularList']
CircularDict = dict[str, int | 'CircularDict']
def check_is_serializable(subject: object, *, circular: bool) -> None:
@ -209,9 +208,7 @@ def test_config_pickle_circular_reference_in_list():
assert isinstance(u, list)
assert v.__class__ is u.__class__
assert len(u) == len(v)
for u_i, v_i in zip(u, v):
for u_i, v_i in zip(u, v, strict=True):
counter[type(u)] += 1
check(u_i, v_i, counter=counter, guard=guard | {id(u), id(v)})
@ -275,9 +272,7 @@ def test_config_pickle_circular_reference_in_dict():
assert isinstance(u, dict)
assert v.__class__ is u.__class__
assert len(u) == len(v)
for u_i, v_i in zip(u, v):
for u_i, v_i in zip(u, v, strict=True):
counter[type(u)] += 1
check(u[u_i], v[v_i], counter=counter, guard=guard | {id(u), id(v)})
return counter
@ -573,8 +568,7 @@ def test_nitpick_base(app, status, warning):
app.build(force_all=True)
warning = warning.getvalue().strip().split('\n')
assert len(warning) == len(nitpick_warnings)
for actual, expected in zip(warning, nitpick_warnings):
for actual, expected in zip(warning, nitpick_warnings, strict=True):
assert expected in actual
@ -629,8 +623,7 @@ def test_nitpick_ignore_regex_fullmatch(app, status, warning):
app.build(force_all=True)
warning = warning.getvalue().strip().split('\n')
assert len(warning) == len(nitpick_warnings)
for actual, expected in zip(warning, nitpick_warnings):
for actual, expected in zip(warning, nitpick_warnings, strict=True):
assert expected in actual

View File

@ -7,7 +7,6 @@ source file translated by test_build.
from __future__ import annotations
import typing
from typing import Union
import pytest
@ -305,7 +304,7 @@ def test_autodoc_process_bases(app):
assert obj.__name__ == 'Quux'
assert options == {'show-inheritance': True,
'members': []}
assert bases == [typing.List[Union[int, float]]] # NoQA: UP006
assert bases == [typing.List[typing.Union[int, float]]] # NoQA: UP006, UP007
bases.pop()
bases.extend([int, str])

View File

@ -153,7 +153,7 @@ def test_get_items_summary(make_app, app_params):
def new_get_items(self, names, *args, **kwargs):
results = orig_get_items(self, names, *args, **kwargs)
for name, result in zip(names, results):
for name, result in zip(names, results, strict=True):
autosummary_items[name] = result # NoQA: PERF403
return results

View File

@ -2421,7 +2421,7 @@ definition_after_normal_text : int
[r"'with \'quotes\''"],
)
for spec, expected in zip(specs, tokens):
for spec, expected in zip(specs, tokens, strict=True):
actual = _tokenize_type_spec(spec)
assert expected == actual
@ -2440,7 +2440,7 @@ definition_after_normal_text : int
["{'F', 'C', 'N'}", ", ", "default", " ", "None"],
)
for tokens_, expected in zip(tokens, combined_tokens):
for tokens_, expected in zip(tokens, combined_tokens, strict=True):
actual = _recombine_set_tokens(tokens_)
assert expected == actual
@ -2456,7 +2456,7 @@ definition_after_normal_text : int
["{1, 2", ", ", "default", ": ", "None"],
)
for tokens_, expected in zip(tokens, combined_tokens):
for tokens_, expected in zip(tokens, combined_tokens, strict=True):
actual = _recombine_set_tokens(tokens_)
assert expected == actual
@ -2491,7 +2491,7 @@ definition_after_normal_text : int
":class:`pandas.DataFrame`, *optional*",
)
for spec, expected in zip(specs, converted):
for spec, expected in zip(specs, converted, strict=True):
actual = _convert_numpy_type_spec(spec, translations=translations)
assert expected == actual
@ -2569,7 +2569,7 @@ definition_after_normal_text : int
r".+: malformed string literal \(missing closing quote\):",
r".+: malformed string literal \(missing opening quote\):",
)
for token, error in zip(tokens, errors):
for token, error in zip(tokens, errors, strict=True):
try:
_token_type(token)
finally:
@ -2698,6 +2698,6 @@ int py:class 1 int.html -
a_ = list(li.findall('.//a[@class="reference external"]'))
assert len(a_) == 2
for a, uri in zip(a_, ('list.html', 'int.html')):
for a, uri in zip(a_, ('list.html', 'int.html'), strict=True):
assert a.attrib['href'] == f'127.0.0.1:5555/{uri}'
assert a.attrib['title'] == '(in Intersphinx Test v42)'

View File

@ -228,13 +228,13 @@ class TestSigElementFallbackTransform:
if ignore_sig_element_fallback_transform:
# desc_sig_element is implemented or desc_sig_* nodes are properly handled (and left untouched)
for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1]):
for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1], strict=True):
assert_node(node, node_type)
assert not node.hasattr('_sig_node_type')
assert mess == f'mark: {node_type.__name__!r}'
else:
# desc_sig_* nodes are converted into inline nodes
for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1]):
for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1], strict=True):
assert_node(node, nodes.inline, _sig_node_type=node_type.__name__)
assert mess == f'generic visit: {nodes.inline.__name__!r}'

View File

@ -53,7 +53,7 @@ def test_strip_ansi(
#
# For instance ``next_ansi_blocks(['a', 'b'], 3) == ['a', 'b', 'a']``.
stream = itertools.cycle(choices)
return list(map(operator.itemgetter(0), zip(stream, range(n))))
return list(map(operator.itemgetter(0), zip(stream, range(n), strict=False)))
# generate all permutations of length N
for sigma in itertools.permutations(range(N), N):

View File

@ -91,7 +91,7 @@ def test_TypeAliasForwardRef():
alias = TypeAliasForwardRef('example')
assert stringify_annotation(alias, 'fully-qualified-except-typing') == 'example'
alias = Optional[alias]
alias = Optional[alias] # NoQA: UP007
assert stringify_annotation(alias, 'fully-qualified-except-typing') == 'example | None'
@ -775,7 +775,7 @@ def test_isproperty():
def test_isgenericalias():
#: A list of int
T = List[int] # NoQA: UP006
S = list[Union[str, None]]
S = list[Union[str, None]] # NoQA: UP006, UP007
C = Callable[[int], None] # a generic alias not having a doccomment

View File

@ -44,8 +44,6 @@ from typing import (
Union,
)
import pytest
from sphinx.ext.autodoc import mock
from sphinx.util.typing import _INVALID_BUILTIN_CLASSES, restify, stringify_annotation
@ -274,12 +272,8 @@ def test_restify_type_hints_typevars():
assert restify(list[T]) == ":py:class:`list`\\ [:py:obj:`tests.test_util.test_util_typing.T`]"
assert restify(list[T], "smart") == ":py:class:`list`\\ [:py:obj:`~tests.test_util.test_util_typing.T`]"
if sys.version_info[:2] >= (3, 10):
assert restify(MyInt) == ":py:class:`tests.test_util.test_util_typing.MyInt`"
assert restify(MyInt, "smart") == ":py:class:`~tests.test_util.test_util_typing.MyInt`"
else:
assert restify(MyInt) == ":py:class:`MyInt`"
assert restify(MyInt, "smart") == ":py:class:`MyInt`"
assert restify(MyInt) == ":py:class:`tests.test_util.test_util_typing.MyInt`"
assert restify(MyInt, "smart") == ":py:class:`~tests.test_util.test_util_typing.MyInt`"
def test_restify_type_hints_custom_class():
@ -363,7 +357,6 @@ def test_restify_Unpack():
assert restify(t.Unpack['X'], 'smart') == expect
@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.')
def test_restify_type_union_operator():
assert restify(int | None) == ":py:class:`int` | :py:obj:`None`" # type: ignore[attr-defined]
assert restify(None | int) == ":py:obj:`None` | :py:class:`int`" # type: ignore[attr-defined]
@ -385,7 +378,6 @@ def test_restify_mock():
assert restify(unknown.secret.Class, "smart") == ':py:class:`~unknown.secret.Class`'
@pytest.mark.xfail(sys.version_info[:2] <= (3, 9), reason='ParamSpec not supported in Python 3.9.')
def test_restify_type_hints_paramspec():
from typing import ParamSpec
P = ParamSpec('P')
@ -658,12 +650,8 @@ def test_stringify_type_hints_typevars():
assert stringify_annotation(list[T], 'fully-qualified-except-typing') == "list[tests.test_util.test_util_typing.T]"
assert stringify_annotation(list[T], "smart") == "list[~tests.test_util.test_util_typing.T]"
if sys.version_info[:2] >= (3, 10):
assert stringify_annotation(MyInt, 'fully-qualified-except-typing') == "tests.test_util.test_util_typing.MyInt"
assert stringify_annotation(MyInt, "smart") == "~tests.test_util.test_util_typing.MyInt"
else:
assert stringify_annotation(MyInt, 'fully-qualified-except-typing') == "MyInt"
assert stringify_annotation(MyInt, "smart") == "MyInt"
assert stringify_annotation(MyInt, 'fully-qualified-except-typing') == "tests.test_util.test_util_typing.MyInt"
assert stringify_annotation(MyInt, "smart") == "~tests.test_util.test_util_typing.MyInt"
def test_stringify_type_hints_custom_class():
@ -695,7 +683,6 @@ def test_stringify_type_Literal():
assert stringify_annotation(Literal[MyEnum.a], 'smart') == '~typing.Literal[MyEnum.a]'
@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.')
def test_stringify_type_union_operator():
assert stringify_annotation(int | None) == "int | None" # type: ignore[attr-defined]
assert stringify_annotation(int | None, "smart") == "int | None" # type: ignore[attr-defined]
@ -738,7 +725,6 @@ def test_stringify_type_ForwardRef():
assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]], 'smart') == "~typing.Tuple[dict[MyInt, str], list[~typing.List[int]]]" # type: ignore[attr-defined]
@pytest.mark.xfail(sys.version_info[:2] <= (3, 9), reason='ParamSpec not supported in Python 3.9.')
def test_stringify_type_hints_paramspec():
from typing import ParamSpec
P = ParamSpec('P')

View File

@ -1,6 +1,6 @@
[tox]
minversion = 4.2.0
envlist = py{39,310,311,312,313}
envlist = py{310,311,312,313}
[testenv]
usedevelop = True
@ -19,7 +19,7 @@ passenv =
BUILDER
READTHEDOCS
description =
py{39,310,311,312,313}: Run unit tests against {envname}.
py{310,311,312,313}: Run unit tests against {envname}.
extras =
test
setenv =

View File

@ -106,7 +106,10 @@ def run_extract() -> None:
options = opt_dict
with open(os.path.join(root, filename), 'rb') as fileobj:
for lineno, message, comments, context in extract(
method, fileobj, KEYWORDS, options=options
method, # type: ignore[arg-type]
fileobj,
KEYWORDS,
options=options,
):
filepath = os.path.join(input_path, relative_name)
catalogue.add(
@ -217,7 +220,7 @@ def run_compile() -> None:
for x in message.locations
):
msgid = message.id
if isinstance(msgid, (list, tuple)):
if isinstance(msgid, list | tuple):
msgid = msgid[0]
js_catalogue[msgid] = message.string

View File

@ -8,9 +8,7 @@ import sys
import time
from contextlib import contextmanager
from pathlib import Path
from typing import TYPE_CHECKING
from typing_extensions import TypeAlias
from typing import TYPE_CHECKING, TypeAlias
if TYPE_CHECKING:
from collections.abc import Iterator, Sequence