Drop support for Python 3.9 (#12633)

This commit is contained in:
Adam Turner 2024-07-22 15:05:15 +01:00 committed by GitHub
parent 8c6d234e96
commit 9e3f4521db
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
73 changed files with 221 additions and 297 deletions

View File

@ -32,7 +32,6 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
python: python:
- "3.9"
- "3.10" - "3.10"
- "3.11" - "3.11"
- "3.12" - "3.12"

View File

@ -1,4 +1,4 @@
target-version = "py39" # Pin Ruff to Python 3.9 target-version = "py310" # Pin Ruff to Python 3.10
line-length = 95 line-length = 95
output-format = "full" output-format = "full"
@ -419,8 +419,8 @@ select = [
] ]
# these tests need old ``typing`` generic aliases # these tests need old ``typing`` generic aliases
"tests/test_util/test_util_typing.py" = ["UP006", "UP035"] "tests/test_util/test_util_typing.py" = ["UP006", "UP007", "UP035"]
"tests/test_util/typing_test_data.py" = ["FA100", "UP006", "UP035"] "tests/test_util/typing_test_data.py" = ["FA100", "UP006", "UP007", "UP035"]
"utils/*" = [ "utils/*" = [
"T201", # whitelist ``print`` for stdout messages "T201", # whitelist ``print`` for stdout messages

View File

@ -4,6 +4,8 @@ Release 8.0.0 (in development)
Dependencies Dependencies
------------ ------------
* #12633: Drop Python 3.9 support.
Incompatible changes Incompatible changes
-------------------- --------------------

View File

@ -174,19 +174,19 @@ of targets and allows testing against multiple different Python environments:
tox -av tox -av
* To run unit tests for a specific Python version, such as Python 3.10:: * To run unit tests for a specific Python version, such as Python 3.12::
tox -e py310 tox -e py312
* To run unit tests for a specific Python version and turn on deprecation * To run unit tests for a specific Python version and turn on deprecation
warnings so they're shown in the test output:: warnings so they're shown in the test output::
PYTHONWARNINGS=error tox -e py310 PYTHONWARNINGS=error tox -e py312
* Arguments to ``pytest`` can be passed via ``tox``, e.g., in order to run a * Arguments to ``pytest`` can be passed via ``tox``, e.g., in order to run a
particular test:: particular test::
tox -e py310 tests/test_module.py::test_new_feature tox -e py312 tests/test_module.py::test_new_feature
You can also test by installing dependencies in your local environment:: You can also test by installing dependencies in your local environment::

View File

@ -246,7 +246,7 @@ After you have published your sources on GitLab, create a file named
pages: pages:
stage: deploy stage: deploy
image: python:3.9-slim image: python:3.12-slim
before_script: before_script:
- apt-get update && apt-get install make --no-install-recommends -y - apt-get update && apt-get install make --no-install-recommends -y
- python -m pip install sphinx furo - python -m pip install sphinx furo

View File

@ -79,10 +79,10 @@ a comma-separated list of group names.
* ``pyversion``, a string option, can be used to specify the required Python * ``pyversion``, a string option, can be used to specify the required Python
version for the example to be tested. For instance, in the following case version for the example to be tested. For instance, in the following case
the example will be tested only for Python versions greater than 3.10:: the example will be tested only for Python versions greater than 3.12::
.. doctest:: .. doctest::
:pyversion: > 3.10 :pyversion: > 3.12
The following operands are supported: The following operands are supported:

View File

@ -152,18 +152,18 @@ Install either ``python3x-sphinx`` using :command:`port`:
:: ::
$ sudo port install py39-sphinx $ sudo port install py312-sphinx
To set up the executable paths, use the ``port select`` command: To set up the executable paths, use the ``port select`` command:
:: ::
$ sudo port select --set python python39 $ sudo port select --set python python312
$ sudo port select --set sphinx py39-sphinx $ sudo port select --set sphinx py312-sphinx
For more information, refer to the `package overview`__. For more information, refer to the `package overview`__.
__ https://www.macports.org/ports.php?by=library&substr=py39-sphinx __ https://www.macports.org/ports.php?by=library&substr=py312-sphinx
Windows Windows
~~~~~~~ ~~~~~~~

View File

@ -13,7 +13,7 @@ urls.Download = "https://pypi.org/project/Sphinx/"
urls.Homepage = "https://www.sphinx-doc.org/" urls.Homepage = "https://www.sphinx-doc.org/"
urls."Issue tracker" = "https://github.com/sphinx-doc/sphinx/issues" urls."Issue tracker" = "https://github.com/sphinx-doc/sphinx/issues"
license.text = "BSD-2-Clause" license.text = "BSD-2-Clause"
requires-python = ">=3.9" requires-python = ">=3.10"
# Classifiers list: https://pypi.org/classifiers/ # Classifiers list: https://pypi.org/classifiers/
classifiers = [ classifiers = [
@ -30,7 +30,6 @@ classifiers = [
"Programming Language :: Python", "Programming Language :: Python",
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.12",
@ -71,7 +70,6 @@ dependencies = [
"imagesize>=1.3", "imagesize>=1.3",
"requests>=2.30.0", "requests>=2.30.0",
"packaging>=23.0", "packaging>=23.0",
"importlib-metadata>=6.0; python_version < '3.10'",
"tomli>=2; python_version < '3.11'", "tomli>=2; python_version < '3.11'",
"colorama>=0.4.6; sys_platform == 'win32'", "colorama>=0.4.6; sys_platform == 'win32'",
] ]
@ -91,7 +89,6 @@ lint = [
"types-Pillow==10.2.0.20240520", "types-Pillow==10.2.0.20240520",
"types-Pygments==2.18.0.20240506", "types-Pygments==2.18.0.20240506",
"types-requests>=2.30.0", # align with requests "types-requests>=2.30.0", # align with requests
"importlib-metadata>=6.0", # for mypy (Python<=3.9)
"tomli>=2", # for mypy (Python<=3.10) "tomli>=2", # for mypy (Python<=3.10)
"pytest>=6.0", "pytest>=6.0",
] ]
@ -211,7 +208,7 @@ exclude = [
] ]
check_untyped_defs = true check_untyped_defs = true
disallow_incomplete_defs = true disallow_incomplete_defs = true
python_version = "3.9" python_version = "3.10"
show_column_numbers = true show_column_numbers = true
show_error_context = true show_error_context = true
strict_optional = true strict_optional = true

View File

@ -79,7 +79,7 @@ class _RootArgumentParser(argparse.ArgumentParser):
] ]
if commands := list(_load_subcommand_descriptions()): if commands := list(_load_subcommand_descriptions()):
command_max_length = min(max(map(len, next(zip(*commands), ()))), 22) command_max_length = min(max(map(len, next(zip(*commands, strict=True), ()))), 22)
help_fragments += [ help_fragments += [
'\n', '\n',
bold(underline(__('Commands:'))), bold(underline(__('Commands:'))),

View File

@ -10,10 +10,10 @@ import os
import pickle import pickle
import sys import sys
from collections import deque from collections import deque
from collections.abc import Collection, Sequence # NoQA: TCH003 from collections.abc import Callable, Collection, Sequence # NoQA: TCH003
from io import StringIO from io import StringIO
from os import path from os import path
from typing import IO, TYPE_CHECKING, Any, Callable, Literal from typing import IO, TYPE_CHECKING, Any, Literal
from docutils.nodes import TextElement # NoQA: TCH002 from docutils.nodes import TextElement # NoQA: TCH002
from docutils.parsers.rst import Directive, roles from docutils.parsers.rst import Directive, roles

View File

@ -87,9 +87,9 @@ def _stable_hash(obj: Any) -> str:
""" """
if isinstance(obj, dict): if isinstance(obj, dict):
obj = sorted(map(_stable_hash, obj.items())) obj = sorted(map(_stable_hash, obj.items()))
if isinstance(obj, (list, tuple, set, frozenset)): if isinstance(obj, list | tuple | set | frozenset):
obj = sorted(map(_stable_hash, obj)) obj = sorted(map(_stable_hash, obj))
elif isinstance(obj, (type, types.FunctionType)): elif isinstance(obj, type | types.FunctionType):
# The default repr() of functions includes the ID, which is not ideal. # The default repr() of functions includes the ID, which is not ideal.
# We use the fully qualified name instead. # We use the fully qualified name instead.
obj = f'{obj.__module__}.{obj.__qualname__}' obj = f'{obj.__module__}.{obj.__qualname__}'
@ -734,7 +734,7 @@ class StandaloneHTMLBuilder(Builder):
'genindex-split.html') 'genindex-split.html')
self.handle_page('genindex-all', genindexcontext, self.handle_page('genindex-all', genindexcontext,
'genindex.html') 'genindex.html')
for (key, entries), count in zip(genindex, indexcounts): for (key, entries), count in zip(genindex, indexcounts, strict=True):
ctx = {'key': key, 'entries': entries, 'count': count, ctx = {'key': key, 'entries': entries, 'count': count,
'genindexentries': genindex} 'genindexentries': genindex}
self.handle_page('genindex-' + key, ctx, self.handle_page('genindex-' + key, ctx,

View File

@ -417,7 +417,7 @@ class LaTeXFootnoteVisitor(nodes.NodeVisitor):
self.unrestrict(node) self.unrestrict(node)
def visit_title(self, node: nodes.title) -> None: def visit_title(self, node: nodes.title) -> None:
if isinstance(node.parent, (nodes.section, nodes.table)): if isinstance(node.parent, nodes.section | nodes.table):
self.restrict(node) self.restrict(node)
def depart_title(self, node: nodes.title) -> None: def depart_title(self, node: nodes.title) -> None:

View File

@ -27,8 +27,8 @@ from sphinx.util.http_date import rfc1123_to_epoch
from sphinx.util.nodes import get_node_line from sphinx.util.nodes import get_node_line
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Callable, Iterator
from typing import Any, Callable from typing import Any
from requests import Response from requests import Response

View File

@ -8,7 +8,7 @@ import os
import sys import sys
import time import time
from os import path from os import path
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
# try to import readline, unix specific enhancement # try to import readline, unix specific enhancement
try: try:
@ -36,7 +36,7 @@ from sphinx.util.osutil import ensuredir
from sphinx.util.template import SphinxRenderer from sphinx.util.template import SphinxRenderer
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Sequence from collections.abc import Callable, Sequence
EXTENSIONS = { EXTENSIONS = {
'autodoc': __('automatically insert docstrings from modules'), 'autodoc': __('automatically insert docstrings from modules'),

View File

@ -8,7 +8,7 @@ import traceback
import types import types
import warnings import warnings
from os import getenv, path from os import getenv, path
from typing import TYPE_CHECKING, Any, Literal, NamedTuple, Union from typing import TYPE_CHECKING, Any, Literal, NamedTuple
from sphinx.deprecation import RemovedInSphinx90Warning from sphinx.deprecation import RemovedInSphinx90Warning
from sphinx.errors import ConfigError, ExtensionError from sphinx.errors import ConfigError, ExtensionError
@ -66,7 +66,7 @@ def is_serializable(obj: object, *, _seen: frozenset[int] = frozenset()) -> bool
is_serializable(key, _seen=seen) and is_serializable(value, _seen=seen) is_serializable(key, _seen=seen) and is_serializable(value, _seen=seen)
for key, value in obj.items() for key, value in obj.items()
) )
elif isinstance(obj, (list, tuple, set, frozenset)): elif isinstance(obj, list | tuple | set | frozenset):
seen = _seen | {id(obj)} seen = _seen | {id(obj)}
return all(is_serializable(item, _seen=seen) for item in obj) return all(is_serializable(item, _seen=seen) for item in obj)
@ -87,13 +87,13 @@ class ENUM:
self.candidates = candidates self.candidates = candidates
def match(self, value: str | list | tuple) -> bool: def match(self, value: str | list | tuple) -> bool:
if isinstance(value, (list, tuple)): if isinstance(value, list | tuple):
return all(item in self.candidates for item in value) return all(item in self.candidates for item in value)
else: else:
return value in self.candidates return value in self.candidates
_OptValidTypes = Union[tuple[()], tuple[type, ...], frozenset[type], ENUM] _OptValidTypes = tuple[()] | tuple[type, ...] | frozenset[type] | ENUM
class _Opt: class _Opt:
@ -549,7 +549,7 @@ def _validate_valid_types(
) -> tuple[()] | tuple[type, ...] | frozenset[type] | ENUM: ) -> tuple[()] | tuple[type, ...] | frozenset[type] | ENUM:
if not valid_types: if not valid_types:
return () return ()
if isinstance(valid_types, (frozenset, ENUM)): if isinstance(valid_types, frozenset | ENUM):
return valid_types return valid_types
if isinstance(valid_types, type): if isinstance(valid_types, type):
return frozenset((valid_types,)) return frozenset((valid_types,))
@ -584,7 +584,7 @@ def convert_source_suffix(app: Sphinx, config: Config) -> None:
config.source_suffix = {source_suffix: 'restructuredtext'} config.source_suffix = {source_suffix: 'restructuredtext'}
logger.info(__("Converting `source_suffix = %r` to `source_suffix = %r`."), logger.info(__("Converting `source_suffix = %r` to `source_suffix = %r`."),
source_suffix, config.source_suffix) source_suffix, config.source_suffix)
elif isinstance(source_suffix, (list, tuple)): elif isinstance(source_suffix, list | tuple):
# if list, considers as all of them are default filetype # if list, considers as all of them are default filetype
config.source_suffix = dict.fromkeys(source_suffix, 'restructuredtext') config.source_suffix = dict.fromkeys(source_suffix, 'restructuredtext')
logger.info(__("Converting `source_suffix = %r` to `source_suffix = %r`."), logger.info(__("Converting `source_suffix = %r` to `source_suffix = %r`."),

View File

@ -8,7 +8,8 @@ from __future__ import annotations
import copy import copy
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, cast from collections.abc import Callable
from typing import TYPE_CHECKING, Any, NamedTuple, cast
from docutils.nodes import Element, Node, system_message from docutils.nodes import Element, Node, system_message
@ -153,7 +154,7 @@ class Index(ABC):
raise NotImplementedError raise NotImplementedError
TitleGetter = Callable[[Node], Optional[str]] TitleGetter = Callable[[Node], str | None]
class Domain: class Domain:

View File

@ -18,13 +18,12 @@ from sphinx.util.cfamily import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from docutils.nodes import Element, Node, TextElement from docutils.nodes import Element, Node, TextElement
from sphinx.domains.c._symbol import Symbol from sphinx.domains.c._symbol import Symbol
from sphinx.environment import BuildEnvironment from sphinx.environment import BuildEnvironment
DeclarationType = Union[ DeclarationType = Union[ # NoQA: UP007
"ASTStruct", "ASTUnion", "ASTEnum", "ASTEnumerator", "ASTStruct", "ASTUnion", "ASTEnum", "ASTEnumerator",
"ASTType", "ASTTypeWithInit", "ASTMacro", "ASTType", "ASTTypeWithInit", "ASTMacro",
] ]

View File

@ -1,6 +1,6 @@
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
from sphinx.domains.c._ast import ( from sphinx.domains.c._ast import (
ASTAlignofExpr, ASTAlignofExpr,
@ -53,7 +53,6 @@ from sphinx.domains.c._ast import (
ASTTypeWithInit, ASTTypeWithInit,
ASTUnaryOpExpr, ASTUnaryOpExpr,
ASTUnion, ASTUnion,
DeclarationType,
) )
from sphinx.domains.c._ids import ( from sphinx.domains.c._ids import (
_expression_assignment_ops, _expression_assignment_ops,
@ -80,7 +79,9 @@ from sphinx.util.cfamily import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Sequence from collections.abc import Callable, Sequence
from sphinx.domains.c._ast import DeclarationType
class DefinitionParser(BaseParser): class DefinitionParser(BaseParser):

View File

@ -30,7 +30,6 @@ from sphinx.util.cfamily import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from docutils.nodes import Element, TextElement from docutils.nodes import Element, TextElement
from sphinx.addnodes import desc_signature from sphinx.addnodes import desc_signature

View File

@ -1,7 +1,7 @@
from __future__ import annotations from __future__ import annotations
import re import re
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
from sphinx.domains.cpp._ast import ( from sphinx.domains.cpp._ast import (
ASTAlignofExpr, ASTAlignofExpr,
@ -127,7 +127,7 @@ from sphinx.util.cfamily import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Sequence from collections.abc import Callable, Sequence
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,6 +1,6 @@
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable, NoReturn from typing import TYPE_CHECKING, Any, NoReturn
from sphinx.domains.cpp._ast import ( from sphinx.domains.cpp._ast import (
ASTDeclaration, ASTDeclaration,
@ -17,7 +17,7 @@ from sphinx.locale import __
from sphinx.util import logging from sphinx.util import logging
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Callable, Iterator
from sphinx.environment import BuildEnvironment from sphinx.environment import BuildEnvironment

View File

@ -74,7 +74,7 @@ class MathDomain(Domain):
def process_doc(self, env: BuildEnvironment, docname: str, def process_doc(self, env: BuildEnvironment, docname: str,
document: nodes.document) -> None: document: nodes.document) -> None:
def math_node(node: Node) -> bool: def math_node(node: Node) -> bool:
return isinstance(node, (nodes.math, nodes.math_block)) return isinstance(node, nodes.math | nodes.math_block)
self.data['has_equations'][docname] = any(document.findall(math_node)) self.data['has_equations'][docname] = any(document.findall(math_node))

View File

@ -25,7 +25,6 @@ from sphinx.util.nodes import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from docutils.nodes import Node from docutils.nodes import Node
from docutils.parsers.rst.states import Inliner from docutils.parsers.rst.states import Inliner

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import re import re
from copy import copy from copy import copy
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Final, cast from typing import TYPE_CHECKING, Any, ClassVar, Final, cast
from docutils import nodes from docutils import nodes
from docutils.nodes import Element, Node, system_message from docutils.nodes import Element, Node, system_message
@ -23,7 +23,7 @@ from sphinx.util.nodes import clean_astext, make_id, make_refnode
from sphinx.util.parsing import nested_parse_to_nodes from sphinx.util.parsing import nested_parse_to_nodes
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterable, Iterator from collections.abc import Callable, Iterable, Iterator
from sphinx.application import Sphinx from sphinx.application import Sphinx
from sphinx.builders import Builder from sphinx.builders import Builder
@ -402,7 +402,7 @@ class Glossary(SphinxDirective):
in_comment = False in_comment = False
was_empty = True was_empty = True
messages: list[Node] = [] messages: list[Node] = []
for line, (source, lineno) in zip(self.content, self.content.items): for line, (source, lineno) in zip(self.content, self.content.items, strict=True):
# empty line -> add to last definition # empty line -> add to last definition
if not line: if not line:
if in_definition and entries: if in_definition and entries:
@ -814,13 +814,12 @@ class StandardDomain(Domain):
if not sectname: if not sectname:
continue continue
else: else:
if (isinstance(node, (nodes.definition_list, if (isinstance(node, nodes.definition_list | nodes.field_list) and
nodes.field_list)) and
node.children): node.children):
node = cast(nodes.Element, node.children[0]) node = cast(nodes.Element, node.children[0])
if isinstance(node, (nodes.field, nodes.definition_list_item)): if isinstance(node, nodes.field | nodes.definition_list_item):
node = cast(nodes.Element, node.children[0]) node = cast(nodes.Element, node.children[0])
if isinstance(node, (nodes.term, nodes.field_name)): if isinstance(node, nodes.term | nodes.field_name):
sectname = clean_astext(node) sectname = clean_astext(node)
else: else:
toctree = next(node.findall(addnodes.toctree), None) toctree = next(node.findall(addnodes.toctree), None)
@ -1114,7 +1113,7 @@ class StandardDomain(Domain):
return title_getter(elem) return title_getter(elem)
else: else:
for subnode in elem: for subnode in elem:
if isinstance(subnode, (nodes.caption, nodes.title)): if isinstance(subnode, nodes.caption | nodes.title):
return clean_astext(subnode) return clean_astext(subnode)
return None return None

View File

@ -9,7 +9,7 @@ import time
from collections import defaultdict from collections import defaultdict
from copy import copy from copy import copy
from os import path from os import path
from typing import TYPE_CHECKING, Any, Callable, NoReturn from typing import TYPE_CHECKING, Any, NoReturn
from sphinx import addnodes from sphinx import addnodes
from sphinx.environment.adapters import toctree as toctree_adapters from sphinx.environment.adapters import toctree as toctree_adapters
@ -23,7 +23,7 @@ from sphinx.util.nodes import is_translatable
from sphinx.util.osutil import canon_path, os_path from sphinx.util.osutil import canon_path, os_path
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Callable, Iterator
from pathlib import Path from pathlib import Path
from docutils import nodes from docutils import nodes

View File

@ -13,16 +13,14 @@ from sphinx.util import logging
from sphinx.util.index_entries import _split_into from sphinx.util.index_entries import _split_into
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Literal, Optional, Union from typing import Literal, TypeAlias
from typing_extensions import TypeAlias
from sphinx.builders import Builder from sphinx.builders import Builder
from sphinx.environment import BuildEnvironment from sphinx.environment import BuildEnvironment
_IndexEntryTarget: TypeAlias = tuple[Optional[str], Union[str, Literal[False]]] _IndexEntryTarget: TypeAlias = tuple[str | None, str | Literal[False]]
_IndexEntryTargets: TypeAlias = list[_IndexEntryTarget] _IndexEntryTargets: TypeAlias = list[_IndexEntryTarget]
_IndexEntryCategoryKey: TypeAlias = Optional[str] _IndexEntryCategoryKey: TypeAlias = str | None
_IndexEntrySubItems: TypeAlias = dict[ _IndexEntrySubItems: TypeAlias = dict[
str, str,
tuple[_IndexEntryTargets, _IndexEntryCategoryKey], tuple[_IndexEntryTargets, _IndexEntryCategoryKey],

View File

@ -404,7 +404,7 @@ def _toctree_standard_entry(
def _toctree_add_classes(node: Element, depth: int, docname: str) -> None: def _toctree_add_classes(node: Element, depth: int, docname: str) -> None:
"""Add 'toctree-l%d' and 'current' classes to the toctree.""" """Add 'toctree-l%d' and 'current' classes to the toctree."""
for subnode in node.children: for subnode in node.children:
if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)): if isinstance(subnode, addnodes.compact_paragraph | nodes.list_item):
# for <p> and <li>, indicate the depth level and recurse # for <p> and <li>, indicate the depth level and recurse
subnode['classes'].append(f'toctree-l{depth - 1}') subnode['classes'].append(f'toctree-l{depth - 1}')
_toctree_add_classes(subnode, depth, docname) _toctree_add_classes(subnode, depth, docname)
@ -442,7 +442,7 @@ def _toctree_copy(node: ET, depth: int, maxdepth: int, collapse: bool, tags: Tag
copy = node.copy() copy = node.copy()
for subnode in node.children: for subnode in node.children:
if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)): if isinstance(subnode, addnodes.compact_paragraph | nodes.list_item):
# for <p> and <li>, just recurse # for <p> and <li>, just recurse
copy.append(_toctree_copy(subnode, depth, maxdepth, collapse, tags)) copy.append(_toctree_copy(subnode, depth, maxdepth, collapse, tags))
elif isinstance(subnode, nodes.bullet_list): elif isinstance(subnode, nodes.bullet_list):
@ -462,7 +462,7 @@ def _toctree_copy(node: ET, depth: int, maxdepth: int, collapse: bool, tags: Tag
copy.append(_toctree_copy( copy.append(_toctree_copy(
child, depth, maxdepth, collapse, tags, # type: ignore[type-var] child, depth, maxdepth, collapse, tags, # type: ignore[type-var]
)) ))
elif isinstance(subnode, (nodes.reference, nodes.title)): elif isinstance(subnode, nodes.reference | nodes.title):
# deep copy references and captions # deep copy references and captions
sub_node_copy = subnode.copy() sub_node_copy = subnode.copy()
sub_node_copy.children = [child.deepcopy() for child in subnode.children] sub_node_copy.children = [child.deepcopy() for child in subnode.children]

View File

@ -8,7 +8,7 @@ from __future__ import annotations
import contextlib import contextlib
from collections import defaultdict from collections import defaultdict
from operator import attrgetter from operator import attrgetter
from typing import TYPE_CHECKING, Any, Callable, NamedTuple from typing import TYPE_CHECKING, Any, NamedTuple
from sphinx.errors import ExtensionError, SphinxError from sphinx.errors import ExtensionError, SphinxError
from sphinx.locale import __ from sphinx.locale import __
@ -16,6 +16,8 @@ from sphinx.util import logging
from sphinx.util.inspect import safe_getattr from sphinx.util.inspect import safe_getattr
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Callable
from sphinx.application import Sphinx from sphinx.application import Sphinx

View File

@ -10,9 +10,8 @@ from __future__ import annotations
import functools import functools
import operator import operator
import re import re
import sys
from inspect import Parameter, Signature from inspect import Parameter, Signature
from typing import TYPE_CHECKING, Any, Callable, ClassVar, TypeVar from typing import TYPE_CHECKING, Any, ClassVar, TypeVar
from docutils.statemachine import StringList from docutils.statemachine import StringList
@ -40,7 +39,7 @@ from sphinx.util.typing import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator, Sequence from collections.abc import Callable, Iterator, Sequence
from types import ModuleType from types import ModuleType
from sphinx.application import Sphinx from sphinx.application import Sphinx
@ -612,7 +611,7 @@ class Documenter:
# add additional content (e.g. from document), if present # add additional content (e.g. from document), if present
if more_content: if more_content:
for line, src in zip(more_content.data, more_content.items): for line, src in zip(more_content.data, more_content.items, strict=True):
self.add_line(line, src[0], src[1]) self.add_line(line, src[0], src[1])
def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]: def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
@ -975,7 +974,7 @@ class ModuleDocumenter(Documenter):
super().add_content(None) super().add_content(None)
self.indent = old_indent self.indent = old_indent
if more_content: if more_content:
for line, src in zip(more_content.data, more_content.items): for line, src in zip(more_content.data, more_content.items, strict=True):
self.add_line(line, src[0], src[1]) self.add_line(line, src[0], src[1])
@classmethod @classmethod
@ -1450,7 +1449,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
# Must be higher than FunctionDocumenter, ClassDocumenter, and # Must be higher than FunctionDocumenter, ClassDocumenter, and
# AttributeDocumenter as NewType can be an attribute and is a class # AttributeDocumenter as NewType can be an attribute and is a class
# after Python 3.10. Before 3.10 it is a kind of function object # after Python 3.10.
priority = 15 priority = 15
_signature_class: Any = None _signature_class: Any = None
@ -1740,24 +1739,6 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
if isinstance(self.object, TypeVar): if isinstance(self.object, TypeVar):
if self.object.__doc__ == TypeVar.__doc__: if self.object.__doc__ == TypeVar.__doc__:
return [] return []
if sys.version_info[:2] < (3, 10):
if inspect.isNewType(self.object) or isinstance(self.object, TypeVar):
parts = self.modname.strip('.').split('.')
orig_objpath = self.objpath
for i in range(len(parts)):
new_modname = '.'.join(parts[:len(parts) - i])
new_objpath = parts[len(parts) - i:] + orig_objpath
try:
analyzer = ModuleAnalyzer.for_module(new_modname)
analyzer.analyze()
key = ('', new_objpath[-1])
comment = list(analyzer.attr_docs.get(key, []))
if comment:
self.objpath = new_objpath
self.modname = new_modname
return [comment]
except PycodeError:
pass
if self.doc_as_attr: if self.doc_as_attr:
# Don't show the docstring of the class when it is an alias. # Don't show the docstring of the class when it is an alias.
if self.get_variable_comment(): if self.get_variable_comment():

View File

@ -1,6 +1,7 @@
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable from collections.abc import Callable
from typing import TYPE_CHECKING, Any
from docutils import nodes from docutils import nodes
from docutils.statemachine import StringList from docutils.statemachine import StringList

View File

@ -102,9 +102,9 @@ def _is_lambda(x: Any, /) -> bool:
def _get_arguments_inner(x: Any, /) -> ast.arguments | None: def _get_arguments_inner(x: Any, /) -> ast.arguments | None:
if isinstance(x, (ast.AsyncFunctionDef, ast.FunctionDef, ast.Lambda)): if isinstance(x, ast.AsyncFunctionDef | ast.FunctionDef | ast.Lambda):
return x.args return x.args
if isinstance(x, (ast.Assign, ast.AnnAssign)): if isinstance(x, ast.Assign | ast.AnnAssign):
return _get_arguments_inner(x.value) return _get_arguments_inner(x.value)
return None return None

View File

@ -11,7 +11,7 @@ import sys
import time import time
from io import StringIO from io import StringIO
from os import path from os import path
from typing import TYPE_CHECKING, Any, Callable, ClassVar from typing import TYPE_CHECKING, Any, ClassVar
from docutils import nodes from docutils import nodes
from docutils.parsers.rst import directives from docutils.parsers.rst import directives
@ -27,7 +27,7 @@ from sphinx.util.docutils import SphinxDirective
from sphinx.util.osutil import relpath from sphinx.util.osutil import relpath
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterable, Sequence from collections.abc import Callable, Iterable, Sequence
from docutils.nodes import Element, Node, TextElement from docutils.nodes import Element, Node, TextElement
@ -420,12 +420,12 @@ Doctest summary
if self.config.doctest_test_doctest_blocks: if self.config.doctest_test_doctest_blocks:
def condition(node: Node) -> bool: def condition(node: Node) -> bool:
return (isinstance(node, (nodes.literal_block, nodes.comment)) and return (isinstance(node, nodes.literal_block | nodes.comment) and
'testnodetype' in node) or \ 'testnodetype' in node) or \
isinstance(node, nodes.doctest_block) isinstance(node, nodes.doctest_block)
else: else:
def condition(node: Node) -> bool: def condition(node: Node) -> bool:
return isinstance(node, (nodes.literal_block, nodes.comment)) \ return isinstance(node, nodes.literal_block | nodes.comment) \
and 'testnodetype' in node and 'testnodetype' in node
for node in doctree.findall(condition): for node in doctree.findall(condition):
if self.skipped(node): # type: ignore[arg-type] if self.skipped(node): # type: ignore[arg-type]

View File

@ -63,7 +63,7 @@ def validate_intersphinx_mapping(app: Sphinx, config: Config) -> None:
continue continue
# ensure values are properly formatted # ensure values are properly formatted
if not isinstance(value, (tuple, list)): if not isinstance(value, (tuple | list)):
errors += 1 errors += 1
msg = __( msg = __(
'Invalid value `%r` in intersphinx_mapping[%r]. ' 'Invalid value `%r` in intersphinx_mapping[%r]. '
@ -105,7 +105,7 @@ def validate_intersphinx_mapping(app: Sphinx, config: Config) -> None:
# ensure inventory locations are None or non-empty # ensure inventory locations are None or non-empty
targets: list[InventoryLocation] = [] targets: list[InventoryLocation] = []
for target in (inv if isinstance(inv, (tuple, list)) else (inv,)): for target in (inv if isinstance(inv, (tuple | list)) else (inv,)):
if target is None or target and isinstance(target, str): if target is None or target and isinstance(target, str):
targets.append(target) targets.append(target)
else: else:

View File

@ -7,8 +7,6 @@ from typing import TYPE_CHECKING, Final
from sphinx.util import logging from sphinx.util import logging
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Optional
from sphinx.environment import BuildEnvironment from sphinx.environment import BuildEnvironment
from sphinx.util.typing import Inventory from sphinx.util.typing import Inventory
@ -26,7 +24,7 @@ if TYPE_CHECKING:
#: #:
#: Empty strings are not expected and ``None`` indicates the default #: Empty strings are not expected and ``None`` indicates the default
#: inventory file name :data:`~sphinx.builder.html.INVENTORY_FILENAME`. #: inventory file name :data:`~sphinx.builder.html.INVENTORY_FILENAME`.
InventoryLocation = Optional[str] InventoryLocation = str | None
#: Inventory cache entry. The integer field is the cache expiration time. #: Inventory cache entry. The integer field is the cache expiration time.
InventoryCacheEntry = tuple[InventoryName, int, Inventory] InventoryCacheEntry = tuple[InventoryName, int, Inventory]

View File

@ -8,14 +8,14 @@ import inspect
import re import re
from functools import partial from functools import partial
from itertools import starmap from itertools import starmap
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
from sphinx.locale import _, __ from sphinx.locale import _, __
from sphinx.util import logging from sphinx.util import logging
from sphinx.util.typing import get_type_hints, stringify_annotation from sphinx.util.typing import get_type_hints, stringify_annotation
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Callable, Iterator
from sphinx.application import Sphinx from sphinx.application import Sphinx
from sphinx.config import Config as SphinxConfig from sphinx.config import Config as SphinxConfig

View File

@ -4,7 +4,7 @@ from __future__ import annotations
from os import path from os import path
from pprint import pformat from pprint import pformat
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
from jinja2 import BaseLoader, FileSystemLoader, TemplateNotFound from jinja2 import BaseLoader, FileSystemLoader, TemplateNotFound
from jinja2.sandbox import SandboxedEnvironment from jinja2.sandbox import SandboxedEnvironment
@ -15,7 +15,7 @@ from sphinx.util import logging
from sphinx.util.osutil import mtimes_of_files from sphinx.util.osutil import mtimes_of_files
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Callable, Iterator
from jinja2.environment import Environment from jinja2.environment import Environment

View File

@ -9,8 +9,8 @@ from os import path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterable from collections.abc import Callable, Iterable
from typing import Any, Callable from typing import Any
class _TranslationProxy: class _TranslationProxy:

View File

@ -130,7 +130,7 @@ class _UnparseVisitor(ast.NodeVisitor):
def visit_Constant(self, node: ast.Constant) -> str: def visit_Constant(self, node: ast.Constant) -> str:
if node.value is Ellipsis: if node.value is Ellipsis:
return "..." return "..."
elif isinstance(node.value, (int, float, complex)): elif isinstance(node.value, int | float | complex):
if self.code: if self.code:
return ast.get_source_segment(self.code, node) or repr(node.value) return ast.get_source_segment(self.code, node) or repr(node.value)
else: else:
@ -141,7 +141,7 @@ class _UnparseVisitor(ast.NodeVisitor):
def visit_Dict(self, node: ast.Dict) -> str: def visit_Dict(self, node: ast.Dict) -> str:
keys = (self.visit(k) for k in node.keys if k is not None) keys = (self.visit(k) for k in node.keys if k is not None)
values = (self.visit(v) for v in node.values) values = (self.visit(v) for v in node.values)
items = (k + ": " + v for k, v in zip(keys, values)) items = (k + ": " + v for k, v in zip(keys, values, strict=True))
return "{" + ", ".join(items) + "}" return "{" + ", ".join(items) + "}"
def visit_Lambda(self, node: ast.Lambda) -> str: def visit_Lambda(self, node: ast.Lambda) -> str:

View File

@ -108,7 +108,7 @@ class Token:
return self.kind == other return self.kind == other
elif isinstance(other, str): elif isinstance(other, str):
return self.value == other return self.value == other
elif isinstance(other, (list, tuple)): elif isinstance(other, list | tuple):
return [self.kind, self.value] == list(other) return [self.kind, self.value] == list(other)
elif other is None: elif other is None:
return False return False
@ -404,7 +404,7 @@ class VariableCommentPicker(ast.NodeVisitor):
def visit_Expr(self, node: ast.Expr) -> None: def visit_Expr(self, node: ast.Expr) -> None:
"""Handles Expr node and pick up a comment if string.""" """Handles Expr node and pick up a comment if string."""
if (isinstance(self.previous, (ast.Assign, ast.AnnAssign)) and if (isinstance(self.previous, ast.Assign | ast.AnnAssign) and
isinstance(node.value, ast.Constant) and isinstance(node.value.value, str)): isinstance(node.value, ast.Constant) and isinstance(node.value.value, str)):
try: try:
targets = get_assign_targets(self.previous) targets = get_assign_targets(self.previous)

View File

@ -2,16 +2,11 @@
from __future__ import annotations from __future__ import annotations
import sys
import traceback import traceback
from importlib import import_module from importlib import import_module
from importlib.metadata import entry_points
from types import MethodType from types import MethodType
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
if sys.version_info >= (3, 10):
from importlib.metadata import entry_points
else:
from importlib_metadata import entry_points
from sphinx.domains import Domain, Index, ObjType from sphinx.domains import Domain, Index, ObjType
from sphinx.domains.std import GenericObject, Target from sphinx.domains.std import GenericObject, Target
@ -25,7 +20,7 @@ from sphinx.util import logging
from sphinx.util.logging import prefixed_warnings from sphinx.util.logging import prefixed_warnings
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator, Sequence from collections.abc import Callable, Iterator, Sequence
from docutils import nodes from docutils import nodes
from docutils.core import Publisher from docutils.core import Publisher

View File

@ -4,12 +4,13 @@ import os
import shutil import shutil
import sys import sys
import warnings import warnings
from typing import IO, TYPE_CHECKING, Any, Callable from typing import IO, TYPE_CHECKING, Any
from sphinx.deprecation import RemovedInSphinx90Warning from sphinx.deprecation import RemovedInSphinx90Warning
if TYPE_CHECKING: if TYPE_CHECKING:
import builtins import builtins
from collections.abc import Callable
warnings.warn("'sphinx.testing.path' is deprecated. " warnings.warn("'sphinx.testing.path' is deprecated. "
"Use 'os.path' or 'pathlib' instead.", "Use 'os.path' or 'pathlib' instead.",

View File

@ -43,7 +43,7 @@ def assert_node(node: Node, cls: Any = None, xpath: str = "", **kwargs: Any) ->
'The node%s has %d child nodes, not one' % (xpath, len(node)) 'The node%s has %d child nodes, not one' % (xpath, len(node))
assert_node(node[0], cls[1:], xpath=xpath + "[0]", **kwargs) assert_node(node[0], cls[1:], xpath=xpath + "[0]", **kwargs)
elif isinstance(cls, tuple): elif isinstance(cls, tuple):
assert isinstance(node, (list, nodes.Element)), \ assert isinstance(node, list | nodes.Element), \
'The node%s does not have any items' % xpath 'The node%s does not have any items' % xpath
assert len(node) == len(cls), \ assert len(node) == len(cls), \
'The node%s has %d child nodes, not %r' % (xpath, len(node), len(cls)) 'The node%s has %d child nodes, not %r' % (xpath, len(node), len(cls))

View File

@ -10,6 +10,7 @@ import os
import shutil import shutil
import sys import sys
import tempfile import tempfile
from importlib.metadata import entry_points
from os import path from os import path
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any
from zipfile import ZipFile from zipfile import ZipFile
@ -26,10 +27,6 @@ if sys.version_info >= (3, 11):
else: else:
import tomli as tomllib import tomli as tomllib
if sys.version_info >= (3, 10):
from importlib.metadata import entry_points
else:
from importlib_metadata import entry_points
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Callable from collections.abc import Callable

View File

@ -22,10 +22,10 @@ from sphinx.util.nodes import apply_source_workaround, is_smartquotable
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Iterator
from typing import Literal from typing import Literal, TypeAlias
from docutils.nodes import Node, Text from docutils.nodes import Node, Text
from typing_extensions import TypeAlias, TypeIs from typing_extensions import TypeIs
from sphinx.application import Sphinx from sphinx.application import Sphinx
from sphinx.config import Config from sphinx.config import Config
@ -247,7 +247,7 @@ class ApplySourceWorkaround(SphinxTransform):
def apply(self, **kwargs: Any) -> None: def apply(self, **kwargs: Any) -> None:
for node in self.document.findall(): # type: Node for node in self.document.findall(): # type: Node
if isinstance(node, (nodes.TextElement, nodes.image, nodes.topic)): if isinstance(node, nodes.TextElement | nodes.image | nodes.topic):
apply_source_workaround(node) apply_source_workaround(node)
@ -477,7 +477,7 @@ def _reorder_index_target_nodes(start_node: nodes.target) -> None:
# as we want *consecutive* target & index nodes. # as we want *consecutive* target & index nodes.
node: nodes.Node node: nodes.Node
for node in start_node.findall(descend=False, siblings=True): for node in start_node.findall(descend=False, siblings=True):
if isinstance(node, (nodes.target, addnodes.index)): if isinstance(node, nodes.target | addnodes.index):
nodes_to_reorder.append(node) nodes_to_reorder.append(node)
continue continue
break # must be a consecutive run of target or index nodes break # must be a consecutive run of target or index nodes

View File

@ -3,8 +3,9 @@
from __future__ import annotations from __future__ import annotations
import re import re
from collections.abc import Callable
from copy import deepcopy from copy import deepcopy
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
from docutils import nodes from docutils import nodes

View File

@ -7,9 +7,9 @@ from sphinx.util import logging
from sphinx.util.console import bold, color_terminal from sphinx.util.console import bold, color_terminal
if False: if False:
from collections.abc import Iterable, Iterator from collections.abc import Callable, Iterable, Iterator
from types import TracebackType from types import TracebackType
from typing import Any, Callable, TypeVar from typing import Any, TypeVar
from typing_extensions import ParamSpec from typing_extensions import ParamSpec
@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
def display_chunk(chunk: Any) -> str: def display_chunk(chunk: Any) -> str:
if isinstance(chunk, (list, tuple)): if isinstance(chunk, list | tuple):
if len(chunk) == 1: if len(chunk) == 1:
return str(chunk[0]) return str(chunk[0])
return f'{chunk[0]} .. {chunk[-1]}' return f'{chunk[0]} .. {chunk[-1]}'

View File

@ -356,7 +356,7 @@ class DocFieldTransformer:
if is_typefield: if is_typefield:
# filter out only inline nodes; others will result in invalid # filter out only inline nodes; others will result in invalid
# markup being written out # markup being written out
content = [n for n in content if isinstance(n, (nodes.Inline, nodes.Text))] content = [n for n in content if isinstance(n, nodes.Inline | nodes.Text)]
if content: if content:
types.setdefault(typename, {})[fieldarg] = content types.setdefault(typename, {})[fieldarg] = content
continue continue

View File

@ -8,7 +8,7 @@ from collections.abc import Sequence # NoQA: TCH003
from contextlib import contextmanager from contextlib import contextmanager
from copy import copy from copy import copy
from os import path from os import path
from typing import IO, TYPE_CHECKING, Any, Callable, cast from typing import IO, TYPE_CHECKING, Any, cast
import docutils import docutils
from docutils import nodes from docutils import nodes
@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
report_re = re.compile('^(.+?:(?:\\d+)?): \\((DEBUG|INFO|WARNING|ERROR|SEVERE)/(\\d+)?\\) ') report_re = re.compile('^(.+?:(?:\\d+)?): \\((DEBUG|INFO|WARNING|ERROR|SEVERE)/(\\d+)?\\) ')
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Callable, Iterator # NoQA: TCH003
from types import ModuleType from types import ModuleType
from docutils.frontend import Values from docutils.frontend import Values

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import os import os
import posixpath import posixpath
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
from docutils.utils import relative_path from docutils.utils import relative_path
@ -12,6 +12,8 @@ from sphinx.util import logging
from sphinx.util.osutil import copyfile, ensuredir from sphinx.util.osutil import copyfile, ensuredir
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Callable
from sphinx.util.template import BaseRenderer from sphinx.util.template import BaseRenderer
from sphinx.util.typing import PathMatcher from sphinx.util.typing import PathMatcher

View File

@ -20,7 +20,7 @@ from sphinx.util.osutil import SEP, canon_path, relpath
if TYPE_CHECKING: if TYPE_CHECKING:
import datetime as dt import datetime as dt
from collections.abc import Iterator from collections.abc import Iterator
from typing import Protocol, Union from typing import Protocol
from babel.core import Locale from babel.core import Locale
@ -52,7 +52,7 @@ if TYPE_CHECKING:
locale: str | Locale | None = ..., locale: str | Locale | None = ...,
) -> str: ... ) -> str: ...
Formatter = Union[DateFormatter, TimeFormatter, DatetimeFormatter] Formatter = DateFormatter | TimeFormatter | DatetimeFormatter
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -27,9 +27,9 @@ if TYPE_CHECKING:
from collections.abc import Callable, Sequence from collections.abc import Callable, Sequence
from inspect import _ParameterKind from inspect import _ParameterKind
from types import MethodType, ModuleType from types import MethodType, ModuleType
from typing import Final, Protocol, Union from typing import Final, Protocol, TypeAlias
from typing_extensions import TypeAlias, TypeIs from typing_extensions import TypeIs
class _SupportsGet(Protocol): class _SupportsGet(Protocol):
def __get__(self, __instance: Any, __owner: type | None = ...) -> Any: ... # NoQA: E704 def __get__(self, __instance: Any, __owner: type | None = ...) -> Any: ... # NoQA: E704
@ -42,21 +42,21 @@ if TYPE_CHECKING:
# instance is contravariant but we do not need that precision # instance is contravariant but we do not need that precision
def __delete__(self, __instance: Any) -> None: ... # NoQA: E704 def __delete__(self, __instance: Any) -> None: ... # NoQA: E704
_RoutineType: TypeAlias = Union[ _RoutineType: TypeAlias = (
types.FunctionType, types.FunctionType
types.LambdaType, | types.LambdaType
types.MethodType, | types.MethodType
types.BuiltinFunctionType, | types.BuiltinFunctionType
types.BuiltinMethodType, | types.BuiltinMethodType
types.WrapperDescriptorType, | types.WrapperDescriptorType
types.MethodDescriptorType, | types.MethodDescriptorType
types.ClassMethodDescriptorType, | types.ClassMethodDescriptorType
] )
_SignatureType: TypeAlias = Union[ _SignatureType: TypeAlias = (
Callable[..., Any], Callable[..., Any]
staticmethod, | staticmethod
classmethod, | classmethod
] )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -128,20 +128,14 @@ def getall(obj: Any) -> Sequence[str] | None:
__all__ = safe_getattr(obj, '__all__', None) __all__ = safe_getattr(obj, '__all__', None)
if __all__ is None: if __all__ is None:
return None return None
if isinstance(__all__, (list, tuple)) and all(isinstance(e, str) for e in __all__): if isinstance(__all__, list | tuple) and all(isinstance(e, str) for e in __all__):
return __all__ return __all__
raise ValueError(__all__) raise ValueError(__all__)
def getannotations(obj: Any) -> Mapping[str, Any]: def getannotations(obj: Any) -> Mapping[str, Any]:
"""Safely get the ``__annotations__`` attribute of an object.""" """Safely get the ``__annotations__`` attribute of an object."""
if sys.version_info >= (3, 10, 0) or not isinstance(obj, type): __annotations__ = safe_getattr(obj, '__annotations__', None)
__annotations__ = safe_getattr(obj, '__annotations__', None)
else:
# Workaround for bugfix not available until python 3.10 as recommended by docs
# https://docs.python.org/3.10/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
__dict__ = safe_getattr(obj, '__dict__', {})
__annotations__ = __dict__.get('__annotations__', None)
if isinstance(__annotations__, Mapping): if isinstance(__annotations__, Mapping):
return __annotations__ return __annotations__
return {} return {}
@ -198,7 +192,7 @@ def getslots(obj: Any) -> dict[str, Any] | dict[str, None] | None:
return __slots__ return __slots__
elif isinstance(__slots__, str): elif isinstance(__slots__, str):
return {__slots__: None} return {__slots__: None}
elif isinstance(__slots__, (list, tuple)): elif isinstance(__slots__, list | tuple):
return dict.fromkeys(__slots__) return dict.fromkeys(__slots__)
else: else:
raise ValueError raise ValueError
@ -206,11 +200,7 @@ def getslots(obj: Any) -> dict[str, Any] | dict[str, None] | None:
def isNewType(obj: Any) -> bool: def isNewType(obj: Any) -> bool:
"""Check the if object is a kind of :class:`~typing.NewType`.""" """Check the if object is a kind of :class:`~typing.NewType`."""
if sys.version_info[:2] >= (3, 10): return isinstance(obj, typing.NewType)
return isinstance(obj, typing.NewType)
__module__ = safe_getattr(obj, '__module__', None)
__qualname__ = safe_getattr(obj, '__qualname__', None)
return __module__ == 'typing' and __qualname__ == 'NewType.<locals>.new_type'
def isenumclass(x: Any) -> TypeIs[type[enum.Enum]]: def isenumclass(x: Any) -> TypeIs[type[enum.Enum]]:
@ -237,7 +227,7 @@ def unpartial(obj: Any) -> Any:
def ispartial(obj: Any) -> TypeIs[partial | partialmethod]: def ispartial(obj: Any) -> TypeIs[partial | partialmethod]:
"""Check if the object is a partial function or method.""" """Check if the object is a partial function or method."""
return isinstance(obj, (partial, partialmethod)) return isinstance(obj, partial | partialmethod)
def isclassmethod( def isclassmethod(
@ -397,12 +387,12 @@ def _is_wrapped_coroutine(obj: Any) -> bool:
def isproperty(obj: Any) -> TypeIs[property | cached_property]: def isproperty(obj: Any) -> TypeIs[property | cached_property]:
"""Check if the object is property (possibly cached).""" """Check if the object is property (possibly cached)."""
return isinstance(obj, (property, cached_property)) return isinstance(obj, property | cached_property)
def isgenericalias(obj: Any) -> TypeIs[types.GenericAlias]: def isgenericalias(obj: Any) -> TypeIs[types.GenericAlias]:
"""Check if the object is a generic alias.""" """Check if the object is a generic alias."""
return isinstance(obj, (types.GenericAlias, typing._BaseGenericAlias)) # type: ignore[attr-defined] return isinstance(obj, types.GenericAlias | typing._BaseGenericAlias) # type: ignore[attr-defined]
def safe_getattr(obj: Any, name: str, *defargs: Any) -> Any: def safe_getattr(obj: Any, name: str, *defargs: Any) -> Any:
@ -852,11 +842,11 @@ def signature_from_ast(node: ast.FunctionDef, code: str = '') -> Signature:
params: list[Parameter] = [] params: list[Parameter] = []
# positional-only arguments (introduced in Python 3.8) # positional-only arguments (introduced in Python 3.8)
for arg, defexpr in zip(args.posonlyargs, defaults): for arg, defexpr in zip(args.posonlyargs, defaults, strict=False):
params.append(_define(Parameter.POSITIONAL_ONLY, arg, code, defexpr=defexpr)) params.append(_define(Parameter.POSITIONAL_ONLY, arg, code, defexpr=defexpr))
# normal arguments # normal arguments
for arg, defexpr in zip(args.args, defaults[pos_only_offset:]): for arg, defexpr in zip(args.args, defaults[pos_only_offset:], strict=False):
params.append(_define(Parameter.POSITIONAL_OR_KEYWORD, arg, code, defexpr=defexpr)) params.append(_define(Parameter.POSITIONAL_OR_KEYWORD, arg, code, defexpr=defexpr))
# variadic positional argument (no possible default expression) # variadic positional argument (no possible default expression)
@ -864,7 +854,7 @@ def signature_from_ast(node: ast.FunctionDef, code: str = '') -> Signature:
params.append(_define(Parameter.VAR_POSITIONAL, args.vararg, code, defexpr=None)) params.append(_define(Parameter.VAR_POSITIONAL, args.vararg, code, defexpr=None))
# keyword-only arguments # keyword-only arguments
for arg, defexpr in zip(args.kwonlyargs, args.kw_defaults): for arg, defexpr in zip(args.kwonlyargs, args.kw_defaults, strict=False):
params.append(_define(Parameter.KEYWORD_ONLY, arg, code, defexpr=defexpr)) params.append(_define(Parameter.KEYWORD_ONLY, arg, code, defexpr=defexpr))
# variadic keyword argument (no possible default expression) # variadic keyword argument (no possible default expression)

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import os import os
import re import re
import zlib import zlib
from typing import IO, TYPE_CHECKING, Callable from typing import IO, TYPE_CHECKING
from sphinx.locale import __ from sphinx.locale import __
from sphinx.util import logging from sphinx.util import logging
@ -13,7 +13,7 @@ BUFSIZE = 16 * 1024
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Callable, Iterator
from sphinx.builders import Builder from sphinx.builders import Builder
from sphinx.environment import BuildEnvironment from sphinx.environment import BuildEnvironment

View File

@ -4,12 +4,12 @@ from __future__ import annotations
import os.path import os.path
import re import re
from typing import TYPE_CHECKING, Callable from typing import TYPE_CHECKING
from sphinx.util.osutil import canon_path, path_stabilize from sphinx.util.osutil import canon_path, path_stabilize
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterable, Iterator from collections.abc import Callable, Iterable, Iterator
def _translate_pattern(pat: str) -> str: def _translate_pattern(pat: str) -> str:

View File

@ -5,7 +5,7 @@ from __future__ import annotations
import contextlib import contextlib
import re import re
import unicodedata import unicodedata
from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, cast from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast
from docutils import nodes from docutils import nodes
from docutils.nodes import Node from docutils.nodes import Node
@ -16,7 +16,7 @@ from sphinx.util import logging
from sphinx.util.parsing import _fresh_title_style_context from sphinx.util.parsing import _fresh_title_style_context
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterable, Iterator from collections.abc import Callable, Iterable, Iterator
from docutils.nodes import Element from docutils.nodes import Element
from docutils.parsers.rst import Directive from docutils.parsers.rst import Directive
@ -178,12 +178,12 @@ def apply_source_workaround(node: Element) -> None:
return return
# workaround: some docutils nodes doesn't have source, line. # workaround: some docutils nodes doesn't have source, line.
if (isinstance(node, ( if isinstance(node, (
nodes.rubric, # #1305 rubric directive nodes.rubric # #1305 rubric directive
nodes.line, # #1477 line node | nodes.line # #1477 line node
nodes.image, # #3093 image directive in substitution | nodes.image # #3093 image directive in substitution
nodes.field_name, # #3335 field list syntax | nodes.field_name # #3335 field list syntax
))): )):
logger.debug('[i18n] PATCH: %r to have source and line: %s', logger.debug('[i18n] PATCH: %r to have source and line: %s',
get_full_module_name(node), repr_domxml(node)) get_full_module_name(node), repr_domxml(node))
try: try:

View File

@ -49,7 +49,7 @@ def relative_uri(base: str, to: str) -> str:
b2 = base.split('#')[0].split(SEP) b2 = base.split('#')[0].split(SEP)
t2 = to.split('#')[0].split(SEP) t2 = to.split('#')[0].split(SEP)
# remove common segments (except the last segment) # remove common segments (except the last segment)
for x, y in zip(b2[:-1], t2[:-1]): for x, y in zip(b2[:-1], t2[:-1], strict=False):
if x != y: if x != y:
break break
b2.pop(0) b2.pop(0)

View File

@ -6,7 +6,7 @@ import os
import time import time
import traceback import traceback
from math import sqrt from math import sqrt
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
try: try:
import multiprocessing import multiprocessing
@ -18,7 +18,7 @@ from sphinx.errors import SphinxParallelError
from sphinx.util import logging from sphinx.util import logging
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Sequence from collections.abc import Callable, Sequence
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -19,7 +19,7 @@ def _get_tls_cacert(url: str, certs: str | dict[str, str] | None) -> str | bool:
"""Get additional CA cert for a specific URL.""" """Get additional CA cert for a specific URL."""
if not certs: if not certs:
return True return True
elif isinstance(certs, (str, tuple)): elif isinstance(certs, str | tuple):
return certs return certs
else: else:
hostname = urlsplit(url).netloc hostname = urlsplit(url).netloc

View File

@ -5,7 +5,7 @@ from __future__ import annotations
import os import os
from functools import partial from functools import partial
from os import path from os import path
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any
from jinja2 import TemplateNotFound from jinja2 import TemplateNotFound
from jinja2.loaders import BaseLoader from jinja2.loaders import BaseLoader
@ -17,7 +17,7 @@ from sphinx.locale import get_translator
from sphinx.util import rst, texescape from sphinx.util import rst, texescape
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Sequence from collections.abc import Callable, Sequence
from jinja2.environment import Environment from jinja2.environment import Environment
@ -38,7 +38,7 @@ class BaseRenderer:
class FileRenderer(BaseRenderer): class FileRenderer(BaseRenderer):
def __init__(self, search_path: Sequence[str | os.PathLike[str]]) -> None: def __init__(self, search_path: Sequence[str | os.PathLike[str]]) -> None:
if isinstance(search_path, (str, os.PathLike)): if isinstance(search_path, str | os.PathLike):
search_path = [search_path] search_path = [search_path]
else: else:
# filter "None" paths # filter "None" paths

View File

@ -6,14 +6,13 @@ import dataclasses
import sys import sys
import types import types
import typing import typing
from collections.abc import Sequence from collections.abc import Callable, Sequence
from contextvars import Context, ContextVar, Token from contextvars import Context, ContextVar, Token
from struct import Struct from struct import Struct
from typing import ( from typing import (
TYPE_CHECKING, TYPE_CHECKING,
Annotated, Annotated,
Any, Any,
Callable,
ForwardRef, ForwardRef,
TypedDict, TypedDict,
TypeVar, TypeVar,
@ -25,9 +24,9 @@ from docutils.parsers.rst.states import Inliner
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Mapping from collections.abc import Mapping
from typing import Final, Literal, Protocol from typing import Final, Literal, Protocol, TypeAlias
from typing_extensions import TypeAlias, TypeIs from typing_extensions import TypeIs
from sphinx.application import Sphinx from sphinx.application import Sphinx
@ -41,10 +40,6 @@ if TYPE_CHECKING:
'smart', 'smart',
] ]
if sys.version_info >= (3, 10):
from types import UnionType
else:
UnionType = None
# classes that have an incorrect .__module__ attribute # classes that have an incorrect .__module__ attribute
_INVALID_BUILTIN_CLASSES: Final[Mapping[object, str]] = { _INVALID_BUILTIN_CLASSES: Final[Mapping[object, str]] = {
@ -85,7 +80,7 @@ def is_invalid_builtin_class(obj: Any) -> bool:
# Text like nodes which are initialized with text and rawsource # Text like nodes which are initialized with text and rawsource
TextlikeNode = Union[nodes.Text, nodes.TextElement] TextlikeNode = nodes.Text | nodes.TextElement
# type of None # type of None
NoneType = type(None) NoneType = type(None)
@ -206,7 +201,7 @@ def _is_unpack_form(obj: Any) -> bool:
# that typing_extensions.Unpack should not be used in that case # that typing_extensions.Unpack should not be used in that case
return typing.get_origin(obj) is Unpack return typing.get_origin(obj) is Unpack
# 3.9 and 3.10 require typing_extensions.Unpack # Python 3.10 requires typing_extensions.Unpack
origin = typing.get_origin(obj) origin = typing.get_origin(obj)
return ( return (
getattr(origin, '__module__', None) == 'typing_extensions' getattr(origin, '__module__', None) == 'typing_extensions'
@ -215,13 +210,11 @@ def _is_unpack_form(obj: Any) -> bool:
def _typing_internal_name(obj: Any) -> str | None: def _typing_internal_name(obj: Any) -> str | None:
if sys.version_info[:2] >= (3, 10): try:
try: return obj.__name__
return obj.__name__ except AttributeError:
except AttributeError: # e.g. ParamSpecArgs, ParamSpecKwargs
# e.g. ParamSpecArgs, ParamSpecKwargs return ''
return ''
return getattr(obj, '_name', None)
def restify(cls: Any, mode: _RestifyMode = 'fully-qualified-except-typing') -> str: def restify(cls: Any, mode: _RestifyMode = 'fully-qualified-except-typing') -> str:
@ -291,11 +284,9 @@ def restify(cls: Any, mode: _RestifyMode = 'fully-qualified-except-typing') -> s
return (f':py:class:`{module_prefix}{cls.__module__}.{cls.__name__}`' return (f':py:class:`{module_prefix}{cls.__module__}.{cls.__name__}`'
fr'\ [{args}, {meta}]') fr'\ [{args}, {meta}]')
elif inspect.isNewType(cls): elif inspect.isNewType(cls):
if sys.version_info[:2] >= (3, 10): # newtypes have correct module info since Python 3.10+
# newtypes have correct module info since Python 3.10+ return f':py:class:`{module_prefix}{cls.__module__}.{cls.__name__}`'
return f':py:class:`{module_prefix}{cls.__module__}.{cls.__name__}`' elif isinstance(cls, types.UnionType):
return f':py:class:`{cls.__name__}`'
elif UnionType and isinstance(cls, UnionType):
# Union types (PEP 585) retain their definition order when they # Union types (PEP 585) retain their definition order when they
# are printed natively and ``None``-like types are kept as is. # are printed natively and ``None``-like types are kept as is.
return ' | '.join(restify(a, mode) for a in cls.__args__) return ' | '.join(restify(a, mode) for a in cls.__args__)
@ -436,17 +427,14 @@ def stringify_annotation(
return annotation_name return annotation_name
return module_prefix + f'{annotation_module}.{annotation_name}' return module_prefix + f'{annotation_module}.{annotation_name}'
elif isNewType(annotation): elif isNewType(annotation):
if sys.version_info[:2] >= (3, 10): return module_prefix + f'{annotation_module}.{annotation_name}'
# newtypes have correct module info since Python 3.10+
return module_prefix + f'{annotation_module}.{annotation_name}'
return annotation_name
elif ismockmodule(annotation): elif ismockmodule(annotation):
return module_prefix + annotation_name return module_prefix + annotation_name
elif ismock(annotation): elif ismock(annotation):
return module_prefix + f'{annotation_module}.{annotation_name}' return module_prefix + f'{annotation_module}.{annotation_name}'
elif is_invalid_builtin_class(annotation): elif is_invalid_builtin_class(annotation):
return module_prefix + _INVALID_BUILTIN_CLASSES[annotation] return module_prefix + _INVALID_BUILTIN_CLASSES[annotation]
elif _is_annotated_form(annotation): # for py39+ elif _is_annotated_form(annotation): # for py310+
pass pass
elif annotation_module == 'builtins' and annotation_qualname: elif annotation_module == 'builtins' and annotation_qualname:
args = getattr(annotation, '__args__', None) args = getattr(annotation, '__args__', None)
@ -495,7 +483,7 @@ def stringify_annotation(
elif hasattr(annotation, '__origin__'): elif hasattr(annotation, '__origin__'):
# instantiated generic provided by a user # instantiated generic provided by a user
qualname = stringify_annotation(annotation.__origin__, mode) qualname = stringify_annotation(annotation.__origin__, mode)
elif UnionType and isinstance(annotation, UnionType): # types.UnionType (for py3.10+) elif isinstance(annotation, types.UnionType):
qualname = 'types.UnionType' qualname = 'types.UnionType'
else: else:
# we weren't able to extract the base type, appending arguments would # we weren't able to extract the base type, appending arguments would
@ -505,7 +493,7 @@ def stringify_annotation(
# Process the generic arguments (if any). # Process the generic arguments (if any).
# They must be a list or a tuple, otherwise they are considered 'broken'. # They must be a list or a tuple, otherwise they are considered 'broken'.
annotation_args = getattr(annotation, '__args__', ()) annotation_args = getattr(annotation, '__args__', ())
if annotation_args and isinstance(annotation_args, (list, tuple)): if annotation_args and isinstance(annotation_args, list | tuple):
if ( if (
qualname in {'Union', 'types.UnionType'} qualname in {'Union', 'types.UnionType'}
and all(getattr(a, '__origin__', ...) is typing.Literal for a in annotation_args) and all(getattr(a, '__origin__', ...) is typing.Literal for a in annotation_args)
@ -525,7 +513,7 @@ def stringify_annotation(
args = ', '.join(_format_literal_arg_stringify(a, mode=mode) args = ', '.join(_format_literal_arg_stringify(a, mode=mode)
for a in annotation_args) for a in annotation_args)
return f'{module_prefix}Literal[{args}]' return f'{module_prefix}Literal[{args}]'
elif _is_annotated_form(annotation): # for py39+ elif _is_annotated_form(annotation): # for py310+
args = stringify_annotation(annotation_args[0], mode) args = stringify_annotation(annotation_args[0], mode)
meta_args = [] meta_args = []
for m in annotation.__metadata__: for m in annotation.__metadata__:
@ -541,11 +529,6 @@ def stringify_annotation(
else: else:
meta_args.append(repr(m)) meta_args.append(repr(m))
meta = ', '.join(meta_args) meta = ', '.join(meta_args)
if sys.version_info[:2] <= (3, 9):
if mode == 'smart':
return f'~typing.Annotated[{args}, {meta}]'
if mode == 'fully-qualified':
return f'typing.Annotated[{args}, {meta}]'
if sys.version_info[:2] <= (3, 11): if sys.version_info[:2] <= (3, 11):
if mode == 'fully-qualified-except-typing': if mode == 'fully-qualified-except-typing':
return f'Annotated[{args}, {meta}]' return f'Annotated[{args}, {meta}]'

View File

@ -1369,7 +1369,7 @@ class LaTeXTranslator(SphinxTranslator):
not isinstance(node.parent[index - 1], nodes.compound)): not isinstance(node.parent[index - 1], nodes.compound)):
# insert blank line, if the paragraph follows a non-paragraph node in a compound # insert blank line, if the paragraph follows a non-paragraph node in a compound
self.body.append(r'\noindent' + CR) self.body.append(r'\noindent' + CR)
elif index == 1 and isinstance(node.parent, (nodes.footnote, footnotetext)): elif index == 1 and isinstance(node.parent, nodes.footnote | footnotetext):
# don't insert blank line, if the paragraph is second child of a footnote # don't insert blank line, if the paragraph is second child of a footnote
# (first one is label node) # (first one is label node)
pass pass
@ -2081,7 +2081,7 @@ class LaTeXTranslator(SphinxTranslator):
done = 0 done = 0
if len(node.children) == 1: if len(node.children) == 1:
child = node.children[0] child = node.children[0]
if isinstance(child, (nodes.bullet_list, nodes.enumerated_list)): if isinstance(child, nodes.bullet_list | nodes.enumerated_list):
done = 1 done = 1
if not done: if not done:
self.body.append(r'\begin{quote}' + CR) self.body.append(r'\begin{quote}' + CR)
@ -2092,7 +2092,7 @@ class LaTeXTranslator(SphinxTranslator):
done = 0 done = 0
if len(node.children) == 1: if len(node.children) == 1:
child = node.children[0] child = node.children[0]
if isinstance(child, (nodes.bullet_list, nodes.enumerated_list)): if isinstance(child, nodes.bullet_list | nodes.enumerated_list):
done = 1 done = 1
if not done: if not done:
self.body.append(r'\end{quote}' + CR) self.body.append(r'\end{quote}' + CR)

View File

@ -297,7 +297,7 @@ class TexinfoTranslator(SphinxTranslator):
# try to find a suitable "Top" node # try to find a suitable "Top" node
title = self.document.next_node(nodes.title) title = self.document.next_node(nodes.title)
top = title.parent if title else self.document top = title.parent if title else self.document
if not isinstance(top, (nodes.document, nodes.section)): if not isinstance(top, nodes.document | nodes.section):
top = self.document top = self.document
if top is not self.document: if top is not self.document:
entries = node_menus[top['node_name']] entries = node_menus[top['node_name']]
@ -625,7 +625,7 @@ class TexinfoTranslator(SphinxTranslator):
parent = node.parent parent = node.parent
if isinstance(parent, nodes.table): if isinstance(parent, nodes.table):
return return
if isinstance(parent, (nodes.Admonition, nodes.sidebar, nodes.topic)): if isinstance(parent, nodes.Admonition | nodes.sidebar | nodes.topic):
raise nodes.SkipNode raise nodes.SkipNode
if not isinstance(parent, nodes.section): if not isinstance(parent, nodes.section):
logger.warning(__('encountered title node not in section, topic, table, ' logger.warning(__('encountered title node not in section, topic, table, '
@ -694,7 +694,7 @@ class TexinfoTranslator(SphinxTranslator):
def visit_reference(self, node: Element) -> None: def visit_reference(self, node: Element) -> None:
# an xref's target is displayed in Info so we ignore a few # an xref's target is displayed in Info so we ignore a few
# cases for the sake of appearance # cases for the sake of appearance
if isinstance(node.parent, (nodes.title, addnodes.desc_type)): if isinstance(node.parent, nodes.title | addnodes.desc_type):
return return
if len(node) != 0 and isinstance(node[0], nodes.image): if len(node) != 0 and isinstance(node[0], nodes.image):
return return
@ -987,7 +987,7 @@ class TexinfoTranslator(SphinxTranslator):
self.add_anchor(id, node) self.add_anchor(id, node)
# anchors and indexes need to go in front # anchors and indexes need to go in front
for n in node[::]: for n in node[::]:
if isinstance(n, (addnodes.index, nodes.target)): if isinstance(n, addnodes.index | nodes.target):
n.walkabout(self) n.walkabout(self)
node.remove(n) node.remove(n)
self.body.append('\n%s ' % self.at_item_x) self.body.append('\n%s ' % self.at_item_x)

View File

@ -6,7 +6,7 @@ import os
import re import re
import textwrap import textwrap
from collections.abc import Iterable, Iterator, Sequence from collections.abc import Iterable, Iterator, Sequence
from itertools import chain, groupby from itertools import chain, groupby, pairwise
from typing import TYPE_CHECKING, Any, cast from typing import TYPE_CHECKING, Any, cast
from docutils import nodes, writers from docutils import nodes, writers
@ -221,10 +221,10 @@ class Table:
tail = "+" if out[-1][0] == "-" else "|" tail = "+" if out[-1][0] == "-" else "|"
glue = [ glue = [
"+" if left[0] == "-" or right[0] == "-" else "|" "+" if left[0] == "-" or right[0] == "-" else "|"
for left, right in zip(out, out[1:]) for left, right in pairwise(out)
] ]
glue.append(tail) glue.append(tail)
return head + "".join(chain.from_iterable(zip(out, glue))) return head + "".join(chain.from_iterable(zip(out, glue, strict=False)))
for lineno, line in enumerate(self.lines): for lineno, line in enumerate(self.lines):
if self.separator and lineno == self.separator: if self.separator and lineno == self.separator:

View File

@ -25,10 +25,9 @@ from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterable from collections.abc import Iterable
from typing import Union
CircularList = list[Union[int, 'CircularList']] CircularList = list[int | 'CircularList']
CircularDict = dict[str, Union[int, 'CircularDict']] CircularDict = dict[str, int | 'CircularDict']
def check_is_serializable(subject: object, *, circular: bool) -> None: def check_is_serializable(subject: object, *, circular: bool) -> None:
@ -209,9 +208,7 @@ def test_config_pickle_circular_reference_in_list():
assert isinstance(u, list) assert isinstance(u, list)
assert v.__class__ is u.__class__ assert v.__class__ is u.__class__
assert len(u) == len(v) for u_i, v_i in zip(u, v, strict=True):
for u_i, v_i in zip(u, v):
counter[type(u)] += 1 counter[type(u)] += 1
check(u_i, v_i, counter=counter, guard=guard | {id(u), id(v)}) check(u_i, v_i, counter=counter, guard=guard | {id(u), id(v)})
@ -275,9 +272,7 @@ def test_config_pickle_circular_reference_in_dict():
assert isinstance(u, dict) assert isinstance(u, dict)
assert v.__class__ is u.__class__ assert v.__class__ is u.__class__
assert len(u) == len(v) for u_i, v_i in zip(u, v, strict=True):
for u_i, v_i in zip(u, v):
counter[type(u)] += 1 counter[type(u)] += 1
check(u[u_i], v[v_i], counter=counter, guard=guard | {id(u), id(v)}) check(u[u_i], v[v_i], counter=counter, guard=guard | {id(u), id(v)})
return counter return counter
@ -573,8 +568,7 @@ def test_nitpick_base(app, status, warning):
app.build(force_all=True) app.build(force_all=True)
warning = warning.getvalue().strip().split('\n') warning = warning.getvalue().strip().split('\n')
assert len(warning) == len(nitpick_warnings) for actual, expected in zip(warning, nitpick_warnings, strict=True):
for actual, expected in zip(warning, nitpick_warnings):
assert expected in actual assert expected in actual
@ -629,8 +623,7 @@ def test_nitpick_ignore_regex_fullmatch(app, status, warning):
app.build(force_all=True) app.build(force_all=True)
warning = warning.getvalue().strip().split('\n') warning = warning.getvalue().strip().split('\n')
assert len(warning) == len(nitpick_warnings) for actual, expected in zip(warning, nitpick_warnings, strict=True):
for actual, expected in zip(warning, nitpick_warnings):
assert expected in actual assert expected in actual

View File

@ -7,7 +7,6 @@ source file translated by test_build.
from __future__ import annotations from __future__ import annotations
import typing import typing
from typing import Union
import pytest import pytest
@ -305,7 +304,7 @@ def test_autodoc_process_bases(app):
assert obj.__name__ == 'Quux' assert obj.__name__ == 'Quux'
assert options == {'show-inheritance': True, assert options == {'show-inheritance': True,
'members': []} 'members': []}
assert bases == [typing.List[Union[int, float]]] # NoQA: UP006 assert bases == [typing.List[typing.Union[int, float]]] # NoQA: UP006, UP007
bases.pop() bases.pop()
bases.extend([int, str]) bases.extend([int, str])

View File

@ -153,7 +153,7 @@ def test_get_items_summary(make_app, app_params):
def new_get_items(self, names, *args, **kwargs): def new_get_items(self, names, *args, **kwargs):
results = orig_get_items(self, names, *args, **kwargs) results = orig_get_items(self, names, *args, **kwargs)
for name, result in zip(names, results): for name, result in zip(names, results, strict=True):
autosummary_items[name] = result # NoQA: PERF403 autosummary_items[name] = result # NoQA: PERF403
return results return results

View File

@ -2421,7 +2421,7 @@ definition_after_normal_text : int
[r"'with \'quotes\''"], [r"'with \'quotes\''"],
) )
for spec, expected in zip(specs, tokens): for spec, expected in zip(specs, tokens, strict=True):
actual = _tokenize_type_spec(spec) actual = _tokenize_type_spec(spec)
assert expected == actual assert expected == actual
@ -2440,7 +2440,7 @@ definition_after_normal_text : int
["{'F', 'C', 'N'}", ", ", "default", " ", "None"], ["{'F', 'C', 'N'}", ", ", "default", " ", "None"],
) )
for tokens_, expected in zip(tokens, combined_tokens): for tokens_, expected in zip(tokens, combined_tokens, strict=True):
actual = _recombine_set_tokens(tokens_) actual = _recombine_set_tokens(tokens_)
assert expected == actual assert expected == actual
@ -2456,7 +2456,7 @@ definition_after_normal_text : int
["{1, 2", ", ", "default", ": ", "None"], ["{1, 2", ", ", "default", ": ", "None"],
) )
for tokens_, expected in zip(tokens, combined_tokens): for tokens_, expected in zip(tokens, combined_tokens, strict=True):
actual = _recombine_set_tokens(tokens_) actual = _recombine_set_tokens(tokens_)
assert expected == actual assert expected == actual
@ -2491,7 +2491,7 @@ definition_after_normal_text : int
":class:`pandas.DataFrame`, *optional*", ":class:`pandas.DataFrame`, *optional*",
) )
for spec, expected in zip(specs, converted): for spec, expected in zip(specs, converted, strict=True):
actual = _convert_numpy_type_spec(spec, translations=translations) actual = _convert_numpy_type_spec(spec, translations=translations)
assert expected == actual assert expected == actual
@ -2569,7 +2569,7 @@ definition_after_normal_text : int
r".+: malformed string literal \(missing closing quote\):", r".+: malformed string literal \(missing closing quote\):",
r".+: malformed string literal \(missing opening quote\):", r".+: malformed string literal \(missing opening quote\):",
) )
for token, error in zip(tokens, errors): for token, error in zip(tokens, errors, strict=True):
try: try:
_token_type(token) _token_type(token)
finally: finally:
@ -2698,6 +2698,6 @@ int py:class 1 int.html -
a_ = list(li.findall('.//a[@class="reference external"]')) a_ = list(li.findall('.//a[@class="reference external"]'))
assert len(a_) == 2 assert len(a_) == 2
for a, uri in zip(a_, ('list.html', 'int.html')): for a, uri in zip(a_, ('list.html', 'int.html'), strict=True):
assert a.attrib['href'] == f'127.0.0.1:5555/{uri}' assert a.attrib['href'] == f'127.0.0.1:5555/{uri}'
assert a.attrib['title'] == '(in Intersphinx Test v42)' assert a.attrib['title'] == '(in Intersphinx Test v42)'

View File

@ -228,13 +228,13 @@ class TestSigElementFallbackTransform:
if ignore_sig_element_fallback_transform: if ignore_sig_element_fallback_transform:
# desc_sig_element is implemented or desc_sig_* nodes are properly handled (and left untouched) # desc_sig_element is implemented or desc_sig_* nodes are properly handled (and left untouched)
for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1]): for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1], strict=True):
assert_node(node, node_type) assert_node(node, node_type)
assert not node.hasattr('_sig_node_type') assert not node.hasattr('_sig_node_type')
assert mess == f'mark: {node_type.__name__!r}' assert mess == f'mark: {node_type.__name__!r}'
else: else:
# desc_sig_* nodes are converted into inline nodes # desc_sig_* nodes are converted into inline nodes
for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1]): for node_type, node, mess in zip(self._builtin_sig_elements, document.children[:-1], stdout[:-1], strict=True):
assert_node(node, nodes.inline, _sig_node_type=node_type.__name__) assert_node(node, nodes.inline, _sig_node_type=node_type.__name__)
assert mess == f'generic visit: {nodes.inline.__name__!r}' assert mess == f'generic visit: {nodes.inline.__name__!r}'

View File

@ -53,7 +53,7 @@ def test_strip_ansi(
# #
# For instance ``next_ansi_blocks(['a', 'b'], 3) == ['a', 'b', 'a']``. # For instance ``next_ansi_blocks(['a', 'b'], 3) == ['a', 'b', 'a']``.
stream = itertools.cycle(choices) stream = itertools.cycle(choices)
return list(map(operator.itemgetter(0), zip(stream, range(n)))) return list(map(operator.itemgetter(0), zip(stream, range(n), strict=False)))
# generate all permutations of length N # generate all permutations of length N
for sigma in itertools.permutations(range(N), N): for sigma in itertools.permutations(range(N), N):

View File

@ -91,7 +91,7 @@ def test_TypeAliasForwardRef():
alias = TypeAliasForwardRef('example') alias = TypeAliasForwardRef('example')
assert stringify_annotation(alias, 'fully-qualified-except-typing') == 'example' assert stringify_annotation(alias, 'fully-qualified-except-typing') == 'example'
alias = Optional[alias] alias = Optional[alias] # NoQA: UP007
assert stringify_annotation(alias, 'fully-qualified-except-typing') == 'example | None' assert stringify_annotation(alias, 'fully-qualified-except-typing') == 'example | None'
@ -775,7 +775,7 @@ def test_isproperty():
def test_isgenericalias(): def test_isgenericalias():
#: A list of int #: A list of int
T = List[int] # NoQA: UP006 T = List[int] # NoQA: UP006
S = list[Union[str, None]] S = list[Union[str, None]] # NoQA: UP006, UP007
C = Callable[[int], None] # a generic alias not having a doccomment C = Callable[[int], None] # a generic alias not having a doccomment

View File

@ -44,8 +44,6 @@ from typing import (
Union, Union,
) )
import pytest
from sphinx.ext.autodoc import mock from sphinx.ext.autodoc import mock
from sphinx.util.typing import _INVALID_BUILTIN_CLASSES, restify, stringify_annotation from sphinx.util.typing import _INVALID_BUILTIN_CLASSES, restify, stringify_annotation
@ -274,12 +272,8 @@ def test_restify_type_hints_typevars():
assert restify(list[T]) == ":py:class:`list`\\ [:py:obj:`tests.test_util.test_util_typing.T`]" assert restify(list[T]) == ":py:class:`list`\\ [:py:obj:`tests.test_util.test_util_typing.T`]"
assert restify(list[T], "smart") == ":py:class:`list`\\ [:py:obj:`~tests.test_util.test_util_typing.T`]" assert restify(list[T], "smart") == ":py:class:`list`\\ [:py:obj:`~tests.test_util.test_util_typing.T`]"
if sys.version_info[:2] >= (3, 10): assert restify(MyInt) == ":py:class:`tests.test_util.test_util_typing.MyInt`"
assert restify(MyInt) == ":py:class:`tests.test_util.test_util_typing.MyInt`" assert restify(MyInt, "smart") == ":py:class:`~tests.test_util.test_util_typing.MyInt`"
assert restify(MyInt, "smart") == ":py:class:`~tests.test_util.test_util_typing.MyInt`"
else:
assert restify(MyInt) == ":py:class:`MyInt`"
assert restify(MyInt, "smart") == ":py:class:`MyInt`"
def test_restify_type_hints_custom_class(): def test_restify_type_hints_custom_class():
@ -363,7 +357,6 @@ def test_restify_Unpack():
assert restify(t.Unpack['X'], 'smart') == expect assert restify(t.Unpack['X'], 'smart') == expect
@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.')
def test_restify_type_union_operator(): def test_restify_type_union_operator():
assert restify(int | None) == ":py:class:`int` | :py:obj:`None`" # type: ignore[attr-defined] assert restify(int | None) == ":py:class:`int` | :py:obj:`None`" # type: ignore[attr-defined]
assert restify(None | int) == ":py:obj:`None` | :py:class:`int`" # type: ignore[attr-defined] assert restify(None | int) == ":py:obj:`None` | :py:class:`int`" # type: ignore[attr-defined]
@ -385,7 +378,6 @@ def test_restify_mock():
assert restify(unknown.secret.Class, "smart") == ':py:class:`~unknown.secret.Class`' assert restify(unknown.secret.Class, "smart") == ':py:class:`~unknown.secret.Class`'
@pytest.mark.xfail(sys.version_info[:2] <= (3, 9), reason='ParamSpec not supported in Python 3.9.')
def test_restify_type_hints_paramspec(): def test_restify_type_hints_paramspec():
from typing import ParamSpec from typing import ParamSpec
P = ParamSpec('P') P = ParamSpec('P')
@ -658,12 +650,8 @@ def test_stringify_type_hints_typevars():
assert stringify_annotation(list[T], 'fully-qualified-except-typing') == "list[tests.test_util.test_util_typing.T]" assert stringify_annotation(list[T], 'fully-qualified-except-typing') == "list[tests.test_util.test_util_typing.T]"
assert stringify_annotation(list[T], "smart") == "list[~tests.test_util.test_util_typing.T]" assert stringify_annotation(list[T], "smart") == "list[~tests.test_util.test_util_typing.T]"
if sys.version_info[:2] >= (3, 10): assert stringify_annotation(MyInt, 'fully-qualified-except-typing') == "tests.test_util.test_util_typing.MyInt"
assert stringify_annotation(MyInt, 'fully-qualified-except-typing') == "tests.test_util.test_util_typing.MyInt" assert stringify_annotation(MyInt, "smart") == "~tests.test_util.test_util_typing.MyInt"
assert stringify_annotation(MyInt, "smart") == "~tests.test_util.test_util_typing.MyInt"
else:
assert stringify_annotation(MyInt, 'fully-qualified-except-typing') == "MyInt"
assert stringify_annotation(MyInt, "smart") == "MyInt"
def test_stringify_type_hints_custom_class(): def test_stringify_type_hints_custom_class():
@ -695,7 +683,6 @@ def test_stringify_type_Literal():
assert stringify_annotation(Literal[MyEnum.a], 'smart') == '~typing.Literal[MyEnum.a]' assert stringify_annotation(Literal[MyEnum.a], 'smart') == '~typing.Literal[MyEnum.a]'
@pytest.mark.skipif(sys.version_info[:2] <= (3, 9), reason='python 3.10+ is required.')
def test_stringify_type_union_operator(): def test_stringify_type_union_operator():
assert stringify_annotation(int | None) == "int | None" # type: ignore[attr-defined] assert stringify_annotation(int | None) == "int | None" # type: ignore[attr-defined]
assert stringify_annotation(int | None, "smart") == "int | None" # type: ignore[attr-defined] assert stringify_annotation(int | None, "smart") == "int | None" # type: ignore[attr-defined]
@ -738,7 +725,6 @@ def test_stringify_type_ForwardRef():
assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]], 'smart') == "~typing.Tuple[dict[MyInt, str], list[~typing.List[int]]]" # type: ignore[attr-defined] assert stringify_annotation(Tuple[dict[ForwardRef("MyInt"), str], list[List[int]]], 'smart') == "~typing.Tuple[dict[MyInt, str], list[~typing.List[int]]]" # type: ignore[attr-defined]
@pytest.mark.xfail(sys.version_info[:2] <= (3, 9), reason='ParamSpec not supported in Python 3.9.')
def test_stringify_type_hints_paramspec(): def test_stringify_type_hints_paramspec():
from typing import ParamSpec from typing import ParamSpec
P = ParamSpec('P') P = ParamSpec('P')

View File

@ -1,6 +1,6 @@
[tox] [tox]
minversion = 4.2.0 minversion = 4.2.0
envlist = py{39,310,311,312,313} envlist = py{310,311,312,313}
[testenv] [testenv]
usedevelop = True usedevelop = True
@ -19,7 +19,7 @@ passenv =
BUILDER BUILDER
READTHEDOCS READTHEDOCS
description = description =
py{39,310,311,312,313}: Run unit tests against {envname}. py{310,311,312,313}: Run unit tests against {envname}.
extras = extras =
test test
setenv = setenv =

View File

@ -106,7 +106,10 @@ def run_extract() -> None:
options = opt_dict options = opt_dict
with open(os.path.join(root, filename), 'rb') as fileobj: with open(os.path.join(root, filename), 'rb') as fileobj:
for lineno, message, comments, context in extract( for lineno, message, comments, context in extract(
method, fileobj, KEYWORDS, options=options method, # type: ignore[arg-type]
fileobj,
KEYWORDS,
options=options,
): ):
filepath = os.path.join(input_path, relative_name) filepath = os.path.join(input_path, relative_name)
catalogue.add( catalogue.add(
@ -217,7 +220,7 @@ def run_compile() -> None:
for x in message.locations for x in message.locations
): ):
msgid = message.id msgid = message.id
if isinstance(msgid, (list, tuple)): if isinstance(msgid, list | tuple):
msgid = msgid[0] msgid = msgid[0]
js_catalogue[msgid] = message.string js_catalogue[msgid] = message.string

View File

@ -8,9 +8,7 @@ import sys
import time import time
from contextlib import contextmanager from contextlib import contextmanager
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING, TypeAlias
from typing_extensions import TypeAlias
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Iterator, Sequence from collections.abc import Iterator, Sequence