mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Convert set-like containers to `frozenset
`
This commit is contained in:
parent
b852618c1b
commit
b3035c7d22
@ -102,7 +102,7 @@ XINDY_LANG_OPTIONS = {
|
|||||||
'el-polyton': '-L greek-polytonic -C utf8 ',
|
'el-polyton': '-L greek-polytonic -C utf8 ',
|
||||||
} # fmt: skip
|
} # fmt: skip
|
||||||
|
|
||||||
XINDY_CYRILLIC_SCRIPTS = ['be', 'bg', 'mk', 'mn', 'ru', 'sr', 'sh', 'uk']
|
XINDY_CYRILLIC_SCRIPTS = frozenset({'be', 'bg', 'mk', 'mn', 'ru', 'sr', 'sh', 'uk'})
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence, Set
|
||||||
|
|
||||||
# https://en.cppreference.com/w/c/keyword
|
# https://en.cppreference.com/w/c/keyword
|
||||||
_keywords = [
|
_keywords: Set[str] = frozenset({
|
||||||
'auto',
|
'auto',
|
||||||
'break',
|
'break',
|
||||||
'case', 'char', 'const', 'continue',
|
'case', 'char', 'const', 'continue',
|
||||||
@ -28,10 +32,10 @@ _keywords = [
|
|||||||
'_Noreturn',
|
'_Noreturn',
|
||||||
'_Static_assert',
|
'_Static_assert',
|
||||||
'_Thread_local',
|
'_Thread_local',
|
||||||
] # fmt: skip
|
}) # fmt: skip
|
||||||
# These are only keyword'y when the corresponding headers are included.
|
# These are only keyword'y when the corresponding headers are included.
|
||||||
# They are used as default value for c_extra_keywords.
|
# They are used as default value for c_extra_keywords.
|
||||||
_macro_keywords = [
|
_macro_keywords: Set[str] = frozenset({
|
||||||
'alignas',
|
'alignas',
|
||||||
'alignof',
|
'alignof',
|
||||||
'bool',
|
'bool',
|
||||||
@ -40,23 +44,23 @@ _macro_keywords = [
|
|||||||
'noreturn',
|
'noreturn',
|
||||||
'static_assert',
|
'static_assert',
|
||||||
'thread_local',
|
'thread_local',
|
||||||
]
|
})
|
||||||
|
|
||||||
# these are ordered by precedence
|
# these are ordered by precedence
|
||||||
_expression_bin_ops = [
|
_expression_bin_ops: Sequence[tuple[str, ...]] = [
|
||||||
['||', 'or'],
|
('||', 'or'),
|
||||||
['&&', 'and'],
|
('&&', 'and'),
|
||||||
['|', 'bitor'],
|
('|', 'bitor'),
|
||||||
['^', 'xor'],
|
('^', 'xor'),
|
||||||
['&', 'bitand'],
|
('&', 'bitand'),
|
||||||
['==', '!=', 'not_eq'],
|
('==', '!=', 'not_eq'),
|
||||||
['<=', '>=', '<', '>'],
|
('<=', '>=', '<', '>'),
|
||||||
['<<', '>>'],
|
('<<', '>>'),
|
||||||
['+', '-'],
|
('+', '-'),
|
||||||
['*', '/', '%'],
|
('*', '/', '%'),
|
||||||
['.*', '->*'],
|
('.*', '->*'),
|
||||||
]
|
]
|
||||||
_expression_unary_ops = [
|
_expression_unary_ops: Sequence[str] = [
|
||||||
'++',
|
'++',
|
||||||
'--',
|
'--',
|
||||||
'*',
|
'*',
|
||||||
@ -68,7 +72,7 @@ _expression_unary_ops = [
|
|||||||
'~',
|
'~',
|
||||||
'compl',
|
'compl',
|
||||||
]
|
]
|
||||||
_expression_assignment_ops = [
|
_expression_assignment_ops: Sequence[str] = [
|
||||||
'=',
|
'=',
|
||||||
'*=',
|
'*=',
|
||||||
'/=',
|
'/=',
|
||||||
@ -86,7 +90,7 @@ _expression_assignment_ops = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
_max_id = 1
|
_max_id = 1
|
||||||
_id_prefix = [None, 'c.', 'Cv2.']
|
_id_prefix: Sequence[str] = ('', 'c.', 'Cv2.')
|
||||||
# Ids are used in lookup keys which are used across pickled files,
|
# Ids are used in lookup keys which are used across pickled files,
|
||||||
# so when _max_id changes, make sure to update the ENV_VERSION.
|
# so when _max_id changes, make sure to update the ENV_VERSION.
|
||||||
|
|
||||||
|
@ -253,6 +253,10 @@ namespace_object:
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence, Set
|
||||||
|
|
||||||
udl_identifier_re = re.compile(
|
udl_identifier_re = re.compile(
|
||||||
r'[a-zA-Z_][a-zA-Z0-9_]*\b' # note, no word boundary in the beginning
|
r'[a-zA-Z_][a-zA-Z0-9_]*\b' # note, no word boundary in the beginning
|
||||||
@ -286,7 +290,7 @@ _fold_operator_re = re.compile(
|
|||||||
re.VERBOSE,
|
re.VERBOSE,
|
||||||
)
|
)
|
||||||
# see https://en.cppreference.com/w/cpp/keyword
|
# see https://en.cppreference.com/w/cpp/keyword
|
||||||
_keywords = [
|
_keywords: Set[str] = frozenset({
|
||||||
'alignas', 'alignof', 'and', 'and_eq', 'asm', 'auto',
|
'alignas', 'alignof', 'and', 'and_eq', 'asm', 'auto',
|
||||||
'bitand', 'bitor', 'bool', 'break',
|
'bitand', 'bitor', 'bool', 'break',
|
||||||
'case', 'catch', 'class', 'compl', 'concept', 'continue',
|
'case', 'catch', 'class', 'compl', 'concept', 'continue',
|
||||||
@ -311,7 +315,7 @@ _keywords = [
|
|||||||
'virtual', 'void', 'volatile',
|
'virtual', 'void', 'volatile',
|
||||||
'wchar_t', 'while',
|
'wchar_t', 'while',
|
||||||
'xor', 'xor_eq',
|
'xor', 'xor_eq',
|
||||||
] # fmt: skip
|
}) # fmt: skip
|
||||||
|
|
||||||
|
|
||||||
_simple_type_specifiers_re = re.compile(
|
_simple_type_specifiers_re = re.compile(
|
||||||
@ -332,7 +336,7 @@ _simple_type_specifiers_re = re.compile(
|
|||||||
)
|
)
|
||||||
|
|
||||||
_max_id = 4
|
_max_id = 4
|
||||||
_id_prefix = [None, '', '_CPPv2', '_CPPv3', '_CPPv4']
|
_id_prefix: Sequence[str] = ('', '', '_CPPv2', '_CPPv3', '_CPPv4')
|
||||||
# Ids are used in lookup keys which are used across pickled files,
|
# Ids are used in lookup keys which are used across pickled files,
|
||||||
# so when _max_id changes, make sure to update the ENV_VERSION.
|
# so when _max_id changes, make sure to update the ENV_VERSION.
|
||||||
|
|
||||||
@ -553,20 +557,20 @@ _id_char_from_prefix: dict[str | None, str] = {
|
|||||||
'L': 'w',
|
'L': 'w',
|
||||||
}
|
}
|
||||||
# these are ordered by preceedence
|
# these are ordered by preceedence
|
||||||
_expression_bin_ops = [
|
_expression_bin_ops: Sequence[tuple[str, ...]] = [
|
||||||
['||', 'or'],
|
('||', 'or'),
|
||||||
['&&', 'and'],
|
('&&', 'and'),
|
||||||
['|', 'bitor'],
|
('|', 'bitor'),
|
||||||
['^', 'xor'],
|
('^', 'xor'),
|
||||||
['&', 'bitand'],
|
('&', 'bitand'),
|
||||||
['==', '!=', 'not_eq'],
|
('==', '!=', 'not_eq'),
|
||||||
['<=>', '<=', '>=', '<', '>'],
|
('<=>', '<=', '>=', '<', '>'),
|
||||||
['<<', '>>'],
|
('<<', '>>'),
|
||||||
['+', '-'],
|
('+', '-'),
|
||||||
['*', '/', '%'],
|
('*', '/', '%'),
|
||||||
['.*', '->*'],
|
('.*', '->*'),
|
||||||
]
|
]
|
||||||
_expression_unary_ops = [
|
_expression_unary_ops: Sequence[str] = [
|
||||||
'++',
|
'++',
|
||||||
'--',
|
'--',
|
||||||
'*',
|
'*',
|
||||||
@ -578,7 +582,7 @@ _expression_unary_ops = [
|
|||||||
'~',
|
'~',
|
||||||
'compl',
|
'compl',
|
||||||
]
|
]
|
||||||
_expression_assignment_ops = [
|
_expression_assignment_ops: Sequence[str] = [
|
||||||
'=',
|
'=',
|
||||||
'*=',
|
'*=',
|
||||||
'/=',
|
'/=',
|
||||||
|
@ -38,12 +38,12 @@ AUTODOC_DEFAULT_OPTIONS = [
|
|||||||
'no-value',
|
'no-value',
|
||||||
]
|
]
|
||||||
|
|
||||||
AUTODOC_EXTENDABLE_OPTIONS = [
|
AUTODOC_EXTENDABLE_OPTIONS = frozenset({
|
||||||
'members',
|
'members',
|
||||||
'private-members',
|
'private-members',
|
||||||
'special-members',
|
'special-members',
|
||||||
'exclude-members',
|
'exclude-members',
|
||||||
]
|
})
|
||||||
|
|
||||||
|
|
||||||
class DummyOptionSpec(dict[str, Callable[[str], str]]):
|
class DummyOptionSpec(dict[str, Callable[[str], str]]):
|
||||||
|
Loading…
Reference in New Issue
Block a user