mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '4.0.x' into 9322_KeyError_from_PropageteDescDomain
This commit is contained in:
commit
072cdf2110
10
CHANGES
10
CHANGES
@ -13,10 +13,20 @@ Deprecated
|
||||
Features added
|
||||
--------------
|
||||
|
||||
* C, add C23 keywords ``_Decimal32``, ``_Decimal64``, and ``_Decimal128``.
|
||||
* #9354: C, add :confval:`c_extra_keywords` to allow user-defined keywords
|
||||
during parsing.
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
|
||||
* #9330: changeset domain: :rst:dir:`versionchanged` with contents being a list
|
||||
will cause error during pdf build
|
||||
* #9313: LaTeX: complex table with merged cells broken since 4.0
|
||||
* #9305: LaTeX: backslash may cause Improper discretionary list pdf build error
|
||||
with Japanese engines
|
||||
* #9354: C, remove special macro names from the keyword list.
|
||||
See also :confval:`c_extra_keywords`.
|
||||
* #9322: KeyError is raised on PropagateDescDomain transform
|
||||
|
||||
Testing
|
||||
|
@ -2670,6 +2670,14 @@ Options for the C domain
|
||||
|
||||
.. versionadded:: 3.0
|
||||
|
||||
.. confval:: c_extra_keywords
|
||||
|
||||
A list of identifiers to be recognized as keywords by the C parser.
|
||||
It defaults to ``['alignas', 'alignof', 'bool', 'complex', 'imaginary',
|
||||
'noreturn', 'static_assert', 'thread_local']``.
|
||||
|
||||
.. versionadded:: 4.0.3
|
||||
|
||||
.. confval:: c_allow_pre_v3
|
||||
|
||||
A boolean (default ``False``) controlling whether to parse and try to
|
||||
|
@ -54,10 +54,15 @@ _keywords = [
|
||||
'else', 'enum', 'extern', 'float', 'for', 'goto', 'if', 'inline', 'int', 'long',
|
||||
'register', 'restrict', 'return', 'short', 'signed', 'sizeof', 'static', 'struct',
|
||||
'switch', 'typedef', 'union', 'unsigned', 'void', 'volatile', 'while',
|
||||
'_Alignas', 'alignas', '_Alignof', 'alignof', '_Atomic', '_Bool', 'bool',
|
||||
'_Complex', 'complex', '_Generic', '_Imaginary', 'imaginary',
|
||||
'_Noreturn', 'noreturn', '_Static_assert', 'static_assert',
|
||||
'_Thread_local', 'thread_local',
|
||||
'_Alignas', '_Alignof', '_Atomic', '_Bool', '_Complex',
|
||||
'_Decimal32', '_Decimal64', '_Decimal128',
|
||||
'_Generic', '_Imaginary', '_Noreturn', '_Static_assert', '_Thread_local',
|
||||
]
|
||||
# These are only keyword'y when the corresponding headers are included.
|
||||
# They are used as default value for c_extra_keywords.
|
||||
_macroKeywords = [
|
||||
'alignas', 'alignof', 'bool', 'complex', 'imaginary', 'noreturn', 'static_assert',
|
||||
'thread_local',
|
||||
]
|
||||
|
||||
# these are ordered by preceedence
|
||||
@ -2535,6 +2540,12 @@ class DefinitionParser(BaseParser):
|
||||
if identifier in _keywords:
|
||||
self.fail("Expected identifier in nested name, "
|
||||
"got keyword: %s" % identifier)
|
||||
if self.matched_text in self.config.c_extra_keywords:
|
||||
msg = "Expected identifier, got user-defined keyword: %s." \
|
||||
+ " Remove it from c_extra_keywords to allow it as identifier.\n" \
|
||||
+ "Currently c_extra_keywords is %s."
|
||||
self.fail(msg % (self.matched_text,
|
||||
str(self.config.c_extra_keywords)))
|
||||
ident = ASTIdentifier(identifier)
|
||||
names.append(ident)
|
||||
|
||||
@ -2711,6 +2722,12 @@ class DefinitionParser(BaseParser):
|
||||
if self.matched_text in _keywords:
|
||||
self.fail("Expected identifier, "
|
||||
"got keyword: %s" % self.matched_text)
|
||||
if self.matched_text in self.config.c_extra_keywords:
|
||||
msg = "Expected identifier, got user-defined keyword: %s." \
|
||||
+ " Remove it from c_extra_keywords to allow it as identifier.\n" \
|
||||
+ "Currently c_extra_keywords is %s."
|
||||
self.fail(msg % (self.matched_text,
|
||||
str(self.config.c_extra_keywords)))
|
||||
identifier = ASTIdentifier(self.matched_text)
|
||||
declId = ASTNestedName([identifier], rooted=False)
|
||||
else:
|
||||
@ -3877,6 +3894,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_domain(CDomain)
|
||||
app.add_config_value("c_id_attributes", [], 'env')
|
||||
app.add_config_value("c_paren_attributes", [], 'env')
|
||||
app.add_config_value("c_extra_keywords", _macroKeywords, 'env')
|
||||
app.add_post_transform(AliasTransform)
|
||||
|
||||
app.add_config_value("c_allow_pre_v3", False, 'env')
|
||||
|
@ -74,8 +74,10 @@ class VersionChange(SphinxDirective):
|
||||
if self.content:
|
||||
self.state.nested_parse(self.content, self.content_offset, node)
|
||||
classes = ['versionmodified', versionlabel_classes[self.name]]
|
||||
if len(node):
|
||||
if isinstance(node[0], nodes.paragraph) and node[0].rawsource:
|
||||
if len(node) > 0 and isinstance(node[0], nodes.paragraph):
|
||||
# the contents start with a paragraph
|
||||
if node[0].rawsource:
|
||||
# make the first paragraph translatable
|
||||
content = nodes.inline(node[0].rawsource, translatable=True)
|
||||
content.source = node[0].source
|
||||
content.line = node[0].line
|
||||
@ -84,10 +86,16 @@ class VersionChange(SphinxDirective):
|
||||
|
||||
para = cast(nodes.paragraph, node[0])
|
||||
para.insert(0, nodes.inline('', '%s: ' % text, classes=classes))
|
||||
else:
|
||||
elif len(node) > 0:
|
||||
# the contents do not starts with a paragraph
|
||||
para = nodes.paragraph('', '',
|
||||
nodes.inline('', '%s.' % text,
|
||||
classes=classes),
|
||||
nodes.inline('', '%s: ' % text, classes=classes),
|
||||
translatable=False)
|
||||
node.insert(0, para)
|
||||
else:
|
||||
# the contents are empty
|
||||
para = nodes.paragraph('', '',
|
||||
nodes.inline('', '%s.' % text, classes=classes),
|
||||
translatable=False)
|
||||
node.append(para)
|
||||
|
||||
|
@ -765,7 +765,8 @@
|
||||
% break at . , ; ? ! /
|
||||
\sphinxbreaksviaactive
|
||||
% break also at \
|
||||
\let\sphinx@textbackslash\textbackslash
|
||||
\setbox8=\hbox{\textbackslash}%
|
||||
\def\sphinx@textbackslash{\copy8}%
|
||||
\let\textbackslash\sphinxtextbackslash
|
||||
% by default, no continuation symbol on next line but may be added
|
||||
\let\sphinxafterbreak\sphinxafterbreakofinlineliteral
|
||||
|
@ -15,16 +15,20 @@ import pytest
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.addnodes import desc
|
||||
from sphinx.domains.c import DefinitionError, DefinitionParser, Symbol, _id_prefix, _max_id
|
||||
from sphinx.domains.c import (DefinitionError, DefinitionParser, Symbol, _id_prefix,
|
||||
_macroKeywords, _max_id)
|
||||
from sphinx.ext.intersphinx import load_mappings, normalize_intersphinx_mapping
|
||||
from sphinx.testing import restructuredtext
|
||||
from sphinx.testing.util import assert_node
|
||||
|
||||
|
||||
class Config:
|
||||
c_id_attributes = ["id_attr", 'LIGHTGBM_C_EXPORT']
|
||||
c_paren_attributes = ["paren_attr"]
|
||||
c_extra_keywords = _macroKeywords
|
||||
|
||||
|
||||
def parse(name, string):
|
||||
class Config:
|
||||
c_id_attributes = ["id_attr", 'LIGHTGBM_C_EXPORT']
|
||||
c_paren_attributes = ["paren_attr"]
|
||||
parser = DefinitionParser(string, location=None, config=Config())
|
||||
parser.allowFallbackExpressionParsing = False
|
||||
ast = parser.parse_declaration(name, name)
|
||||
@ -114,9 +118,6 @@ def check(name, input, idDict, output=None, key=None, asTextOutput=None):
|
||||
|
||||
def test_expressions():
|
||||
def exprCheck(expr, output=None):
|
||||
class Config:
|
||||
c_id_attributes = ["id_attr"]
|
||||
c_paren_attributes = ["paren_attr"]
|
||||
parser = DefinitionParser(expr, location=None, config=Config())
|
||||
parser.allowFallbackExpressionParsing = False
|
||||
ast = parser.parse_expression()
|
||||
@ -522,6 +523,16 @@ def test_attributes():
|
||||
check('function', 'LIGHTGBM_C_EXPORT int LGBM_BoosterFree(int handle)',
|
||||
{1: 'LGBM_BoosterFree'})
|
||||
|
||||
|
||||
def test_extra_keywords():
|
||||
with pytest.raises(DefinitionError,
|
||||
match='Expected identifier, got user-defined keyword: complex.'):
|
||||
parse('function', 'void f(int complex)')
|
||||
with pytest.raises(DefinitionError,
|
||||
match='Expected identifier, got user-defined keyword: complex.'):
|
||||
parse('function', 'void complex(void)')
|
||||
|
||||
|
||||
# def test_print():
|
||||
# # used for getting all the ids out for checking
|
||||
# for a in ids:
|
||||
|
Loading…
Reference in New Issue
Block a user