mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Improve string concatenation (#12758)
Use implicit concatenation over ``+``, and combine some implicit concatenations to a single string literal.
This commit is contained in:
parent
18fbced7e0
commit
620e434f65
@ -495,9 +495,11 @@ class DefinitionParser(BaseParser):
|
||||
self.fail("Expected identifier in nested name, "
|
||||
"got keyword: %s" % identifier)
|
||||
if self.matched_text in self.config.c_extra_keywords:
|
||||
msg = "Expected identifier, got user-defined keyword: %s." \
|
||||
+ " Remove it from c_extra_keywords to allow it as identifier.\n" \
|
||||
+ "Currently c_extra_keywords is %s."
|
||||
msg = (
|
||||
'Expected identifier, got user-defined keyword: %s.'
|
||||
' Remove it from c_extra_keywords to allow it as identifier.\n'
|
||||
'Currently c_extra_keywords is %s.'
|
||||
)
|
||||
self.fail(msg % (self.matched_text,
|
||||
str(self.config.c_extra_keywords)))
|
||||
ident = ASTIdentifier(identifier)
|
||||
@ -670,9 +672,11 @@ class DefinitionParser(BaseParser):
|
||||
self.fail("Expected identifier, "
|
||||
"got keyword: %s" % self.matched_text)
|
||||
if self.matched_text in self.config.c_extra_keywords:
|
||||
msg = "Expected identifier, got user-defined keyword: %s." \
|
||||
+ " Remove it from c_extra_keywords to allow it as identifier.\n" \
|
||||
+ "Currently c_extra_keywords is %s."
|
||||
msg = (
|
||||
'Expected identifier, got user-defined keyword: %s. '
|
||||
'Remove it from c_extra_keywords to allow it as identifier.\n'
|
||||
'Currently c_extra_keywords is %s.'
|
||||
)
|
||||
self.fail(msg % (self.matched_text,
|
||||
str(self.config.c_extra_keywords)))
|
||||
identifier = ASTIdentifier(self.matched_text)
|
||||
|
@ -1263,7 +1263,7 @@ class DefinitionParser(BaseParser):
|
||||
if self.skip_string('('):
|
||||
expr = self._parse_constant_expression(inTemplate=False)
|
||||
if not expr:
|
||||
self.fail("Expected constant expression after '('" +
|
||||
self.fail("Expected constant expression after '('"
|
||||
" in explicit specifier.")
|
||||
self.skip_ws()
|
||||
if not self.skip_string(')'):
|
||||
@ -1972,13 +1972,14 @@ class DefinitionParser(BaseParser):
|
||||
if numArgs > numParams:
|
||||
numExtra = numArgs - numParams
|
||||
if not fullSpecShorthand and not isMemberInstantiation:
|
||||
msg = "Too many template argument lists compared to parameter" \
|
||||
" lists. Argument lists: %d, Parameter lists: %d," \
|
||||
" Extra empty parameters lists prepended: %d." \
|
||||
% (numArgs, numParams, numExtra)
|
||||
msg += " Declaration:\n\t"
|
||||
msg = (
|
||||
f'Too many template argument lists compared to parameter lists. '
|
||||
f'Argument lists: {numArgs:d}, Parameter lists: {numParams:d}, '
|
||||
f'Extra empty parameters lists prepended: {numExtra:d}. '
|
||||
'Declaration:\n\t'
|
||||
)
|
||||
if templatePrefix:
|
||||
msg += "%s\n\t" % templatePrefix
|
||||
msg += f"{templatePrefix}\n\t"
|
||||
msg += str(nestedName)
|
||||
self.warn(msg)
|
||||
|
||||
|
@ -394,7 +394,7 @@ Note: By default this script will not overwrite already created files."""),
|
||||
'exclude_pattern',
|
||||
nargs='*',
|
||||
help=__(
|
||||
'fnmatch-style file and/or directory patterns ' 'to exclude from generation'
|
||||
'fnmatch-style file and/or directory patterns to exclude from generation'
|
||||
),
|
||||
)
|
||||
|
||||
@ -419,7 +419,7 @@ Note: By default this script will not overwrite already created files."""),
|
||||
dest='maxdepth',
|
||||
type=int,
|
||||
default=4,
|
||||
help=__('maximum depth of submodules to show in the TOC ' '(default: 4)'),
|
||||
help=__('maximum depth of submodules to show in the TOC (default: 4)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f',
|
||||
@ -435,8 +435,7 @@ Note: By default this script will not overwrite already created files."""),
|
||||
dest='followlinks',
|
||||
default=False,
|
||||
help=__(
|
||||
'follow symbolic links. Powerful when combined '
|
||||
'with collective.recipe.omelette.'
|
||||
'follow symbolic links. Powerful when combined with collective.recipe.omelette.'
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
@ -490,15 +489,14 @@ Note: By default this script will not overwrite already created files."""),
|
||||
'--module-first',
|
||||
action='store_true',
|
||||
dest='modulefirst',
|
||||
help=__('put module documentation before submodule ' 'documentation'),
|
||||
help=__('put module documentation before submodule documentation'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--implicit-namespaces',
|
||||
action='store_true',
|
||||
dest='implicit_namespaces',
|
||||
help=__(
|
||||
'interpret module paths according to PEP-0420 '
|
||||
'implicit namespaces specification'
|
||||
'interpret module paths according to PEP-0420 implicit namespaces specification'
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
@ -559,7 +557,7 @@ Note: By default this script will not overwrite already created files."""),
|
||||
action='store',
|
||||
dest='release',
|
||||
help=__(
|
||||
'project release, used when --full is given, ' 'defaults to --doc-version'
|
||||
'project release, used when --full is given, defaults to --doc-version'
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -803,7 +803,7 @@ The format of the autosummary directive is documented in the
|
||||
action='store',
|
||||
dest='suffix',
|
||||
default='rst',
|
||||
help=__('default suffix for files (default: ' '%(default)s)'),
|
||||
help=__('default suffix for files (default: %(default)s)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-t',
|
||||
@ -811,7 +811,7 @@ The format of the autosummary directive is documented in the
|
||||
action='store',
|
||||
dest='templates',
|
||||
default=None,
|
||||
help=__('custom template directory (default: ' '%(default)s)'),
|
||||
help=__('custom template directory (default: %(default)s)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-i',
|
||||
@ -819,7 +819,7 @@ The format of the autosummary directive is documented in the
|
||||
action='store_true',
|
||||
dest='imported_members',
|
||||
default=False,
|
||||
help=__('document imported members (default: ' '%(default)s)'),
|
||||
help=__('document imported members (default: %(default)s)'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-a',
|
||||
|
@ -455,7 +455,7 @@ class CoverageBuilder(Builder):
|
||||
if self.app.quiet or self.app.warningiserror:
|
||||
for meth in methods:
|
||||
logger.warning(
|
||||
__('undocumented python method:' +
|
||||
__('undocumented python method:'
|
||||
' %s :: %s :: %s'),
|
||||
name, class_name, meth)
|
||||
else:
|
||||
|
@ -202,7 +202,7 @@ class _NodeUpdater:
|
||||
old_foot_refs = list(is_autofootnote_ref.findall(self.node))
|
||||
new_foot_refs = list(is_autofootnote_ref.findall(self.patch))
|
||||
self.compare_references(old_foot_refs, new_foot_refs,
|
||||
__('inconsistent footnote references in translated message.' +
|
||||
__('inconsistent footnote references in translated message.'
|
||||
' original: {0}, translated: {1}'))
|
||||
old_foot_namerefs: dict[str, list[nodes.footnote_reference]] = {}
|
||||
for r in old_foot_refs:
|
||||
@ -242,7 +242,7 @@ class _NodeUpdater:
|
||||
old_refs = list(is_refnamed_ref.findall(self.node))
|
||||
new_refs = list(is_refnamed_ref.findall(self.patch))
|
||||
self.compare_references(old_refs, new_refs,
|
||||
__('inconsistent references in translated message.' +
|
||||
__('inconsistent references in translated message.'
|
||||
' original: {0}, translated: {1}'))
|
||||
old_ref_names = [r['refname'] for r in old_refs]
|
||||
new_ref_names = [r['refname'] for r in new_refs]
|
||||
@ -267,7 +267,7 @@ class _NodeUpdater:
|
||||
new_foot_refs = list(is_refnamed_footnote_ref.findall(self.patch))
|
||||
refname_ids_map: dict[str, list[str]] = {}
|
||||
self.compare_references(old_foot_refs, new_foot_refs,
|
||||
__('inconsistent footnote references in translated message.' +
|
||||
__('inconsistent footnote references in translated message.'
|
||||
' original: {0}, translated: {1}'))
|
||||
for oldf in old_foot_refs:
|
||||
refname_ids_map.setdefault(oldf["refname"], []).append(oldf["ids"])
|
||||
@ -282,7 +282,7 @@ class _NodeUpdater:
|
||||
old_cite_refs = list(is_citation_ref.findall(self.node))
|
||||
new_cite_refs = list(is_citation_ref.findall(self.patch))
|
||||
self.compare_references(old_cite_refs, new_cite_refs,
|
||||
__('inconsistent citation references in translated message.' +
|
||||
__('inconsistent citation references in translated message.'
|
||||
' original: {0}, translated: {1}'))
|
||||
refname_ids_map: dict[str, list[str]] = {}
|
||||
for oldc in old_cite_refs:
|
||||
@ -299,7 +299,7 @@ class _NodeUpdater:
|
||||
old_xrefs = [*self.node.findall(addnodes.pending_xref)]
|
||||
new_xrefs = [*self.patch.findall(addnodes.pending_xref)]
|
||||
self.compare_references(old_xrefs, new_xrefs,
|
||||
__('inconsistent term references in translated message.' +
|
||||
__('inconsistent term references in translated message.'
|
||||
' original: {0}, translated: {1}'))
|
||||
|
||||
xref_reftarget_map: dict[tuple[str, str, str] | None, dict[str, Any]] = {}
|
||||
|
@ -41,6 +41,6 @@ def test_html_code_role(app):
|
||||
'<span class="k">pass</span>')
|
||||
assert ('<p>Inline <code class="code highlight python docutils literal highlight-python">' +
|
||||
common_content + '</code> code block</p>') in content
|
||||
assert ('<div class="highlight-python notranslate">' +
|
||||
assert ('<div class="highlight-python notranslate">'
|
||||
'<div class="highlight"><pre><span></span>' +
|
||||
common_content) in content
|
||||
|
@ -1698,7 +1698,7 @@ def test_latex_code_role(app):
|
||||
r'\PYG{k}{pass}')
|
||||
assert (r'Inline \sphinxcode{\sphinxupquote{%' + '\n' +
|
||||
common_content + '%\n}} code block') in content
|
||||
assert (r'\begin{sphinxVerbatim}[commandchars=\\\{\}]' +
|
||||
assert (r'\begin{sphinxVerbatim}[commandchars=\\\{\}]'
|
||||
'\n' + common_content + '\n' + r'\end{sphinxVerbatim}') in content
|
||||
|
||||
|
||||
|
@ -45,8 +45,8 @@ def _check_warnings(expected_warnings: str, warning: str) -> None:
|
||||
warnings = strip_colors(re.sub(re.escape(os.sep) + '{1,2}', '/', warning))
|
||||
assert re.match(f'{expected_warnings}$', warnings), (
|
||||
"Warnings don't match:\n"
|
||||
+ f'--- Expected (regex):\n{expected_warnings}\n'
|
||||
+ f'--- Got:\n{warnings}'
|
||||
f'--- Expected (regex):\n{expected_warnings}\n'
|
||||
f'--- Got:\n{warnings}'
|
||||
)
|
||||
sys.modules.pop('autodoc_fodder', None)
|
||||
|
||||
|
@ -302,8 +302,8 @@ def test_code_block(app):
|
||||
assert len(code_block) > 0
|
||||
actual = code_block[0].text
|
||||
expect = (
|
||||
" def ruby?\n" +
|
||||
" false\n" +
|
||||
" def ruby?\n"
|
||||
" false\n"
|
||||
" end"
|
||||
)
|
||||
assert actual == expect
|
||||
@ -333,8 +333,8 @@ def test_code_block_caption_latex(app):
|
||||
latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
|
||||
caption = '\\sphinxSetupCaptionForVerbatim{caption \\sphinxstyleemphasis{test} rb}'
|
||||
label = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:id1}}}'
|
||||
link = '\\hyperref[\\detokenize{caption:name-test-rb}]' \
|
||||
'{Listing \\ref{\\detokenize{caption:name-test-rb}}}'
|
||||
link = ('\\hyperref[\\detokenize{caption:name-test-rb}]'
|
||||
'{Listing \\ref{\\detokenize{caption:name-test-rb}}}')
|
||||
assert caption in latex
|
||||
assert label in latex
|
||||
assert link in latex
|
||||
@ -345,12 +345,12 @@ def test_code_block_namedlink_latex(app):
|
||||
app.build(force_all=True)
|
||||
latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
|
||||
label1 = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:name-test-rb}}}'
|
||||
link1 = '\\hyperref[\\detokenize{caption:name-test-rb}]'\
|
||||
'{\\sphinxcrossref{\\DUrole{std,std-ref}{Ruby}}'
|
||||
link1 = ('\\hyperref[\\detokenize{caption:name-test-rb}]'
|
||||
'{\\sphinxcrossref{\\DUrole{std,std-ref}{Ruby}}')
|
||||
label2 = ('\\def\\sphinxLiteralBlockLabel'
|
||||
'{\\label{\\detokenize{namedblocks:some-ruby-code}}}')
|
||||
link2 = '\\hyperref[\\detokenize{namedblocks:some-ruby-code}]'\
|
||||
'{\\sphinxcrossref{\\DUrole{std,std-ref}{the ruby code}}}'
|
||||
link2 = ('\\hyperref[\\detokenize{namedblocks:some-ruby-code}]'
|
||||
'{\\sphinxcrossref{\\DUrole{std,std-ref}{the ruby code}}}')
|
||||
assert label1 in latex
|
||||
assert link1 in latex
|
||||
assert label2 in latex
|
||||
@ -453,8 +453,8 @@ def test_literalinclude_caption_latex(app):
|
||||
latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
|
||||
caption = '\\sphinxSetupCaptionForVerbatim{caption \\sphinxstylestrong{test} py}'
|
||||
label = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:id2}}}'
|
||||
link = '\\hyperref[\\detokenize{caption:name-test-py}]' \
|
||||
'{Listing \\ref{\\detokenize{caption:name-test-py}}}'
|
||||
link = ('\\hyperref[\\detokenize{caption:name-test-py}]'
|
||||
'{Listing \\ref{\\detokenize{caption:name-test-py}}}')
|
||||
assert caption in latex
|
||||
assert label in latex
|
||||
assert link in latex
|
||||
@ -465,12 +465,12 @@ def test_literalinclude_namedlink_latex(app):
|
||||
app.build(filenames='index')
|
||||
latex = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
|
||||
label1 = '\\def\\sphinxLiteralBlockLabel{\\label{\\detokenize{caption:name-test-py}}}'
|
||||
link1 = '\\hyperref[\\detokenize{caption:name-test-py}]'\
|
||||
'{\\sphinxcrossref{\\DUrole{std,std-ref}{Python}}'
|
||||
link1 = ('\\hyperref[\\detokenize{caption:name-test-py}]'
|
||||
'{\\sphinxcrossref{\\DUrole{std,std-ref}{Python}}')
|
||||
label2 = ('\\def\\sphinxLiteralBlockLabel'
|
||||
'{\\label{\\detokenize{namedblocks:some-python-code}}}')
|
||||
link2 = '\\hyperref[\\detokenize{namedblocks:some-python-code}]'\
|
||||
'{\\sphinxcrossref{\\DUrole{std,std-ref}{the python code}}}'
|
||||
link2 = ('\\hyperref[\\detokenize{namedblocks:some-python-code}]'
|
||||
'{\\sphinxcrossref{\\DUrole{std,std-ref}{the python code}}}')
|
||||
assert label1 in latex
|
||||
assert link1 in latex
|
||||
assert label2 in latex
|
||||
|
@ -55,8 +55,8 @@ def test_object_description_sections(app):
|
||||
|
||||
|
||||
def test_object_description_content_line_number(app):
|
||||
text = (".. py:function:: foo(bar)\n" +
|
||||
"\n" +
|
||||
text = (".. py:function:: foo(bar)\n"
|
||||
"\n"
|
||||
" Some link here: :ref:`abc`\n")
|
||||
doc = restructuredtext.parse(app, text)
|
||||
xrefs = list(doc.findall(condition=addnodes.pending_xref))
|
||||
|
@ -485,8 +485,8 @@ def test_domain_cpp_ast_function_definitions():
|
||||
check('function', 'bool namespaced::theclass::method(arg1, arg2)',
|
||||
{1: "namespaced::theclass::method__arg1.arg2",
|
||||
2: "N10namespaced8theclass6methodE4arg14arg2"})
|
||||
x = 'std::vector<std::pair<std::string, int>> &module::test(register int ' \
|
||||
'foo, bar, std::string baz = "foobar, blah, bleh") const = 0'
|
||||
x = ('std::vector<std::pair<std::string, int>> &module::test(register int '
|
||||
'foo, bar, std::string baz = "foobar, blah, bleh") const = 0')
|
||||
check('function', x, {1: "module::test__i.bar.ssC",
|
||||
2: "NK6module4testEi3barNSt6stringE"})
|
||||
check('function', 'void f(std::pair<A, B>)',
|
||||
@ -557,8 +557,8 @@ def test_domain_cpp_ast_function_definitions():
|
||||
check('function', 'MyClass::pointer MyClass::operator->()',
|
||||
{1: "MyClass::pointer-operator", 2: "N7MyClassptEv"})
|
||||
|
||||
x = 'std::vector<std::pair<std::string, int>> &module::test(register int ' \
|
||||
'foo, bar[n], std::string baz = "foobar, blah, bleh") const = 0'
|
||||
x = ('std::vector<std::pair<std::string, int>> &module::test(register int '
|
||||
'foo, bar[n], std::string baz = "foobar, blah, bleh") const = 0')
|
||||
check('function', x, {1: "module::test__i.barA.ssC",
|
||||
2: "NK6module4testEiAn_3barNSt6stringE",
|
||||
3: "NK6module4testEiA1n_3barNSt6stringE"})
|
||||
@ -920,12 +920,12 @@ def test_domain_cpp_ast_requires_clauses():
|
||||
check('function', 'template<typename T> requires A && B || C and D void f()',
|
||||
{4: 'I0EIQooaa1A1Baa1C1DE1fvv'})
|
||||
check('function',
|
||||
'template<typename T> requires R<T> ' +
|
||||
'template<typename U> requires S<T> ' +
|
||||
'template<typename T> requires R<T> '
|
||||
'template<typename U> requires S<T> '
|
||||
'void A<T>::f() requires B',
|
||||
{4: 'I0EIQ1RI1TEEI0EIQaa1SI1TE1BEN1A1fEvv'})
|
||||
check('function',
|
||||
'template<template<typename T> requires R<T> typename X> ' +
|
||||
'template<template<typename T> requires R<T> typename X> '
|
||||
'void f()',
|
||||
{2: 'II0EIQ1RI1TEE0E1fv', 4: 'II0EIQ1RI1TEE0E1fvv'})
|
||||
check('type',
|
||||
|
@ -234,8 +234,8 @@ def test_no_index_entry(app):
|
||||
|
||||
|
||||
def test_module_content_line_number(app):
|
||||
text = (".. js:module:: foo\n" +
|
||||
"\n" +
|
||||
text = (".. js:module:: foo\n"
|
||||
"\n"
|
||||
" Some link here: :ref:`abc`\n")
|
||||
doc = restructuredtext.parse(app, text)
|
||||
xrefs = list(doc.findall(condition=addnodes.pending_xref))
|
||||
|
@ -691,8 +691,8 @@ def test_domain_py_python_maximum_signature_line_length_in_text(app):
|
||||
|
||||
|
||||
def test_module_content_line_number(app):
|
||||
text = (".. py:module:: foo\n" +
|
||||
"\n" +
|
||||
text = (".. py:module:: foo\n"
|
||||
"\n"
|
||||
" Some link here: :ref:`abc`\n")
|
||||
doc = restructuredtext.parse(app, text)
|
||||
xrefs = list(doc.findall(condition=addnodes.pending_xref))
|
||||
|
@ -322,9 +322,10 @@ def test_toc_all_references_should_exist_pep420_enabled(make_app, apidoc):
|
||||
if not (outdir / filename).is_file():
|
||||
missing_files.append(filename)
|
||||
|
||||
assert len(missing_files) == 0, \
|
||||
'File(s) referenced in TOC not found: {}\n' \
|
||||
'TOC:\n{}'.format(", ".join(missing_files), toc)
|
||||
all_missing = ', '.join(missing_files)
|
||||
assert len(missing_files) == 0, (
|
||||
f'File(s) referenced in TOC not found: {all_missing}\nTOC:\n{toc}'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.apidoc(
|
||||
@ -352,9 +353,10 @@ def test_toc_all_references_should_exist_pep420_disabled(make_app, apidoc):
|
||||
if not (outdir / filename).is_file():
|
||||
missing_files.append(filename)
|
||||
|
||||
assert len(missing_files) == 0, \
|
||||
'File(s) referenced in TOC not found: {}\n' \
|
||||
'TOC:\n{}'.format(", ".join(missing_files), toc)
|
||||
all_missing = ', '.join(missing_files)
|
||||
assert len(missing_files) == 0, (
|
||||
f'File(s) referenced in TOC not found: {all_missing}\nTOC:\n{toc}'
|
||||
)
|
||||
|
||||
|
||||
def extract_toc(path):
|
||||
|
@ -186,8 +186,9 @@ def test_get_items_summary(make_app, app_params):
|
||||
'C.C2': 'This is a nested inner class docstring',
|
||||
}
|
||||
for key, expected in expected_values.items():
|
||||
assert autosummary_items[key][2] == expected, 'Summary for %s was %r -'\
|
||||
' expected %r' % (key, autosummary_items[key], expected)
|
||||
assert autosummary_items[key][2] == expected, (
|
||||
f'Summary for {key} was {autosummary_items[key]!r} - expected {expected!r}'
|
||||
)
|
||||
|
||||
# check an item in detail
|
||||
assert 'func' in autosummary_items
|
||||
|
@ -17,8 +17,8 @@ def test_ifconfig(app):
|
||||
|
||||
def test_ifconfig_content_line_number(app):
|
||||
app.setup_extension("sphinx.ext.ifconfig")
|
||||
text = (".. ifconfig:: confval1\n" +
|
||||
"\n" +
|
||||
text = (".. ifconfig:: confval1\n"
|
||||
"\n"
|
||||
" Some link here: :ref:`abc`\n")
|
||||
doc = restructuredtext.parse(app, text)
|
||||
xrefs = list(doc.findall(condition=addnodes.pending_xref))
|
||||
|
@ -15,7 +15,7 @@ if TYPE_CHECKING:
|
||||
def check_viewcode_output(app: SphinxTestApp) -> str:
|
||||
warnings = re.sub(r'\\+', '/', app.warning.getvalue())
|
||||
assert re.findall(
|
||||
r"index.rst:\d+: WARNING: Object named 'func1' not found in include " +
|
||||
r"index.rst:\d+: WARNING: Object named 'func1' not found in include "
|
||||
r"file .*/spam/__init__.py'",
|
||||
warnings,
|
||||
)
|
||||
@ -131,7 +131,7 @@ def test_local_source_files(app):
|
||||
|
||||
warnings = re.sub(r'\\+', '/', app.warning.getvalue())
|
||||
assert re.findall(
|
||||
r"index.rst:\d+: WARNING: Object named 'func1' not found in include " +
|
||||
r"index.rst:\d+: WARNING: Object named 'func1' not found in include "
|
||||
r"file .*/not_a_package/__init__.py'",
|
||||
warnings,
|
||||
)
|
||||
|
@ -1307,8 +1307,7 @@ def test_xml_label_targets(app):
|
||||
assert_elem(
|
||||
para3[0],
|
||||
['X', 'bridge label',
|
||||
'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
|
||||
'SECTION TITLE.'],
|
||||
'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED SECTION TITLE.'],
|
||||
['label-bridged-target-section'])
|
||||
assert_elem(
|
||||
para3[1],
|
||||
|
Loading…
Reference in New Issue
Block a user