mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge pull request #10107 from Jean-Abou-Samra/intl-warnings
Implement #noqa for i18n
This commit is contained in:
commit
b20e04968e
@ -68,6 +68,24 @@ be translated you need to follow these instructions:
|
|||||||
* Run your desired build.
|
* Run your desired build.
|
||||||
|
|
||||||
|
|
||||||
|
In order to protect against mistakes, a warning is emitted if
|
||||||
|
cross-references in the translated paragraph do not match those from the
|
||||||
|
original. This can be turned off globally using the
|
||||||
|
:confval:`suppress_warnings` configuration variable. Alternatively, to
|
||||||
|
turn it off for one message only, end the message with ``#noqa`` like
|
||||||
|
this::
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse
|
||||||
|
risus tortor, luctus id ultrices at. #noqa
|
||||||
|
|
||||||
|
(Write ``\#noqa`` in case you want to have "#noqa" literally in the
|
||||||
|
text. This does not apply to code blocks, where ``#noqa`` is ignored
|
||||||
|
because code blocks do not contain references anyway.)
|
||||||
|
|
||||||
|
.. versionadded:: 4.5
|
||||||
|
The ``#noqa`` mechanism.
|
||||||
|
|
||||||
|
|
||||||
Translating with sphinx-intl
|
Translating with sphinx-intl
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
|
@ -316,7 +316,11 @@ General configuration
|
|||||||
* ``app.add_role``
|
* ``app.add_role``
|
||||||
* ``app.add_generic_role``
|
* ``app.add_generic_role``
|
||||||
* ``app.add_source_parser``
|
* ``app.add_source_parser``
|
||||||
|
* ``autosectionlabel.*``
|
||||||
* ``download.not_readable``
|
* ``download.not_readable``
|
||||||
|
* ``epub.unknown_project_files``
|
||||||
|
* ``epub.duplicated_toc_entry``
|
||||||
|
* ``i18n.inconsistent_references``
|
||||||
* ``image.not_readable``
|
* ``image.not_readable``
|
||||||
* ``ref.term``
|
* ``ref.term``
|
||||||
* ``ref.ref``
|
* ``ref.ref``
|
||||||
@ -332,11 +336,9 @@ General configuration
|
|||||||
* ``toc.excluded``
|
* ``toc.excluded``
|
||||||
* ``toc.not_readable``
|
* ``toc.not_readable``
|
||||||
* ``toc.secnum``
|
* ``toc.secnum``
|
||||||
* ``epub.unknown_project_files``
|
|
||||||
* ``epub.duplicated_toc_entry``
|
|
||||||
* ``autosectionlabel.*``
|
|
||||||
|
|
||||||
You can choose from these types.
|
You can choose from these types. You can also give only the first
|
||||||
|
component to exclude all warnings attached to it.
|
||||||
|
|
||||||
Now, this option should be considered *experimental*.
|
Now, this option should be considered *experimental*.
|
||||||
|
|
||||||
@ -366,6 +368,10 @@ General configuration
|
|||||||
|
|
||||||
Added ``toc.excluded`` and ``toc.not_readable``
|
Added ``toc.excluded`` and ``toc.not_readable``
|
||||||
|
|
||||||
|
.. versionadded:: 4.5
|
||||||
|
|
||||||
|
Added ``i18n.inconsistent_references``
|
||||||
|
|
||||||
.. confval:: needs_sphinx
|
.. confval:: needs_sphinx
|
||||||
|
|
||||||
If set to a ``major.minor`` version string like ``'1.1'``, Sphinx will
|
If set to a ``major.minor`` version string like ``'1.1'``, Sphinx will
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Docutils transforms used by Sphinx when reading documents."""
|
"""Docutils transforms used by Sphinx when reading documents."""
|
||||||
|
|
||||||
from os import path
|
from os import path
|
||||||
|
from re import DOTALL, match
|
||||||
from textwrap import indent
|
from textwrap import indent
|
||||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, TypeVar
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, TypeVar
|
||||||
|
|
||||||
@ -74,6 +75,14 @@ def publish_msgstr(app: "Sphinx", source: str, source_path: str, source_line: in
|
|||||||
config.rst_prolog = rst_prolog # type: ignore
|
config.rst_prolog = rst_prolog # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def parse_noqa(source: str) -> Tuple[str, bool]:
|
||||||
|
m = match(r"(.*)(?<!\\)#\s*noqa\s*$", source, DOTALL)
|
||||||
|
if m:
|
||||||
|
return m.group(1), True
|
||||||
|
else:
|
||||||
|
return source, False
|
||||||
|
|
||||||
|
|
||||||
class PreserveTranslatableMessages(SphinxTransform):
|
class PreserveTranslatableMessages(SphinxTransform):
|
||||||
"""
|
"""
|
||||||
Preserve original translatable messages before translation
|
Preserve original translatable messages before translation
|
||||||
@ -111,6 +120,14 @@ class Locale(SphinxTransform):
|
|||||||
# phase1: replace reference ids with translated names
|
# phase1: replace reference ids with translated names
|
||||||
for node, msg in extract_messages(self.document):
|
for node, msg in extract_messages(self.document):
|
||||||
msgstr = catalog.gettext(msg)
|
msgstr = catalog.gettext(msg)
|
||||||
|
|
||||||
|
# There is no point in having #noqa on literal blocks because
|
||||||
|
# they cannot contain references. Recognizing it would just
|
||||||
|
# completely prevent escaping the #noqa. Outside of literal
|
||||||
|
# blocks, one can always write \#noqa.
|
||||||
|
if not isinstance(node, LITERAL_TYPE_NODES):
|
||||||
|
msgstr, _ = parse_noqa(msgstr)
|
||||||
|
|
||||||
# XXX add marker to untranslated parts
|
# XXX add marker to untranslated parts
|
||||||
if not msgstr or msgstr == msg or not msgstr.strip():
|
if not msgstr or msgstr == msg or not msgstr.strip():
|
||||||
# as-of-yet untranslated
|
# as-of-yet untranslated
|
||||||
@ -131,6 +148,7 @@ class Locale(SphinxTransform):
|
|||||||
|
|
||||||
patch = publish_msgstr(self.app, msgstr, source,
|
patch = publish_msgstr(self.app, msgstr, source,
|
||||||
node.line, self.config, settings)
|
node.line, self.config, settings)
|
||||||
|
# FIXME: no warnings about inconsistent references in this part
|
||||||
# XXX doctest and other block markup
|
# XXX doctest and other block markup
|
||||||
if not isinstance(patch, nodes.paragraph):
|
if not isinstance(patch, nodes.paragraph):
|
||||||
continue # skip for now
|
continue # skip for now
|
||||||
@ -220,6 +238,11 @@ class Locale(SphinxTransform):
|
|||||||
continue # skip if the node is already translated by phase1
|
continue # skip if the node is already translated by phase1
|
||||||
|
|
||||||
msgstr = catalog.gettext(msg)
|
msgstr = catalog.gettext(msg)
|
||||||
|
|
||||||
|
# See above.
|
||||||
|
if not isinstance(node, LITERAL_TYPE_NODES):
|
||||||
|
msgstr, noqa = parse_noqa(msgstr)
|
||||||
|
|
||||||
# XXX add marker to untranslated parts
|
# XXX add marker to untranslated parts
|
||||||
if not msgstr or msgstr == msg: # as-of-yet untranslated
|
if not msgstr or msgstr == msg: # as-of-yet untranslated
|
||||||
continue
|
continue
|
||||||
@ -265,7 +288,6 @@ class Locale(SphinxTransform):
|
|||||||
|
|
||||||
patch = publish_msgstr(self.app, msgstr, source,
|
patch = publish_msgstr(self.app, msgstr, source,
|
||||||
node.line, self.config, settings)
|
node.line, self.config, settings)
|
||||||
|
|
||||||
# Structural Subelements phase2
|
# Structural Subelements phase2
|
||||||
if isinstance(node, nodes.title):
|
if isinstance(node, nodes.title):
|
||||||
# get <title> node that placed as a first child
|
# get <title> node that placed as a first child
|
||||||
@ -295,13 +317,13 @@ class Locale(SphinxTransform):
|
|||||||
is_autofootnote_ref = NodeMatcher(nodes.footnote_reference, auto=Any)
|
is_autofootnote_ref = NodeMatcher(nodes.footnote_reference, auto=Any)
|
||||||
old_foot_refs: List[nodes.footnote_reference] = list(node.findall(is_autofootnote_ref)) # NOQA
|
old_foot_refs: List[nodes.footnote_reference] = list(node.findall(is_autofootnote_ref)) # NOQA
|
||||||
new_foot_refs: List[nodes.footnote_reference] = list(patch.findall(is_autofootnote_ref)) # NOQA
|
new_foot_refs: List[nodes.footnote_reference] = list(patch.findall(is_autofootnote_ref)) # NOQA
|
||||||
if len(old_foot_refs) != len(new_foot_refs):
|
if not noqa and len(old_foot_refs) != len(new_foot_refs):
|
||||||
old_foot_ref_rawsources = [ref.rawsource for ref in old_foot_refs]
|
old_foot_ref_rawsources = [ref.rawsource for ref in old_foot_refs]
|
||||||
new_foot_ref_rawsources = [ref.rawsource for ref in new_foot_refs]
|
new_foot_ref_rawsources = [ref.rawsource for ref in new_foot_refs]
|
||||||
logger.warning(__('inconsistent footnote references in translated message.' +
|
logger.warning(__('inconsistent footnote references in translated message.' +
|
||||||
' original: {0}, translated: {1}')
|
' original: {0}, translated: {1}')
|
||||||
.format(old_foot_ref_rawsources, new_foot_ref_rawsources),
|
.format(old_foot_ref_rawsources, new_foot_ref_rawsources),
|
||||||
location=node)
|
location=node, type='i18n', subtype='inconsistent_references')
|
||||||
old_foot_namerefs: Dict[str, List[nodes.footnote_reference]] = {}
|
old_foot_namerefs: Dict[str, List[nodes.footnote_reference]] = {}
|
||||||
for r in old_foot_refs:
|
for r in old_foot_refs:
|
||||||
old_foot_namerefs.setdefault(r.get('refname'), []).append(r)
|
old_foot_namerefs.setdefault(r.get('refname'), []).append(r)
|
||||||
@ -338,13 +360,13 @@ class Locale(SphinxTransform):
|
|||||||
is_refnamed_ref = NodeMatcher(nodes.reference, refname=Any)
|
is_refnamed_ref = NodeMatcher(nodes.reference, refname=Any)
|
||||||
old_refs: List[nodes.reference] = list(node.findall(is_refnamed_ref))
|
old_refs: List[nodes.reference] = list(node.findall(is_refnamed_ref))
|
||||||
new_refs: List[nodes.reference] = list(patch.findall(is_refnamed_ref))
|
new_refs: List[nodes.reference] = list(patch.findall(is_refnamed_ref))
|
||||||
if len(old_refs) != len(new_refs):
|
if not noqa and len(old_refs) != len(new_refs):
|
||||||
old_ref_rawsources = [ref.rawsource for ref in old_refs]
|
old_ref_rawsources = [ref.rawsource for ref in old_refs]
|
||||||
new_ref_rawsources = [ref.rawsource for ref in new_refs]
|
new_ref_rawsources = [ref.rawsource for ref in new_refs]
|
||||||
logger.warning(__('inconsistent references in translated message.' +
|
logger.warning(__('inconsistent references in translated message.' +
|
||||||
' original: {0}, translated: {1}')
|
' original: {0}, translated: {1}')
|
||||||
.format(old_ref_rawsources, new_ref_rawsources),
|
.format(old_ref_rawsources, new_ref_rawsources),
|
||||||
location=node)
|
location=node, type='i18n', subtype='inconsistent_references')
|
||||||
old_ref_names = [r['refname'] for r in old_refs]
|
old_ref_names = [r['refname'] for r in old_refs]
|
||||||
new_ref_names = [r['refname'] for r in new_refs]
|
new_ref_names = [r['refname'] for r in new_refs]
|
||||||
orphans = list(set(old_ref_names) - set(new_ref_names))
|
orphans = list(set(old_ref_names) - set(new_ref_names))
|
||||||
@ -366,13 +388,13 @@ class Locale(SphinxTransform):
|
|||||||
old_foot_refs = list(node.findall(is_refnamed_footnote_ref))
|
old_foot_refs = list(node.findall(is_refnamed_footnote_ref))
|
||||||
new_foot_refs = list(patch.findall(is_refnamed_footnote_ref))
|
new_foot_refs = list(patch.findall(is_refnamed_footnote_ref))
|
||||||
refname_ids_map: Dict[str, List[str]] = {}
|
refname_ids_map: Dict[str, List[str]] = {}
|
||||||
if len(old_foot_refs) != len(new_foot_refs):
|
if not noqa and len(old_foot_refs) != len(new_foot_refs):
|
||||||
old_foot_ref_rawsources = [ref.rawsource for ref in old_foot_refs]
|
old_foot_ref_rawsources = [ref.rawsource for ref in old_foot_refs]
|
||||||
new_foot_ref_rawsources = [ref.rawsource for ref in new_foot_refs]
|
new_foot_ref_rawsources = [ref.rawsource for ref in new_foot_refs]
|
||||||
logger.warning(__('inconsistent footnote references in translated message.' +
|
logger.warning(__('inconsistent footnote references in translated message.' +
|
||||||
' original: {0}, translated: {1}')
|
' original: {0}, translated: {1}')
|
||||||
.format(old_foot_ref_rawsources, new_foot_ref_rawsources),
|
.format(old_foot_ref_rawsources, new_foot_ref_rawsources),
|
||||||
location=node)
|
location=node, type='i18n', subtype='inconsistent_references')
|
||||||
for oldf in old_foot_refs:
|
for oldf in old_foot_refs:
|
||||||
refname_ids_map.setdefault(oldf["refname"], []).append(oldf["ids"])
|
refname_ids_map.setdefault(oldf["refname"], []).append(oldf["ids"])
|
||||||
for newf in new_foot_refs:
|
for newf in new_foot_refs:
|
||||||
@ -385,13 +407,13 @@ class Locale(SphinxTransform):
|
|||||||
old_cite_refs: List[nodes.citation_reference] = list(node.findall(is_citation_ref))
|
old_cite_refs: List[nodes.citation_reference] = list(node.findall(is_citation_ref))
|
||||||
new_cite_refs: List[nodes.citation_reference] = list(patch.findall(is_citation_ref)) # NOQA
|
new_cite_refs: List[nodes.citation_reference] = list(patch.findall(is_citation_ref)) # NOQA
|
||||||
refname_ids_map = {}
|
refname_ids_map = {}
|
||||||
if len(old_cite_refs) != len(new_cite_refs):
|
if not noqa and len(old_cite_refs) != len(new_cite_refs):
|
||||||
old_cite_ref_rawsources = [ref.rawsource for ref in old_cite_refs]
|
old_cite_ref_rawsources = [ref.rawsource for ref in old_cite_refs]
|
||||||
new_cite_ref_rawsources = [ref.rawsource for ref in new_cite_refs]
|
new_cite_ref_rawsources = [ref.rawsource for ref in new_cite_refs]
|
||||||
logger.warning(__('inconsistent citation references in translated message.' +
|
logger.warning(__('inconsistent citation references in translated message.' +
|
||||||
' original: {0}, translated: {1}')
|
' original: {0}, translated: {1}')
|
||||||
.format(old_cite_ref_rawsources, new_cite_ref_rawsources),
|
.format(old_cite_ref_rawsources, new_cite_ref_rawsources),
|
||||||
location=node)
|
location=node, type='i18n', subtype='inconsistent_references')
|
||||||
for oldc in old_cite_refs:
|
for oldc in old_cite_refs:
|
||||||
refname_ids_map.setdefault(oldc["refname"], []).append(oldc["ids"])
|
refname_ids_map.setdefault(oldc["refname"], []).append(oldc["ids"])
|
||||||
for newc in new_cite_refs:
|
for newc in new_cite_refs:
|
||||||
@ -405,13 +427,13 @@ class Locale(SphinxTransform):
|
|||||||
old_xrefs = list(node.findall(addnodes.pending_xref))
|
old_xrefs = list(node.findall(addnodes.pending_xref))
|
||||||
new_xrefs = list(patch.findall(addnodes.pending_xref))
|
new_xrefs = list(patch.findall(addnodes.pending_xref))
|
||||||
xref_reftarget_map = {}
|
xref_reftarget_map = {}
|
||||||
if len(old_xrefs) != len(new_xrefs):
|
if not noqa and len(old_xrefs) != len(new_xrefs):
|
||||||
old_xref_rawsources = [xref.rawsource for xref in old_xrefs]
|
old_xref_rawsources = [xref.rawsource for xref in old_xrefs]
|
||||||
new_xref_rawsources = [xref.rawsource for xref in new_xrefs]
|
new_xref_rawsources = [xref.rawsource for xref in new_xrefs]
|
||||||
logger.warning(__('inconsistent term references in translated message.' +
|
logger.warning(__('inconsistent term references in translated message.' +
|
||||||
' original: {0}, translated: {1}')
|
' original: {0}, translated: {1}')
|
||||||
.format(old_xref_rawsources, new_xref_rawsources),
|
.format(old_xref_rawsources, new_xref_rawsources),
|
||||||
location=node)
|
location=node, type='i18n', subtype='inconsistent_references')
|
||||||
|
|
||||||
def get_ref_key(node: addnodes.pending_xref) -> Optional[Tuple[str, str, str]]:
|
def get_ref_key(node: addnodes.pending_xref) -> Optional[Tuple[str, str, str]]:
|
||||||
case = node["refdomain"], node["reftype"]
|
case = node["refdomain"], node["reftype"]
|
||||||
|
@ -49,6 +49,14 @@ code blocks
|
|||||||
literal-block
|
literal-block
|
||||||
in list
|
in list
|
||||||
|
|
||||||
|
.. highlight:: none
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
test_code_for_noqa()
|
||||||
|
continued()
|
||||||
|
|
||||||
|
|
||||||
doctest blocks
|
doctest blocks
|
||||||
==============
|
==============
|
||||||
|
|
||||||
|
16
tests/roots/test-intl/noqa.txt
Normal file
16
tests/roots/test-intl/noqa.txt
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
First section
|
||||||
|
=============
|
||||||
|
|
||||||
|
Some text with a reference, :ref:`next-section`.
|
||||||
|
|
||||||
|
Another reference: :ref:`next-section`.
|
||||||
|
|
||||||
|
This should allow to test escaping ``#noqa``.
|
||||||
|
|
||||||
|
.. _next-section:
|
||||||
|
|
||||||
|
Next section
|
||||||
|
============
|
||||||
|
|
||||||
|
Some text, again referring to the section: :ref:`next-section`.
|
||||||
|
|
@ -77,6 +77,12 @@ msgid "literal-block\n"
|
|||||||
msgstr "LITERAL-BLOCK\n"
|
msgstr "LITERAL-BLOCK\n"
|
||||||
"IN LIST"
|
"IN LIST"
|
||||||
|
|
||||||
|
msgid "test_code_for_noqa()\n"
|
||||||
|
"continued()"
|
||||||
|
msgstr ""
|
||||||
|
"# TRAILING noqa SHOULD NOT GET STRIPPED\n"
|
||||||
|
"# FROM THIS BLOCK. #noqa"
|
||||||
|
|
||||||
msgid "doctest blocks"
|
msgid "doctest blocks"
|
||||||
msgstr "DOCTEST-BLOCKS"
|
msgstr "DOCTEST-BLOCKS"
|
||||||
|
|
||||||
|
46
tests/roots/test-intl/xx/LC_MESSAGES/noqa.po
Normal file
46
tests/roots/test-intl/xx/LC_MESSAGES/noqa.po
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
# SOME DESCRIPTIVE TITLE.
|
||||||
|
# Copyright (C)
|
||||||
|
# This file is distributed under the same license as the Sphinx intl <Tests> package.
|
||||||
|
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||||
|
#
|
||||||
|
msgid ""
|
||||||
|
msgstr ""
|
||||||
|
"Project-Id-Version: \n"
|
||||||
|
"Report-Msgid-Bugs-To: \n"
|
||||||
|
"POT-Creation-Date: 2022-01-16 15:23+0100\n"
|
||||||
|
"PO-Revision-Date: 2022-01-16 15:23+0100\n"
|
||||||
|
"Last-Translator: Jean Abou Samra <jean@abou-samra.fr>\n"
|
||||||
|
"Language-Team: \n"
|
||||||
|
"Language: xx\n"
|
||||||
|
"MIME-Version: 1.0\n"
|
||||||
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
"Content-Transfer-Encoding: 8bit\n"
|
||||||
|
"X-Generator: Poedit 3.0\n"
|
||||||
|
|
||||||
|
#: ../tests/roots/test-intl/noqa.txt:2
|
||||||
|
msgid "First section"
|
||||||
|
msgstr "FIRST SECTION"
|
||||||
|
|
||||||
|
#: ../tests/roots/test-intl/noqa.txt:4
|
||||||
|
msgid "Some text with a reference, :ref:`next-section`."
|
||||||
|
msgstr "TRANSLATED TEXT WITHOUT REFERENCE. #noqa"
|
||||||
|
|
||||||
|
#: ../tests/roots/test-intl/noqa.txt:6
|
||||||
|
msgid "Another reference: :ref:`next-section`."
|
||||||
|
msgstr ""
|
||||||
|
"TEST noqa WHITESPACE INSENSITIVITY.\n"
|
||||||
|
"# \n"
|
||||||
|
" noqa"
|
||||||
|
|
||||||
|
#: ../tests/roots/test-intl/noqa.txt:8
|
||||||
|
msgid "This should allow to test escaping ``#noqa``."
|
||||||
|
msgstr "``#noqa`` IS ESCAPED AT THE END OF THIS STRING. \\#noqa"
|
||||||
|
|
||||||
|
#: ../tests/roots/test-intl/noqa.txt:13
|
||||||
|
msgid "Next section"
|
||||||
|
msgstr "NEXT SECTION WITH PARAGRAPH TO TEST BARE noqa"
|
||||||
|
|
||||||
|
# This edge case should not fail.
|
||||||
|
#: ../tests/roots/test-intl/noqa.txt:15
|
||||||
|
msgid "Some text, again referring to the section: :ref:`next-section`."
|
||||||
|
msgstr "#noqa"
|
@ -186,6 +186,32 @@ def test_text_inconsistency_warnings(app, warning):
|
|||||||
assert_re_search(expected_citation_warning_expr, warnings)
|
assert_re_search(expected_citation_warning_expr, warnings)
|
||||||
|
|
||||||
|
|
||||||
|
@sphinx_intl
|
||||||
|
@pytest.mark.sphinx('text')
|
||||||
|
@pytest.mark.test_params(shared_result='test_intl_basic')
|
||||||
|
def test_noqa(app, warning):
|
||||||
|
app.build()
|
||||||
|
result = (app.outdir / 'noqa.txt').read_text()
|
||||||
|
expect = r"""FIRST SECTION
|
||||||
|
*************
|
||||||
|
|
||||||
|
TRANSLATED TEXT WITHOUT REFERENCE.
|
||||||
|
|
||||||
|
TEST noqa WHITESPACE INSENSITIVITY.
|
||||||
|
|
||||||
|
"#noqa" IS ESCAPED AT THE END OF THIS STRING. #noqa
|
||||||
|
|
||||||
|
|
||||||
|
NEXT SECTION WITH PARAGRAPH TO TEST BARE noqa
|
||||||
|
*********************************************
|
||||||
|
|
||||||
|
Some text, again referring to the section: NEXT SECTION WITH PARAGRAPH
|
||||||
|
TO TEST BARE noqa.
|
||||||
|
"""
|
||||||
|
assert result == expect
|
||||||
|
assert "next-section" not in getwarning(warning)
|
||||||
|
|
||||||
|
|
||||||
@sphinx_intl
|
@sphinx_intl
|
||||||
@pytest.mark.sphinx('text')
|
@pytest.mark.sphinx('text')
|
||||||
@pytest.mark.test_params(shared_result='test_intl_basic')
|
@pytest.mark.test_params(shared_result='test_intl_basic')
|
||||||
@ -1180,6 +1206,9 @@ def test_additional_targets_should_be_translated(app):
|
|||||||
"""<span class="c1"># SYS IMPORTING</span>""")
|
"""<span class="c1"># SYS IMPORTING</span>""")
|
||||||
assert_count(expected_expr, result, 1)
|
assert_count(expected_expr, result, 1)
|
||||||
|
|
||||||
|
# '#noqa' should remain in literal blocks.
|
||||||
|
assert_count("#noqa", result, 1)
|
||||||
|
|
||||||
# [raw.txt]
|
# [raw.txt]
|
||||||
|
|
||||||
result = (app.outdir / 'raw.html').read_text()
|
result = (app.outdir / 'raw.html').read_text()
|
||||||
|
Loading…
Reference in New Issue
Block a user