mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Refactor/speed up test_intl by combining all tests with a certain builder into a generator.
This commit is contained in:
parent
a73ab32bc5
commit
be72ed6dc7
@ -123,6 +123,9 @@ class path(text_type):
|
||||
"""
|
||||
os.unlink(self)
|
||||
|
||||
def utime(self, arg):
|
||||
os.utime(self, arg)
|
||||
|
||||
def write_text(self, text, **kwargs):
|
||||
"""
|
||||
Writes the given `text` to the file.
|
||||
|
@ -16,15 +16,31 @@ import re
|
||||
from subprocess import Popen, PIPE
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from nose.tools import assert_equal, assert_in, assert_not_in
|
||||
from six import string_types
|
||||
|
||||
from util import tempdir, rootdir, path, with_app, SkipTest
|
||||
from util import tempdir, rootdir, path, gen_with_app, SkipTest
|
||||
|
||||
|
||||
root = tempdir / 'test-intl'
|
||||
|
||||
|
||||
def with_intl_app(*args, **kw):
|
||||
def re_search(regex, text, flags=0):
|
||||
if not re.search(regex, text, flags):
|
||||
assert False, '%r did not match %r' % (regex, text)
|
||||
|
||||
|
||||
def not_re_search(regex, text, flags=0):
|
||||
if re.search(regex, text, flags):
|
||||
assert False, '%r did match %r' % (regex, text)
|
||||
|
||||
|
||||
def startswith(thing, prefix):
|
||||
if not thing.startswith(prefix):
|
||||
assert False, '%r does not start with %r' % (thing, prefix)
|
||||
|
||||
|
||||
def gen_with_intl_app(*args, **kw):
|
||||
default_kw = {
|
||||
'testroot': 'intl',
|
||||
'confoverrides': {
|
||||
@ -33,7 +49,7 @@ def with_intl_app(*args, **kw):
|
||||
},
|
||||
}
|
||||
default_kw.update(kw)
|
||||
return with_app(*args, **default_kw)
|
||||
return gen_with_app(*args, **default_kw)
|
||||
|
||||
|
||||
def setup_module():
|
||||
@ -97,126 +113,38 @@ def assert_elem(elem, texts=None, refs=None, names=None):
|
||||
assert _names == names
|
||||
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_simple(app, status, warning):
|
||||
app.builder.build(['bom'])
|
||||
result = (app.outdir / 'bom.txt').text(encoding='utf-8')
|
||||
expect = (u"\nDatei mit UTF-8"
|
||||
u"\n***************\n" # underline matches new translation
|
||||
u"\nThis file has umlauts: äöü.\n")
|
||||
assert result == expect
|
||||
@gen_with_intl_app('text', freshenv=True)
|
||||
def test_text_builder(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_subdir(app, status, warning):
|
||||
app.builder.build(['subdir/contents'])
|
||||
result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8')
|
||||
assert result.startswith(u"\nsubdir contents\n***************\n")
|
||||
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_i18n_warnings_in_translation(app, status, warning):
|
||||
app.outdir.rmtree(True) # for warnings acceleration
|
||||
app.doctreedir.rmtree(True)
|
||||
app.builder.build(['warnings'])
|
||||
result = (app.outdir / 'warnings.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH REST WARNINGS"
|
||||
u"\n***********************\n"
|
||||
u"\nLINE OF >>``<<BROKEN LITERAL MARKUP.\n")
|
||||
|
||||
assert result == expect
|
||||
# --- warnings in translation
|
||||
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
warning_expr = u'.*/warnings.txt:4: ' \
|
||||
u'WARNING: Inline literal start-string without end-string.\n'
|
||||
assert re.search(warning_expr, warnings)
|
||||
yield re_search, warning_expr, warnings
|
||||
|
||||
result = (app.outdir / 'warnings.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH REST WARNINGS"
|
||||
u"\n***********************\n"
|
||||
u"\nLINE OF >>``<<BROKEN LITERAL MARKUP.\n")
|
||||
yield assert_equal, result, expect
|
||||
|
||||
@with_intl_app(buildername='html', freshenv=True)
|
||||
def test_i18n_footnote_break_refid(app, status, warning):
|
||||
# test for #955 cant-build-html-with-footnotes-when-using
|
||||
app.builder.build(['footnote'])
|
||||
(app.outdir / 'footnote.html').text(encoding='utf-8')
|
||||
# expect no error by build
|
||||
# --- simple translation; check title underlines
|
||||
|
||||
result = (app.outdir / 'bom.txt').text(encoding='utf-8')
|
||||
expect = (u"\nDatei mit UTF-8"
|
||||
u"\n***************\n" # underline matches new translation
|
||||
u"\nThis file has umlauts: äöü.\n")
|
||||
yield assert_equal, result, expect
|
||||
|
||||
@with_intl_app(buildername='xml')
|
||||
def test_i18n_footnote_regression(app, status, warning):
|
||||
# regression test for fix #955, #1176
|
||||
#app.builddir.rmtree(True)
|
||||
app.builder.build(['footnote'])
|
||||
et = ElementTree.parse(app.outdir / 'footnote.xml')
|
||||
secs = et.findall('section')
|
||||
# --- check translation in subdirs
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
assert_elem(
|
||||
para0[0],
|
||||
texts=['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
|
||||
'2', '[ref]', '1', '100', '.'],
|
||||
refs=['i18n-with-footnote', 'ref'])
|
||||
result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8')
|
||||
yield startswith, result, u"\nsubdir contents\n***************\n"
|
||||
|
||||
footnote0 = secs[0].findall('footnote')
|
||||
assert_elem(
|
||||
footnote0[0],
|
||||
texts=['1', 'THIS IS A AUTO NUMBERED FOOTNOTE.'],
|
||||
names=['1'])
|
||||
assert_elem(
|
||||
footnote0[1],
|
||||
texts=['100', 'THIS IS A NUMBERED FOOTNOTE.'],
|
||||
names=['100'])
|
||||
assert_elem(
|
||||
footnote0[2],
|
||||
texts=['2', 'THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'],
|
||||
names=['named'])
|
||||
# --- check warnings for inconsistency in number of references
|
||||
|
||||
citation0 = secs[0].findall('citation')
|
||||
assert_elem(
|
||||
citation0[0],
|
||||
texts=['ref', 'THIS IS A NAMED FOOTNOTE.'],
|
||||
names=['ref'])
|
||||
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n'
|
||||
assert not re.search(warning_expr, warnings)
|
||||
|
||||
|
||||
@with_intl_app(buildername='xml', freshenv=True)
|
||||
def test_i18n_footnote_backlink(app, status, warning):
|
||||
# i18n test for #1058
|
||||
app.builder.build(['footnote'])
|
||||
et = ElementTree.parse(app.outdir / 'footnote.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
refs0 = para0[0].findall('footnote_reference')
|
||||
refid2id = dict([
|
||||
(r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0])
|
||||
|
||||
footnote0 = secs[0].findall('footnote')
|
||||
for footnote in footnote0:
|
||||
ids = footnote.attrib.get('ids')
|
||||
backrefs = footnote.attrib.get('backrefs')
|
||||
assert refid2id[ids] == backrefs
|
||||
|
||||
|
||||
@with_intl_app(buildername='xml')
|
||||
def test_i18n_refs_python_domain(app, status, warning):
|
||||
app.builder.build(['refs_python_domain'])
|
||||
et = ElementTree.parse(app.outdir / 'refs_python_domain.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
# regression test for fix #1363
|
||||
para0 = secs[0].findall('paragraph')
|
||||
assert_elem(
|
||||
para0[0],
|
||||
texts=['SEE THIS DECORATOR:', 'sensitive_variables()', '.'],
|
||||
refs=['sensitive.sensitive_variables'])
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', freshenv=True)
|
||||
def test_i18n_warn_for_number_of_references_inconsistency(app, status, warning):
|
||||
#app.builddir.rmtree(True)
|
||||
app.builder.build(['refs_inconsistency'])
|
||||
result = (app.outdir / 'refs_inconsistency.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH REFS INCONSISTENCY"
|
||||
u"\n****************************\n"
|
||||
@ -226,7 +154,7 @@ def test_i18n_warn_for_number_of_references_inconsistency(app, status, warning):
|
||||
u"\n[1] THIS IS A AUTO NUMBERED FOOTNOTE.\n"
|
||||
u"\n[ref2] THIS IS A NAMED FOOTNOTE.\n"
|
||||
u"\n[100] THIS IS A NUMBERED FOOTNOTE.\n")
|
||||
assert result == expect
|
||||
yield assert_equal, result, expect
|
||||
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
warning_fmt = u'.*/refs_inconsistency.txt:\\d+: ' \
|
||||
@ -235,89 +163,10 @@ def test_i18n_warn_for_number_of_references_inconsistency(app, status, warning):
|
||||
warning_fmt % 'footnote references' +
|
||||
warning_fmt % 'references' +
|
||||
warning_fmt % 'references')
|
||||
assert re.search(expected_warning_expr, warnings)
|
||||
yield re_search, expected_warning_expr, warnings
|
||||
|
||||
# --- check warning for literal block
|
||||
|
||||
@with_intl_app(buildername='html', freshenv=True)
|
||||
def test_i18n_link_to_undefined_reference(app, status, warning):
|
||||
app.builder.build(['refs_inconsistency'])
|
||||
result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8')
|
||||
|
||||
expected_expr = ('<a class="reference external" '
|
||||
'href="http://www.example.com">reference</a>')
|
||||
assert len(re.findall(expected_expr, result)) == 2
|
||||
|
||||
expected_expr = ('<a class="reference internal" '
|
||||
'href="#reference">reference</a>')
|
||||
assert len(re.findall(expected_expr, result)) == 0
|
||||
|
||||
expected_expr = ('<a class="reference internal" '
|
||||
'href="#i18n-with-refs-inconsistency">I18N WITH '
|
||||
'REFS INCONSISTENCY</a>')
|
||||
assert len(re.findall(expected_expr, result)) == 1
|
||||
|
||||
|
||||
@with_intl_app(buildername='xml', freshenv=True)
|
||||
def test_i18n_keep_external_links(app, status, warning):
|
||||
# regression test for #1044
|
||||
app.builder.build(['external_links'])
|
||||
et = ElementTree.parse(app.outdir / 'external_links.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
# external link check
|
||||
assert_elem(
|
||||
para0[0],
|
||||
texts=['EXTERNAL LINK TO', 'Python', '.'],
|
||||
refs=['http://python.org/index.html'])
|
||||
|
||||
# internal link check
|
||||
assert_elem(
|
||||
para0[1],
|
||||
texts=['EXTERNAL LINKS', 'IS INTERNAL LINK.'],
|
||||
refs=['i18n-with-external-links'])
|
||||
|
||||
# inline link check
|
||||
assert_elem(
|
||||
para0[2],
|
||||
texts=['INLINE LINK BY', 'THE SPHINX SITE', '.'],
|
||||
refs=['http://sphinx-doc.org'])
|
||||
|
||||
# unnamed link check
|
||||
assert_elem(
|
||||
para0[3],
|
||||
texts=['UNNAMED', 'LINK', '.'],
|
||||
refs=['http://google.com'])
|
||||
|
||||
# link target swapped translation
|
||||
para1 = secs[1].findall('paragraph')
|
||||
assert_elem(
|
||||
para1[0],
|
||||
texts=['LINK TO', 'external2', 'AND', 'external1', '.'],
|
||||
refs=['http://example.com/external2',
|
||||
'http://example.com/external1'])
|
||||
assert_elem(
|
||||
para1[1],
|
||||
texts=['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE',
|
||||
'.'],
|
||||
refs=['http://python.org', 'http://sphinx-doc.org'])
|
||||
|
||||
# multiple references in the same line
|
||||
para2 = secs[2].findall('paragraph')
|
||||
assert_elem(
|
||||
para2[0],
|
||||
texts=['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',',
|
||||
'THE SPHINX SITE', ',', 'UNNAMED', 'AND',
|
||||
'THE PYTHON SITE', '.'],
|
||||
refs=['i18n-with-external-links', 'http://python.org/index.html',
|
||||
'http://sphinx-doc.org', 'http://google.com',
|
||||
'http://python.org'])
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', freshenv=True)
|
||||
def test_i18n_literalblock_warning(app, status, warning):
|
||||
#app.builddir.rmtree(True) # for warnings acceleration
|
||||
app.builder.build(['literalblock'])
|
||||
result = (app.outdir / 'literalblock.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH LITERAL BLOCK"
|
||||
u"\n***********************\n"
|
||||
@ -326,18 +175,15 @@ def test_i18n_literalblock_warning(app, status, warning):
|
||||
u"\n literal block\n"
|
||||
u"\nMISSING LITERAL BLOCK:\n"
|
||||
u"\n<SYSTEM MESSAGE:")
|
||||
assert result.startswith(expect)
|
||||
yield startswith, result, expect
|
||||
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
expected_warning_expr = u'.*/literalblock.txt:\\d+: ' \
|
||||
u'WARNING: Literal block expected; none found.'
|
||||
assert re.search(expected_warning_expr, warnings)
|
||||
yield re_search, expected_warning_expr, warnings
|
||||
|
||||
# --- definition terms: regression test for #975
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_i18n_definition_terms(app, status, warning):
|
||||
# regression test for #975
|
||||
app.builder.build(['definition_terms'])
|
||||
result = (app.outdir / 'definition_terms.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH DEFINITION TERMS"
|
||||
u"\n**************************\n"
|
||||
@ -345,15 +191,10 @@ def test_i18n_definition_terms(app, status, warning):
|
||||
u"\n THE CORRESPONDING DEFINITION\n"
|
||||
u"\nSOME OTHER TERM"
|
||||
u"\n THE CORRESPONDING DEFINITION #2\n")
|
||||
yield assert_equal, result, expect
|
||||
|
||||
assert result == expect
|
||||
# --- glossary terms: regression test for #1090
|
||||
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_i18n_glossary_terms(app, status, warning):
|
||||
# regression test for #1090
|
||||
#app.builddir.rmtree(True) # for warnings acceleration
|
||||
app.builder.build(['glossary_terms'])
|
||||
result = (app.outdir / 'glossary_terms.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH GLOSSARY TERMS"
|
||||
u"\n************************\n"
|
||||
@ -362,146 +203,26 @@ def test_i18n_glossary_terms(app, status, warning):
|
||||
u"\nSOME OTHER NEW TERM"
|
||||
u"\n THE CORRESPONDING GLOSSARY #2\n"
|
||||
u"\nLINK TO *SOME NEW TERM*.\n")
|
||||
assert result == expect
|
||||
|
||||
yield assert_equal, result, expect
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
assert 'term not in glossary' not in warnings
|
||||
yield assert_not_in, 'term not in glossary', warnings
|
||||
|
||||
# --- glossary term inconsistencies: regression test for #1090
|
||||
|
||||
@with_intl_app(buildername='xml')
|
||||
def test_i18n_role_xref(app, status, warning):
|
||||
# regression test for #1090, #1193
|
||||
#app.builddir.rmtree(True) # for warnings acceleration
|
||||
app.builder.build(['role_xref'])
|
||||
et = ElementTree.parse(app.outdir / 'role_xref.xml')
|
||||
sec1, sec2 = et.findall('section')
|
||||
|
||||
para1, = sec1.findall('paragraph')
|
||||
assert_elem(
|
||||
para1,
|
||||
texts=['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
|
||||
'SOME NEW TERM', '.'],
|
||||
refs=['i18n-role-xref', 'contents',
|
||||
'glossary_terms#term-some-term'])
|
||||
|
||||
para2 = sec2.findall('paragraph')
|
||||
assert_elem(
|
||||
para2[0],
|
||||
texts=['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM',
|
||||
'.'],
|
||||
refs=['glossary_terms#term-some-other-term',
|
||||
'glossary_terms#term-some-term'])
|
||||
assert_elem(
|
||||
para2[1],
|
||||
texts=['LINK TO', 'SAME TYPE LINKS', 'AND',
|
||||
"I18N ROCK'N ROLE XREF", '.'],
|
||||
refs=['same-type-links', 'i18n-role-xref'])
|
||||
assert_elem(
|
||||
para2[2],
|
||||
texts=['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS',
|
||||
'.'],
|
||||
refs=['glossary_terms', 'contents'])
|
||||
assert_elem(
|
||||
para2[3],
|
||||
texts=['LINK TO', '--module', 'AND', '-m', '.'],
|
||||
refs=['cmdoption--module', 'cmdoption-m'])
|
||||
assert_elem(
|
||||
para2[4],
|
||||
texts=['LINK TO', 'env2', 'AND', 'env1', '.'],
|
||||
refs=['envvar-env2', 'envvar-env1'])
|
||||
assert_elem(
|
||||
para2[5],
|
||||
texts=['LINK TO', 'token2', 'AND', 'token1', '.'],
|
||||
refs=[]) # TODO: how do I link token role to productionlist?
|
||||
assert_elem(
|
||||
para2[6],
|
||||
texts=['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'],
|
||||
refs=['same-type-links', 'i18n-role-xref'])
|
||||
|
||||
# warnings
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
assert 'term not in glossary' not in warnings
|
||||
assert 'undefined label' not in warnings
|
||||
assert 'unknown document' not in warnings
|
||||
|
||||
|
||||
@with_intl_app(buildername='xml')
|
||||
def test_i18n_label_target(app, status, warning):
|
||||
# regression test for #1193, #1265
|
||||
app.builder.build(['label_target'])
|
||||
et = ElementTree.parse(app.outdir / 'label_target.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
assert_elem(
|
||||
para0[0],
|
||||
texts=['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND',
|
||||
'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'],
|
||||
refs=['implicit-target', 'section-and-label'])
|
||||
|
||||
para1 = secs[1].findall('paragraph')
|
||||
assert_elem(
|
||||
para1[0],
|
||||
texts=['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND',
|
||||
'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1',
|
||||
'.'],
|
||||
refs=['explicit-target', 'id1'])
|
||||
|
||||
para2 = secs[2].findall('paragraph')
|
||||
assert_elem(
|
||||
para2[0],
|
||||
texts=['X IMPLICIT SECTION NAME', 'POINT TO',
|
||||
'implicit-section-name', '.'],
|
||||
refs=['implicit-section-name'])
|
||||
|
||||
sec2 = secs[2].findall('section')
|
||||
|
||||
para2_0 = sec2[0].findall('paragraph')
|
||||
assert_elem(
|
||||
para2_0[0],
|
||||
texts=['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'],
|
||||
refs=[])
|
||||
|
||||
para3 = secs[3].findall('paragraph')
|
||||
assert_elem(
|
||||
para3[0],
|
||||
texts=['X', 'bridge label',
|
||||
'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
|
||||
'SECTION TITLE.'],
|
||||
refs=['label-bridged-target-section'])
|
||||
assert_elem(
|
||||
para3[1],
|
||||
texts=['X', 'bridge label', 'POINT TO',
|
||||
'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2',
|
||||
'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED',
|
||||
'bridge label2', 'POINT TO CORRECT TARGET.'],
|
||||
refs=['label-bridged-target-section',
|
||||
'section-and-label',
|
||||
'section-and-label'])
|
||||
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_i18n_glossary_terms_inconsistency(app, status, warning):
|
||||
# regression test for #1090
|
||||
app.outdir.rmtree(True) # for warnings acceleration
|
||||
app.doctreedir.rmtree(True) # for warnings acceleration
|
||||
app.builder.build(['glossary_terms_inconsistency'])
|
||||
result = (app.outdir / 'glossary_terms_inconsistency.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH GLOSSARY TERMS INCONSISTENCY"
|
||||
u"\n**************************************\n"
|
||||
u"\n1. LINK TO *SOME NEW TERM*.\n")
|
||||
assert result == expect
|
||||
yield assert_equal, result, expect
|
||||
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
expected_warning_expr = (
|
||||
u'.*/glossary_terms_inconsistency.txt:\\d+: '
|
||||
u'WARNING: inconsistent term references in translated message\n')
|
||||
assert re.search(expected_warning_expr, warnings)
|
||||
yield re_search, expected_warning_expr, warnings
|
||||
|
||||
# --- seealso
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_seealso(app, status, warning):
|
||||
app.builder.build(['seealso'])
|
||||
result = (app.outdir / 'seealso.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH SEEALSO"
|
||||
u"\n*****************\n"
|
||||
@ -509,13 +230,10 @@ def test_seealso(app, status, warning):
|
||||
u"\nSee also: LONG TEXT 1\n"
|
||||
u"\nSee also: SHORT TEXT 2\n"
|
||||
u"\n LONG TEXT 2\n")
|
||||
assert result == expect
|
||||
yield assert_equal, result, expect
|
||||
|
||||
# --- figure captions: regression test for #940
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_i18n_figure_caption(app, status, warning):
|
||||
# regression test for #940
|
||||
app.builder.build(['figure_caption'])
|
||||
result = (app.outdir / 'figure_caption.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH FIGURE CAPTION"
|
||||
u"\n************************\n"
|
||||
@ -529,14 +247,10 @@ def test_i18n_figure_caption(app, status, warning):
|
||||
u"\n [image]MY CAPTION OF THE FIGURE\n"
|
||||
u"\n MY DESCRIPTION PARAGRAPH1 OF THE FIGURE.\n"
|
||||
u"\n MY DESCRIPTION PARAGRAPH2 OF THE FIGURE.\n")
|
||||
yield assert_equal, result, expect
|
||||
|
||||
assert result == expect
|
||||
# --- rubric: regression test for pull request #190
|
||||
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_i18n_rubric(app, status, warning):
|
||||
# regression test for pull request #190
|
||||
app.builder.build(['rubric'])
|
||||
result = (app.outdir / 'rubric.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH RUBRIC"
|
||||
u"\n****************\n"
|
||||
@ -546,14 +260,74 @@ def test_i18n_rubric(app, status, warning):
|
||||
u"\n===================\n"
|
||||
u"\nBLOCK\n"
|
||||
u"\n -[ RUBRIC TITLE ]-\n")
|
||||
yield assert_equal, result, expect
|
||||
|
||||
assert result == expect
|
||||
# --- docfields
|
||||
|
||||
result = (app.outdir / 'docfields.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH DOCFIELDS"
|
||||
u"\n*******************\n"
|
||||
u"\nclass class Cls1\n"
|
||||
u"\n Parameters:"
|
||||
u"\n **param** -- DESCRIPTION OF PARAMETER param\n"
|
||||
u"\nclass class Cls2\n"
|
||||
u"\n Parameters:"
|
||||
u"\n * **foo** -- DESCRIPTION OF PARAMETER foo\n"
|
||||
u"\n * **bar** -- DESCRIPTION OF PARAMETER bar\n"
|
||||
u"\nclass class Cls3(values)\n"
|
||||
u"\n Raises ValueError:"
|
||||
u"\n IF THE VALUES ARE OUT OF RANGE\n"
|
||||
u"\nclass class Cls4(values)\n"
|
||||
u"\n Raises:"
|
||||
u"\n * **TypeError** -- IF THE VALUES ARE NOT VALID\n"
|
||||
u"\n * **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n"
|
||||
u"\nclass class Cls5\n"
|
||||
u"\n Returns:"
|
||||
u'\n A NEW "Cls3" INSTANCE\n')
|
||||
yield assert_equal, result, expect
|
||||
|
||||
# --- admonitions
|
||||
# #1206: gettext did not translate admonition directive's title
|
||||
# seealso: http://docutils.sourceforge.net/docs/ref/rst/directives.html#admonitions
|
||||
|
||||
result = (app.outdir / 'admonitions.txt').text(encoding='utf-8')
|
||||
directives = (
|
||||
"attention", "caution", "danger", "error", "hint",
|
||||
"important", "note", "tip", "warning", "admonition")
|
||||
for d in directives:
|
||||
yield assert_in, d.upper() + " TITLE", result
|
||||
yield assert_in, d.upper() + " BODY", result
|
||||
|
||||
|
||||
@with_intl_app(buildername='html')
|
||||
def test_i18n_index_entries(app, status, warning):
|
||||
# regression test for #976
|
||||
app.builder.build(['index_entries'])
|
||||
|
||||
@gen_with_intl_app('html', freshenv=True)
|
||||
def test_html_builder(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
# --- test for #955 cant-build-html-with-footnotes-when-using
|
||||
|
||||
# expect no error by build
|
||||
(app.outdir / 'footnote.html').text(encoding='utf-8')
|
||||
|
||||
# --- links to undefined reference
|
||||
|
||||
result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8')
|
||||
|
||||
expected_expr = ('<a class="reference external" '
|
||||
'href="http://www.example.com">reference</a>')
|
||||
yield assert_equal, len(re.findall(expected_expr, result)), 2
|
||||
|
||||
expected_expr = ('<a class="reference internal" '
|
||||
'href="#reference">reference</a>')
|
||||
yield assert_equal, len(re.findall(expected_expr, result)), 0
|
||||
|
||||
expected_expr = ('<a class="reference internal" '
|
||||
'href="#i18n-with-refs-inconsistency">I18N WITH '
|
||||
'REFS INCONSISTENCY</a>')
|
||||
yield assert_equal, len(re.findall(expected_expr, result)), 1
|
||||
|
||||
# --- index entries: regression test for #976
|
||||
|
||||
result = (app.outdir / 'genindex.html').text(encoding='utf-8')
|
||||
|
||||
def wrap(tag, keyword):
|
||||
@ -579,12 +353,10 @@ def test_i18n_index_entries(app, status, warning):
|
||||
wrap('a', 'BUILTIN'),
|
||||
]
|
||||
for expr in expected_exprs:
|
||||
assert re.search(expr, result, re.M)
|
||||
yield re_search, expr, result, re.M
|
||||
|
||||
# --- versionchanges
|
||||
|
||||
@with_intl_app(buildername='html', freshenv=True)
|
||||
def test_versionchange(app, status, warning):
|
||||
app.builder.build(['versionchange'])
|
||||
result = (app.outdir / 'versionchange.html').text(encoding='utf-8')
|
||||
|
||||
def get_content(result, name):
|
||||
@ -600,83 +372,266 @@ def test_versionchange(app, status, warning):
|
||||
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF DEPRECATED.</p>\n"""
|
||||
u"""<p>THIS IS THE <em>SECOND</em> PARAGRAPH OF DEPRECATED.</p>\n""")
|
||||
matched_content = get_content(result, "deprecated")
|
||||
assert expect1 == matched_content
|
||||
yield assert_equal, expect1, matched_content
|
||||
|
||||
expect2 = (
|
||||
u"""<p><span class="versionmodified">New in version 1.0: </span>"""
|
||||
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF VERSIONADDED.</p>\n""")
|
||||
matched_content = get_content(result, "versionadded")
|
||||
assert expect2 == matched_content
|
||||
yield assert_equal, expect2, matched_content
|
||||
|
||||
expect3 = (
|
||||
u"""<p><span class="versionmodified">Changed in version 1.0: </span>"""
|
||||
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF VERSIONCHANGED.</p>\n""")
|
||||
matched_content = get_content(result, "versionchanged")
|
||||
assert expect3 == matched_content
|
||||
yield assert_equal, expect3, matched_content
|
||||
|
||||
# --- docfields
|
||||
|
||||
@with_intl_app(buildername='text', freshenv=True)
|
||||
def test_i18n_docfields(app, status, warning):
|
||||
app.builder.build(['docfields'])
|
||||
result = (app.outdir / 'docfields.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH DOCFIELDS"
|
||||
u"\n*******************\n"
|
||||
u"\nclass class Cls1\n"
|
||||
u"\n Parameters:"
|
||||
u"\n **param** -- DESCRIPTION OF PARAMETER param\n"
|
||||
u"\nclass class Cls2\n"
|
||||
u"\n Parameters:"
|
||||
u"\n * **foo** -- DESCRIPTION OF PARAMETER foo\n"
|
||||
u"\n * **bar** -- DESCRIPTION OF PARAMETER bar\n"
|
||||
u"\nclass class Cls3(values)\n"
|
||||
u"\n Raises ValueError:"
|
||||
u"\n IF THE VALUES ARE OUT OF RANGE\n"
|
||||
u"\nclass class Cls4(values)\n"
|
||||
u"\n Raises:"
|
||||
u"\n * **TypeError** -- IF THE VALUES ARE NOT VALID\n"
|
||||
u"\n * **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n"
|
||||
u"\nclass class Cls5\n"
|
||||
u"\n Returns:"
|
||||
u'\n A NEW "Cls3" INSTANCE\n')
|
||||
assert result == expect
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', freshenv=True)
|
||||
def test_i18n_admonitions(app, status, warning):
|
||||
# #1206: gettext did not translate admonition directive's title
|
||||
# seealso: http://docutils.sourceforge.net/docs/ref/rst/directives.html#admonitions
|
||||
app.builder.build(['admonitions'])
|
||||
result = (app.outdir / 'admonitions.txt').text(encoding='utf-8')
|
||||
directives = (
|
||||
"attention", "caution", "danger", "error", "hint",
|
||||
"important", "note", "tip", "warning", "admonition",)
|
||||
for d in directives:
|
||||
assert d.upper() + " TITLE" in result
|
||||
assert d.upper() + " BODY" in result
|
||||
|
||||
|
||||
@with_intl_app(buildername='html', freshenv=True)
|
||||
def test_i18n_docfields_html(app, status, warning):
|
||||
app.builder.build(['docfields'])
|
||||
(app.outdir / 'docfields.html').text(encoding='utf-8')
|
||||
# expect no error by build
|
||||
(app.outdir / 'docfields.html').text(encoding='utf-8')
|
||||
|
||||
# --- gettext template
|
||||
|
||||
@with_intl_app(buildername='html')
|
||||
def test_gettext_template(app, status, warning):
|
||||
app.builder.build_all()
|
||||
result = (app.outdir / 'index.html').text(encoding='utf-8')
|
||||
assert "WELCOME" in result
|
||||
assert "SPHINX 2013.120" in result
|
||||
yield assert_in, "WELCOME", result
|
||||
yield assert_in, "SPHINX 2013.120", result
|
||||
|
||||
# --- rebuild by .mo mtime
|
||||
|
||||
@with_intl_app(buildername='html')
|
||||
def test_rebuild_by_mo_mtime(app, status, warning):
|
||||
app.builder.build_update()
|
||||
_, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app)
|
||||
assert count == 0
|
||||
yield assert_equal, count, 0
|
||||
|
||||
mo = (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').bytes()
|
||||
(app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').write_bytes(mo)
|
||||
(app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').utime(None)
|
||||
_, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app)
|
||||
assert count == 1
|
||||
yield assert_equal, count, 1
|
||||
|
||||
|
||||
@gen_with_intl_app('xml', freshenv=True)
|
||||
def test_xml_builder(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
# --- footnotes: regression test for fix #955, #1176
|
||||
|
||||
et = ElementTree.parse(app.outdir / 'footnote.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para0[0],
|
||||
['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
|
||||
'2', '[ref]', '1', '100', '.'],
|
||||
['i18n-with-footnote', 'ref'])
|
||||
|
||||
footnote0 = secs[0].findall('footnote')
|
||||
yield (assert_elem,
|
||||
footnote0[0],
|
||||
['1', 'THIS IS A AUTO NUMBERED FOOTNOTE.'],
|
||||
None,
|
||||
['1'])
|
||||
yield (assert_elem,
|
||||
footnote0[1],
|
||||
['100', 'THIS IS A NUMBERED FOOTNOTE.'],
|
||||
None,
|
||||
['100'])
|
||||
yield (assert_elem,
|
||||
footnote0[2],
|
||||
['2', 'THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'],
|
||||
None,
|
||||
['named'])
|
||||
|
||||
citation0 = secs[0].findall('citation')
|
||||
yield (assert_elem,
|
||||
citation0[0],
|
||||
['ref', 'THIS IS A NAMED FOOTNOTE.'],
|
||||
None,
|
||||
['ref'])
|
||||
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n'
|
||||
yield not_re_search, warning_expr, warnings
|
||||
|
||||
# --- footnote backlinks: i18n test for #1058
|
||||
|
||||
et = ElementTree.parse(app.outdir / 'footnote.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
refs0 = para0[0].findall('footnote_reference')
|
||||
refid2id = dict([
|
||||
(r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0])
|
||||
|
||||
footnote0 = secs[0].findall('footnote')
|
||||
for footnote in footnote0:
|
||||
ids = footnote.attrib.get('ids')
|
||||
backrefs = footnote.attrib.get('backrefs')
|
||||
yield assert_equal, refid2id[ids], backrefs
|
||||
|
||||
# --- refs in the Python domain
|
||||
|
||||
et = ElementTree.parse(app.outdir / 'refs_python_domain.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
# regression test for fix #1363
|
||||
para0 = secs[0].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para0[0],
|
||||
['SEE THIS DECORATOR:', 'sensitive_variables()', '.'],
|
||||
['sensitive.sensitive_variables'])
|
||||
|
||||
# --- keep external links: regression test for #1044
|
||||
|
||||
et = ElementTree.parse(app.outdir / 'external_links.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
# external link check
|
||||
yield (assert_elem,
|
||||
para0[0],
|
||||
['EXTERNAL LINK TO', 'Python', '.'],
|
||||
['http://python.org/index.html'])
|
||||
|
||||
# internal link check
|
||||
yield (assert_elem,
|
||||
para0[1],
|
||||
['EXTERNAL LINKS', 'IS INTERNAL LINK.'],
|
||||
['i18n-with-external-links'])
|
||||
|
||||
# inline link check
|
||||
yield (assert_elem,
|
||||
para0[2],
|
||||
['INLINE LINK BY', 'THE SPHINX SITE', '.'],
|
||||
['http://sphinx-doc.org'])
|
||||
|
||||
# unnamed link check
|
||||
yield (assert_elem,
|
||||
para0[3],
|
||||
['UNNAMED', 'LINK', '.'],
|
||||
['http://google.com'])
|
||||
|
||||
# link target swapped translation
|
||||
para1 = secs[1].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para1[0],
|
||||
['LINK TO', 'external2', 'AND', 'external1', '.'],
|
||||
['http://example.com/external2',
|
||||
'http://example.com/external1'])
|
||||
yield (assert_elem,
|
||||
para1[1],
|
||||
['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE', '.'],
|
||||
['http://python.org', 'http://sphinx-doc.org'])
|
||||
|
||||
# multiple references in the same line
|
||||
para2 = secs[2].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para2[0],
|
||||
['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',',
|
||||
'THE SPHINX SITE', ',', 'UNNAMED', 'AND',
|
||||
'THE PYTHON SITE', '.'],
|
||||
['i18n-with-external-links', 'http://python.org/index.html',
|
||||
'http://sphinx-doc.org', 'http://google.com',
|
||||
'http://python.org'])
|
||||
|
||||
# --- role xref: regression test for #1090, #1193
|
||||
|
||||
et = ElementTree.parse(app.outdir / 'role_xref.xml')
|
||||
sec1, sec2 = et.findall('section')
|
||||
|
||||
para1, = sec1.findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para1,
|
||||
['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
|
||||
'SOME NEW TERM', '.'],
|
||||
['i18n-role-xref', 'contents',
|
||||
'glossary_terms#term-some-term'])
|
||||
|
||||
para2 = sec2.findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para2[0],
|
||||
['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM', '.'],
|
||||
['glossary_terms#term-some-other-term',
|
||||
'glossary_terms#term-some-term'])
|
||||
yield(assert_elem,
|
||||
para2[1],
|
||||
['LINK TO', 'SAME TYPE LINKS', 'AND',
|
||||
"I18N ROCK'N ROLE XREF", '.'],
|
||||
['same-type-links', 'i18n-role-xref'])
|
||||
yield (assert_elem,
|
||||
para2[2],
|
||||
['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS', '.'],
|
||||
['glossary_terms', 'contents'])
|
||||
yield (assert_elem,
|
||||
para2[3],
|
||||
['LINK TO', '--module', 'AND', '-m', '.'],
|
||||
['cmdoption--module', 'cmdoption-m'])
|
||||
yield (assert_elem,
|
||||
para2[4],
|
||||
['LINK TO', 'env2', 'AND', 'env1', '.'],
|
||||
['envvar-env2', 'envvar-env1'])
|
||||
yield (assert_elem,
|
||||
para2[5],
|
||||
['LINK TO', 'token2', 'AND', 'token1', '.'],
|
||||
[]) # TODO: how do I link token role to productionlist?
|
||||
yield (assert_elem,
|
||||
para2[6],
|
||||
['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'],
|
||||
['same-type-links', 'i18n-role-xref'])
|
||||
|
||||
# warnings
|
||||
warnings = warning.getvalue().replace(os.sep, '/')
|
||||
yield assert_not_in, 'term not in glossary', warnings
|
||||
yield assert_not_in, 'undefined label', warnings
|
||||
yield assert_not_in, 'unknown document', warnings
|
||||
|
||||
# --- label targets: regression test for #1193, #1265
|
||||
|
||||
et = ElementTree.parse(app.outdir / 'label_target.xml')
|
||||
secs = et.findall('section')
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para0[0],
|
||||
['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND',
|
||||
'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'],
|
||||
['implicit-target', 'section-and-label'])
|
||||
|
||||
para1 = secs[1].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para1[0],
|
||||
['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND',
|
||||
'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1',
|
||||
'.'],
|
||||
['explicit-target', 'id1'])
|
||||
|
||||
para2 = secs[2].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para2[0],
|
||||
['X IMPLICIT SECTION NAME', 'POINT TO',
|
||||
'implicit-section-name', '.'],
|
||||
['implicit-section-name'])
|
||||
|
||||
sec2 = secs[2].findall('section')
|
||||
|
||||
para2_0 = sec2[0].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para2_0[0],
|
||||
['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'],
|
||||
[])
|
||||
|
||||
para3 = secs[3].findall('paragraph')
|
||||
yield (assert_elem,
|
||||
para3[0],
|
||||
['X', 'bridge label',
|
||||
'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
|
||||
'SECTION TITLE.'],
|
||||
['label-bridged-target-section'])
|
||||
yield (assert_elem,
|
||||
para3[1],
|
||||
['X', 'bridge label', 'POINT TO',
|
||||
'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2',
|
||||
'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED',
|
||||
'bridge label2', 'POINT TO CORRECT TARGET.'],
|
||||
['label-bridged-target-section',
|
||||
'section-and-label',
|
||||
'section-and-label'])
|
||||
|
Loading…
Reference in New Issue
Block a user