mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merged in knzm/sphinx-fix-docfields-fork (pull request #96)
This commit is contained in:
@@ -55,6 +55,11 @@ texinfo_documents = [
|
||||
'Georg Brandl \\and someone else', 'Sphinx Testing', 'Miscellaneous'),
|
||||
]
|
||||
|
||||
man_pages = [
|
||||
('contents', 'SphinxTests', 'Sphinx Tests Documentation',
|
||||
'Georg Brandl and someone else', 1),
|
||||
]
|
||||
|
||||
value_from_conf_py = 84
|
||||
|
||||
coverage_c_path = ['special/*.h']
|
||||
|
||||
@@ -8,7 +8,10 @@
|
||||
external_links
|
||||
refs_inconsistency
|
||||
literalblock
|
||||
seealso
|
||||
definition_terms
|
||||
figure_caption
|
||||
index_entries
|
||||
glossary_terms
|
||||
glossary_terms_inconsistency
|
||||
docfields
|
||||
|
||||
35
tests/roots/test-intl/glossary_terms.po
Normal file
35
tests/roots/test-intl/glossary_terms.po
Normal file
@@ -0,0 +1,35 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) 2012, foof
|
||||
# This file is distributed under the same license as the foo package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: sphinx 1.0\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2013-01-29 14:10\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
msgid "i18n with glossary terms"
|
||||
msgstr "I18N WITH GLOSSARY TERMS"
|
||||
|
||||
msgid "Some term"
|
||||
msgstr "SOME NEW TERM"
|
||||
|
||||
msgid "The corresponding glossary"
|
||||
msgstr "THE CORRESPONDING GLOSSARY"
|
||||
|
||||
msgid "Some other term"
|
||||
msgstr "SOME OTHER NEW TERM"
|
||||
|
||||
msgid "The corresponding glossary #2"
|
||||
msgstr "THE CORRESPONDING GLOSSARY #2"
|
||||
|
||||
msgid "link to :term:`Some term`."
|
||||
msgstr "LINK TO :term:`SOME NEW TERM`."
|
||||
14
tests/roots/test-intl/glossary_terms.txt
Normal file
14
tests/roots/test-intl/glossary_terms.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
:tocdepth: 2
|
||||
|
||||
i18n with glossary terms
|
||||
========================
|
||||
|
||||
.. glossary::
|
||||
|
||||
Some term
|
||||
The corresponding glossary
|
||||
|
||||
Some other term
|
||||
The corresponding glossary #2
|
||||
|
||||
link to :term:`Some term`.
|
||||
23
tests/roots/test-intl/glossary_terms_inconsistency.po
Normal file
23
tests/roots/test-intl/glossary_terms_inconsistency.po
Normal file
@@ -0,0 +1,23 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) 2012, foof
|
||||
# This file is distributed under the same license as the foo package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: sphinx 1.0\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2013-01-29 14:10\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
msgid "i18n with glossary terms inconsistency"
|
||||
msgstr "I18N WITH GLOSSARY TERMS INCONSISTENCY"
|
||||
|
||||
msgid "link to :term:`Some term` and :term:`Some other term`."
|
||||
msgstr "LINK TO :term:`SOME NEW TERM`."
|
||||
6
tests/roots/test-intl/glossary_terms_inconsistency.txt
Normal file
6
tests/roots/test-intl/glossary_terms_inconsistency.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
:tocdepth: 2
|
||||
|
||||
i18n with glossary terms inconsistency
|
||||
======================================
|
||||
|
||||
1. link to :term:`Some term` and :term:`Some other term`.
|
||||
33
tests/roots/test-intl/seealso.po
Normal file
33
tests/roots/test-intl/seealso.po
Normal file
@@ -0,0 +1,33 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) 2010, Georg Brandl & Team
|
||||
# This file is distributed under the same license as the Sphinx <Tests> package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: Sphinx <Tests> 0.6\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2012-12-16 06:06\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
msgid "i18n with seealso"
|
||||
msgstr "I18N WITH SEEALSO"
|
||||
|
||||
msgid "short text 1"
|
||||
msgstr "SHORT TEXT 1"
|
||||
|
||||
msgid "long text 1"
|
||||
msgstr "LONG TEXT 1"
|
||||
|
||||
msgid "short text 2"
|
||||
msgstr "SHORT TEXT 2"
|
||||
|
||||
msgid "long text 2"
|
||||
msgstr "LONG TEXT 2"
|
||||
|
||||
15
tests/roots/test-intl/seealso.txt
Normal file
15
tests/roots/test-intl/seealso.txt
Normal file
@@ -0,0 +1,15 @@
|
||||
:tocdepth: 2
|
||||
|
||||
i18n with seealso
|
||||
============================
|
||||
.. #960 directive-seelaso-ignored-in-the-gettext
|
||||
|
||||
.. seealso:: short text 1
|
||||
|
||||
.. seealso::
|
||||
|
||||
long text 1
|
||||
|
||||
.. seealso:: short text 2
|
||||
|
||||
long text 2
|
||||
@@ -11,6 +11,7 @@
|
||||
|
||||
from StringIO import StringIO
|
||||
|
||||
from docutils import nodes
|
||||
from sphinx.application import ExtensionError
|
||||
from sphinx.domains import Domain
|
||||
|
||||
@@ -41,6 +42,12 @@ def test_events(app):
|
||||
"Callback called when disconnected"
|
||||
|
||||
|
||||
@with_app()
|
||||
def test_emit_with_multibyte_name_node(app):
|
||||
node = nodes.section(names=[u'\u65e5\u672c\u8a9e'])
|
||||
app.emit('my_event', node)
|
||||
|
||||
|
||||
def test_output():
|
||||
status, warnings = StringIO(), StringIO()
|
||||
app = TestApp(status=status, warning=warnings)
|
||||
|
||||
@@ -14,6 +14,7 @@ import sys
|
||||
from StringIO import StringIO
|
||||
|
||||
from util import *
|
||||
from nose.tools import with_setup
|
||||
|
||||
from docutils.statemachine import ViewList
|
||||
|
||||
@@ -22,8 +23,7 @@ from sphinx.ext.autodoc import AutoDirective, add_documenter, \
|
||||
|
||||
|
||||
def setup_module():
|
||||
global app, lid, options, directive
|
||||
|
||||
global app
|
||||
app = TestApp()
|
||||
app.builder.env.app = app
|
||||
app.builder.env.temp_data['docname'] = 'dummy'
|
||||
@@ -31,6 +31,15 @@ def setup_module():
|
||||
app.connect('autodoc-process-signature', process_signature)
|
||||
app.connect('autodoc-skip-member', skip_member)
|
||||
|
||||
|
||||
def teardown_module():
|
||||
app.cleanup()
|
||||
|
||||
|
||||
def setup_test():
|
||||
global options, directive
|
||||
global processed_docstrings, processed_signatures, _warnings
|
||||
|
||||
options = Struct(
|
||||
inherited_members = False,
|
||||
undoc_members = False,
|
||||
@@ -54,8 +63,9 @@ def setup_module():
|
||||
filename_set = set(),
|
||||
)
|
||||
|
||||
def teardown_module():
|
||||
app.cleanup()
|
||||
processed_docstrings = []
|
||||
processed_signatures = []
|
||||
_warnings = []
|
||||
|
||||
|
||||
_warnings = []
|
||||
@@ -80,12 +90,15 @@ def process_signature(app, what, name, obj, options, args, retann):
|
||||
|
||||
|
||||
def skip_member(app, what, name, obj, skip, options):
|
||||
if name in ('__special1__', '__special2__'):
|
||||
return skip
|
||||
if name.startswith('_'):
|
||||
return True
|
||||
if name == 'skipmeth':
|
||||
return True
|
||||
|
||||
|
||||
@with_setup(setup_test)
|
||||
def test_parse_name():
|
||||
def verify(objtype, name, result):
|
||||
inst = AutoDirective._registry[objtype](directive, name)
|
||||
@@ -127,6 +140,7 @@ def test_parse_name():
|
||||
del directive.env.temp_data['autodoc:class']
|
||||
|
||||
|
||||
@with_setup(setup_test)
|
||||
def test_format_signature():
|
||||
def formatsig(objtype, name, obj, args, retann):
|
||||
inst = AutoDirective._registry[objtype](directive, name)
|
||||
@@ -165,6 +179,20 @@ def test_format_signature():
|
||||
assert formatsig('class', 'C', C, None, None) == '(a, b=None)'
|
||||
assert formatsig('class', 'C', D, 'a, b', 'X') == '(a, b) -> X'
|
||||
|
||||
#__init__ have signature at first line of docstring
|
||||
class F2:
|
||||
'''some docstring for F2.'''
|
||||
def __init__(self, *args, **kw):
|
||||
'''
|
||||
__init__(a1, a2, kw1=True, kw2=False)
|
||||
|
||||
some docstring for __init__.
|
||||
'''
|
||||
class G2(F2, object):
|
||||
pass
|
||||
for C in (F2, G2):
|
||||
assert formatsig('class', 'C', C, None, None) == '(a1, a2, kw1=True, kw2=False)'
|
||||
|
||||
# test for methods
|
||||
class H:
|
||||
def foo1(self, b, *c):
|
||||
@@ -182,7 +210,23 @@ def test_format_signature():
|
||||
# test processing by event handler
|
||||
assert formatsig('method', 'bar', H.foo1, None, None) == '42'
|
||||
|
||||
# test functions created via functools.partial
|
||||
from functools import partial
|
||||
curried1 = partial(lambda a, b, c: None, 'A')
|
||||
assert formatsig('function', 'curried1', curried1, None, None) == \
|
||||
'(b, c)'
|
||||
curried2 = partial(lambda a, b, c=42: None, 'A')
|
||||
assert formatsig('function', 'curried2', curried2, None, None) == \
|
||||
'(b, c=42)'
|
||||
curried3 = partial(lambda a, b, *c: None, 'A')
|
||||
assert formatsig('function', 'curried3', curried3, None, None) == \
|
||||
'(b, *c)'
|
||||
curried4 = partial(lambda a, b, c=42, *d, **e: None, 'A')
|
||||
assert formatsig('function', 'curried4', curried4, None, None) == \
|
||||
'(b, c=42, *d, **e)'
|
||||
|
||||
|
||||
@with_setup(setup_test)
|
||||
def test_get_doc():
|
||||
def getdocl(objtype, obj, encoding=None):
|
||||
inst = AutoDirective._registry[objtype](directive, 'tmp')
|
||||
@@ -251,6 +295,7 @@ def test_get_doc():
|
||||
'', 'Other', ' lines']
|
||||
|
||||
|
||||
@with_setup(setup_test)
|
||||
def test_docstring_processing():
|
||||
def process(objtype, name, obj):
|
||||
inst = AutoDirective._registry[objtype](directive, name)
|
||||
@@ -301,6 +346,8 @@ def test_docstring_processing():
|
||||
assert process('function', 'h', h) == ['first line', 'third line', '']
|
||||
app.disconnect(lid)
|
||||
|
||||
|
||||
@with_setup(setup_test)
|
||||
def test_new_documenter():
|
||||
class MyDocumenter(ModuleLevelDocumenter):
|
||||
objtype = 'integer'
|
||||
@@ -328,6 +375,40 @@ def test_new_documenter():
|
||||
assert_result_contains('.. py:data:: integer', 'module', 'test_autodoc')
|
||||
|
||||
|
||||
@with_setup(setup_test, AutoDirective._special_attrgetters.clear)
|
||||
def test_attrgetter_using():
|
||||
def assert_getter_works(objtype, name, obj, attrs=[], **kw):
|
||||
getattr_spy = []
|
||||
def special_getattr(obj, name, *defargs):
|
||||
if name in attrs:
|
||||
getattr_spy.append((obj, name))
|
||||
return None
|
||||
return getattr(obj, name, *defargs)
|
||||
AutoDirective._special_attrgetters[type] = special_getattr
|
||||
|
||||
del getattr_spy[:]
|
||||
inst = AutoDirective._registry[objtype](directive, name)
|
||||
inst.generate(**kw)
|
||||
|
||||
hooked_members = [s[1] for s in getattr_spy]
|
||||
documented_members = [s[1] for s in processed_signatures]
|
||||
for attr in attrs:
|
||||
fullname = '.'.join((name, attr))
|
||||
assert attr in hooked_members
|
||||
assert fullname not in documented_members, \
|
||||
'%r was not hooked by special_attrgetter function' % fullname
|
||||
|
||||
options.members = ALL
|
||||
options.inherited_members = False
|
||||
assert_getter_works('class', 'test_autodoc.Class', Class,
|
||||
['meth'])
|
||||
|
||||
options.inherited_members = True
|
||||
assert_getter_works('class', 'test_autodoc.Class', Class,
|
||||
['meth', 'inheritedmeth'])
|
||||
|
||||
|
||||
@with_setup(setup_test)
|
||||
def test_generate():
|
||||
def assert_warns(warn_str, objtype, name, **kw):
|
||||
inst = AutoDirective._registry[objtype](directive, name)
|
||||
@@ -440,6 +521,15 @@ def test_generate():
|
||||
should.append(('method', 'test_autodoc.Class.inheritedmeth'))
|
||||
assert_processes(should, 'class', 'Class')
|
||||
|
||||
# test special members
|
||||
options.special_members = ['__special1__']
|
||||
should.append(('method', 'test_autodoc.Class.__special1__'))
|
||||
assert_processes(should, 'class', 'Class')
|
||||
options.special_members = ALL
|
||||
should.append(('method', 'test_autodoc.Class.__special2__'))
|
||||
assert_processes(should, 'class', 'Class')
|
||||
options.special_members = False
|
||||
|
||||
options.members = []
|
||||
# test module flags
|
||||
assert_result_contains('.. py:module:: test_autodoc',
|
||||
@@ -567,6 +657,13 @@ def test_generate():
|
||||
del directive.env.temp_data['autodoc:module']
|
||||
del directive.env.temp_data['py:module']
|
||||
|
||||
# test descriptor class documentation
|
||||
options.members = ['CustomDataDescriptor']
|
||||
assert_result_contains('.. py:class:: CustomDataDescriptor(doc)',
|
||||
'module', 'test_autodoc')
|
||||
assert_result_contains(' .. py:method:: CustomDataDescriptor.meth()',
|
||||
'module', 'test_autodoc')
|
||||
|
||||
# --- generate fodder ------------
|
||||
|
||||
__all__ = ['Class']
|
||||
@@ -591,6 +688,10 @@ class CustomDataDescriptor(object):
|
||||
return self
|
||||
return 42
|
||||
|
||||
def meth(self):
|
||||
"""Function."""
|
||||
return "The Answer"
|
||||
|
||||
def _funky_classmethod(name, b, c, d, docstring=None):
|
||||
"""Generates a classmethod for a class from a template by filling out
|
||||
some arguments."""
|
||||
@@ -660,6 +761,13 @@ class Class(Base):
|
||||
self.inst_attr_string = None
|
||||
"""a documented instance attribute"""
|
||||
|
||||
def __special1__(self):
|
||||
"""documented special method"""
|
||||
|
||||
def __special2__(self):
|
||||
# undocumented special method
|
||||
pass
|
||||
|
||||
|
||||
class CustomDict(dict):
|
||||
"""Docstring."""
|
||||
|
||||
@@ -58,6 +58,7 @@ else:
|
||||
@with_app(buildername='man')
|
||||
def test_man(app):
|
||||
app.builder.build_all()
|
||||
assert (app.outdir / 'SphinxTests.1').exists()
|
||||
|
||||
@with_app(buildername='singlehtml', cleanenv=True)
|
||||
def test_singlehtml(app):
|
||||
@@ -84,7 +85,7 @@ def test_multibyte_path(app):
|
||||
master_doc = srcdir / 'contents.txt'
|
||||
master_doc.write_bytes((master_doc.text() + dedent("""
|
||||
.. toctree::
|
||||
|
||||
|
||||
%(mb_name)s/%(mb_name)s
|
||||
""" % locals())
|
||||
).encode('utf-8'))
|
||||
|
||||
@@ -39,6 +39,14 @@ def test_build(app):
|
||||
assert (app.outdir / 'subdir.pot').isfile()
|
||||
|
||||
|
||||
@with_app(buildername='gettext')
|
||||
def test_seealso(app):
|
||||
# regression test for issue #960
|
||||
app.builder.build(['markup'])
|
||||
catalog = (app.outdir / 'markup.pot').text(encoding='utf-8')
|
||||
assert 'msgid "something, something else, something more"' in catalog
|
||||
|
||||
|
||||
@with_app(buildername='gettext')
|
||||
def test_gettext(app):
|
||||
app.builder.build(['markup'])
|
||||
|
||||
@@ -62,7 +62,8 @@ def setup_module():
|
||||
if p.returncode != 0:
|
||||
print stdout
|
||||
print stderr
|
||||
assert False, 'msgfmt exited with return code %s' % p.returncode
|
||||
assert False, \
|
||||
'msgfmt exited with return code %s' % p.returncode
|
||||
assert mo.isfile(), 'msgfmt failed'
|
||||
|
||||
|
||||
@@ -158,13 +159,17 @@ def test_i18n_link_to_undefined_reference(app):
|
||||
app.builder.build(['refs_inconsistency'])
|
||||
result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8')
|
||||
|
||||
expected_expr = """<a class="reference external" href="http://www.example.com">reference</a>"""
|
||||
expected_expr = ('<a class="reference external" '
|
||||
'href="http://www.example.com">reference</a>')
|
||||
assert len(re.findall(expected_expr, result)) == 2
|
||||
|
||||
expected_expr = """<a class="reference internal" href="#reference">reference</a>"""
|
||||
expected_expr = ('<a class="reference internal" '
|
||||
'href="#reference">reference</a>')
|
||||
assert len(re.findall(expected_expr, result)) == 0
|
||||
|
||||
expected_expr = """<a class="reference internal" href="#i18n-with-refs-inconsistency">I18N WITH REFS INCONSISTENCY</a>"""
|
||||
expected_expr = ('<a class="reference internal" '
|
||||
'href="#i18n-with-refs-inconsistency">I18N WITH '
|
||||
'REFS INCONSISTENCY</a>')
|
||||
assert len(re.findall(expected_expr, result)) == 1
|
||||
|
||||
|
||||
@@ -175,7 +180,8 @@ def test_i18n_keep_external_links(app):
|
||||
result = (app.outdir / 'external_links.html').text(encoding='utf-8')
|
||||
|
||||
# external link check
|
||||
expect_line = u"""<li>EXTERNAL LINK TO <a class="reference external" href="http://python.org">Python</a>.</li>"""
|
||||
expect_line = (u'<li>EXTERNAL LINK TO <a class="reference external" '
|
||||
u'href="http://python.org">Python</a>.</li>')
|
||||
matched = re.search('^<li>EXTERNAL LINK TO .*$', result, re.M)
|
||||
matched_line = ''
|
||||
if matched:
|
||||
@@ -183,7 +189,9 @@ def test_i18n_keep_external_links(app):
|
||||
assert expect_line == matched_line
|
||||
|
||||
# internal link check
|
||||
expect_line = u"""<li><a class="reference internal" href="#i18n-with-external-links">EXTERNAL LINKS</a> IS INTERNAL LINK.</li>"""
|
||||
expect_line = (u'<li><a class="reference internal" '
|
||||
u'href="#i18n-with-external-links">EXTERNAL '
|
||||
u'LINKS</a> IS INTERNAL LINK.</li>')
|
||||
matched = re.search('^<li><a .* IS INTERNAL LINK.</li>$', result, re.M)
|
||||
matched_line = ''
|
||||
if matched:
|
||||
@@ -191,7 +199,8 @@ def test_i18n_keep_external_links(app):
|
||||
assert expect_line == matched_line
|
||||
|
||||
# inline link check
|
||||
expect_line = u"""<li>INLINE LINK BY <a class="reference external" href="http://sphinx-doc.org">SPHINX</a>.</li>"""
|
||||
expect_line = (u'<li>INLINE LINK BY <a class="reference external" '
|
||||
u'href="http://sphinx-doc.org">SPHINX</a>.</li>')
|
||||
matched = re.search('^<li>INLINE LINK BY .*$', result, re.M)
|
||||
matched_line = ''
|
||||
if matched:
|
||||
@@ -199,7 +208,8 @@ def test_i18n_keep_external_links(app):
|
||||
assert expect_line == matched_line
|
||||
|
||||
# unnamed link check
|
||||
expect_line = u"""<li>UNNAMED <a class="reference external" href="http://google.com">LINK</a>.</li>"""
|
||||
expect_line = (u'<li>UNNAMED <a class="reference external" '
|
||||
u'href="http://google.com">LINK</a>.</li>')
|
||||
matched = re.search('^<li>UNNAMED .*$', result, re.M)
|
||||
matched_line = ''
|
||||
if matched:
|
||||
@@ -242,6 +252,57 @@ def test_i18n_definition_terms(app):
|
||||
assert result == expect
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', warning=warnfile)
|
||||
def test_i18n_glossary_terms(app):
|
||||
# regression test for #1090
|
||||
app.builddir.rmtree(True) #for warnings acceleration
|
||||
app.builder.build(['glossary_terms'])
|
||||
result = (app.outdir / 'glossary_terms.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH GLOSSARY TERMS"
|
||||
u"\n************************\n"
|
||||
u"\nSOME NEW TERM"
|
||||
u"\n THE CORRESPONDING GLOSSARY\n"
|
||||
u"\nSOME OTHER NEW TERM"
|
||||
u"\n THE CORRESPONDING GLOSSARY #2\n"
|
||||
u"\nLINK TO *SOME NEW TERM*.\n")
|
||||
assert result == expect
|
||||
|
||||
warnings = warnfile.getvalue().replace(os.sep, '/')
|
||||
assert 'term not in glossary' not in warnings
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', warning=warnfile)
|
||||
def test_i18n_glossary_terms_inconsistency(app):
|
||||
# regression test for #1090
|
||||
app.builddir.rmtree(True) #for warnings acceleration
|
||||
app.builder.build(['glossary_terms_inconsistency'])
|
||||
result = (app.outdir / 'glossary_terms_inconsistency.txt'
|
||||
).text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH GLOSSARY TERMS INCONSISTENCY"
|
||||
u"\n**************************************\n"
|
||||
u"\n1. LINK TO *SOME NEW TERM*.\n")
|
||||
assert result == expect
|
||||
|
||||
warnings = warnfile.getvalue().replace(os.sep, '/')
|
||||
expected_warning_expr = (
|
||||
u'.*/glossary_terms_inconsistency.txt:\\d+: '
|
||||
u'WARNING: inconsistent term references in translated message\n')
|
||||
assert re.search(expected_warning_expr, warnings)
|
||||
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_seealso(app):
|
||||
app.builder.build(['seealso'])
|
||||
result = (app.outdir / 'seealso.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH SEEALSO"
|
||||
u"\n*****************\n"
|
||||
u"\nSee also: SHORT TEXT 1\n"
|
||||
u"\nSee also: LONG TEXT 1\n"
|
||||
u"\nSee also: SHORT TEXT 2\n"
|
||||
u"\n LONG TEXT 2\n")
|
||||
assert result == expect
|
||||
|
||||
|
||||
@with_intl_app(buildername='text')
|
||||
def test_i18n_figure_caption(app):
|
||||
# regression test for #940
|
||||
|
||||
Reference in New Issue
Block a user