Complete test suite overhaul.

* rename a few test modules to make the names more consistent

* do not copy/use Sphinx from build/ (unnecessary without 2to3)

* use a temporary dir for *all* test projects, the source tree
  will stay pristine that way  (default is tests/build)

* speed up tests by ~3x by splitting up test projects and avoiding
  rebuilds
This commit is contained in:
Georg Brandl 2014-09-21 17:17:02 +02:00
parent c5dfd5c732
commit d47a7587f9
83 changed files with 1476 additions and 1757 deletions

View File

@ -7,6 +7,7 @@
^build/
^dist/
^tests/.coverage
^tests/build/
^sphinx/pycode/Grammar.*pickle
^Sphinx.egg-info/
^doc/_build/
@ -18,5 +19,3 @@
~$
^utils/.*3\.py$
^distribute-
^tests/root/_build/*
^tests/root/generated/*

View File

@ -48,10 +48,10 @@ reindent:
@$(PYTHON) utils/reindent.py -r -n .
endif
test: build
test:
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' $(TEST)
covertest: build
covertest:
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' --with-coverage \
--cover-package=sphinx $(TEST)

View File

@ -195,6 +195,9 @@ class path(text_type):
"""
return self.__class__(os.path.join(self, *map(self.__class__, args)))
def listdir(self):
return os.listdir(self)
__div__ = __truediv__ = joinpath
def __repr__(self):

View File

@ -3,11 +3,9 @@
import sys, os
sys.path.append(os.path.abspath('.'))
sys.path.append(os.path.abspath('..'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.jsmath', 'sphinx.ext.todo',
'sphinx.ext.coverage', 'sphinx.ext.autosummary',
'sphinx.ext.doctest', 'sphinx.ext.extlinks',
'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.extlinks',
'sphinx.ext.viewcode', 'ext']
jsmath_path = 'dummy.js'
@ -18,7 +16,7 @@ master_doc = 'contents'
source_suffix = '.txt'
project = 'Sphinx <Tests>'
copyright = '2010, Georg Brandl & Team'
copyright = '2010-2014, Georg Brandl & Team'
# If this is changed, remember to update the versionchanges!
version = '0.6'
release = '0.6alpha1'
@ -34,7 +32,8 @@ html_theme = 'testtheme'
html_theme_path = ['.']
html_theme_options = {'testopt': 'testoverride'}
html_sidebars = {'**': 'customsb.html',
'contents': ['contentssb.html', 'localtoc.html'] }
'contents': ['contentssb.html', 'localtoc.html',
'globaltoc.html']}
html_style = 'default.css'
html_static_path = ['_static', 'templated.css_t']
html_extra_path = ['robots.txt']
@ -65,8 +64,6 @@ value_from_conf_py = 84
coverage_c_path = ['special/*.h']
coverage_c_regexes = {'function': r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'}
autosummary_generate = ['autosummary']
extlinks = {'issue': ('http://bugs.python.org/issue%s', 'issue '),
'pyurl': ('http://python.org/%s', None)}

View File

@ -21,15 +21,14 @@ Contents:
bom
math
autodoc
autosummary
metadata
extensions
doctest
extensions
versioning/index
footnote
lists
http://sphinx-doc.org/
Latest reference <http://sphinx-doc.org/latest/>
Python <http://python.org/>
Indices and tables
@ -44,3 +43,13 @@ References
.. [Ref1] Reference target.
.. [Ref_1] Reference target 2.
Test for issue #1157
====================
This used to crash:
.. toctree::
.. toctree::
:hidden:

View File

@ -0,0 +1,3 @@
:orphan:
here: »

View File

@ -1,3 +1,7 @@
import sys, os
sys.path.insert(0, os.path.abspath('.'))
extensions = ['sphinx.ext.autosummary']
# The suffix of source filenames.

View File

@ -1,6 +1,7 @@
.. autosummary::
:nosignatures:
:toctree:
dummy_module
.. autosummary::
:nosignatures:
:toctree:
dummy_module
sphinx

View File

@ -0,0 +1,2 @@
master_doc = 'contents'
source_suffix = '.txt'

View File

@ -0,0 +1,8 @@
.. toctree::
maxwidth
lineblock
nonascii_title
nonascii_table
nonascii_maxwidth
table

View File

@ -0,0 +1,6 @@
* one
| line-block 1
| line-block 2
followed paragraph.

View File

@ -0,0 +1,6 @@
.. seealso:: ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
spam egg

View File

@ -0,0 +1,5 @@
abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc
日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語
abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語

View File

@ -0,0 +1,7 @@
.. list-table::
- - spam
- egg
- - 日本語
- 日本語

View File

@ -0,0 +1,2 @@
日本語
======

View File

@ -0,0 +1,7 @@
+-----+-----+
| XXX | XXX |
+-----+-----+
| | XXX |
+-----+-----+
| XXX | |
+-----+-----+

View File

View File

@ -0,0 +1,4 @@
.. toctree::
sub

View File

@ -0,0 +1,3 @@
.. toctree::
contents

View File

@ -1,22 +1,35 @@
Dedent
======
Code blocks
-----------
.. code-block:: ruby
:linenos:
:dedent: 4
def ruby?
false
end
Literal Include
---------------
.. literalinclude:: literal.inc
:language: python
:lines: 10-11
:dedent: 0
.. literalinclude:: literal.inc
:language: python
:lines: 10-11
:dedent: 1
.. literalinclude:: literal.inc
:language: python
:lines: 10-11
:dedent: 2
.. literalinclude:: literal.inc
:language: python
:lines: 10-11
:dedent: 3
.. literalinclude:: literal.inc
:language: python
:lines: 10-11
:dedent: 4
.. literalinclude:: literal.inc
:language: python
:lines: 10-11
:dedent: 1000

View File

@ -0,0 +1,53 @@
Dedent
======
Code blocks
-----------
.. code-block:: ruby
:linenos:
:dedent: 0
def ruby?
false
end
.. code-block:: ruby
:linenos:
:dedent: 1
def ruby?
false
end
.. code-block:: ruby
:linenos:
:dedent: 2
def ruby?
false
end
.. code-block:: ruby
:linenos:
:dedent: 3
def ruby?
false
end
.. code-block:: ruby
:linenos:
:dedent: 4
def ruby?
false
end
.. code-block:: ruby
:linenos:
:dedent: 1000
def ruby?
false
end

View File

@ -0,0 +1,5 @@
extensions = ['sphinx.ext.doctest']
project = 'test project for doctest'
master_doc = 'doctest.txt'
source_suffix = '.txt'

View File

@ -125,5 +125,5 @@ Special directives
.. testcleanup:: *
import test_doctest
test_doctest.cleanup_call()
import test_ext_doctest
test_ext_doctest.cleanup_call()

View File

@ -0,0 +1,5 @@
.. toctree::
:numbered:
sub

View File

@ -0,0 +1,3 @@
.. toctree::
contents

View File

@ -4,10 +4,4 @@ Autosummary templating test
.. autosummary::
:toctree: generated
sphinx.application.Sphinx
.. currentmodule:: sphinx.application
.. autoclass:: TemplateBridge
.. automethod:: render
sphinx.application.TemplateBridge

View File

@ -0,0 +1,3 @@
project = 'versioning test root'
master_doc = 'index'
source_suffix = '.txt'

View File

@ -11,47 +11,37 @@
"""
from __future__ import print_function
import os
import sys
from os import path, chdir, listdir, environ
import shutil
import traceback
from path import path
testroot = path.dirname(__file__) or '.'
if 'BUILD_TEST_PATH' in environ:
# for tox testing
newroot = environ['BUILD_TEST_PATH']
# tox installs the sphinx package, no need for sys.path.insert
else:
newroot = path.join(testroot, path.pardir, 'build')
newroot = path.join(newroot, listdir(newroot)[0], 'tests')
testroot = os.path.dirname(__file__) or '.'
sys.path.insert(0, os.path.abspath(os.path.join(testroot, os.path.pardir)))
shutil.rmtree(newroot, ignore_errors=True)
# just copying test directory to parallel testing
print('Copying sources to build/lib/tests...')
shutil.copytree(testroot, newroot)
# check dependencies before testing
print('Checking dependencies...')
for modname in ('nose', 'mock', 'six', 'docutils', 'jinja2', 'pygments',
'snowballstemmer', 'babel'):
try:
__import__(modname)
except ImportError as err:
traceback.print_exc()
print('The %r package is needed to run the Sphinx test suite.' % modname)
sys.exit(1)
# always test the sphinx package from build/lib/
sys.path.insert(0, path.abspath(path.join(newroot, path.pardir)))
# switch to the copy/converted dir so nose tests the right tests
chdir(newroot)
try:
import nose
except ImportError:
print('The nose package is needed to run the Sphinx test suite.')
sys.exit(1)
try:
import docutils
except ImportError:
print('Sphinx requires the docutils package to be installed.')
sys.exit(1)
try:
import jinja2
except ImportError:
print('Sphinx requires the jinja2 package to be installed.')
sys.exit(1)
# find a temp dir for testing and clean it up now
os.environ['SPHINX_TEST_TEMPDIR'] = \
os.path.abspath(os.path.join(testroot, 'build')) \
if 'SPHINX_TEST_TEMPDIR' not in os.environ \
else os.path.abspath(os.environ['SPHINX_TEST_TEMPDIR'])
tempdir = path(os.environ['SPHINX_TEST_TEMPDIR'])
print('Temporary files will be placed in %s.' % tempdir)
if tempdir.exists():
tempdir.rmtree()
tempdir.makedirs()
print('Running Sphinx test suite...')
import nose
nose.main()

View File

@ -8,82 +8,57 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
from nose.tools import with_setup
from util import with_app, test_roots
from util import with_app, rootdir
def setup_module():
sys.path.insert(0, test_roots / 'test-api-set-translator')
sys.path.insert(0, rootdir / 'roots' / 'test-api-set-translator')
def teardown_module():
sys.path.remove(test_roots / 'test-api-set-translator')
sys.path.remove(rootdir / 'roots' / 'test-api-set-translator')
def teardown_websupport():
(test_roots / 'test-api-set-translator' / 'generated').rmtree(True)
(test_roots / 'test-api-set-translator' / 'websupport').rmtree(True)
@with_app(
buildername='html',
srcdir=(test_roots / 'test-api-set-translator'),
confdir=(test_roots / 'test-api-set-translator' / 'nonext'),
)
def test_html_translator(app):
@with_app('html')
def test_html_translator(app, status, warning):
# no set_translator(), no html_translator_class
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'SmartyPantsHTMLTranslator'
@with_app(
buildername='html',
srcdir=(test_roots / 'test-api-set-translator'),
confdir=(test_roots / 'test-api-set-translator' / 'nonext'),
confoverrides={
'html_translator_class': 'translator.ExtHTMLTranslator'},
)
def test_html_with_html_translator_class(app):
@with_app('html', confoverrides={
'html_translator_class': 'translator.ExtHTMLTranslator'})
def test_html_with_html_translator_class(app, status, warning):
# no set_translator(), but html_translator_class
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ExtHTMLTranslator'
@with_app(
buildername='html',
srcdir=(test_roots / 'test-api-set-translator'),
confdir=(test_roots / 'test-api-set-translator' / 'nonext'),
confoverrides={'html_use_smartypants': False},
)
def test_html_with_smartypants(app):
@with_app('html',
confoverrides={'html_use_smartypants': False})
def test_html_with_smartypants(app, status, warning):
# no set_translator(), html_use_smartypants=False
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'HTMLTranslator'
@with_app(
buildername='html',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_html_with_set_translator_for_html_(app):
@with_app('html', testroot='api-set-translator')
def test_html_with_set_translator_for_html_(app, status, warning):
# use set_translator(), no html_translator_class
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfHTMLTranslator'
@with_app(
buildername='html',
srcdir=(test_roots / 'test-api-set-translator'),
confoverrides={'html_translator_class': 'ext.ExtHTMLTranslator'},
)
def test_html_with_set_translator_for_html_and_html_translator_class(app):
@with_app('html', testroot='api-set-translator',
confoverrides={'html_translator_class': 'ext.ExtHTMLTranslator'})
def test_html_with_set_translator_for_html_and_html_translator_class(app, status, warning):
# use set_translator() and html_translator_class.
# set_translator() is given priority over html_translator_clas.
translator_class = app.builder.translator_class
@ -96,108 +71,70 @@ def test_html_with_set_translator_for_html_and_html_translator_class(app):
# buildername='dirhtml',
# srcdir=(test_roots / 'test-api-set-translator'),
# )
# def test_dirhtml_set_translator_for_dirhtml(app):
# def test_dirhtml_set_translator_for_dirhtml(app, status, warning):
# translator_class = app.builder.translator_class
# assert translator_class
# assert translator_class.__name__ == 'ConfDirHTMLTranslator'
@with_app(
buildername='singlehtml',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_singlehtml_set_translator_for_singlehtml(app):
@with_app('singlehtml', testroot='api-set-translator')
def test_singlehtml_set_translator_for_singlehtml(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfSingleHTMLTranslator'
@with_app(
buildername='pickle',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_pickle_set_translator_for_pickle(app):
@with_app('pickle', testroot='api-set-translator')
def test_pickle_set_translator_for_pickle(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfPickleTranslator'
@with_app(
buildername='json',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_json_set_translator_for_json(app):
@with_app('json', testroot='api-set-translator')
def test_json_set_translator_for_json(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfJsonTranslator'
@with_app(
buildername='latex',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_html_with_set_translator_for_latex(app):
@with_app('latex', testroot='api-set-translator')
def test_html_with_set_translator_for_latex(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfLaTeXTranslator'
@with_app(
buildername='man',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_html_with_set_translator_for_man(app):
@with_app('man', testroot='api-set-translator')
def test_html_with_set_translator_for_man(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfManualPageTranslator'
@with_app(
buildername='texinfo',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_html_with_set_translator_for_texinfo(app):
@with_app('texinfo', testroot='api-set-translator')
def test_html_with_set_translator_for_texinfo(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfTexinfoTranslator'
@with_app(
buildername='text',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_html_with_set_translator_for_text(app):
@with_app('text', testroot='api-set-translator')
def test_html_with_set_translator_for_text(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfTextTranslator'
@with_setup(teardown=teardown_websupport)
@with_app(
buildername='websupport',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_html_with_set_translator_for_websupport(app):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfWebSupportTranslator'
@with_app(
buildername='xml',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_html_with_set_translator_for_xml(app):
@with_app('xml', testroot='api-set-translator')
def test_html_with_set_translator_for_xml(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfXMLTranslator'
@with_app(
buildername='pseudoxml',
srcdir=(test_roots / 'test-api-set-translator'),
)
def test_html_with_set_translator_for_pseudoxml(app):
@with_app('pseudoxml', testroot='api-set-translator')
def test_html_with_set_translator_for_pseudoxml(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfPseudoXMLTranslator'

View File

@ -9,22 +9,21 @@
:license: BSD, see LICENSE for details.
"""
from six import StringIO
from docutils import nodes
from sphinx.application import ExtensionError
from sphinx.domains import Domain
from util import with_app, raises_msg, TestApp
from util import with_app, raises_msg
@with_app()
def test_events(app):
def empty(): pass
def test_events(app, status, warning):
def empty():
pass
raises_msg(ExtensionError, "Unknown event name: invalid",
app.connect, "invalid", empty)
app.add_event("my_event")
raises_msg(ExtensionError, "Event 'my_event' already present",
app.add_event, "my_event")
@ -43,57 +42,49 @@ def test_events(app):
@with_app()
def test_emit_with_nonascii_name_node(app):
def test_emit_with_nonascii_name_node(app, status, warning):
node = nodes.section(names=[u'\u65e5\u672c\u8a9e'])
app.emit('my_event', node)
def test_output():
status, warnings = StringIO(), StringIO()
app = TestApp(status=status, warning=warnings)
try:
status.truncate(0) # __init__ writes to status
status.seek(0)
app.info("Nothing here...")
assert status.getvalue() == "Nothing here...\n"
status.truncate(0)
status.seek(0)
app.info("Nothing here...", True)
assert status.getvalue() == "Nothing here..."
@with_app()
def test_output(app, status, warning):
status.truncate(0) # __init__ writes to status
status.seek(0)
app.info("Nothing here...")
assert status.getvalue() == "Nothing here...\n"
status.truncate(0)
status.seek(0)
app.info("Nothing here...", True)
assert status.getvalue() == "Nothing here..."
old_count = app._warncount
app.warn("Bad news!")
assert warnings.getvalue() == "WARNING: Bad news!\n"
assert app._warncount == old_count + 1
finally:
app.cleanup()
old_count = app._warncount
app.warn("Bad news!")
assert warning.getvalue() == "WARNING: Bad news!\n"
assert app._warncount == old_count + 1
def test_extensions():
status, warnings = StringIO(), StringIO()
app = TestApp(status=status, warning=warnings)
try:
app.setup_extension('shutil')
assert warnings.getvalue().startswith("WARNING: extension 'shutil'")
finally:
app.cleanup()
@with_app()
def test_extensions(app, status, warning):
app.setup_extension('shutil')
assert warning.getvalue().startswith("WARNING: extension 'shutil'")
def test_domain_override():
@with_app()
def test_domain_override(app, status, warning):
class A(Domain):
name = 'foo'
class B(A):
name = 'foo'
class C(Domain):
name = 'foo'
status, warnings = StringIO(), StringIO()
app = TestApp(status=status, warning=warnings)
try:
# No domain know named foo.
raises_msg(ExtensionError, 'domain foo not yet registered',
app.override_domain, A)
assert app.add_domain(A) is None
assert app.override_domain(B) is None
raises_msg(ExtensionError, 'new domain not a subclass of registered '
'foo domain', app.override_domain, C)
finally:
app.cleanup()
# No domain know named foo.
raises_msg(ExtensionError, 'domain foo not yet registered',
app.override_domain, A)
assert app.add_domain(A) is None
assert app.override_domain(B) is None
raises_msg(ExtensionError, 'new domain not a subclass of registered '
'foo domain', app.override_domain, C)

View File

@ -18,7 +18,7 @@ from six import StringIO
from docutils.statemachine import ViewList
from sphinx.ext.autodoc import AutoDirective, add_documenter, \
ModuleLevelDocumenter, FunctionDocumenter, cut_lines, between, ALL
ModuleLevelDocumenter, FunctionDocumenter, cut_lines, between, ALL
app = None

View File

@ -3,114 +3,86 @@
test_build
~~~~~~~~~~
Test all builders that have no special checks.
Test all builders.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from util import with_app, test_root, path, SkipTest, TestApp
from six import BytesIO
from textwrap import dedent
from util import with_app, rootdir, tempdir, SkipTest, TestApp
try:
from docutils.writers.manpage import Writer as ManWriter
except ImportError:
ManWriter = None
def teardown_module():
(test_root / '_build').rmtree(True)
class MockOpener(object):
def open(self, req, **kwargs):
class result(BytesIO):
headers = None
url = req.url
return result()
import sphinx.builders.linkcheck
sphinx.builders.linkcheck.opener = MockOpener()
def test_build():
for buildername in ('pickle', 'json', 'linkcheck', 'text', 'htmlhelp',
'qthelp', 'epub', 'changes', 'singlehtml', 'xml',
'pseudoxml'):
app = TestApp(buildername=buildername)
yield lambda app: app.builder.build_all(), app
app.cleanup()
@with_app(buildername='man')
def test_man(app):
if ManWriter is None:
def verify_build(buildername, srcdir):
if buildername == 'man' and ManWriter is None:
raise SkipTest('man writer is not available')
app.builder.build_all()
assert (app.outdir / 'SphinxTests.1').exists()
def _test_nonascii_path(app):
srcdir = path(app.srcdir)
mb_name = u'\u65e5\u672c\u8a9e'
app = TestApp(buildername=buildername, srcdir=srcdir)
try:
(srcdir / mb_name).makedirs()
except UnicodeEncodeError:
from path import FILESYSTEMENCODING
raise SkipTest(
'nonascii filename not supported on this filesystem encoding: '
'%s', FILESYSTEMENCODING)
(srcdir / mb_name / (mb_name + '.txt')).write_text(dedent("""
multi byte file name page
==========================
"""))
master_doc = srcdir / 'contents.txt'
master_doc.write_bytes((master_doc.text() + dedent("""
.. toctree::
%(mb_name)s/%(mb_name)s
""" % {'mb_name': mb_name})
).encode('utf-8'))
app.builder.build_all()
def test_nonascii_path():
(test_root / '_build').rmtree(True) #keep this to build first gettext
builder_names = ['gettext', 'html', 'dirhtml', 'singlehtml', 'latex',
'texinfo', 'pickle', 'json', 'linkcheck', 'text',
'htmlhelp', 'qthelp', 'epub', 'changes', 'xml',
'pseudoxml']
if ManWriter is not None:
builder_names.append('man')
for buildername in builder_names:
app = TestApp(buildername=buildername, _copy_to_temp=True)
yield _test_nonascii_path, app
app.builder.build_all()
finally:
app.cleanup()
@with_app(buildername='text', srcdir='(empty)')
def test_circular_toctree(app):
contents = (".. toctree::\n"
"\n"
" sub\n")
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
def test_build_all():
# If supported, build in a non-ASCII source dir
test_name = u'\u65e5\u672c\u8a9e'
try:
srcdir = tempdir / test_name
(rootdir / 'root').copytree(tempdir / test_name)
except UnicodeEncodeError:
srcdir = tempdir / 'all'
else:
# add a doc with a non-ASCII file name to the source dir
(srcdir / (test_name + '.txt')).write_text(dedent("""
nonascii file name page
=======================
"""))
contents = (".. toctree::\n"
"\n"
" contents\n")
(app.srcdir / 'sub.rst').write_text(contents, encoding='utf-8')
master_doc = srcdir / 'contents.txt'
master_doc.write_bytes((master_doc.text() + dedent("""
.. toctree::
%(test_name)s/%(test_name)s
""" % {'test_name': test_name})
).encode('utf-8'))
# note: no 'html' - if it's ok with dirhtml it's ok with html
for buildername in ['dirhtml', 'singlehtml', 'latex', 'texinfo',
'pickle', 'json', 'text', 'htmlhelp', 'qthelp', 'epub',
'changes', 'xml', 'pseudoxml', 'man', 'linkcheck']:
yield verify_build, buildername, srcdir
@with_app(buildername='text', testroot='circular')
def test_circular_toctree(app, status, warning):
app.builder.build_all()
warnings = "".join(app._warning.content)
warnings = warning.getvalue()
assert 'circular toctree references detected, ignoring: sub <- contents <- sub' in warnings
assert 'circular toctree references detected, ignoring: contents <- sub <- contents' in warnings
@with_app(buildername='text', srcdir='(empty)')
def test_numbered_circular_toctree(app):
contents = (".. toctree::\n"
" :numbered:\n"
"\n"
" sub\n")
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
contents = (".. toctree::\n"
"\n"
" contents\n")
(app.srcdir / 'sub.rst').write_text(contents, encoding='utf-8')
@with_app(buildername='text', testroot='numbered-circular')
def test_numbered_circular_toctree(app, status, warning):
app.builder.build_all()
warnings = "\n".join(app._warning.content)
warnings = warning.getvalue()
assert 'circular toctree references detected, ignoring: sub <- contents <- sub' in warnings
assert 'circular toctree references detected, ignoring: contents <- sub <- contents' in warnings

View File

@ -15,22 +15,17 @@ import os
import re
from subprocess import Popen, PIPE
from util import test_root, test_roots, with_app, SkipTest
from util import with_app, SkipTest
def teardown_module():
(test_root / '_build').rmtree(True)
(test_roots / 'test-intl' / '_build').rmtree(True),
@with_app(buildername='gettext')
def test_all(app):
@with_app('gettext')
def test_all(app, status, warning):
# Generic build; should fail only when the builder is horribly broken.
app.builder.build_all()
@with_app(buildername='gettext')
def test_build(app):
@with_app('gettext')
def test_build(app, status, warning):
# Do messages end up in the correct location?
app.builder.build(['extapi', 'subdir/includes'])
# top-level documents end up in a message catalog
@ -39,16 +34,16 @@ def test_build(app):
assert (app.outdir / 'subdir.pot').isfile()
@with_app(buildername='gettext')
def test_seealso(app):
@with_app('gettext')
def test_seealso(app, status, warning):
# regression test for issue #960
app.builder.build(['markup'])
catalog = (app.outdir / 'markup.pot').text(encoding='utf-8')
assert 'msgid "something, something else, something more"' in catalog
@with_app(buildername='gettext')
def test_gettext(app):
@with_app('gettext')
def test_gettext(app, status, warning):
app.builder.build(['markup'])
(app.outdir / 'en' / 'LC_MESSAGES').makedirs()
@ -58,7 +53,7 @@ def test_gettext(app):
try:
p = Popen(['msginit', '--no-translator', '-i', 'markup.pot',
'--locale', 'en_US'],
stdout=PIPE, stderr=PIPE)
stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msginit was not found
else:
@ -67,12 +62,12 @@ def test_gettext(app):
print(stdout)
print(stderr)
assert False, 'msginit exited with return code %s' % \
p.returncode
p.returncode
assert (app.outdir / 'en_US.po').isfile(), 'msginit failed'
try:
p = Popen(['msgfmt', 'en_US.po', '-o',
os.path.join('en', 'LC_MESSAGES', 'test_root.mo')],
stdout=PIPE, stderr=PIPE)
os.path.join('en', 'LC_MESSAGES', 'test_root.mo')],
stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msgfmt was not found
else:
@ -81,9 +76,9 @@ def test_gettext(app):
print(stdout)
print(stderr)
assert False, 'msgfmt exited with return code %s' % \
p.returncode
p.returncode
assert (app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(), \
'msgfmt failed'
'msgfmt failed'
finally:
os.chdir(cwd)
@ -91,15 +86,14 @@ def test_gettext(app):
assert _("Testing various markup") == u"Testing various markup"
@with_app(buildername='gettext',
srcdir=(test_roots / 'test-intl'),
doctreedir=(test_roots / 'test-intl' / '_build' / 'doctree'),
@with_app('gettext', testroot='intl',
confoverrides={'gettext_compact': False})
def test_gettext_index_entries(app):
def test_gettext_index_entries(app, status, warning):
# regression test for #976
app.builder.build(['index_entries'])
_msgid_getter = re.compile(r'msgid "(.*)"').search
def msgid_getter(msgid):
m = _msgid_getter(msgid)
if m:
@ -139,10 +133,8 @@ def test_gettext_index_entries(app):
assert msgids == []
@with_app(buildername='gettext',
srcdir=(test_roots / 'test-intl'),
doctreedir=(test_roots / 'test-intl' / '_build' / 'doctree'))
def test_gettext_template(app):
@with_app(buildername='gettext', testroot='intl')
def test_gettext_template(app, status, warning):
app.builder.build_all()
assert (app.outdir / 'sphinx.pot').isfile()

View File

@ -15,22 +15,11 @@ import re
from six import PY3, iteritems, StringIO
from six.moves import html_entities
try:
import pygments
except ImportError:
pygments = None
from sphinx import __version__
from util import test_root, test_roots, remove_unicode_literals, gen_with_app, with_app
from util import remove_unicode_literals, gen_with_app
from etree13 import ElementTree as ET
def teardown_module():
(test_root / '_build').rmtree(True)
html_warnfile = StringIO()
ENV_WARNINGS = """\
%(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \
WARNING: Explicit markup ends without a blank line; unexpected \
@ -44,6 +33,8 @@ reading included file u'.*?wrongenc.inc' seems to be wrong, try giving an \
%(root)s/includes.txt:4: WARNING: download file not readable: .*?nonexisting.png
%(root)s/markup.txt:\\d+: WARNING: Malformed :option: u'Python c option', does \
not contain option marker - or -- or / or \\+
%(root)s/undecodable.txt:3: WARNING: undecodable source characters, replacing \
with "\\?": b?'here: >>>\\\\xbb<<<'
"""
HTML_WARNINGS = ENV_WARNINGS + """\
@ -61,6 +52,7 @@ if PY3:
def tail_check(check):
rex = re.compile(check)
def checker(nodes):
for node in nodes:
if node.tail and rex.search(node.tail):
@ -84,6 +76,8 @@ HTML_XPATH = {
(".//a[@href='../_downloads/img.png']", ''),
(".//img[@src='../_images/img.png']", ''),
(".//p", 'This is an include file.'),
(".//pre/span", 'line 1'),
(".//pre/span", 'line 2'),
],
'includes.html': [
(".//pre", u'Max Strauß'),
@ -91,6 +85,23 @@ HTML_XPATH = {
(".//a[@href='_downloads/img1.png']", ''),
(".//pre", u'"quotes"'),
(".//pre", u"'included'"),
(".//pre/span[@class='s']", u'üöä'),
(".//div[@class='inc-pyobj1 highlight-text']//pre",
r'^class Foo:\n pass\n\s*$'),
(".//div[@class='inc-pyobj2 highlight-text']//pre",
r'^ def baz\(\):\n pass\n\s*$'),
(".//div[@class='inc-lines highlight-text']//pre",
r'^class Foo:\n pass\nclass Bar:\n$'),
(".//div[@class='inc-startend highlight-text']//pre",
u'^foo = "Including Unicode characters: üöä"\\n$'),
(".//div[@class='inc-preappend highlight-text']//pre",
r'(?m)^START CODE$'),
(".//div[@class='inc-pyobj-dedent highlight-python']//span",
r'def'),
(".//div[@class='inc-tab3 highlight-text']//pre",
r'-| |-'),
(".//div[@class='inc-tab8 highlight-python']//pre/span",
r'-| |-'),
],
'autodoc.html': [
(".//dt[@id='test_autodoc.Class']", ''),
@ -215,12 +226,10 @@ HTML_XPATH = {
(".//h4", 'Custom sidebar'),
# docfields
(".//td[@class='field-body']/strong", '^moo$'),
(".//td[@class='field-body']/strong",
tail_check(r'\(Moo\) .* Moo')),
(".//td[@class='field-body']/strong", tail_check(r'\(Moo\) .* Moo')),
(".//td[@class='field-body']/ul/li/strong", '^hour$'),
(".//td[@class='field-body']/ul/li/em", '^DuplicateType$'),
(".//td[@class='field-body']/ul/li/em",
tail_check(r'.* Some parameter')),
(".//td[@class='field-body']/ul/li/em", tail_check(r'.* Some parameter')),
],
'contents.html': [
(".//meta[@name='hc'][@content='hcval']", ''),
@ -241,6 +250,11 @@ HTML_XPATH = {
(".//h4", 'Contents sidebar'),
# custom JavaScript
(".//script[@src='file://moo.js']", ''),
# URL in contents
(".//a[@class='reference external'][@href='http://sphinx-doc.org/']",
'http://sphinx-doc.org/'),
(".//a[@class='reference external'][@href='http://sphinx-doc.org/latest/']",
'Latest reference'),
],
'bom.html': [
(".//title", " File with UTF-8 BOM"),
@ -260,33 +274,19 @@ HTML_XPATH = {
(".//a/strong", "Other"),
(".//a", "entry"),
(".//dt/a", "double"),
]
],
'footnote.html': [
(".//a[@class='footnote-reference'][@href='#id5'][@id='id1']", r"\[1\]"),
(".//a[@class='footnote-reference'][@href='#id6'][@id='id2']", r"\[2\]"),
(".//a[@class='footnote-reference'][@href='#foo'][@id='id3']", r"\[3\]"),
(".//a[@class='reference internal'][@href='#bar'][@id='id4']", r"\[bar\]"),
(".//a[@class='fn-backref'][@href='#id1']", r"\[1\]"),
(".//a[@class='fn-backref'][@href='#id2']", r"\[2\]"),
(".//a[@class='fn-backref'][@href='#id3']", r"\[3\]"),
(".//a[@class='fn-backref'][@href='#id4']", r"\[bar\]"),
],
}
if pygments:
HTML_XPATH['includes.html'].extend([
(".//pre/span[@class='s']", u'üöä'),
(".//div[@class='inc-pyobj1 highlight-text']//pre",
r'^class Foo:\n pass\n\s*$'),
(".//div[@class='inc-pyobj2 highlight-text']//pre",
r'^ def baz\(\):\n pass\n\s*$'),
(".//div[@class='inc-lines highlight-text']//pre",
r'^class Foo:\n pass\nclass Bar:\n$'),
(".//div[@class='inc-startend highlight-text']//pre",
u'^foo = "Including Unicode characters: üöä"\\n$'),
(".//div[@class='inc-preappend highlight-text']//pre",
r'(?m)^START CODE$'),
(".//div[@class='inc-pyobj-dedent highlight-python']//span",
r'def'),
(".//div[@class='inc-tab3 highlight-text']//pre",
r'-| |-'),
(".//div[@class='inc-tab8 highlight-python']//pre/span",
r'-| |-'),
])
HTML_XPATH['subdir/includes.html'].extend([
(".//pre/span", 'line 1'),
(".//pre/span", 'line 2'),
])
class NslessParser(ET.XMLParser):
"""XMLParser that throws away namespaces in tag names."""
@ -320,7 +320,8 @@ def check_xpath(etree, fname, path, check, be_found=True):
else:
assert False, ('%r not found in any node matching '
'path %s in %s: %r' % (check, path, fname,
[node.text for node in nodes]))
[node.text for node in nodes]))
def check_static_entries(outdir):
staticdir = outdir / '_static'
@ -335,21 +336,23 @@ def check_static_entries(outdir):
# a file from _static, but matches exclude_patterns
assert not (staticdir / 'excluded.css').exists()
def check_extra_entries(outdir):
assert (outdir / 'robots.txt').isfile()
@gen_with_app(buildername='html', warning=html_warnfile, cleanenv=True,
@gen_with_app(buildername='html', freshenv=True,
confoverrides={'html_context.hckey_co': 'hcval_co'},
tags=['testtag'])
def test_html(app):
def test_html_output(app, status, warning):
app.builder.build_all()
html_warnings = html_warnfile.getvalue().replace(os.sep, '/')
html_warnings = warning.getvalue().replace(os.sep, '/')
html_warnings_exp = HTML_WARNINGS % {
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(html_warnings_exp + '$', html_warnings), \
'Warnings don\'t match:\n' + \
'--- Expected (regex):\n' + html_warnings_exp + \
'--- Got:\n' + html_warnings
'Warnings don\'t match:\n' + \
'--- Expected (regex):\n' + html_warnings_exp + \
'--- Got:\n' + html_warnings
for fname, paths in iteritems(HTML_XPATH):
parser = NslessParser()
@ -365,23 +368,9 @@ def test_html(app):
check_static_entries(app.builder.outdir)
check_extra_entries(app.builder.outdir)
@with_app(buildername='html', srcdir='(empty)',
confoverrides={'html_sidebars': {'*': ['globaltoc.html']}},
)
def test_html_with_globaltoc_and_hidden_toctree(app):
# issue #1157: combination of 'globaltoc.html' and hidden toctree cause
# exception.
(app.srcdir / 'contents.rst').write_text(
'\n.. toctree::'
'\n'
'\n.. toctree::'
'\n :hidden:'
'\n')
app.builder.build_all()
@gen_with_app(buildername='html', srcdir=(test_roots / 'test-tocdepth'))
def test_tocdepth(app):
@gen_with_app(buildername='html', testroot='tocdepth')
def test_tocdepth(app, status, warning):
# issue #1251
app.builder.build_all()
@ -391,14 +380,14 @@ def test_tocdepth(app):
(".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True),
(".//li[@class='toctree-l3']/a", '2.1.1. Bar A1', False),
(".//li[@class='toctree-l3']/a", '2.2.1. Bar B1', False),
],
],
'foo.html': [
(".//h1", '1. Foo', True),
(".//h2", '1.1. Foo A', True),
(".//h3", '1.1.1. Foo A1', True),
(".//h2", '1.2. Foo B', True),
(".//h3", '1.2.1. Foo B1', True),
],
],
'bar.html': [
(".//h1", '2. Bar', True),
(".//h2", '2.1. Bar A', True),
@ -423,8 +412,8 @@ def test_tocdepth(app):
yield check_xpath, etree, fname, xpath, check, be_found
@gen_with_app(buildername='singlehtml', srcdir=(test_roots / 'test-tocdepth'))
def test_tocdepth_singlehtml(app):
@gen_with_app(buildername='singlehtml', testroot='tocdepth')
def test_tocdepth_singlehtml(app, status, warning):
app.builder.build_all()
expects = {
@ -466,18 +455,3 @@ def test_tocdepth_singlehtml(app):
for xpath, check, be_found in paths:
yield check_xpath, etree, fname, xpath, check, be_found
@with_app(buildername='html', srcdir='(empty)')
def test_url_in_toctree(app):
contents = (".. toctree::\n"
"\n"
" http://sphinx-doc.org/\n"
" Latest reference <http://sphinx-doc.org/latest/>\n")
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
app.builder.build_all()
result = (app.outdir / 'contents.html').text(encoding='utf-8')
assert '<a class="reference external" href="http://sphinx-doc.org/">http://sphinx-doc.org/</a>' in result
assert '<a class="reference external" href="http://sphinx-doc.org/latest/">Latest reference</a>' in result

View File

@ -14,20 +14,14 @@ import os
import re
from subprocess import Popen, PIPE
from six import PY3, StringIO
from six import PY3
from sphinx.writers.latex import LaTeXTranslator
from util import test_root, SkipTest, remove_unicode_literals, with_app
from util import SkipTest, remove_unicode_literals, with_app
from test_build_html import ENV_WARNINGS
def teardown_module():
(test_root / '_build').rmtree(True)
latex_warnfile = StringIO()
LATEX_WARNINGS = ENV_WARNINGS + """\
None:None: WARNING: citation not found: missing
None:None: WARNING: no matching candidate for image URI u'foo.\\*'
@ -39,17 +33,17 @@ if PY3:
LATEX_WARNINGS = remove_unicode_literals(LATEX_WARNINGS)
@with_app(buildername='latex', warning=latex_warnfile, cleanenv=True)
def test_latex(app):
@with_app(buildername='latex', freshenv=True)
def test_latex(app, status, warning):
LaTeXTranslator.ignore_missing_images = True
app.builder.build_all()
latex_warnings = latex_warnfile.getvalue().replace(os.sep, '/')
latex_warnings = warning.getvalue().replace(os.sep, '/')
latex_warnings_exp = LATEX_WARNINGS % {
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(latex_warnings_exp + '$', latex_warnings), \
'Warnings don\'t match:\n' + \
'--- Expected (regex):\n' + latex_warnings_exp + \
'--- Got:\n' + latex_warnings
'Warnings don\'t match:\n' + \
'--- Expected (regex):\n' + latex_warnings_exp + \
'--- Got:\n' + latex_warnings
# file from latex_additional_files
assert (app.outdir / 'svgimg.svg').isfile()

View File

@ -14,20 +14,14 @@ import os
import re
from subprocess import Popen, PIPE
from six import PY3, StringIO
from six import PY3
from sphinx.writers.texinfo import TexinfoTranslator
from util import test_root, SkipTest, remove_unicode_literals, with_app
from util import SkipTest, remove_unicode_literals, with_app
from test_build_html import ENV_WARNINGS
def teardown_module():
(test_root / '_build').rmtree(True)
texinfo_warnfile = StringIO()
TEXINFO_WARNINGS = ENV_WARNINGS + """\
None:None: WARNING: citation not found: missing
None:None: WARNING: no matching candidate for image URI u'foo.\\*'
@ -38,17 +32,17 @@ if PY3:
TEXINFO_WARNINGS = remove_unicode_literals(TEXINFO_WARNINGS)
@with_app(buildername='texinfo', warning=texinfo_warnfile, cleanenv=True)
def test_texinfo(app):
@with_app('texinfo', freshenv=True)
def test_texinfo(app, status, warning):
TexinfoTranslator.ignore_missing_images = True
app.builder.build_all()
texinfo_warnings = texinfo_warnfile.getvalue().replace(os.sep, '/')
texinfo_warnings = warning.getvalue().replace(os.sep, '/')
texinfo_warnings_exp = TEXINFO_WARNINGS % {
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(texinfo_warnings_exp + '$', texinfo_warnings), \
'Warnings don\'t match:\n' + \
'--- Expected (regex):\n' + texinfo_warnings_exp + \
'--- Got:\n' + texinfo_warnings
'Warnings don\'t match:\n' + \
'--- Expected (regex):\n' + texinfo_warnings_exp + \
'--- Got:\n' + texinfo_warnings
# now, try to run makeinfo over it
cwd = os.getcwd()
os.chdir(app.outdir)

View File

@ -18,29 +18,16 @@ from util import with_app
def with_text_app(*args, **kw):
default_kw = {
'buildername': 'text',
'srcdir': '(empty)',
'confoverrides': {
'project': 'text',
'master_doc': 'contents',
},
'testroot': 'build-text',
}
default_kw.update(kw)
return with_app(*args, **default_kw)
@with_text_app()
def test_maxwitdh_with_prefix(app):
long_string = u' '.join([u"ham"] * 30)
contents = (
u".. seealso:: %(long_string)s\n\n"
u"* %(long_string)s\n"
u"* %(long_string)s\n"
u"\nspam egg\n"
) % locals()
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
app.builder.build_all()
result = (app.outdir / 'contents.txt').text(encoding='utf-8')
def test_maxwitdh_with_prefix(app, status, warning):
app.builder.build_update()
result = (app.outdir / 'maxwidth.txt').text(encoding='utf-8')
lines = result.splitlines()
line_widths = [column_width(line) for line in lines]
@ -58,105 +45,52 @@ def test_maxwitdh_with_prefix(app):
@with_text_app()
def test_lineblock(app):
def test_lineblock(app, status, warning):
# regression test for #1109: need empty line after line block
contents = (
u"* one\n"
u"\n"
u" | line-block 1\n"
u" | line-block 2\n"
u"\n"
u"followed paragraph.\n"
)
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
app.builder.build_all()
result = (app.outdir / 'contents.txt').text(encoding='utf-8')
app.builder.build_update()
result = (app.outdir / 'lineblock.txt').text(encoding='utf-8')
expect = (
u"* one\n"
u"\n"
u" line-block 1\n"
u" line-block 2\n"
u"\n"
u"followed paragraph.\n"
)
u"* one\n"
u"\n"
u" line-block 1\n"
u" line-block 2\n"
u"\n"
u"followed paragraph.\n"
)
assert result == expect
@with_text_app()
def test_nonascii_title_line(app):
title = u'\u65e5\u672c\u8a9e'
underline = u'=' * column_width(title)
content = u'\n'.join((title, underline, u''))
(app.srcdir / 'contents.rst').write_text(content, encoding='utf-8')
app.builder.build_all()
result = (app.outdir / 'contents.txt').text(encoding='utf-8')
expect_underline = underline.replace('=', '*')
def test_nonascii_title_line(app, status, warning):
app.builder.build_update()
result = (app.outdir / 'nonascii_title.txt').text(encoding='utf-8')
expect_underline = '******'
result_underline = result.splitlines()[2].strip()
assert expect_underline == result_underline
@with_text_app()
def test_nonascii_table(app):
text = u'\u65e5\u672c\u8a9e'
contents = (u"\n.. list-table::"
"\n"
"\n - - spam"
"\n - egg"
"\n"
"\n - - %(text)s"
"\n - %(text)s"
"\n" % locals())
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
app.builder.build_all()
result = (app.outdir / 'contents.txt').text(encoding='utf-8')
def test_nonascii_table(app, status, warning):
app.builder.build_update()
result = (app.outdir / 'nonascii_table.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
line_widths = [column_width(line) for line in lines]
assert len(set(line_widths)) == 1 # same widths
@with_text_app()
def test_nonascii_maxwidth(app):
sb_text = u'abc' #length=3
mb_text = u'\u65e5\u672c\u8a9e' #length=3
sb_line = ' '.join([sb_text] * int(MAXWIDTH / 3))
mb_line = ' '.join([mb_text] * int(MAXWIDTH / 3))
mix_line = ' '.join([sb_text, mb_text] * int(MAXWIDTH / 6))
contents = u'\n\n'.join((sb_line, mb_line, mix_line))
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
app.builder.build_all()
result = (app.outdir / 'contents.txt').text(encoding='utf-8')
def test_nonascii_maxwidth(app, status, warning):
app.builder.build_update()
result = (app.outdir / 'nonascii_maxwidth.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
line_widths = [column_width(line) for line in lines]
assert max(line_widths) < MAXWIDTH
@with_text_app()
def test_table_with_empty_cell(app):
contents = (u"""
+-----+-----+
| XXX | XXX |
+-----+-----+
| | XXX |
+-----+-----+
| XXX | |
+-----+-----+
""")
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
app.builder.build_all()
result = (app.outdir / 'contents.txt').text(encoding='utf-8')
def test_table_with_empty_cell(app, status, warning):
app.builder.build_update()
result = (app.outdir / 'table.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
assert lines[0] == "+-------+-------+"
assert lines[1] == "| XXX | XXX |"

View File

@ -1,77 +1,78 @@
# -*- coding: utf-8 -*-
"""
test_build_base
~~~~~~~~~~~~~~~
Test the base build process.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import shutil
from nose.tools import with_setup
from util import test_roots, with_app, find_files
root = test_roots / 'test-intl'
build_dir = root / '_build'
locale_dir = build_dir / 'locale'
def setup_test():
# Delete remnants left over after failed build
locale_dir.rmtree(True)
# copy all catalogs into locale layout directory
for po in find_files(root, '.po'):
copy_po = (locale_dir / 'en' / 'LC_MESSAGES' / po)
if not copy_po.parent.exists():
copy_po.parent.makedirs()
shutil.copy(root / po, copy_po)
def teardown_test():
build_dir.rmtree(True),
@with_setup(setup_test, teardown_test)
@with_app(buildername='html', srcdir=root,
confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
def test_compile_all_catalogs(app):
app.builder.compile_all_catalogs()
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
expect = set([
x.replace('.po', '.mo')
for x in find_files(catalog_dir, '.po')
])
actual = set(find_files(catalog_dir, '.mo'))
assert actual # not empty
assert actual == expect
@with_setup(setup_test, teardown_test)
@with_app(buildername='html', srcdir=root,
confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
def test_compile_specific_catalogs(app):
app.builder.compile_specific_catalogs(['admonitions'])
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
actual = set(find_files(catalog_dir, '.mo'))
assert actual == set(['admonitions.mo'])
@with_setup(setup_test, teardown_test)
@with_app(buildername='html', srcdir=root,
confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
def test_compile_update_catalogs(app):
app.builder.compile_update_catalogs()
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
expect = set([
x.replace('.po', '.mo')
for x in find_files(catalog_dir, '.po')
])
actual = set(find_files(catalog_dir, '.mo'))
assert actual # not empty
assert actual == expect
# -*- coding: utf-8 -*-
"""
test_build_base
~~~~~~~~~~~~~~~
Test the base build process.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import shutil
from nose.tools import with_setup
from util import with_app, find_files, rootdir, tempdir
root = tempdir / 'test-intl'
build_dir = root / '_build'
locale_dir = build_dir / 'locale'
def setup_test():
# delete remnants left over after failed build
root.rmtree(True)
(rootdir / 'roots' / 'test-intl').copytree(root)
# copy all catalogs into locale layout directory
for po in find_files(root, '.po'):
copy_po = (locale_dir / 'en' / 'LC_MESSAGES' / po)
if not copy_po.parent.exists():
copy_po.parent.makedirs()
shutil.copy(root / po, copy_po)
def teardown_test():
build_dir.rmtree(True)
@with_setup(setup_test, teardown_test)
@with_app(buildername='html', testroot='intl',
confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
def test_compile_all_catalogs(app, status, warning):
app.builder.compile_all_catalogs()
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
expect = set([
x.replace('.po', '.mo')
for x in find_files(catalog_dir, '.po')
])
actual = set(find_files(catalog_dir, '.mo'))
assert actual # not empty
assert actual == expect
@with_setup(setup_test, teardown_test)
@with_app(buildername='html', testroot='intl',
confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
def test_compile_specific_catalogs(app, status, warning):
app.builder.compile_specific_catalogs(['admonitions'])
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
actual = set(find_files(catalog_dir, '.mo'))
assert actual == set(['admonitions.mo'])
@with_setup(setup_test, teardown_test)
@with_app(buildername='html', testroot='intl',
confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
def test_compile_update_catalogs(app, status, warning):
app.builder.compile_update_catalogs()
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
expect = set([
x.replace('.po', '.mo')
for x in find_files(catalog_dir, '.po')
])
actual = set(find_files(catalog_dir, '.mo'))
assert actual # not empty
assert actual == expect

View File

@ -20,7 +20,7 @@ from sphinx.errors import ExtensionError, ConfigError, VersionRequirementError
@with_app(confoverrides={'master_doc': 'master', 'nonexisting_value': 'True',
'latex_elements.docclass': 'scrartcl',
'modindex_common_prefix': 'path1,path2'})
def test_core_config(app):
def test_core_config(app, status, warning):
cfg = app.config
# simple values
@ -36,7 +36,7 @@ def test_core_config(app):
# simple default values
assert 'locale_dirs' not in cfg.__dict__
assert cfg.locale_dirs == []
assert cfg.trim_footnote_reference_space == False
assert cfg.trim_footnote_reference_space is False
# complex default values
assert 'html_title' not in cfg.__dict__
@ -68,7 +68,7 @@ def test_core_config(app):
@with_app()
def test_extension_values(app):
def test_extension_values(app, status, warning):
cfg = app.config
# default value

View File

@ -1,173 +1,114 @@
# -*- coding: utf-8 -*-
"""
test_directive_code
~~~~~~~~~~~~~~~~~~~
Test the code-block directive.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from xml.etree import ElementTree
from util import with_app, test_roots
def teardown_module():
(test_roots / 'test-directive-code' / '_build').rmtree(True)
@with_app(buildername='xml',
srcdir=(test_roots / 'test-directive-code'),
_copy_to_temp=True)
def test_code_block(app):
app.builder.build('index')
et = ElementTree.parse(app.outdir / 'index.xml')
secs = et.findall('./section/section')
code_block = secs[0].findall('literal_block')
assert len(code_block) > 0
actual = code_block[0].text
expect = (
" def ruby?\n" +
" false\n" +
" end"
)
assert actual == expect
@with_app(buildername='xml',
srcdir=(test_roots / 'test-directive-code'),
_copy_to_temp=True)
def test_code_block_dedent(app):
outdir = app.outdir
def get_dedent_actual(dedent):
dedent_text = (app.srcdir / 'dedent.rst').text(encoding='utf-8')
dedent_text = re.sub(
r':dedent: \d', ':dedent: %d' % dedent, dedent_text)
(app.srcdir / 'dedent.rst').write_text(dedent_text, encoding='utf-8')
# use another output dir to force rebuild
app.outdir = outdir / str(dedent)
app._init_env(freshenv=True)
app._init_builder(app.builder.name)
app.builder.build(['dedent'], method='specific')
et = ElementTree.parse(app.outdir / 'dedent.xml')
secs = et.findall('./section/section')
code_block = secs[0].findall('literal_block')
assert len(code_block) > 0
actual = code_block[0].text
return actual
for i in range(5): # 0-4
actual = get_dedent_actual(i)
indent = " " * (4 - i)
expect = (
indent + "def ruby?\n" +
indent + " false\n" +
indent + "end"
)
assert (i, actual) == (i, expect)
actual = get_dedent_actual(1000)
assert actual == '\n\n'
@with_app(buildername='html',
srcdir=(test_roots / 'test-directive-code'),
_copy_to_temp=True)
def test_code_block_caption_html(app):
app.builder.build('index')
html = (app.outdir / 'caption.html').text()
caption = '<div class="code-block-caption"><code>caption-test.rb</code></div>'
assert caption in html
@with_app(buildername='latex',
srcdir=(test_roots / 'test-directive-code'),
_copy_to_temp=True)
def test_code_block_caption_latex(app):
app.builder.build('index')
latex = (app.outdir / 'Python.tex').text()
caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]'
'{\\small\\texttt{caption-test.rb}}}}')
assert caption in latex
@with_app(buildername='xml',
srcdir=(test_roots / 'test-directive-code'),
_copy_to_temp=True)
def test_literal_include(app):
app.builder.build('index')
et = ElementTree.parse(app.outdir / 'index.xml')
secs = et.findall('./section/section')
literal_include = secs[1].findall('literal_block')
literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
assert len(literal_include) > 0
actual = literal_include[0].text
assert actual == literal_src
@with_app(buildername='xml',
srcdir=(test_roots / 'test-directive-code'),
_copy_to_temp=True)
def test_literal_include_dedent(app):
outdir = app.outdir
literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
literal_lines = [l[4:] for l in literal_src.split('\n')[9:11]]
def get_dedent_actual(dedent):
dedent_text = (app.srcdir / 'dedent.rst').text(encoding='utf-8')
dedent_text = re.sub(
r':dedent: \d', ':dedent: %d' % dedent, dedent_text)
(app.srcdir / 'dedent.rst').write_text(dedent_text, encoding='utf-8')
# use another output dir to force rebuild
app.outdir = outdir / str(dedent)
app._init_env(freshenv=True)
app._init_builder(app.builder.name)
app.builder.build(['dedent'])
et = ElementTree.parse(app.outdir / 'dedent.xml')
secs = et.findall('./section/section')
literal_include = secs[1].findall('literal_block')
assert len(literal_include) > 0
actual = literal_include[0].text
return actual
for i in range(5): # 0-4
actual = get_dedent_actual(i)
indent = " " * (4 - i)
expect = '\n'.join(indent + l for l in literal_lines) + '\n'
assert (i, actual) == (i, expect)
actual = get_dedent_actual(1000)
assert actual == '\n\n'
@with_app(buildername='html',
srcdir=(test_roots / 'test-directive-code'),
_copy_to_temp=True)
def test_literalinclude_caption_html(app):
app.builder.build('index')
html = (app.outdir / 'caption.html').text()
caption = '<div class="code-block-caption"><code>caption-test.py</code></div>'
assert caption in html
@with_app(buildername='latex',
srcdir=(test_roots / 'test-directive-code'),
_copy_to_temp=True)
def test_literalinclude_caption_latex(app):
app.builder.build('index')
latex = (app.outdir / 'Python.tex').text()
caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]'
'{\\small\\texttt{caption-test.py}}}}')
assert caption in latex
# -*- coding: utf-8 -*-
"""
test_directive_code
~~~~~~~~~~~~~~~~~~~
Test the code-block directive.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from xml.etree import ElementTree
from util import with_app
@with_app('xml', testroot='directive-code')
def test_code_block(app, status, warning):
app.builder.build('index')
et = ElementTree.parse(app.outdir / 'index.xml')
secs = et.findall('./section/section')
code_block = secs[0].findall('literal_block')
assert len(code_block) > 0
actual = code_block[0].text
expect = (
" def ruby?\n" +
" false\n" +
" end"
)
assert actual == expect
@with_app('xml', testroot='directive-code')
def test_code_block_dedent(app, status, warning):
app.builder.build(['dedent_code'])
et = ElementTree.parse(app.outdir / 'dedent_code.xml')
blocks = et.findall('./section/section/literal_block')
for i in range(5): # 0-4
actual = blocks[i].text
indent = " " * (4 - i)
expect = (
indent + "def ruby?\n" +
indent + " false\n" +
indent + "end"
)
assert (i, actual) == (i, expect)
assert blocks[5].text == '\n\n' # dedent: 1000
@with_app('html', testroot='directive-code')
def test_code_block_caption_html(app, status, warning):
app.builder.build(['caption'])
html = (app.outdir / 'caption.html').text()
caption = '<div class="code-block-caption"><code>caption-test.rb</code></div>'
assert caption in html
@with_app('latex', testroot='directive-code')
def test_code_block_caption_latex(app, status, warning):
app.builder.build_all()
latex = (app.outdir / 'Python.tex').text()
caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]'
'{\\small\\texttt{caption-test.rb}}}}')
assert caption in latex
@with_app('xml', testroot='directive-code')
def test_literal_include(app, status, warning):
app.builder.build(['index'])
et = ElementTree.parse(app.outdir / 'index.xml')
secs = et.findall('./section/section')
literal_include = secs[1].findall('literal_block')
literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
assert len(literal_include) > 0
actual = literal_include[0].text
assert actual == literal_src
@with_app('xml', testroot='directive-code')
def test_literal_include_dedent(app, status, warning):
literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
literal_lines = [l[4:] for l in literal_src.split('\n')[9:11]]
app.builder.build(['dedent'])
et = ElementTree.parse(app.outdir / 'dedent.xml')
blocks = et.findall('./section/section/literal_block')
for i in range(5): # 0-4
actual = blocks[i].text
indent = ' ' * (4 - i)
expect = '\n'.join(indent + l for l in literal_lines) + '\n'
assert (i, actual) == (i, expect)
assert blocks[5].text == '\n\n' # dedent: 1000
@with_app('html', testroot='directive-code')
def test_literalinclude_caption_html(app, status, warning):
app.builder.build('index')
html = (app.outdir / 'caption.html').text()
caption = '<div class="code-block-caption"><code>caption-test.py</code></div>'
assert caption in html
@with_app('latex', testroot='directive-code')
def test_literalinclude_caption_latex(app, status, warning):
app.builder.build('index')
latex = (app.outdir / 'Python.tex').text()
caption = ('{\\colorbox[rgb]{0.9,0.9,0.9}{\\makebox[\\textwidth][l]'
'{\\small\\texttt{caption-test.py}}}}')
assert caption in latex

View File

@ -13,15 +13,11 @@ import re
from docutils import nodes
from util import with_app, test_roots
from util import with_app
def teardown_module():
(test_roots / 'test-directive-only' / '_build').rmtree(True)
@with_app(buildername='text', srcdir=(test_roots / 'test-directive-only'))
def test_sectioning(app):
@with_app('text', testroot='directive-only')
def test_sectioning(app, status, warning):
def getsects(section):
if not isinstance(section, nodes.section):

View File

@ -9,50 +9,17 @@
:license: BSD, see LICENSE for details.
"""
import os
import re
from functools import wraps
from six import StringIO
from util import test_roots, TestApp, path, SkipTest
html_warnfile = StringIO()
root = test_roots / 'test-docutilsconf'
# need cleanenv to rebuild everytime.
# docutils.conf change did not effect to rebuild.
def with_conf_app(docutilsconf='', *args, **kwargs):
default_kw = {
'srcdir': root,
'cleanenv': True,
}
default_kw.update(kwargs)
def generator(func):
@wraps(func)
def deco(*args2, **kwargs2):
app = TestApp(*args, **default_kw)
(app.srcdir / 'docutils.conf').write_text(docutilsconf)
try:
cwd = os.getcwd()
os.chdir(app.srcdir)
func(app, *args2, **kwargs2)
finally:
os.chdir(cwd)
# don't execute cleanup if test failed
app.cleanup()
return deco
return generator
from util import with_app, path, SkipTest
def regex_count(expr, result):
return len(re.findall(expr, result))
@with_conf_app(buildername='html')
def test_html_with_default_docutilsconf(app):
@with_app('html', testroot='docutilsconf', freshenv=True, docutilsconf='')
def test_html_with_default_docutilsconf(app, status, warning):
app.builder.build(['contents'])
result = (app.outdir / 'contents.html').text(encoding='utf-8')
@ -62,13 +29,13 @@ def test_html_with_default_docutilsconf(app):
assert regex_count(r'<td class="option-group" colspan="2">', result) == 1
@with_conf_app(buildername='html', docutilsconf=(
@with_app('html', testroot='docutilsconf', freshenv=True, docutilsconf=(
'\n[html4css1 writer]'
'\noption-limit:1'
'\nfield-name-limit:1'
'\n')
)
def test_html_with_docutilsconf(app):
def test_html_with_docutilsconf(app, status, warning):
app.builder.build(['contents'])
result = (app.outdir / 'contents.html').text(encoding='utf-8')
@ -78,41 +45,32 @@ def test_html_with_docutilsconf(app):
assert regex_count(r'<td class="option-group" colspan="2">', result) == 2
@with_conf_app(buildername='html', warning=html_warnfile)
def test_html(app):
@with_app('html', testroot='docutilsconf')
def test_html(app, status, warning):
app.builder.build(['contents'])
assert html_warnfile.getvalue() == ''
assert warning.getvalue() == ''
@with_conf_app(buildername='latex', warning=html_warnfile)
def test_latex(app):
@with_app('latex', testroot='docutilsconf')
def test_latex(app, status, warning):
app.builder.build(['contents'])
assert html_warnfile.getvalue() == ''
assert warning.getvalue() == ''
@with_conf_app(buildername='man', warning=html_warnfile)
def test_man(app):
@with_app('man', testroot='docutilsconf')
def test_man(app, status, warning):
app.builder.build(['contents'])
assert html_warnfile.getvalue() == ''
assert warning.getvalue() == ''
@with_conf_app(buildername='texinfo', warning=html_warnfile)
def test_texinfo(app):
@with_app('texinfo', testroot='docutilsconf')
def test_texinfo(app, status, warning):
app.builder.build(['contents'])
@with_conf_app(buildername='html', srcdir='(empty)',
docutilsconf='[general]\nsource_link=true\n')
def test_docutils_source_link(app):
srcdir = path(app.srcdir)
(srcdir / 'conf.py').write_text('')
(srcdir / 'contents.rst').write_text('')
app.builder.build_all()
@with_conf_app(buildername='html', srcdir='(empty)',
docutilsconf='[general]\nsource_link=true\n')
def test_docutils_source_link_with_nonascii_file(app):
@with_app('html', testroot='docutilsconf',
docutilsconf='[general]\nsource_link=true\n')
def test_docutils_source_link_with_nonascii_file(app, status, warning):
srcdir = path(app.srcdir)
mb_name = u'\u65e5\u672c\u8a9e'
try:
@ -123,7 +81,4 @@ def test_docutils_source_link_with_nonascii_file(app):
'nonascii filename not supported on this filesystem encoding: '
'%s', FILESYSTEMENCODING)
(srcdir / 'conf.py').write_text('')
(srcdir / 'contents.rst').write_text('')
app.builder.build_all()

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""
test_py_domain
test_domain_py
~~~~~~~~~~~~~~
Tests the Python Domain

View File

@ -1,80 +1,80 @@
# -*- coding: utf-8 -*-
"""
test_domain_std
~~~~~~~~~~~~~~~
Tests the std domain
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from docutils import nodes
from sphinx.domains.std import StandardDomain
from util import mock
def test_process_doc_handle_figure_caption():
env = mock.Mock(domaindata={})
figure_node = nodes.figure(
'',
nodes.caption('caption text', 'caption text'),
)
document = mock.Mock(
nametypes={'testname': True},
nameids={'testname': 'testid'},
ids={'testid': figure_node},
)
domain = StandardDomain(env)
if 'testname' in domain.data['labels']:
del domain.data['labels']['testname']
domain.process_doc(env, 'testdoc', document)
assert 'testname' in domain.data['labels']
assert domain.data['labels']['testname'] == (
'testdoc', 'testid', 'caption text')
def test_process_doc_handle_image_parent_figure_caption():
env = mock.Mock(domaindata={})
img_node = nodes.image('', alt='image alt')
figure_node = nodes.figure(
'',
nodes.caption('caption text', 'caption text'),
img_node,
)
document = mock.Mock(
nametypes={'testname': True},
nameids={'testname': 'testid'},
ids={'testid': img_node},
)
domain = StandardDomain(env)
if 'testname' in domain.data['labels']:
del domain.data['labels']['testname']
domain.process_doc(env, 'testdoc', document)
assert 'testname' in domain.data['labels']
assert domain.data['labels']['testname'] == (
'testdoc', 'testid', 'caption text')
def test_process_doc_handle_table_title():
env = mock.Mock(domaindata={})
table_node = nodes.table(
'',
nodes.title('title text', 'title text'),
)
document = mock.Mock(
nametypes={'testname': True},
nameids={'testname': 'testid'},
ids={'testid': table_node},
)
domain = StandardDomain(env)
if 'testname' in domain.data['labels']:
del domain.data['labels']['testname']
domain.process_doc(env, 'testdoc', document)
assert 'testname' in domain.data['labels']
assert domain.data['labels']['testname'] == (
'testdoc', 'testid', 'title text')
# -*- coding: utf-8 -*-
"""
test_domain_std
~~~~~~~~~~~~~~~
Tests the std domain
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from docutils import nodes
from sphinx.domains.std import StandardDomain
from util import mock
def test_process_doc_handle_figure_caption():
env = mock.Mock(domaindata={})
figure_node = nodes.figure(
'',
nodes.caption('caption text', 'caption text'),
)
document = mock.Mock(
nametypes={'testname': True},
nameids={'testname': 'testid'},
ids={'testid': figure_node},
)
domain = StandardDomain(env)
if 'testname' in domain.data['labels']:
del domain.data['labels']['testname']
domain.process_doc(env, 'testdoc', document)
assert 'testname' in domain.data['labels']
assert domain.data['labels']['testname'] == (
'testdoc', 'testid', 'caption text')
def test_process_doc_handle_image_parent_figure_caption():
env = mock.Mock(domaindata={})
img_node = nodes.image('', alt='image alt')
figure_node = nodes.figure(
'',
nodes.caption('caption text', 'caption text'),
img_node,
)
document = mock.Mock(
nametypes={'testname': True},
nameids={'testname': 'testid'},
ids={'testid': img_node},
)
domain = StandardDomain(env)
if 'testname' in domain.data['labels']:
del domain.data['labels']['testname']
domain.process_doc(env, 'testdoc', document)
assert 'testname' in domain.data['labels']
assert domain.data['labels']['testname'] == (
'testdoc', 'testid', 'caption text')
def test_process_doc_handle_table_title():
env = mock.Mock(domaindata={})
table_node = nodes.table(
'',
nodes.title('title text', 'title text'),
)
document = mock.Mock(
nametypes={'testname': True},
nameids={'testname': 'testid'},
ids={'testid': table_node},
)
domain = StandardDomain(env)
if 'testname' in domain.data['labels']:
del domain.data['labels']['testname']
domain.process_doc(env, 'testdoc', document)
assert 'testname' in domain.data['labels']
assert domain.data['labels']['testname'] == (
'testdoc', 'testid', 'title text')

View File

@ -8,9 +8,12 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
from six import PY3
from util import TestApp, remove_unicode_literals, path, with_app
from util import TestApp, remove_unicode_literals, path
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.builders.latex import LaTeXBuilder
@ -18,21 +21,25 @@ from sphinx.builders.latex import LaTeXBuilder
app = env = None
warnings = []
def setup_module():
global app, env
app = TestApp(freshenv=True, _copy_to_temp=True)
app = TestApp(srcdir='env-test')
env = app.env
env.set_warnfunc(lambda *args: warnings.append(args))
def teardown_module():
app.cleanup()
def warning_emitted(file, text):
for warning in warnings:
if len(warning) == 2 and file in warning[1] and text in warning[0]:
return True
return False
# Tests are run in the order they appear in the file, therefore we can
# afford to not run update() in the setup but in its own test
@ -46,6 +53,7 @@ def test_first_update():
# test if exclude_patterns works ok
assert 'subdir/excluded' not in env.found_docs
def test_images():
assert warning_emitted('images', 'image file not readable: foo.png')
assert warning_emitted('images', 'nonlocal image URI found: '
@ -75,6 +83,7 @@ def test_images():
assert set(latexbuilder.images.values()) == \
set(['img.pdf', 'img.png', 'img1.png', 'simg.png', 'svgimg.pdf'])
def test_second_update():
# delete, add and "edit" (change saved mtime) some files and update again
env.all_docs['contents'] = 0
@ -96,19 +105,6 @@ def test_second_update():
assert 'autodoc' not in env.found_docs
@with_app(srcdir='(empty)')
def test_undecodable_source_reading_emit_warnings(app):
# issue #1524
warnings[:] = []
app.env.set_warnfunc(lambda *args: warnings.append(args))
(app.srcdir / 'contents.rst').write_bytes(b'1\xbb2')
_, _, it = app.env.update(app.config, app.srcdir, app.doctreedir, app)
list(it) # the generator does all the work
assert warning_emitted(
'contents', 'undecodable source characters, replacing with "?":'
)
def test_object_inventory():
refs = env.domaindata['py']['objects']

View File

@ -8,49 +8,24 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
from functools import wraps
from six import iteritems, StringIO
from sphinx.ext.autosummary import mangle_signature
from util import test_roots, TestApp
from util import with_app
html_warnfile = StringIO()
def with_autosummary_app(*args, **kw):
default_kw = {
'srcdir': (test_roots / 'test-autosummary'),
'confoverrides': {
'extensions': ['sphinx.ext.autosummary'],
'autosummary_generate': True,
'source_suffix': '.rst'
}
default_kw = {
'testroot': 'autosummary',
'confoverrides': {
'extensions': ['sphinx.ext.autosummary'],
'autosummary_generate': True,
'source_suffix': '.rst'
}
default_kw.update(kw)
def generator(func):
@wraps(func)
def deco(*args2, **kwargs2):
# Now, modify the python path...
srcdir = default_kw['srcdir']
sys.path.insert(0, srcdir)
try:
app = TestApp(*args, **default_kw)
func(app, *args2, **kwargs2)
finally:
if srcdir in sys.path:
sys.path.remove(srcdir)
# remove the auto-generated dummy_module.rst
dummy_rst = srcdir / 'dummy_module.rst'
if dummy_rst.isfile():
dummy_rst.unlink()
# don't execute cleanup if test failed
app.cleanup()
return deco
return generator
}
def test_mangle_signature():
@ -79,10 +54,8 @@ def test_mangle_signature():
assert res == outp, (u"'%s' -> '%s' != '%s'" % (inp, res, outp))
@with_autosummary_app(buildername='html', warning=html_warnfile)
def test_get_items_summary(app):
app.builddir.rmtree(True)
@with_app(buildername='html', **default_kw)
def test_get_items_summary(app, status, warning):
# monkey-patch Autosummary.get_items so we can easily get access to it's
# results..
import sphinx.ext.autosummary
@ -96,13 +69,17 @@ def test_get_items_summary(app):
autosummary_items[name] = result
return results
def handler(app, what, name, obj, options, lines):
assert isinstance(lines, list)
app.connect('autodoc-process-docstring', handler)
sphinx.ext.autosummary.Autosummary.get_items = new_get_items
try:
app.builder.build_all()
finally:
sphinx.ext.autosummary.Autosummary.get_items = orig_get_items
html_warnings = html_warnfile.getvalue()
html_warnings = warning.getvalue()
assert html_warnings == ''
expected_values = {
@ -118,13 +95,3 @@ def test_get_items_summary(app):
for key, expected in iteritems(expected_values):
assert autosummary_items[key][2] == expected, 'Summary for %s was %r -'\
' expected %r' % (key, autosummary_items[key], expected)
@with_autosummary_app(buildername='html')
def test_process_doc_event(app):
app.builddir.rmtree(True)
def handler(app, what, name, obj, options, lines):
assert isinstance(lines, list)
app.connect('autodoc-process-docstring', handler)
app.builder.build_all()

View File

@ -15,7 +15,7 @@ from util import with_app
@with_app(buildername='coverage')
def test_build(app):
def test_build(app, status, warning):
app.builder.build_all()
py_undoc = (app.outdir / 'python.txt').text()

View File

@ -12,26 +12,24 @@ from __future__ import print_function
import sys
from six import StringIO
from util import with_app
status = StringIO()
cleanup_called = 0
@with_app(buildername='doctest', status=status)
def test_build(app):
@with_app(buildername='doctest', testroot='doctest')
def test_build(app, status, warning):
global cleanup_called
cleanup_called = 0
app.builder.build_all()
if app.statuscode != 0:
print(status.getvalue(), file=sys.stderr)
assert False, 'failures in doctests'
assert False, 'failures in doctests:' + status.getvalue()
# in doctest.txt, there are two named groups and the default group,
# so the cleanup function must be called three times
assert cleanup_called == 3, 'testcleanup did not get executed enough times'
def cleanup_call():
global cleanup_called
cleanup_called += 1

View File

@ -17,7 +17,7 @@ from docutils import nodes
from sphinx import addnodes
from sphinx.ext.intersphinx import read_inventory_v1, read_inventory_v2, \
load_mappings, missing_reference
load_mappings, missing_reference
from util import with_app, with_tempdir
@ -49,9 +49,9 @@ def test_read_inventory_v1():
f.readline()
invdata = read_inventory_v1(f, '/util', posixpath.join)
assert invdata['py:module']['module'] == \
('foo', '1.0', '/util/foo.html#module-module', '-')
('foo', '1.0', '/util/foo.html#module-module', '-')
assert invdata['py:class']['module.cls'] == \
('foo', '1.0', '/util/foo.html#module.cls', '-')
('foo', '1.0', '/util/foo.html#module.cls', '-')
def test_read_inventory_v2():
@ -68,19 +68,19 @@ def test_read_inventory_v2():
assert len(invdata1['py:module']) == 2
assert invdata1['py:module']['module1'] == \
('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc')
('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc')
assert invdata1['py:module']['module2'] == \
('foo', '2.0', '/util/foo.html#module-module2', '-')
('foo', '2.0', '/util/foo.html#module-module2', '-')
assert invdata1['py:function']['module1.func'][2] == \
'/util/sub/foo.html#module1.func'
'/util/sub/foo.html#module1.func'
assert invdata1['c:function']['CFunc'][2] == '/util/cfunc.html#CFunc'
assert invdata1['std:term']['a term'][2] == \
'/util/glossary.html#term-a-term'
'/util/glossary.html#term-a-term'
@with_app()
@with_tempdir
def test_missing_reference(tempdir, app):
def test_missing_reference(tempdir, app, status, warning):
inv_file = tempdir / 'inventory'
inv_file.write_bytes(inventory_v2)
app.config.intersphinx_mapping = {
@ -94,7 +94,7 @@ def test_missing_reference(tempdir, app):
inv = app.env.intersphinx_inventory
assert inv['py:module']['module2'] == \
('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-')
('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-')
# create fake nodes and check referencing
@ -156,7 +156,7 @@ def test_missing_reference(tempdir, app):
@with_app()
@with_tempdir
def test_load_mappings_warnings(tempdir, app):
def test_load_mappings_warnings(tempdir, app, status, warning):
"""
load_mappings issues a warning if new-style mapping
identifiers are not alphanumeric
@ -174,4 +174,4 @@ def test_load_mappings_warnings(tempdir, app):
app.config.intersphinx_cache_limit = 0
# load the inventory and check if it's done correctly
load_mappings(app)
assert len(app._warning.content) == 2
assert warning.getvalue().count('\n') == 2

View File

@ -9,13 +9,12 @@
:license: BSD, see LICENSE for details.
"""
import os
from util import with_app
@with_app(buildername='html', tags=['test_linkcode'], _copy_to_temp=True)
def test_html(app):
app.builder.build_all()
@with_app('html', tags=['test_linkcode'])
def test_html(app, status, warning):
app.builder.build(['objects'])
stuff = (app.outdir / 'objects.html').text(encoding='utf-8')

View File

@ -1,43 +1,32 @@
# -*- coding: utf-8 -*-
"""
test_ext_viewcode
~~~~~~~~~~~~~~~~~
Test sphinx.ext.viewcode extension.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from six import StringIO
from util import test_roots, with_app
warnfile = StringIO()
root = test_roots / 'test-ext-viewcode'
doctreedir = root / '_build' / 'doctree'
def teardown_module():
(root / '_build').rmtree(True)
@with_app(srcdir=root, warning=warnfile)
def test_simple(app):
app.builder.build_all()
warnings = re.sub(r'\\+', '/', warnfile.getvalue())
assert re.findall(
r"index.rst:\d+: WARNING: Object named 'func1' not found in include " +
r"file .*/spam/__init__.py'",
warnings
)
result = (app.outdir / 'index.html').text(encoding='utf-8')
assert result.count('href="_modules/spam/mod1.html#func1"') == 2
assert result.count('href="_modules/spam/mod2.html#func2"') == 2
assert result.count('href="_modules/spam/mod1.html#Class1"') == 2
assert result.count('href="_modules/spam/mod2.html#Class2"') == 2
# -*- coding: utf-8 -*-
"""
test_ext_viewcode
~~~~~~~~~~~~~~~~~
Test sphinx.ext.viewcode extension.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from util import with_app
@with_app(testroot='ext-viewcode')
def test_simple(app, status, warning):
app.builder.build_all()
warnings = re.sub(r'\\+', '/', warning.getvalue())
assert re.findall(
r"index.rst:\d+: WARNING: Object named 'func1' not found in include " +
r"file .*/spam/__init__.py'",
warnings
)
result = (app.outdir / 'index.html').text(encoding='utf-8')
assert result.count('href="_modules/spam/mod1.html#func1"') == 2
assert result.count('href="_modules/spam/mod2.html#func2"') == 2
assert result.count('href="_modules/spam/mod1.html#Class1"') == 2
assert result.count('href="_modules/spam/mod2.html#Class2"') == 2

View File

@ -1,37 +0,0 @@
# -*- coding: utf-8 -*-
"""
test_footnote
~~~~~~~~~~~~~
Test for footnote and citation.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from util import test_root, with_app
def teardown_module():
(test_root / '_build').rmtree(True)
@with_app(buildername='html')
def test_html(app):
app.builder.build(['footnote'])
result = (app.outdir / 'footnote.html').text(encoding='utf-8')
expects = [
'<a class="footnote-reference" href="#id5" id="id1">[1]</a>',
'<a class="footnote-reference" href="#id6" id="id2">[2]</a>',
'<a class="footnote-reference" href="#foo" id="id3">[3]</a>',
'<a class="reference internal" href="#bar" id="id4">[bar]</a>',
'<a class="fn-backref" href="#id1">[1]</a>',
'<a class="fn-backref" href="#id2">[2]</a>',
'<a class="fn-backref" href="#id3">[3]</a>',
'<a class="fn-backref" href="#id4">[bar]</a>',
]
for expect in expects:
matches = re.findall(re.escape(expect), result)
assert len(matches) == 1

View File

@ -15,12 +15,7 @@ from pygments.formatters.html import HtmlFormatter
from sphinx.highlighting import PygmentsBridge
from util import with_app, SkipTest
try:
import pygments
except ImportError:
raise SkipTest('pygments not available')
from util import with_app
class MyLexer(RegexLexer):
@ -46,13 +41,14 @@ class ComplainOnUnhighlighted(PygmentsBridge):
@with_app()
def test_add_lexer(app):
def test_add_lexer(app, status, warning):
app.add_lexer('test', MyLexer())
bridge = PygmentsBridge('html')
ret = bridge.highlight_block('ab', 'test')
assert '<span class="n">a</span>b' in ret
def test_detect_interactive():
bridge = ComplainOnUnhighlighted('html')
blocks = [
@ -60,11 +56,12 @@ def test_detect_interactive():
>>> testing()
True
""",
]
]
for block in blocks:
ret = bridge.highlight_block(block.lstrip(), 'python')
assert ret.startswith("<div class=\"highlight\">")
def test_set_formatter():
PygmentsBridge.html_formatter = MyFormatter
try:
@ -74,6 +71,7 @@ def test_set_formatter():
finally:
PygmentsBridge.html_formatter = HtmlFormatter
def test_trim_doctest_flags():
PygmentsBridge.html_formatter = MyFormatter
try:

View File

@ -13,5 +13,5 @@ from util import with_app
@with_app(confoverrides={'language': 'de'})
def test_i18n(app):
def test_i18n(app, status, warning):
app.builder.build_all()

View File

@ -16,20 +16,17 @@ import re
from subprocess import Popen, PIPE
from xml.etree import ElementTree
from six import StringIO, string_types
from six import string_types
from util import test_roots, path, with_app, SkipTest
from util import tempdir, rootdir, path, with_app, SkipTest
warnfile = StringIO()
root = test_roots / 'test-intl'
doctreedir = root / '_build' / 'doctree'
root = tempdir / 'test-intl'
def with_intl_app(*args, **kw):
default_kw = {
'srcdir': root,
'doctreedir': doctreedir,
'testroot': 'intl',
'confoverrides': {
'language': 'xx', 'locale_dirs': ['.'],
'gettext_compact': False,
@ -40,21 +37,21 @@ def with_intl_app(*args, **kw):
def setup_module():
if not root.exists():
(rootdir / 'roots' / 'test-intl').copytree(root)
# Delete remnants left over after failed build
(root / 'xx').rmtree(True)
(root / 'xx' / 'LC_MESSAGES').makedirs()
# Compile all required catalogs into binary format (*.mo).
for dirpath, dirs, files in os.walk(root):
dirpath = path(dirpath)
for f in [f for f in files if f.endswith('.po')]:
po = dirpath / f
mo = root / 'xx' / 'LC_MESSAGES' / (
os.path.relpath(po[:-3], root) + '.mo')
os.path.relpath(po[:-3], root) + '.mo')
if not mo.parent.exists():
mo.parent.makedirs()
try:
p = Popen(['msgfmt', po, '-o', mo],
stdout=PIPE, stderr=PIPE)
stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msgfmt was not found
else:
@ -67,11 +64,6 @@ def setup_module():
assert mo.isfile(), 'msgfmt failed'
def teardown_module():
(root / '_build').rmtree(True)
(root / 'xx').rmtree(True)
def elem_gettexts(elem):
def itertext(self):
# this function copied from Python-2.7 'ElementTree.itertext'.
@ -106,25 +98,26 @@ def assert_elem(elem, texts=None, refs=None, names=None):
@with_intl_app(buildername='text')
def test_simple(app):
def test_simple(app, status, warning):
app.builder.build(['bom'])
result = (app.outdir / 'bom.txt').text(encoding='utf-8')
expect = (u"\nDatei mit UTF-8"
u"\n***************\n" # underline matches new translation
u"\n***************\n" # underline matches new translation
u"\nThis file has umlauts: äöü.\n")
assert result == expect
@with_intl_app(buildername='text')
def test_subdir(app):
def test_subdir(app, status, warning):
app.builder.build(['subdir/contents'])
result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8')
assert result.startswith(u"\nsubdir contents\n***************\n")
@with_intl_app(buildername='text', warning=warnfile)
def test_i18n_warnings_in_translation(app):
app.builddir.rmtree(True)
@with_intl_app(buildername='text')
def test_i18n_warnings_in_translation(app, status, warning):
app.outdir.rmtree(True) # for warnings acceleration
app.doctreedir.rmtree(True)
app.builder.build(['warnings'])
result = (app.outdir / 'warnings.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH REST WARNINGS"
@ -133,62 +126,62 @@ def test_i18n_warnings_in_translation(app):
assert result == expect
warnings = warnfile.getvalue().replace(os.sep, '/')
warnings = warning.getvalue().replace(os.sep, '/')
warning_expr = u'.*/warnings.txt:4: ' \
u'WARNING: Inline literal start-string without end-string.\n'
u'WARNING: Inline literal start-string without end-string.\n'
assert re.search(warning_expr, warnings)
@with_intl_app(buildername='html', cleanenv=True)
def test_i18n_footnote_break_refid(app):
"""test for #955 cant-build-html-with-footnotes-when-using"""
@with_intl_app(buildername='html', freshenv=True)
def test_i18n_footnote_break_refid(app, status, warning):
# test for #955 cant-build-html-with-footnotes-when-using
app.builder.build(['footnote'])
result = (app.outdir / 'footnote.html').text(encoding='utf-8')
(app.outdir / 'footnote.html').text(encoding='utf-8')
# expect no error by build
@with_intl_app(buildername='xml', warning=warnfile)
def test_i18n_footnote_regression(app):
@with_intl_app(buildername='xml')
def test_i18n_footnote_regression(app, status, warning):
# regression test for fix #955, #1176
app.builddir.rmtree(True)
#app.builddir.rmtree(True)
app.builder.build(['footnote'])
et = ElementTree.parse(app.outdir / 'footnote.xml')
secs = et.findall('section')
para0 = secs[0].findall('paragraph')
assert_elem(
para0[0],
texts=['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
'2', '[ref]', '1', '100', '.'],
refs=['i18n-with-footnote', 'ref'])
para0[0],
texts=['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
'2', '[ref]', '1', '100', '.'],
refs=['i18n-with-footnote', 'ref'])
footnote0 = secs[0].findall('footnote')
assert_elem(
footnote0[0],
texts=['1','THIS IS A AUTO NUMBERED FOOTNOTE.'],
names=['1'])
footnote0[0],
texts=['1', 'THIS IS A AUTO NUMBERED FOOTNOTE.'],
names=['1'])
assert_elem(
footnote0[1],
texts=['100','THIS IS A NUMBERED FOOTNOTE.'],
names=['100'])
footnote0[1],
texts=['100', 'THIS IS A NUMBERED FOOTNOTE.'],
names=['100'])
assert_elem(
footnote0[2],
texts=['2','THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'],
names=['named'])
footnote0[2],
texts=['2', 'THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'],
names=['named'])
citation0 = secs[0].findall('citation')
assert_elem(
citation0[0],
texts=['ref','THIS IS A NAMED FOOTNOTE.'],
names=['ref'])
citation0[0],
texts=['ref', 'THIS IS A NAMED FOOTNOTE.'],
names=['ref'])
warnings = warnfile.getvalue().replace(os.sep, '/')
warnings = warning.getvalue().replace(os.sep, '/')
warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n'
assert not re.search(warning_expr, warnings)
@with_intl_app(buildername='xml', cleanenv=True)
def test_i18n_footnote_backlink(app):
@with_intl_app(buildername='xml', freshenv=True)
def test_i18n_footnote_backlink(app, status, warning):
# i18n test for #1058
app.builder.build(['footnote'])
et = ElementTree.parse(app.outdir / 'footnote.xml')
@ -206,8 +199,8 @@ def test_i18n_footnote_backlink(app):
assert refid2id[ids] == backrefs
@with_intl_app(buildername='xml', warning=warnfile)
def test_i18n_refs_python_domain(app):
@with_intl_app(buildername='xml')
def test_i18n_refs_python_domain(app, status, warning):
app.builder.build(['refs_python_domain'])
et = ElementTree.parse(app.outdir / 'refs_python_domain.xml')
secs = et.findall('section')
@ -220,9 +213,9 @@ def test_i18n_refs_python_domain(app):
refs=['sensitive.sensitive_variables'])
@with_intl_app(buildername='text', warning=warnfile, cleanenv=True)
def test_i18n_warn_for_number_of_references_inconsistency(app):
app.builddir.rmtree(True)
@with_intl_app(buildername='text', freshenv=True)
def test_i18n_warn_for_number_of_references_inconsistency(app, status, warning):
#app.builddir.rmtree(True)
app.builder.build(['refs_inconsistency'])
result = (app.outdir / 'refs_inconsistency.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH REFS INCONSISTENCY"
@ -235,9 +228,9 @@ def test_i18n_warn_for_number_of_references_inconsistency(app):
u"\n[100] THIS IS A NUMBERED FOOTNOTE.\n")
assert result == expect
warnings = warnfile.getvalue().replace(os.sep, '/')
warnings = warning.getvalue().replace(os.sep, '/')
warning_fmt = u'.*/refs_inconsistency.txt:\\d+: ' \
u'WARNING: inconsistent %s in translated message\n'
u'WARNING: inconsistent %s in translated message\n'
expected_warning_expr = (
warning_fmt % 'footnote references' +
warning_fmt % 'references' +
@ -245,8 +238,8 @@ def test_i18n_warn_for_number_of_references_inconsistency(app):
assert re.search(expected_warning_expr, warnings)
@with_intl_app(buildername='html', cleanenv=True)
def test_i18n_link_to_undefined_reference(app):
@with_intl_app(buildername='html', freshenv=True)
def test_i18n_link_to_undefined_reference(app, status, warning):
app.builder.build(['refs_inconsistency'])
result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8')
@ -264,8 +257,8 @@ def test_i18n_link_to_undefined_reference(app):
assert len(re.findall(expected_expr, result)) == 1
@with_intl_app(buildername='xml', cleanenv=True)
def test_i18n_keep_external_links(app):
@with_intl_app(buildername='xml', freshenv=True)
def test_i18n_keep_external_links(app, status, warning):
# regression test for #1044
app.builder.build(['external_links'])
et = ElementTree.parse(app.outdir / 'external_links.xml')
@ -274,56 +267,56 @@ def test_i18n_keep_external_links(app):
para0 = secs[0].findall('paragraph')
# external link check
assert_elem(
para0[0],
texts=['EXTERNAL LINK TO', 'Python', '.'],
refs=['http://python.org/index.html'])
para0[0],
texts=['EXTERNAL LINK TO', 'Python', '.'],
refs=['http://python.org/index.html'])
# internal link check
assert_elem(
para0[1],
texts=['EXTERNAL LINKS', 'IS INTERNAL LINK.'],
refs=['i18n-with-external-links'])
para0[1],
texts=['EXTERNAL LINKS', 'IS INTERNAL LINK.'],
refs=['i18n-with-external-links'])
# inline link check
assert_elem(
para0[2],
texts=['INLINE LINK BY', 'THE SPHINX SITE', '.'],
refs=['http://sphinx-doc.org'])
para0[2],
texts=['INLINE LINK BY', 'THE SPHINX SITE', '.'],
refs=['http://sphinx-doc.org'])
# unnamed link check
assert_elem(
para0[3],
texts=['UNNAMED', 'LINK', '.'],
refs=['http://google.com'])
para0[3],
texts=['UNNAMED', 'LINK', '.'],
refs=['http://google.com'])
# link target swapped translation
para1 = secs[1].findall('paragraph')
assert_elem(
para1[0],
texts=['LINK TO', 'external2', 'AND', 'external1', '.'],
refs=['http://example.com/external2',
'http://example.com/external1'])
para1[0],
texts=['LINK TO', 'external2', 'AND', 'external1', '.'],
refs=['http://example.com/external2',
'http://example.com/external1'])
assert_elem(
para1[1],
texts=['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE',
'.'],
refs=['http://python.org', 'http://sphinx-doc.org'])
para1[1],
texts=['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE',
'.'],
refs=['http://python.org', 'http://sphinx-doc.org'])
# multiple references in the same line
para2 = secs[2].findall('paragraph')
assert_elem(
para2[0],
texts=['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',',
'THE SPHINX SITE', ',', 'UNNAMED', 'AND',
'THE PYTHON SITE', '.'],
refs=['i18n-with-external-links', 'http://python.org/index.html',
'http://sphinx-doc.org', 'http://google.com',
'http://python.org'])
para2[0],
texts=['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',',
'THE SPHINX SITE', ',', 'UNNAMED', 'AND',
'THE PYTHON SITE', '.'],
refs=['i18n-with-external-links', 'http://python.org/index.html',
'http://sphinx-doc.org', 'http://google.com',
'http://python.org'])
@with_intl_app(buildername='text', warning=warnfile, cleanenv=True)
def test_i18n_literalblock_warning(app):
app.builddir.rmtree(True) #for warnings acceleration
@with_intl_app(buildername='text', freshenv=True)
def test_i18n_literalblock_warning(app, status, warning):
#app.builddir.rmtree(True) # for warnings acceleration
app.builder.build(['literalblock'])
result = (app.outdir / 'literalblock.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH LITERAL BLOCK"
@ -335,14 +328,14 @@ def test_i18n_literalblock_warning(app):
u"\n<SYSTEM MESSAGE:")
assert result.startswith(expect)
warnings = warnfile.getvalue().replace(os.sep, '/')
warnings = warning.getvalue().replace(os.sep, '/')
expected_warning_expr = u'.*/literalblock.txt:\\d+: ' \
u'WARNING: Literal block expected; none found.'
u'WARNING: Literal block expected; none found.'
assert re.search(expected_warning_expr, warnings)
@with_intl_app(buildername='text')
def test_i18n_definition_terms(app):
def test_i18n_definition_terms(app, status, warning):
# regression test for #975
app.builder.build(['definition_terms'])
result = (app.outdir / 'definition_terms.txt').text(encoding='utf-8')
@ -356,10 +349,10 @@ def test_i18n_definition_terms(app):
assert result == expect
@with_intl_app(buildername='text', warning=warnfile)
def test_i18n_glossary_terms(app):
@with_intl_app(buildername='text')
def test_i18n_glossary_terms(app, status, warning):
# regression test for #1090
app.builddir.rmtree(True) #for warnings acceleration
#app.builddir.rmtree(True) # for warnings acceleration
app.builder.build(['glossary_terms'])
result = (app.outdir / 'glossary_terms.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH GLOSSARY TERMS"
@ -371,69 +364,69 @@ def test_i18n_glossary_terms(app):
u"\nLINK TO *SOME NEW TERM*.\n")
assert result == expect
warnings = warnfile.getvalue().replace(os.sep, '/')
warnings = warning.getvalue().replace(os.sep, '/')
assert 'term not in glossary' not in warnings
@with_intl_app(buildername='xml', warning=warnfile)
def test_i18n_role_xref(app):
@with_intl_app(buildername='xml')
def test_i18n_role_xref(app, status, warning):
# regression test for #1090, #1193
app.builddir.rmtree(True) #for warnings acceleration
#app.builddir.rmtree(True) # for warnings acceleration
app.builder.build(['role_xref'])
et = ElementTree.parse(app.outdir / 'role_xref.xml')
sec1, sec2 = et.findall('section')
para1, = sec1.findall('paragraph')
assert_elem(
para1,
texts=['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
'SOME NEW TERM', '.'],
refs=['i18n-role-xref', 'contents',
'glossary_terms#term-some-term'])
para1,
texts=['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
'SOME NEW TERM', '.'],
refs=['i18n-role-xref', 'contents',
'glossary_terms#term-some-term'])
para2 = sec2.findall('paragraph')
assert_elem(
para2[0],
texts=['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM',
'.'],
refs=['glossary_terms#term-some-other-term',
'glossary_terms#term-some-term'])
para2[0],
texts=['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM',
'.'],
refs=['glossary_terms#term-some-other-term',
'glossary_terms#term-some-term'])
assert_elem(
para2[1],
texts=['LINK TO', 'SAME TYPE LINKS', 'AND',
"I18N ROCK'N ROLE XREF", '.'],
refs=['same-type-links', 'i18n-role-xref'])
para2[1],
texts=['LINK TO', 'SAME TYPE LINKS', 'AND',
"I18N ROCK'N ROLE XREF", '.'],
refs=['same-type-links', 'i18n-role-xref'])
assert_elem(
para2[2],
texts=['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS',
'.'],
refs=['glossary_terms', 'contents'])
para2[2],
texts=['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS',
'.'],
refs=['glossary_terms', 'contents'])
assert_elem(
para2[3],
texts=['LINK TO', '--module', 'AND', '-m', '.'],
refs=['cmdoption--module', 'cmdoption-m'])
para2[3],
texts=['LINK TO', '--module', 'AND', '-m', '.'],
refs=['cmdoption--module', 'cmdoption-m'])
assert_elem(
para2[4],
texts=['LINK TO', 'env2', 'AND', 'env1', '.'],
refs=['envvar-env2', 'envvar-env1'])
para2[4],
texts=['LINK TO', 'env2', 'AND', 'env1', '.'],
refs=['envvar-env2', 'envvar-env1'])
assert_elem(
para2[5],
texts=['LINK TO', 'token2', 'AND', 'token1', '.'],
refs=[]) #TODO: how do I link token role to productionlist?
para2[5],
texts=['LINK TO', 'token2', 'AND', 'token1', '.'],
refs=[]) # TODO: how do I link token role to productionlist?
assert_elem(
para2[6],
texts=['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'],
refs=['same-type-links', 'i18n-role-xref'])
para2[6],
texts=['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'],
refs=['same-type-links', 'i18n-role-xref'])
#warnings
warnings = warnfile.getvalue().replace(os.sep, '/')
# warnings
warnings = warning.getvalue().replace(os.sep, '/')
assert 'term not in glossary' not in warnings
assert 'undefined label' not in warnings
assert 'unknown document' not in warnings
@with_intl_app(buildername='xml', warning=warnfile)
def test_i18n_label_target(app):
@with_intl_app(buildername='xml')
def test_i18n_label_target(app, status, warning):
# regression test for #1193, #1265
app.builder.build(['label_target'])
et = ElementTree.parse(app.outdir / 'label_target.xml')
@ -441,73 +434,73 @@ def test_i18n_label_target(app):
para0 = secs[0].findall('paragraph')
assert_elem(
para0[0],
texts=['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND',
'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'],
refs=['implicit-target', 'section-and-label'])
para0[0],
texts=['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND',
'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'],
refs=['implicit-target', 'section-and-label'])
para1 = secs[1].findall('paragraph')
assert_elem(
para1[0],
texts=['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND',
'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1',
'.'],
refs=['explicit-target', 'id1'])
para1[0],
texts=['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND',
'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1',
'.'],
refs=['explicit-target', 'id1'])
para2 = secs[2].findall('paragraph')
assert_elem(
para2[0],
texts=['X IMPLICIT SECTION NAME', 'POINT TO',
'implicit-section-name', '.'],
refs=['implicit-section-name'])
para2[0],
texts=['X IMPLICIT SECTION NAME', 'POINT TO',
'implicit-section-name', '.'],
refs=['implicit-section-name'])
sec2 = secs[2].findall('section')
para2_0 = sec2[0].findall('paragraph')
assert_elem(
para2_0[0],
texts=['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'],
refs=[])
para2_0[0],
texts=['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'],
refs=[])
para3 = secs[3].findall('paragraph')
assert_elem(
para3[0],
texts=['X', 'bridge label',
'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
'SECTION TITLE.'],
refs=['label-bridged-target-section'])
para3[0],
texts=['X', 'bridge label',
'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
'SECTION TITLE.'],
refs=['label-bridged-target-section'])
assert_elem(
para3[1],
texts=['X', 'bridge label', 'POINT TO',
'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2',
'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED',
'bridge label2', 'POINT TO CORRECT TARGET.'],
refs=['label-bridged-target-section',
'section-and-label',
'section-and-label'])
para3[1],
texts=['X', 'bridge label', 'POINT TO',
'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2',
'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED',
'bridge label2', 'POINT TO CORRECT TARGET.'],
refs=['label-bridged-target-section',
'section-and-label',
'section-and-label'])
@with_intl_app(buildername='text', warning=warnfile)
def test_i18n_glossary_terms_inconsistency(app):
@with_intl_app(buildername='text')
def test_i18n_glossary_terms_inconsistency(app, status, warning):
# regression test for #1090
app.builddir.rmtree(True) #for warnings acceleration
app.outdir.rmtree(True) # for warnings acceleration
app.doctreedir.rmtree(True) # for warnings acceleration
app.builder.build(['glossary_terms_inconsistency'])
result = (app.outdir / 'glossary_terms_inconsistency.txt'
).text(encoding='utf-8')
result = (app.outdir / 'glossary_terms_inconsistency.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH GLOSSARY TERMS INCONSISTENCY"
u"\n**************************************\n"
u"\n1. LINK TO *SOME NEW TERM*.\n")
assert result == expect
warnings = warnfile.getvalue().replace(os.sep, '/')
warnings = warning.getvalue().replace(os.sep, '/')
expected_warning_expr = (
u'.*/glossary_terms_inconsistency.txt:\\d+: '
u'WARNING: inconsistent term references in translated message\n')
u'.*/glossary_terms_inconsistency.txt:\\d+: '
u'WARNING: inconsistent term references in translated message\n')
assert re.search(expected_warning_expr, warnings)
@with_intl_app(buildername='text')
def test_seealso(app):
def test_seealso(app, status, warning):
app.builder.build(['seealso'])
result = (app.outdir / 'seealso.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH SEEALSO"
@ -520,7 +513,7 @@ def test_seealso(app):
@with_intl_app(buildername='text')
def test_i18n_figure_caption(app):
def test_i18n_figure_caption(app, status, warning):
# regression test for #940
app.builder.build(['figure_caption'])
result = (app.outdir / 'figure_caption.txt').text(encoding='utf-8')
@ -541,7 +534,7 @@ def test_i18n_figure_caption(app):
@with_intl_app(buildername='text')
def test_i18n_rubric(app):
def test_i18n_rubric(app, status, warning):
# regression test for pull request #190
app.builder.build(['rubric'])
result = (app.outdir / 'rubric.txt').text(encoding='utf-8')
@ -558,7 +551,7 @@ def test_i18n_rubric(app):
@with_intl_app(buildername='html')
def test_i18n_index_entries(app):
def test_i18n_index_entries(app, status, warning):
# regression test for #976
app.builder.build(['index_entries'])
result = (app.outdir / 'genindex.html').text(encoding='utf-8')
@ -589,8 +582,8 @@ def test_i18n_index_entries(app):
assert re.search(expr, result, re.M)
@with_intl_app(buildername='html', cleanenv=True)
def test_versionchange(app):
@with_intl_app(buildername='html', freshenv=True)
def test_versionchange(app, status, warning):
app.builder.build(['versionchange'])
result = (app.outdir / 'versionchange.html').text(encoding='utf-8')
@ -622,8 +615,8 @@ def test_versionchange(app):
assert expect3 == matched_content
@with_intl_app(buildername='text', cleanenv=True)
def test_i18n_docfields(app):
@with_intl_app(buildername='text', freshenv=True)
def test_i18n_docfields(app, status, warning):
app.builder.build(['docfields'])
result = (app.outdir / 'docfields.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH DOCFIELDS"
@ -648,29 +641,29 @@ def test_i18n_docfields(app):
assert result == expect
@with_intl_app(buildername='text', cleanenv=True)
def test_i18n_admonitions(app):
@with_intl_app(buildername='text', freshenv=True)
def test_i18n_admonitions(app, status, warning):
# #1206: gettext did not translate admonition directive's title
# seealso: http://docutils.sourceforge.net/docs/ref/rst/directives.html#admonitions
app.builder.build(['admonitions'])
result = (app.outdir / 'admonitions.txt').text(encoding='utf-8')
directives = (
"attention", "caution", "danger", "error", "hint",
"important", "note", "tip", "warning", "admonition",)
"attention", "caution", "danger", "error", "hint",
"important", "note", "tip", "warning", "admonition",)
for d in directives:
assert d.upper() + " TITLE" in result
assert d.upper() + " BODY" in result
@with_intl_app(buildername='html', cleanenv=True)
def test_i18n_docfields_html(app):
@with_intl_app(buildername='html', freshenv=True)
def test_i18n_docfields_html(app, status, warning):
app.builder.build(['docfields'])
result = (app.outdir / 'docfields.html').text(encoding='utf-8')
(app.outdir / 'docfields.html').text(encoding='utf-8')
# expect no error by build
@with_intl_app(buildername='html')
def test_gettext_template(app):
def test_gettext_template(app, status, warning):
app.builder.build_all()
result = (app.outdir / 'index.html').text(encoding='utf-8')
assert "WELCOME" in result
@ -678,7 +671,7 @@ def test_gettext_template(app):
@with_intl_app(buildername='html')
def test_rebuild_by_mo_mtime(app):
def test_rebuild_by_mo_mtime(app, status, warning):
app.builder.build_update()
_, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app)
assert count == 0

View File

@ -23,10 +23,11 @@ from util import TestApp
app = settings = parser = None
def setup_module():
global app, settings, parser
texescape.init() # otherwise done by the latex builder
app = TestApp(cleanenv=True)
app = TestApp()
optparser = frontend.OptionParser(
components=(rst.Parser, HTMLWriter, LaTeXWriter))
settings = optparser.get_default_values()
@ -35,6 +36,7 @@ def setup_module():
settings.env.temp_data['docname'] = 'dummy'
parser = rst.Parser()
def teardown_module():
app.cleanup()
@ -42,12 +44,15 @@ def teardown_module():
class ForgivingTranslator:
def visit_pending_xref(self, node):
pass
def depart_pending_xref(self, node):
pass
class ForgivingHTMLTranslator(SmartyPantsHTMLTranslator, ForgivingTranslator):
pass
class ForgivingLaTeXTranslator(LaTeXTranslator, ForgivingTranslator):
pass

View File

@ -12,27 +12,13 @@
# adapted from an example of bibliographic metadata at
# http://docutils.sourceforge.net/docs/user/rst/demo.txt
from util import TestApp
from util import with_app
from nose.tools import assert_equal
app = env = None
warnings = []
def setup_module():
# Is there a better way of generating this doctree than manually iterating?
global app, env
app = TestApp(_copy_to_temp=True)
env = app.env
msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app)
for docname in it:
pass
def teardown_module():
app.cleanup()
def test_docinfo():
@with_app('pseudoxml')
def test_docinfo(app, status, warning):
"""
Inspect the 'docinfo' metadata stored in the first node of the document.
Note this doesn't give us access to data stored in subsequence blocks
@ -40,6 +26,8 @@ def test_docinfo():
'dedication' blocks, or the 'meta' role. Doing otherwise is probably more
messing with the internals of sphinx than this rare use case merits.
"""
app.builder.build(['metadata'])
env = app.env
exampledocinfo = env.metadata['metadata']
expecteddocinfo = {
'author': u'David Goodger',

View File

@ -29,8 +29,10 @@ warnfile = StringIO()
def setup_module():
nocolor()
def mock_input(answers, needanswer=False):
called = set()
def input_(prompt):
if prompt in called:
raise AssertionError('answer for %r missing and no default '
@ -50,8 +52,10 @@ def mock_input(answers, needanswer=False):
return ''
return input_
real_input = input
def teardown_module():
qs.term_input = real_input
qs.TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
@ -200,7 +204,7 @@ def test_quickstart_all_answers(tempdir):
assert ns['master_doc'] == 'contents'
assert ns['project'] == u'STASI™'
assert ns['copyright'] == u'%s, Wolfgang Schäuble & G\'Beckstein' % \
time.strftime('%Y')
time.strftime('%Y')
assert ns['version'] == '2.0'
assert ns['release'] == '2.0.1'
assert ns['html_static_path'] == ['.static']
@ -214,7 +218,7 @@ def test_quickstart_all_answers(tempdir):
assert ns['texinfo_documents'] == [
('contents', 'STASI', u'STASI™ Documentation',
u'Wolfgang Schäuble & G\'Beckstein', 'STASI',
'One line description of project.', 'Miscellaneous'),]
'One line description of project.', 'Miscellaneous')]
assert (tempdir / 'build').isdir()
assert (tempdir / 'source' / '.static').isdir()
@ -237,7 +241,7 @@ def test_generated_files_eol(tempdir):
def assert_eol(filename, eol):
content = filename.bytes().decode('unicode-escape')
assert all([l[-len(eol):]==eol for l in content.splitlines(True)])
assert all([l[-len(eol):] == eol for l in content.splitlines(True)])
assert_eol(tempdir / 'make.bat', '\r\n')
assert_eol(tempdir / 'Makefile', '\n')
@ -257,13 +261,13 @@ def test_quickstart_and_build(tempdir):
qs.generate(d)
app = application.Sphinx(
tempdir, #srcdir
tempdir, #confdir
(tempdir / '_build' / 'html'), #outdir
(tempdir / '_build' / '.doctree'), #doctreedir
'html', #buildername
status=StringIO(),
warning=warnfile)
tempdir, # srcdir
tempdir, # confdir
(tempdir / '_build' / 'html'), # outdir
(tempdir / '_build' / '.doctree'), # doctreedir
'html', # buildername
status=StringIO(),
warning=warnfile)
app.builder.build_all()
warnings = warnfile.getvalue()
assert not warnings
@ -273,7 +277,7 @@ def test_quickstart_and_build(tempdir):
def test_default_filename(tempdir):
answers = {
'Root path': tempdir,
'Project name': u'\u30c9\u30a4\u30c4', #Fullwidth characters only
'Project name': u'\u30c9\u30a4\u30c4', # Fullwidth characters only
'Author name': 'Georg Brandl',
'Project version': '0.1',
}

View File

@ -9,33 +9,24 @@
:license: BSD, see LICENSE for details.
"""
import os
from six import StringIO
from sphinx.websupport import WebSupport
from test_websupport import sqlalchemy_missing
from util import test_root, skip_if, skip_unless_importable
def clear_builddir():
(test_root / 'websupport').rmtree(True)
from util import rootdir, tempdir, skip_if, skip_unless_importable
def teardown_module():
(test_root / 'generated').rmtree(True)
clear_builddir()
(tempdir / 'websupport').rmtree(True)
def search_adapter_helper(adapter):
clear_builddir()
settings = {'builddir': os.path.join(test_root, 'websupport'),
settings = {'srcdir': rootdir / 'root',
'builddir': tempdir / 'websupport',
'status': StringIO(),
'warning': StringIO()}
settings.update({'srcdir': test_root,
'search': adapter})
'warning': StringIO(),
'search': adapter}
support = WebSupport(**settings)
support.build()
@ -63,7 +54,7 @@ def search_adapter_helper(adapter):
'%s search adapter returned %s search result(s), should have been 1'\
% (adapter, len(results))
# Make sure it works through the WebSupport API
html = support.get_search_results(u'SomeLongRandomWord')
support.get_search_results(u'SomeLongRandomWord')
@skip_unless_importable('xapian', 'needs xapian bindings installed')

View File

@ -16,11 +16,16 @@ from functools import wraps
import tempfile
import sphinx
from util import with_tempdir, test_roots, SkipTest
from util import rootdir, tempdir, SkipTest
from path import path
from textwrap import dedent
root = test_roots / 'test-setup'
root = tempdir / 'test-setup'
def setup_module():
if not root.exists():
(rootdir / 'roots' / 'test-setup').copytree(root)
def with_setup_command(root, *args, **kwds):

View File

@ -9,28 +9,23 @@
:license: BSD, see LICENSE for details.
"""
from util import test_roots, with_app
from util import with_app
def teardown_module():
(test_roots / 'test-templating' / '_build').rmtree(True),
@with_app(buildername='html', srcdir=(test_roots / 'test-templating'))
def test_layout_overloading(app):
app.builder.build_all()
@with_app('html', testroot='templating')
def test_layout_overloading(app, status, warning):
app.builder.build_update()
result = (app.outdir / 'contents.html').text(encoding='utf-8')
assert '<!-- layout overloading -->' in result
@with_app(buildername='html', srcdir=(test_roots / 'test-templating'))
def test_autosummary_class_template_overloading(app):
app.builder.build_all()
@with_app('html', testroot='templating')
def test_autosummary_class_template_overloading(app, status, warning):
app.builder.build_update()
result = (app.outdir / 'generated' / 'sphinx.application.Sphinx.html').text(
encoding='utf-8')
result = (app.outdir / 'generated' / 'sphinx.application.TemplateBridge.html').text(
encoding='utf-8')
assert 'autosummary/class.rst method block overloading' in result

View File

@ -19,14 +19,14 @@ from util import with_app, raises
@with_app(confoverrides={'html_theme': 'ziptheme',
'html_theme_options.testopt': 'foo'})
def test_theme_api(app):
def test_theme_api(app, status, warning):
cfg = app.config
# test Theme class API
assert set(Theme.themes.keys()) == \
set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku',
'traditional', 'testtheme', 'ziptheme', 'epub', 'nature',
'pyramid', 'bizstyle'])
set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku',
'traditional', 'testtheme', 'ziptheme', 'epub', 'nature',
'pyramid', 'bizstyle'])
assert Theme.themes['testtheme'][1] is None
assert isinstance(Theme.themes['ziptheme'][1], zipfile.ZipFile)
@ -56,14 +56,15 @@ def test_theme_api(app):
theme.cleanup()
assert not os.path.exists(themedir)
@with_app(buildername='html')
def test_js_source(app):
@with_app()
def test_js_source(app, status, warning):
# Now sphinx provides non-minified JS files for jquery.js and underscore.js
# to clarify the source of the minified files. see also #1434.
# If you update the version of the JS file, please update the source of the
# JS file and version number in this test.
app.builder.build_all()
app.builder.build(['contents'])
v = '1.8.3'
msg = 'jquery.js version does not match to {v}'.format(v=v)

View File

@ -1,163 +1,163 @@
# -*- coding: utf-8 -*-
"""
test_util_i18n
~~~~~~~~~~~~~~
Test i18n util.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import os
from os import path
from babel.messages.mofile import read_mo
from sphinx.util import i18n
from util import with_tempdir
def test_catalog_info_for_file_and_path():
cat = i18n.CatalogInfo('path', 'domain')
assert cat.po_file == 'domain.po'
assert cat.mo_file == 'domain.mo'
assert cat.po_path == path.join('path', 'domain.po')
assert cat.mo_path == path.join('path', 'domain.mo')
def test_catalog_info_for_sub_domain_file_and_path():
cat = i18n.CatalogInfo('path', 'sub/domain')
assert cat.po_file == 'sub/domain.po'
assert cat.mo_file == 'sub/domain.mo'
assert cat.po_path == path.join('path', 'sub/domain.po')
assert cat.mo_path == path.join('path', 'sub/domain.mo')
@with_tempdir
def test_catalog_outdated(dir):
(dir / 'test.po').write_text('#')
cat = i18n.CatalogInfo(dir, 'test')
assert cat.is_outdated() # if mo is not exist
mo_file = (dir / 'test.mo')
mo_file.write_text('#')
assert not cat.is_outdated() # if mo is exist and newer than po
os.utime(mo_file, (os.stat(mo_file).st_mtime - 10,) * 2) # to be outdate
assert cat.is_outdated() # if mo is exist and older than po
@with_tempdir
def test_catalog_write_mo(dir):
(dir / 'test.po').write_text('#')
cat = i18n.CatalogInfo(dir, 'test')
cat.write_mo('en')
assert path.exists(cat.mo_path)
assert read_mo(open(cat.mo_path, 'rb')) is not None
@with_tempdir
def test_get_catalogs_for_xx(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
(dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'test6.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_ALL').makedirs()
(dir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
domains = set(c.domain for c in catalogs)
assert domains == set([
'test1',
'test2',
path.normpath('sub/test4'),
path.normpath('sub/test5'),
])
@with_tempdir
def test_get_catalogs_for_en(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'xx_dom.po').write_text('#')
(dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1'], 'en', force_all=False)
domains = set(c.domain for c in catalogs)
assert domains == set(['en_dom'])
@with_tempdir
def test_get_catalogs_with_non_existent_locale(dir):
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx')
assert not catalogs
catalogs = i18n.get_catalogs([dir / 'loc1'], None)
assert not catalogs
def test_get_catalogs_with_non_existent_locale_dirs():
catalogs = i18n.get_catalogs(['dummy'], 'xx')
assert not catalogs
@with_tempdir
def test_get_catalogs_for_xx_without_outdated(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.mo').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.mo').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.mo').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.mo').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.mo').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
assert not catalogs
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=True)
domains = set(c.domain for c in catalogs)
assert domains == set([
'test1',
'test2',
path.normpath('sub/test4'),
path.normpath('sub/test5'),
])
@with_tempdir
def test_get_catalogs_from_multiple_locale_dirs(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1', dir / 'loc2'], 'xx')
domains = sorted(c.domain for c in catalogs)
assert domains == ['test1', 'test1', 'test2']
@with_tempdir
def test_get_catalogs_with_compact(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', gettext_compact=True)
domains = set(c.domain for c in catalogs)
assert domains == set(['test1', 'test2', 'sub'])
# -*- coding: utf-8 -*-
"""
test_util_i18n
~~~~~~~~~~~~~~
Test i18n util.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import os
from os import path
from babel.messages.mofile import read_mo
from sphinx.util import i18n
from util import with_tempdir
def test_catalog_info_for_file_and_path():
cat = i18n.CatalogInfo('path', 'domain')
assert cat.po_file == 'domain.po'
assert cat.mo_file == 'domain.mo'
assert cat.po_path == path.join('path', 'domain.po')
assert cat.mo_path == path.join('path', 'domain.mo')
def test_catalog_info_for_sub_domain_file_and_path():
cat = i18n.CatalogInfo('path', 'sub/domain')
assert cat.po_file == 'sub/domain.po'
assert cat.mo_file == 'sub/domain.mo'
assert cat.po_path == path.join('path', 'sub/domain.po')
assert cat.mo_path == path.join('path', 'sub/domain.mo')
@with_tempdir
def test_catalog_outdated(dir):
(dir / 'test.po').write_text('#')
cat = i18n.CatalogInfo(dir, 'test')
assert cat.is_outdated() # if mo is not exist
mo_file = (dir / 'test.mo')
mo_file.write_text('#')
assert not cat.is_outdated() # if mo is exist and newer than po
os.utime(mo_file, (os.stat(mo_file).st_mtime - 10,) * 2) # to be outdate
assert cat.is_outdated() # if mo is exist and older than po
@with_tempdir
def test_catalog_write_mo(dir):
(dir / 'test.po').write_text('#')
cat = i18n.CatalogInfo(dir, 'test')
cat.write_mo('en')
assert path.exists(cat.mo_path)
assert read_mo(open(cat.mo_path, 'rb')) is not None
@with_tempdir
def test_get_catalogs_for_xx(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
(dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'test6.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_ALL').makedirs()
(dir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
domains = set(c.domain for c in catalogs)
assert domains == set([
'test1',
'test2',
path.normpath('sub/test4'),
path.normpath('sub/test5'),
])
@with_tempdir
def test_get_catalogs_for_en(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'xx_dom.po').write_text('#')
(dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1'], 'en', force_all=False)
domains = set(c.domain for c in catalogs)
assert domains == set(['en_dom'])
@with_tempdir
def test_get_catalogs_with_non_existent_locale(dir):
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx')
assert not catalogs
catalogs = i18n.get_catalogs([dir / 'loc1'], None)
assert not catalogs
def test_get_catalogs_with_non_existent_locale_dirs():
catalogs = i18n.get_catalogs(['dummy'], 'xx')
assert not catalogs
@with_tempdir
def test_get_catalogs_for_xx_without_outdated(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.mo').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.mo').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.mo').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.mo').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.mo').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
assert not catalogs
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=True)
domains = set(c.domain for c in catalogs)
assert domains == set([
'test1',
'test2',
path.normpath('sub/test4'),
path.normpath('sub/test5'),
])
@with_tempdir
def test_get_catalogs_from_multiple_locale_dirs(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1', dir / 'loc2'], 'xx')
domains = sorted(c.domain for c in catalogs)
assert domains == ['test1', 'test1', 'test2']
@with_tempdir
def test_get_catalogs_with_compact(dir):
(dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#')
(dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', gettext_compact=True)
domains = set(c.domain for c in catalogs)
assert domains == set(['test1', 'test2', 'sub'])

View File

@ -1,121 +1,121 @@
# -*- coding: utf-8 -*-
"""
test_util_nodes
~~~~~~~~~~~~~~~
Tests uti.nodes functions.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from textwrap import dedent
from docutils import nodes
from docutils.parsers import rst
from docutils.utils import new_document
from docutils import frontend
from sphinx.util.nodes import extract_messages
def _get_doctree(text):
settings = frontend.OptionParser(
components=(rst.Parser,)).get_default_values()
document = new_document('dummy.txt', settings)
rst.Parser().parse(text, document)
return document
def assert_node_count(messages, node_type, expect_count):
count = 0
node_list = [node for node, msg in messages]
for node in node_list:
if isinstance(node, node_type):
count += 1
assert count == expect_count, (
"Count of %r in the %r is %d instead of %d"
% (node_type, node_list, count, expect_count))
def test_extract_messages():
text = dedent(
"""
.. admonition:: admonition title
admonition body
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.title, 1,
)
text = dedent(
"""
.. figure:: foo.jpg
this is title
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.caption, 1,
)
text = dedent(
"""
.. rubric:: spam
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.rubric, 1,
)
text = dedent(
"""
| spam
| egg
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.line, 2,
)
text = dedent(
"""
section
=======
+----------------+
| | **Title 1** |
| | Message 1 |
+----------------+
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.line, 2,
)
text = dedent(
"""
* | **Title 1**
| Message 1
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.line, 2,
)
# -*- coding: utf-8 -*-
"""
test_util_nodes
~~~~~~~~~~~~~~~
Tests uti.nodes functions.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from textwrap import dedent
from docutils import nodes
from docutils.parsers import rst
from docutils.utils import new_document
from docutils import frontend
from sphinx.util.nodes import extract_messages
def _get_doctree(text):
settings = frontend.OptionParser(
components=(rst.Parser,)).get_default_values()
document = new_document('dummy.txt', settings)
rst.Parser().parse(text, document)
return document
def assert_node_count(messages, node_type, expect_count):
count = 0
node_list = [node for node, msg in messages]
for node in node_list:
if isinstance(node, node_type):
count += 1
assert count == expect_count, (
"Count of %r in the %r is %d instead of %d"
% (node_type, node_list, count, expect_count))
def test_extract_messages():
text = dedent(
"""
.. admonition:: admonition title
admonition body
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.title, 1,
)
text = dedent(
"""
.. figure:: foo.jpg
this is title
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.caption, 1,
)
text = dedent(
"""
.. rubric:: spam
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.rubric, 1,
)
text = dedent(
"""
| spam
| egg
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.line, 2,
)
text = dedent(
"""
section
=======
+----------------+
| | **Title 1** |
| | Message 1 |
+----------------+
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.line, 2,
)
text = dedent(
"""
* | **Title 1**
| Message 1
"""
)
yield (
assert_node_count,
extract_messages(_get_doctree(text)),
nodes.line, 2,
)

View File

@ -16,39 +16,46 @@ from docutils.parsers.rst.directives.html import MetaBody
from sphinx import addnodes
from sphinx.versioning import add_uids, merge_doctrees, get_ratio
from util import test_root, TestApp
from util import TestApp
app = original = original_uids = None
def setup_module():
global app, original, original_uids
app = TestApp()
app = TestApp(testroot='versioning')
app.builder.env.app = app
app.connect('doctree-resolved', on_doctree_resolved)
app.build()
original = doctrees['versioning/original']
original = doctrees['original']
original_uids = [n.uid for n in add_uids(original, is_paragraph)]
def teardown_module():
app.cleanup()
(test_root / '_build').rmtree(True)
doctrees = {}
def on_doctree_resolved(app, doctree, docname):
doctrees[docname] = doctree
def is_paragraph(node):
return node.__class__.__name__ == 'paragraph'
def test_get_ratio():
assert get_ratio('', 'a')
assert get_ratio('a', '')
def test_add_uids():
assert len(original_uids) == 3
def test_picklablility():
# we have to modify the doctree so we can pickle it
copy = original.copy()
@ -62,44 +69,50 @@ def test_picklablility():
loaded = pickle.loads(pickle.dumps(copy, pickle.HIGHEST_PROTOCOL))
assert all(getattr(n, 'uid', False) for n in loaded.traverse(is_paragraph))
def test_modified():
modified = doctrees['versioning/modified']
modified = doctrees['modified']
new_nodes = list(merge_doctrees(original, modified, is_paragraph))
uids = [n.uid for n in modified.traverse(is_paragraph)]
assert not new_nodes
assert original_uids == uids
def test_added():
added = doctrees['versioning/added']
added = doctrees['added']
new_nodes = list(merge_doctrees(original, added, is_paragraph))
uids = [n.uid for n in added.traverse(is_paragraph)]
assert len(new_nodes) == 1
assert original_uids == uids[:-1]
def test_deleted():
deleted = doctrees['versioning/deleted']
deleted = doctrees['deleted']
new_nodes = list(merge_doctrees(original, deleted, is_paragraph))
uids = [n.uid for n in deleted.traverse(is_paragraph)]
assert not new_nodes
assert original_uids[::2] == uids
def test_deleted_end():
deleted_end = doctrees['versioning/deleted_end']
deleted_end = doctrees['deleted_end']
new_nodes = list(merge_doctrees(original, deleted_end, is_paragraph))
uids = [n.uid for n in deleted_end.traverse(is_paragraph)]
assert not new_nodes
assert original_uids[:-1] == uids
def test_insert():
insert = doctrees['versioning/insert']
insert = doctrees['insert']
new_nodes = list(merge_doctrees(original, insert, is_paragraph))
uids = [n.uid for n in insert.traverse(is_paragraph)]
assert len(new_nodes) == 1
assert original_uids[0] == uids[0]
assert original_uids[1:] == uids[2:]
def test_insert_beginning():
insert_beginning = doctrees['versioning/insert_beginning']
insert_beginning = doctrees['insert_beginning']
new_nodes = list(merge_doctrees(original, insert_beginning, is_paragraph))
uids = [n.uid for n in insert_beginning.traverse(is_paragraph)]
assert len(new_nodes) == 1
@ -107,8 +120,9 @@ def test_insert_beginning():
assert original_uids == uids[1:]
assert original_uids[0] != uids[0]
def test_insert_similar():
insert_similar = doctrees['versioning/insert_similar']
insert_similar = doctrees['insert_similar']
new_nodes = list(merge_doctrees(original, insert_similar, is_paragraph))
uids = [n.uid for n in insert_similar.traverse(is_paragraph)]
assert len(new_nodes) == 1

View File

@ -9,34 +9,33 @@
:license: BSD, see LICENSE for details.
"""
import os
from functools import wraps
from six import StringIO
from sphinx.websupport import WebSupport
from sphinx.websupport.errors import DocumentNotFoundError, \
CommentNotAllowedError, UserNotAuthorizedError
CommentNotAllowedError, UserNotAuthorizedError
from sphinx.websupport.storage import StorageBackend
from sphinx.websupport.storage.differ import CombinedHtmlDiff
try:
from sphinx.websupport.storage.sqlalchemystorage import Session, \
Comment, CommentVote
Comment, CommentVote
from sphinx.websupport.storage.sqlalchemy_db import Node
sqlalchemy_missing = False
except ImportError:
sqlalchemy_missing = True
from util import test_root, raises, skip_if
from util import rootdir, tempdir, raises, skip_if
default_settings = {'builddir': os.path.join(test_root, 'websupport'),
default_settings = {'builddir': tempdir / 'websupport',
'status': StringIO(),
'warning': StringIO()}
def teardown_module():
(test_root / 'generated').rmtree(True)
(test_root / 'websupport').rmtree(True)
(tempdir / 'websupport').rmtree(True)
def with_support(*args, **kwargs):
@ -59,12 +58,12 @@ class NullStorage(StorageBackend):
@with_support(storage=NullStorage())
def test_no_srcdir(support):
"""Make sure the correct exception is raised if srcdir is not given."""
# make sure the correct exception is raised if srcdir is not given.
raises(RuntimeError, support.build)
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
@with_support(srcdir=test_root)
@with_support(srcdir=rootdir / 'root')
def test_build(support):
support.build()
@ -173,9 +172,9 @@ def test_proposals(support):
source = data['source']
proposal = source[:5] + source[10:15] + 'asdf' + source[15:]
comment = support.add_comment('Proposal comment',
node_id=node.id,
proposal=proposal)
support.add_comment('Proposal comment',
node_id=node.id,
proposal=proposal)
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
@ -234,6 +233,8 @@ def test_update_username(support):
called = False
def moderation_callback(comment):
global called
called = True
@ -251,7 +252,7 @@ def test_moderation(support):
deleted = support.add_comment('Comment to delete', node_id=node.id,
displayed=False)
# Make sure the moderation_callback is called.
assert called == True
assert called
# Make sure the user must be a moderator.
raises(UserNotAuthorizedError, support.accept_comment, accepted['id'])
raises(UserNotAuthorizedError, support.delete_comment, deleted['id'])

View File

@ -8,22 +8,22 @@
"""
import os
import re
import sys
import tempfile
import shutil
import re
from functools import wraps
from six import StringIO
from nose import tools, SkipTest
from sphinx import application
from sphinx.theming import Theme
from sphinx.ext.autodoc import AutoDirective
from sphinx.pycode import ModuleAnalyzer
from path import path
from nose import tools, SkipTest
try:
# Python >=3.3
from unittest import mock
@ -32,7 +32,7 @@ except ImportError:
__all__ = [
'test_root', 'test_roots', 'raises', 'raises_msg',
'rootdir', 'tempdir', 'raises', 'raises_msg',
'skip_if', 'skip_unless', 'skip_unless_importable', 'Struct',
'ListOutput', 'TestApp', 'with_app', 'gen_with_app',
'path', 'with_tempdir',
@ -41,8 +41,8 @@ __all__ = [
]
test_root = path(__file__).parent.joinpath('root').abspath()
test_roots = path(__file__).parent.joinpath('roots').abspath()
rootdir = path(os.path.dirname(__file__) or '.').abspath()
tempdir = path(os.environ['SPHINX_TEST_TEMPDIR']).abspath()
def _excstr(exc):
@ -50,11 +50,9 @@ def _excstr(exc):
return str(tuple(map(_excstr, exc)))
return exc.__name__
def raises(exc, func, *args, **kwds):
"""
Raise :exc:`AssertionError` if ``func(*args, **kwds)`` does not
raise *exc*.
"""
"""Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*."""
try:
func(*args, **kwds)
except exc:
@ -63,10 +61,10 @@ def raises(exc, func, *args, **kwds):
raise AssertionError('%s did not raise %s' %
(func.__name__, _excstr(exc)))
def raises_msg(exc, msg, func, *args, **kwds):
"""
Raise :exc:`AssertionError` if ``func(*args, **kwds)`` does not
raise *exc*, and check if the message contains *msg*.
"""Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*,
and check if the message contains *msg*.
"""
try:
func(*args, **kwds)
@ -76,6 +74,7 @@ def raises_msg(exc, msg, func, *args, **kwds):
raise AssertionError('%s did not raise %s' %
(func.__name__, _excstr(exc)))
def skip_if(condition, msg=None):
"""Decorator to skip test if condition is true."""
def deco(test):
@ -87,10 +86,12 @@ def skip_if(condition, msg=None):
return skipper
return deco
def skip_unless(condition, msg=None):
"""Decorator to skip test if condition is false."""
return skip_if(not condition, msg)
def skip_unless_importable(module, msg=None):
"""Decorator to skip test if module is not importable."""
try:
@ -127,61 +128,47 @@ class TestApp(application.Sphinx):
better default values for the initialization parameters.
"""
def __init__(self, srcdir=None, confdir=None, outdir=None, doctreedir=None,
buildername='html', confoverrides=None,
status=None, warning=None, freshenv=None,
warningiserror=None, tags=None,
confname='conf.py', cleanenv=False,
_copy_to_temp=False,
):
application.CONFIG_FILENAME = confname
self.cleanup_trees = [test_root / 'generated']
if srcdir is None:
srcdir = test_root
elif srcdir == '(empty)':
tempdir = path(tempfile.mkdtemp())
self.cleanup_trees.append(tempdir)
temproot = tempdir / 'root'
temproot.makedirs()
(temproot / 'conf.py').write_text('')
srcdir = temproot
def __init__(self, buildername='html', testroot=None, srcdir=None,
freshenv=False, confoverrides=None, status=None, warning=None,
tags=None, docutilsconf=None):
if testroot is None:
defaultsrcdir = 'root'
testroot = rootdir / 'root'
else:
srcdir = path(srcdir)
defaultsrcdir = 'test-' + testroot
testroot = rootdir / 'roots' / ('test-' + testroot)
if srcdir is None:
srcdir = tempdir / defaultsrcdir
else:
srcdir = tempdir / srcdir
if _copy_to_temp:
tempdir = path(tempfile.mkdtemp())
self.cleanup_trees.append(tempdir)
temproot = tempdir / srcdir.basename()
srcdir.copytree(temproot)
srcdir = temproot
if not srcdir.exists():
testroot.copytree(srcdir)
self.builddir = srcdir.joinpath('_build')
if confdir is None:
confdir = srcdir
if outdir is None:
outdir = srcdir.joinpath(self.builddir, buildername)
if not outdir.isdir():
outdir.makedirs()
self.cleanup_trees.insert(0, outdir)
if doctreedir is None:
doctreedir = srcdir.joinpath(srcdir, self.builddir, 'doctrees')
if not doctreedir.isdir():
doctreedir.makedirs()
if cleanenv:
self.cleanup_trees.insert(0, doctreedir)
if docutilsconf is not None:
(srcdir / 'docutils.conf').write_text(docutilsconf)
builddir = srcdir / '_build'
# if confdir is None:
confdir = srcdir
# if outdir is None:
outdir = builddir.joinpath(buildername)
if not outdir.isdir():
outdir.makedirs()
# if doctreedir is None:
doctreedir = builddir.joinpath('doctrees')
if not doctreedir.isdir():
doctreedir.makedirs()
if confoverrides is None:
confoverrides = {}
if status is None:
status = StringIO()
if warning is None:
warning = ListOutput('stderr')
if freshenv is None:
freshenv = False
if warningiserror is None:
warningiserror = False
# if warningiserror is None:
warningiserror = False
self._saved_path = sys.path[:]
application.Sphinx.__init__(self, srcdir, confdir, outdir, doctreedir,
buildername, confoverrides, status, warning,
@ -190,8 +177,9 @@ class TestApp(application.Sphinx):
def cleanup(self, doctrees=False):
Theme.themes.clear()
AutoDirective._registry.clear()
for tree in self.cleanup_trees:
shutil.rmtree(tree, True)
ModuleAnalyzer.cache.clear()
sys.path[:] = self._saved_path
sys.modules.pop('autodoc_fodder', None)
def __repr__(self):
return '<%s buildername=%r>' % (self.__class__.__name__, self.builder.name)
@ -205,10 +193,14 @@ def with_app(*args, **kwargs):
def generator(func):
@wraps(func)
def deco(*args2, **kwargs2):
status, warning = StringIO(), StringIO()
kwargs['status'] = status
kwargs['warning'] = warning
app = TestApp(*args, **kwargs)
func(app, *args2, **kwargs2)
# don't execute cleanup if test failed
app.cleanup()
try:
func(app, status, warning, *args2, **kwargs2)
finally:
app.cleanup()
return deco
return generator
@ -221,20 +213,24 @@ def gen_with_app(*args, **kwargs):
def generator(func):
@wraps(func)
def deco(*args2, **kwargs2):
status, warning = StringIO(), StringIO()
kwargs['status'] = status
kwargs['warning'] = warning
app = TestApp(*args, **kwargs)
for item in func(app, *args2, **kwargs2):
yield item
# don't execute cleanup if test failed
app.cleanup()
try:
for item in func(app, status, warning, *args2, **kwargs2):
yield item
finally:
app.cleanup()
return deco
return generator
def with_tempdir(func):
def new_func(*args, **kwds):
tempdir = path(tempfile.mkdtemp())
func(tempdir, *args, **kwds)
tempdir.rmtree()
new_tempdir = path(tempfile.mkdtemp(dir=tempdir))
func(new_tempdir, *args, **kwds)
new_tempdir.rmtree() # not when test fails...
new_func.__name__ = func.__name__
return new_func
@ -242,7 +238,10 @@ def with_tempdir(func):
def sprint(*args):
sys.stderr.write(' '.join(map(str, args)) + '\n')
_unicode_literals_re = re.compile(r'u(".*?")|u(\'.*?\')')
def remove_unicode_literals(s):
return _unicode_literals_re.sub(lambda x: x.group(1) or x.group(2), s)

View File

@ -7,7 +7,7 @@ deps=
sqlalchemy
whoosh
setenv =
BUILD_TEST_PATH = {envdir}/tests
SPHINX_TEST_TEMPDIR = {envdir}/testbuild
commands=
{envpython} tests/run.py {posargs}
sphinx-build -q -W -b html -d {envtmpdir}/doctrees doc {envtmpdir}/html