mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch 'stable'
This commit is contained in:
commit
8cbe1efe8d
1
CHANGES
1
CHANGES
@ -82,6 +82,7 @@ Bugs fixed
|
|||||||
* #3268: Sphinx crashes with requests package from Debian jessie
|
* #3268: Sphinx crashes with requests package from Debian jessie
|
||||||
* #3284: Sphinx crashes on parallel build with an extension which raises
|
* #3284: Sphinx crashes on parallel build with an extension which raises
|
||||||
unserializable exception
|
unserializable exception
|
||||||
|
* #3315: Bibliography crashes on latex build with docclass 'memoir'
|
||||||
|
|
||||||
|
|
||||||
Release 1.5.1 (released Dec 13, 2016)
|
Release 1.5.1 (released Dec 13, 2016)
|
||||||
|
2
Makefile
2
Makefile
@ -6,7 +6,7 @@ PYTHON ?= python
|
|||||||
DONT_CHECK = -i build -i dist -i sphinx/style/jquery.js \
|
DONT_CHECK = -i build -i dist -i sphinx/style/jquery.js \
|
||||||
-i sphinx/pycode/pgen2 -i sphinx/util/smartypants.py \
|
-i sphinx/pycode/pgen2 -i sphinx/util/smartypants.py \
|
||||||
-i .ropeproject -i doc/_build -i tests/path.py \
|
-i .ropeproject -i doc/_build -i tests/path.py \
|
||||||
-i tests/coverage.py -i utils/convert.py \
|
-i utils/convert.py \
|
||||||
-i tests/typing_test_data.py \
|
-i tests/typing_test_data.py \
|
||||||
-i tests/test_autodoc_py35.py \
|
-i tests/test_autodoc_py35.py \
|
||||||
-i tests/roots/test-warnings/undecodable.rst \
|
-i tests/roots/test-warnings/undecodable.rst \
|
||||||
|
@ -712,13 +712,13 @@ class StandardDomain(Domain):
|
|||||||
else:
|
else:
|
||||||
title = env.config.numfig_format.get(figtype, '')
|
title = env.config.numfig_format.get(figtype, '')
|
||||||
|
|
||||||
if figname is None and '%{name}' in title:
|
if figname is None and '{name}' in title:
|
||||||
logger.warning('the link has no caption: %s', title, location=node)
|
logger.warning('the link has no caption: %s', title, location=node)
|
||||||
return contnode
|
return contnode
|
||||||
else:
|
else:
|
||||||
fignum = '.'.join(map(str, fignumber))
|
fignum = '.'.join(map(str, fignumber))
|
||||||
if '{name}' in title or 'number' in title:
|
if '{name}' in title or 'number' in title:
|
||||||
# new style format (cf. "Fig.%{number}")
|
# new style format (cf. "Fig.{number}")
|
||||||
if figname:
|
if figname:
|
||||||
newtitle = title.format(name=figname, number=fignum)
|
newtitle = title.format(name=figname, number=fignum)
|
||||||
else:
|
else:
|
||||||
|
@ -1082,8 +1082,14 @@
|
|||||||
|
|
||||||
% make commands known to non-Sphinx document classes
|
% make commands known to non-Sphinx document classes
|
||||||
\providecommand*{\sphinxtableofcontents}{\tableofcontents}
|
\providecommand*{\sphinxtableofcontents}{\tableofcontents}
|
||||||
\providecommand*{\sphinxthebibliography}{\thebibliography}
|
\spx@ifundefined{sphinxthebibliography}
|
||||||
\providecommand*{\sphinxtheindex}{\theindex}
|
{\newenvironment
|
||||||
|
{sphinxthebibliography}{\begin{thebibliography}}{\end{thebibliography}}%
|
||||||
|
}
|
||||||
|
{}% else clause of ifundefined
|
||||||
|
\spx@ifundefined{sphinxtheindex}
|
||||||
|
{\newenvironment{sphinxtheindex}{\begin{theindex}}{\end{theindex}}}%
|
||||||
|
{}% else clause of ifundefined
|
||||||
|
|
||||||
% remove LaTeX's cap on nesting depth if 'maxlistdepth' key used.
|
% remove LaTeX's cap on nesting depth if 'maxlistdepth' key used.
|
||||||
% This is a hack, which works with the standard classes: it assumes \@toodeep
|
% This is a hack, which works with the standard classes: it assumes \@toodeep
|
||||||
|
@ -3,7 +3,7 @@ pytest>=3.0
|
|||||||
pytest-cov
|
pytest-cov
|
||||||
mock
|
mock
|
||||||
six>=1.4
|
six>=1.4
|
||||||
Jinja2>=2.3,<2.9
|
Jinja2>=2.3
|
||||||
Pygments>=2.0
|
Pygments>=2.0
|
||||||
docutils>=0.11
|
docutils>=0.11
|
||||||
snowballstemmer>=1.1
|
snowballstemmer>=1.1
|
||||||
|
@ -1,19 +1,25 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from six import StringIO
|
from six import StringIO, string_types
|
||||||
|
|
||||||
from util import SphinxTestApp, path
|
import util
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def app_params(request):
|
def app_params(request, test_params, shared_result):
|
||||||
"""
|
"""
|
||||||
parameters that is specified by 'pytest.mark.sphinx' for
|
parameters that is specified by 'pytest.mark.sphinx' for
|
||||||
sphinx.application.Sphinx initialization
|
sphinx.application.Sphinx initialization
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# ##### process pytest.mark.sphinx
|
||||||
|
|
||||||
markers = request.node.get_marker("sphinx")
|
markers = request.node.get_marker("sphinx")
|
||||||
pargs = {}
|
pargs = {}
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
@ -26,17 +32,99 @@ def app_params(request):
|
|||||||
kwargs.update(info.kwargs)
|
kwargs.update(info.kwargs)
|
||||||
|
|
||||||
args = [pargs[i] for i in sorted(pargs.keys())]
|
args = [pargs[i] for i in sorted(pargs.keys())]
|
||||||
return args, kwargs
|
|
||||||
|
# ##### process pytest.mark.test_params
|
||||||
|
|
||||||
|
if test_params['shared_result']:
|
||||||
|
if 'srcdir' in kwargs:
|
||||||
|
raise pytest.Exception('You can not spcify shared_result and '
|
||||||
|
'srcdir in same time.')
|
||||||
|
kwargs['srcdir'] = test_params['shared_result']
|
||||||
|
restore = shared_result.restore(test_params['shared_result'])
|
||||||
|
kwargs.update(restore)
|
||||||
|
|
||||||
|
# ##### prepare Application params
|
||||||
|
|
||||||
|
if 'srcdir' in kwargs:
|
||||||
|
srcdir = util.tempdir / kwargs['srcdir']
|
||||||
|
else:
|
||||||
|
srcdir = util.tempdir / kwargs.get('testroot', 'root')
|
||||||
|
kwargs['srcdir'] = srcdir
|
||||||
|
|
||||||
|
if kwargs.get('testroot') is None:
|
||||||
|
testroot_path = util.rootdir / 'root'
|
||||||
|
else:
|
||||||
|
testroot_path = util.rootdir / 'roots' / ('test-' + kwargs['testroot'])
|
||||||
|
|
||||||
|
if not srcdir.exists():
|
||||||
|
testroot_path.copytree(srcdir)
|
||||||
|
|
||||||
|
return namedtuple('app_params', 'args,kwargs')(args, kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_params(request):
|
||||||
|
"""
|
||||||
|
test parameters that is specified by 'pytest.mark.test_params'
|
||||||
|
|
||||||
|
:param Union[str] shared_result:
|
||||||
|
If the value is provided, app._status and app._warning objects will be
|
||||||
|
shared in the parametrized test functions and/or test functions that
|
||||||
|
have same 'shared_result' value.
|
||||||
|
**NOTE**: You can not specify shared_result and srcdir in same time.
|
||||||
|
"""
|
||||||
|
env = request.node.get_marker('test_params')
|
||||||
|
kwargs = env.kwargs if env else {}
|
||||||
|
result = {
|
||||||
|
'shared_result': None,
|
||||||
|
}
|
||||||
|
result.update(kwargs)
|
||||||
|
|
||||||
|
if (result['shared_result'] and
|
||||||
|
not isinstance(result['shared_result'], string_types)):
|
||||||
|
raise pytest.Exception('You can only provide a string type of value '
|
||||||
|
'for "shared_result" ')
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class SphinxTestAppWrapperForSkipBuilding(object):
|
||||||
|
"""
|
||||||
|
This class is a wrapper for SphinxTestApp to speed up the test by skipping
|
||||||
|
`app.build` process if it is already built and there is even one output
|
||||||
|
file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, app_):
|
||||||
|
self.app = app_
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return getattr(self.app, name)
|
||||||
|
|
||||||
|
def build(self, *args, **kw):
|
||||||
|
if not self.app.outdir.listdir():
|
||||||
|
# if listdir is empty, do build.
|
||||||
|
self.app.build(*args, **kw)
|
||||||
|
# otherwise, we can use built cache
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def app(app_params, make_app):
|
def app(test_params, app_params, make_app, shared_result):
|
||||||
"""
|
"""
|
||||||
provides sphinx.application.Sphinx object
|
provides sphinx.application.Sphinx object
|
||||||
"""
|
"""
|
||||||
args, kwargs = app_params
|
args, kwargs = app_params
|
||||||
app_ = make_app(*args, **kwargs)
|
app_ = make_app(*args, **kwargs)
|
||||||
return app_
|
yield app_
|
||||||
|
|
||||||
|
print('# testroot:', kwargs.get('testroot', 'root'))
|
||||||
|
print('# builder:', app_.buildername)
|
||||||
|
print('# srcdir:', app_.srcdir)
|
||||||
|
print('# outdir:', app_.outdir)
|
||||||
|
print('# status:', '\n' + app_._status.getvalue())
|
||||||
|
print('# warning:', '\n' + app_._warning.getvalue())
|
||||||
|
|
||||||
|
if test_params['shared_result']:
|
||||||
|
shared_result.store(test_params['shared_result'], app_)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
@ -56,7 +144,7 @@ def warning(app):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def make_app():
|
def make_app(test_params):
|
||||||
"""
|
"""
|
||||||
provides make_app function to initialize SphinxTestApp instance.
|
provides make_app function to initialize SphinxTestApp instance.
|
||||||
if you want to initialize 'app' in your test function. please use this
|
if you want to initialize 'app' in your test function. please use this
|
||||||
@ -69,8 +157,10 @@ def make_app():
|
|||||||
status, warning = StringIO(), StringIO()
|
status, warning = StringIO(), StringIO()
|
||||||
kwargs.setdefault('status', status)
|
kwargs.setdefault('status', status)
|
||||||
kwargs.setdefault('warning', warning)
|
kwargs.setdefault('warning', warning)
|
||||||
app_ = SphinxTestApp(*args, **kwargs)
|
app_ = util.SphinxTestApp(*args, **kwargs)
|
||||||
apps.append(app_)
|
apps.append(app_)
|
||||||
|
if test_params['shared_result']:
|
||||||
|
app_ = SphinxTestAppWrapperForSkipBuilding(app_)
|
||||||
return app_
|
return app_
|
||||||
yield make
|
yield make
|
||||||
|
|
||||||
@ -79,6 +169,38 @@ def make_app():
|
|||||||
app_.cleanup()
|
app_.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
class SharedResult(object):
|
||||||
|
cache = {}
|
||||||
|
|
||||||
|
def store(self, key, app_):
|
||||||
|
if key in self.cache:
|
||||||
|
return
|
||||||
|
data = {
|
||||||
|
'status': app_._status.getvalue(),
|
||||||
|
'warning': app_._warning.getvalue(),
|
||||||
|
}
|
||||||
|
self.cache[key] = data
|
||||||
|
|
||||||
|
def restore(self, key):
|
||||||
|
if key not in self.cache:
|
||||||
|
return {}
|
||||||
|
data = self.cache[key]
|
||||||
|
return {
|
||||||
|
'status': StringIO(data['status']),
|
||||||
|
'warning': StringIO(data['warning']),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def shared_result():
|
||||||
|
return SharedResult()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module', autouse=True)
|
||||||
|
def _shared_result_cache():
|
||||||
|
SharedResult.cache.clear()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def if_graphviz_found(app):
|
def if_graphviz_found(app):
|
||||||
"""
|
"""
|
||||||
@ -105,4 +227,4 @@ def tempdir(tmpdir):
|
|||||||
temporary directory that wrapped with `path` class.
|
temporary directory that wrapped with `path` class.
|
||||||
this fixture is for compat with old test implementation.
|
this fixture is for compat with old test implementation.
|
||||||
"""
|
"""
|
||||||
return path(tmpdir)
|
return util.path(tmpdir)
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
# "raises" imported for usage by autodoc
|
# "raises" imported for usage by autodoc
|
||||||
import six
|
import six
|
||||||
import sys
|
import sys
|
||||||
from util import TestApp, Struct, raises, SkipTest
|
from util import SphinxTestApp, Struct
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from six import StringIO
|
from six import StringIO
|
||||||
@ -27,7 +27,7 @@ app = None
|
|||||||
|
|
||||||
def setup_module():
|
def setup_module():
|
||||||
global app
|
global app
|
||||||
app = TestApp()
|
app = SphinxTestApp()
|
||||||
app.builder.env.app = app
|
app.builder.env.app = app
|
||||||
app.builder.env.temp_data['docname'] = 'dummy'
|
app.builder.env.temp_data['docname'] = 'dummy'
|
||||||
app.connect('autodoc-process-docstring', process_docstring)
|
app.connect('autodoc-process-docstring', process_docstring)
|
||||||
@ -185,7 +185,7 @@ def test_generate():
|
|||||||
'Class.meth', more_content=add_content)
|
'Class.meth', more_content=add_content)
|
||||||
|
|
||||||
# test check_module
|
# test check_module
|
||||||
inst = FunctionDocumenter(directive, 'raises')
|
inst = FunctionDocumenter(directive, 'add_documenter')
|
||||||
inst.generate(check_module=True)
|
inst.generate(check_module=True)
|
||||||
assert len(directive.result) == 0
|
assert len(directive.result) == 0
|
||||||
|
|
||||||
|
10
tests/run.py
10
tests/run.py
@ -17,11 +17,15 @@ import warnings
|
|||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from path import path
|
from path import path
|
||||||
import pytest
|
|
||||||
|
|
||||||
testroot = os.path.dirname(__file__) or '.'
|
testroot = os.path.dirname(__file__) or '.'
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(testroot, os.path.pardir)))
|
sys.path.insert(0, os.path.abspath(os.path.join(testroot, os.path.pardir)))
|
||||||
|
|
||||||
|
# filter warnings of test dependencies
|
||||||
|
warnings.filterwarnings('ignore', category=DeprecationWarning, module='site') # virtualenv
|
||||||
|
warnings.filterwarnings('ignore', category=ImportWarning, module='backports')
|
||||||
|
warnings.filterwarnings('ignore', category=PendingDeprecationWarning, module=r'_pytest\..*')
|
||||||
|
|
||||||
# check dependencies before testing
|
# check dependencies before testing
|
||||||
print('Checking dependencies...')
|
print('Checking dependencies...')
|
||||||
for modname in ('pytest', 'mock', 'six', 'docutils', 'jinja2', 'pygments',
|
for modname in ('pytest', 'mock', 'six', 'docutils', 'jinja2', 'pygments',
|
||||||
@ -49,9 +53,6 @@ tempdir.makedirs()
|
|||||||
print('Running Sphinx test suite (with Python %s)...' % sys.version.split()[0])
|
print('Running Sphinx test suite (with Python %s)...' % sys.version.split()[0])
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
# filter warnings of test dependencies
|
|
||||||
warnings.filterwarnings('ignore', category=DeprecationWarning, module='site') # virtualenv
|
|
||||||
|
|
||||||
# exclude 'root' and 'roots' dirs for pytest test collector
|
# exclude 'root' and 'roots' dirs for pytest test collector
|
||||||
ignore_paths = [
|
ignore_paths = [
|
||||||
os.path.relpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), sub))
|
os.path.relpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), sub))
|
||||||
@ -61,4 +62,5 @@ args = sys.argv[1:]
|
|||||||
for path in ignore_paths:
|
for path in ignore_paths:
|
||||||
args.extend(['--ignore', path])
|
args.extend(['--ignore', path])
|
||||||
|
|
||||||
|
import pytest
|
||||||
sys.exit(pytest.main(args))
|
sys.exit(pytest.main(args))
|
||||||
|
@ -10,8 +10,7 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# "raises" imported for usage by autodoc
|
from util import SphinxTestApp, Struct # NOQA
|
||||||
from util import TestApp, Struct, raises, SkipTest # NOQA
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
@ -26,7 +25,7 @@ app = None
|
|||||||
|
|
||||||
def setup_module():
|
def setup_module():
|
||||||
global app
|
global app
|
||||||
app = TestApp()
|
app = SphinxTestApp()
|
||||||
app.builder.env.app = app
|
app.builder.env.app = app
|
||||||
app.builder.env.temp_data['docname'] = 'dummy'
|
app.builder.env.temp_data['docname'] = 'dummy'
|
||||||
app.connect('autodoc-process-docstring', process_docstring)
|
app.connect('autodoc-process-docstring', process_docstring)
|
||||||
@ -125,26 +124,27 @@ def test_parse_name():
|
|||||||
del _warnings[:]
|
del _warnings[:]
|
||||||
|
|
||||||
# for functions/classes
|
# for functions/classes
|
||||||
verify('function', 'util.raises', ('util', ['raises'], None, None))
|
verify('function', 'test_autodoc.raises',
|
||||||
verify('function', 'util.raises(exc) -> None',
|
('test_autodoc', ['raises'], None, None))
|
||||||
('util', ['raises'], 'exc', 'None'))
|
verify('function', 'test_autodoc.raises(exc) -> None',
|
||||||
directive.env.temp_data['autodoc:module'] = 'util'
|
('test_autodoc', ['raises'], 'exc', 'None'))
|
||||||
verify('function', 'raises', ('util', ['raises'], None, None))
|
directive.env.temp_data['autodoc:module'] = 'test_autodoc'
|
||||||
|
verify('function', 'raises', ('test_autodoc', ['raises'], None, None))
|
||||||
del directive.env.temp_data['autodoc:module']
|
del directive.env.temp_data['autodoc:module']
|
||||||
directive.env.ref_context['py:module'] = 'util'
|
directive.env.ref_context['py:module'] = 'test_autodoc'
|
||||||
verify('function', 'raises', ('util', ['raises'], None, None))
|
verify('function', 'raises', ('test_autodoc', ['raises'], None, None))
|
||||||
verify('class', 'TestApp', ('util', ['TestApp'], None, None))
|
verify('class', 'Base', ('test_autodoc', ['Base'], None, None))
|
||||||
|
|
||||||
# for members
|
# for members
|
||||||
directive.env.ref_context['py:module'] = 'foo'
|
directive.env.ref_context['py:module'] = 'foo'
|
||||||
verify('method', 'util.TestApp.cleanup',
|
verify('method', 'util.SphinxTestApp.cleanup',
|
||||||
('util', ['TestApp', 'cleanup'], None, None))
|
('util', ['SphinxTestApp', 'cleanup'], None, None))
|
||||||
directive.env.ref_context['py:module'] = 'util'
|
directive.env.ref_context['py:module'] = 'util'
|
||||||
directive.env.ref_context['py:class'] = 'Foo'
|
directive.env.ref_context['py:class'] = 'Foo'
|
||||||
directive.env.temp_data['autodoc:class'] = 'TestApp'
|
directive.env.temp_data['autodoc:class'] = 'SphinxTestApp'
|
||||||
verify('method', 'cleanup', ('util', ['TestApp', 'cleanup'], None, None))
|
verify('method', 'cleanup', ('util', ['SphinxTestApp', 'cleanup'], None, None))
|
||||||
verify('method', 'TestApp.cleanup',
|
verify('method', 'SphinxTestApp.cleanup',
|
||||||
('util', ['TestApp', 'cleanup'], None, None))
|
('util', ['SphinxTestApp', 'cleanup'], None, None))
|
||||||
|
|
||||||
# and clean up
|
# and clean up
|
||||||
del directive.env.ref_context['py:module']
|
del directive.env.ref_context['py:module']
|
||||||
@ -658,7 +658,7 @@ def test_generate():
|
|||||||
'Class.meth', more_content=add_content)
|
'Class.meth', more_content=add_content)
|
||||||
|
|
||||||
# test check_module
|
# test check_module
|
||||||
inst = FunctionDocumenter(directive, 'raises')
|
inst = FunctionDocumenter(directive, 'add_documenter')
|
||||||
inst.generate(check_module=True)
|
inst.generate(check_module=True)
|
||||||
assert len(directive.result) == 0
|
assert len(directive.result) == 0
|
||||||
|
|
||||||
@ -878,6 +878,11 @@ __all__ = ['Class']
|
|||||||
integer = 1
|
integer = 1
|
||||||
|
|
||||||
|
|
||||||
|
def raises(exc, func, *args, **kwds):
|
||||||
|
"""Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CustomEx(Exception):
|
class CustomEx(Exception):
|
||||||
"""My custom exception."""
|
"""My custom exception."""
|
||||||
|
|
||||||
@ -1086,7 +1091,7 @@ def test_type_hints():
|
|||||||
try:
|
try:
|
||||||
from typing_test_data import f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11
|
from typing_test_data import f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11
|
||||||
except (ImportError, SyntaxError):
|
except (ImportError, SyntaxError):
|
||||||
raise SkipTest('Cannot import Python code with function annotations')
|
pytest.skip('Cannot import Python code with function annotations')
|
||||||
|
|
||||||
def verify_arg_spec(f, expected):
|
def verify_arg_spec(f, expected):
|
||||||
assert formatargspec(f, *getargspec(f)) == expected
|
assert formatargspec(f, *getargspec(f)) == expected
|
||||||
|
@ -15,14 +15,8 @@ import mock
|
|||||||
import pytest
|
import pytest
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from sphinx.errors import SphinxError
|
from sphinx.errors import SphinxError
|
||||||
import sphinx.builders.linkcheck
|
|
||||||
|
|
||||||
from util import rootdir, tempdir, SkipTest, TestApp, path
|
from util import rootdir, tempdir, path
|
||||||
|
|
||||||
try:
|
|
||||||
from docutils.writers.manpage import Writer as ManWriter
|
|
||||||
except ImportError:
|
|
||||||
ManWriter = None
|
|
||||||
|
|
||||||
|
|
||||||
def request_session_head(url, **kwargs):
|
def request_session_head(url, **kwargs):
|
||||||
@ -32,24 +26,17 @@ def request_session_head(url, **kwargs):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def verify_build(buildername, srcdir):
|
@pytest.fixture
|
||||||
if buildername == 'man' and ManWriter is None:
|
def nonascii_srcdir(request):
|
||||||
raise SkipTest('man writer is not available')
|
|
||||||
app = TestApp(buildername=buildername, srcdir=srcdir)
|
|
||||||
try:
|
|
||||||
app.builder.build_all()
|
|
||||||
finally:
|
|
||||||
app.cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
def test_build_all():
|
|
||||||
# If supported, build in a non-ASCII source dir
|
# If supported, build in a non-ASCII source dir
|
||||||
test_name = u'\u65e5\u672c\u8a9e'
|
test_name = u'\u65e5\u672c\u8a9e'
|
||||||
|
basedir = tempdir / request.node.originalname
|
||||||
try:
|
try:
|
||||||
srcdir = tempdir / test_name
|
srcdir = basedir / test_name
|
||||||
(rootdir / 'root').copytree(tempdir / test_name)
|
if not srcdir.exists():
|
||||||
|
(rootdir / 'root').copytree(srcdir)
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
srcdir = tempdir / 'all'
|
srcdir = basedir / 'all'
|
||||||
else:
|
else:
|
||||||
# add a doc with a non-ASCII file name to the source dir
|
# add a doc with a non-ASCII file name to the source dir
|
||||||
(srcdir / (test_name + '.txt')).write_text(dedent("""
|
(srcdir / (test_name + '.txt')).write_text(dedent("""
|
||||||
@ -64,30 +51,32 @@ def test_build_all():
|
|||||||
%(test_name)s/%(test_name)s
|
%(test_name)s/%(test_name)s
|
||||||
""" % {'test_name': test_name})
|
""" % {'test_name': test_name})
|
||||||
)
|
)
|
||||||
|
return srcdir
|
||||||
|
|
||||||
with mock.patch('sphinx.builders.linkcheck.requests') as requests:
|
|
||||||
requests.head = request_session_head
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"buildername",
|
||||||
|
[
|
||||||
# note: no 'html' - if it's ok with dirhtml it's ok with html
|
# note: no 'html' - if it's ok with dirhtml it's ok with html
|
||||||
for buildername in ['dirhtml', 'singlehtml', 'latex', 'texinfo', 'pickle',
|
'dirhtml', 'singlehtml', 'latex', 'texinfo', 'pickle', 'json', 'text',
|
||||||
'json', 'text', 'htmlhelp', 'qthelp', 'epub2', 'epub',
|
'htmlhelp', 'qthelp', 'epub2', 'epub', 'applehelp', 'changes', 'xml',
|
||||||
'applehelp', 'changes', 'xml', 'pseudoxml', 'man',
|
'pseudoxml', 'man', 'linkcheck',
|
||||||
'linkcheck']:
|
],
|
||||||
yield verify_build, buildername, srcdir
|
)
|
||||||
|
@mock.patch('sphinx.builders.linkcheck.requests.head',
|
||||||
|
side_effect=request_session_head)
|
||||||
|
def test_build_all(requests_head, make_app, nonascii_srcdir, buildername):
|
||||||
|
app = make_app(buildername, srcdir=nonascii_srcdir)
|
||||||
|
app.build()
|
||||||
|
|
||||||
|
|
||||||
def test_master_doc_not_found(tempdir):
|
def test_master_doc_not_found(tempdir, make_app):
|
||||||
(tempdir / 'conf.py').write_text('master_doc = "index"')
|
(tempdir / 'conf.py').write_text('master_doc = "index"')
|
||||||
assert tempdir.listdir() == ['conf.py']
|
assert tempdir.listdir() == ['conf.py']
|
||||||
|
|
||||||
try:
|
app = make_app('dummy', srcdir=tempdir)
|
||||||
app = TestApp(buildername='dummy', srcdir=tempdir)
|
with pytest.raises(SphinxError):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
assert False # SphinxError not raised
|
|
||||||
except Exception as exc:
|
|
||||||
assert isinstance(exc, SphinxError)
|
|
||||||
finally:
|
|
||||||
app.cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx(buildername='text', testroot='circular')
|
@pytest.mark.sphinx(buildername='text', testroot='circular')
|
||||||
|
@ -17,36 +17,36 @@ from subprocess import Popen, PIPE
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from util import (
|
from sphinx.util.osutil import cd
|
||||||
gen_with_app, SkipTest, assert_in, assert_true, assert_equal
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app('gettext', srcdir='root-gettext')
|
@pytest.mark.sphinx('gettext', srcdir='root-gettext')
|
||||||
def test_all(app, status, warning):
|
def test_build_gettext(app):
|
||||||
# Generic build; should fail only when the builder is horribly broken.
|
# Generic build; should fail only when the builder is horribly broken.
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
|
|
||||||
# Do messages end up in the correct location?
|
# Do messages end up in the correct location?
|
||||||
# top-level documents end up in a message catalog
|
# top-level documents end up in a message catalog
|
||||||
yield assert_true, (app.outdir / 'extapi.pot').isfile()
|
assert (app.outdir / 'extapi.pot').isfile()
|
||||||
# directory items are grouped into sections
|
# directory items are grouped into sections
|
||||||
yield assert_true, (app.outdir / 'subdir.pot').isfile()
|
assert (app.outdir / 'subdir.pot').isfile()
|
||||||
|
|
||||||
# regression test for issue #960
|
# regression test for issue #960
|
||||||
catalog = (app.outdir / 'markup.pot').text(encoding='utf-8')
|
catalog = (app.outdir / 'markup.pot').text(encoding='utf-8')
|
||||||
yield assert_in, 'msgid "something, something else, something more"', catalog
|
assert 'msgid "something, something else, something more"' in catalog
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('gettext', srcdir='root-gettext')
|
||||||
|
def test_msgfmt(app):
|
||||||
|
app.builder.build_all()
|
||||||
(app.outdir / 'en' / 'LC_MESSAGES').makedirs()
|
(app.outdir / 'en' / 'LC_MESSAGES').makedirs()
|
||||||
cwd = os.getcwd()
|
with cd(app.outdir):
|
||||||
os.chdir(app.outdir)
|
|
||||||
try:
|
|
||||||
try:
|
try:
|
||||||
p = Popen(['msginit', '--no-translator', '-i', 'markup.pot',
|
p = Popen(['msginit', '--no-translator', '-i', 'markup.pot',
|
||||||
'--locale', 'en_US'],
|
'--locale', 'en_US'],
|
||||||
stdout=PIPE, stderr=PIPE)
|
stdout=PIPE, stderr=PIPE)
|
||||||
except OSError:
|
except OSError:
|
||||||
raise SkipTest # most likely msginit was not found
|
pytest.skip() # most likely msginit was not found
|
||||||
else:
|
else:
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
@ -54,13 +54,13 @@ def test_all(app, status, warning):
|
|||||||
print(stderr)
|
print(stderr)
|
||||||
assert False, 'msginit exited with return code %s' % \
|
assert False, 'msginit exited with return code %s' % \
|
||||||
p.returncode
|
p.returncode
|
||||||
yield assert_true, (app.outdir / 'en_US.po').isfile(), 'msginit failed'
|
assert (app.outdir / 'en_US.po').isfile(), 'msginit failed'
|
||||||
try:
|
try:
|
||||||
p = Popen(['msgfmt', 'en_US.po', '-o',
|
p = Popen(['msgfmt', 'en_US.po', '-o',
|
||||||
os.path.join('en', 'LC_MESSAGES', 'test_root.mo')],
|
os.path.join('en', 'LC_MESSAGES', 'test_root.mo')],
|
||||||
stdout=PIPE, stderr=PIPE)
|
stdout=PIPE, stderr=PIPE)
|
||||||
except OSError:
|
except OSError:
|
||||||
raise SkipTest # most likely msgfmt was not found
|
pytest.skip() # most likely msgfmt was not found
|
||||||
else:
|
else:
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
@ -68,20 +68,17 @@ def test_all(app, status, warning):
|
|||||||
print(stderr)
|
print(stderr)
|
||||||
assert False, 'msgfmt exited with return code %s' % \
|
assert False, 'msgfmt exited with return code %s' % \
|
||||||
p.returncode
|
p.returncode
|
||||||
yield (assert_true,
|
mo = app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo'
|
||||||
(app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(),
|
assert mo.isfile(), 'msgfmt failed'
|
||||||
'msgfmt failed')
|
|
||||||
finally:
|
|
||||||
os.chdir(cwd)
|
|
||||||
|
|
||||||
_ = gettext.translation('test_root', app.outdir, languages=['en']).gettext
|
_ = gettext.translation('test_root', app.outdir, languages=['en']).gettext
|
||||||
yield assert_equal, _("Testing various markup"), u"Testing various markup"
|
assert _("Testing various markup") == u"Testing various markup"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'gettext', testroot='intl', srcdir='gettext',
|
'gettext', testroot='intl', srcdir='gettext',
|
||||||
confoverrides={'gettext_compact': False})
|
confoverrides={'gettext_compact': False})
|
||||||
def test_gettext_index_entries(app, status, warning):
|
def test_gettext_index_entries(app):
|
||||||
# regression test for #976
|
# regression test for #976
|
||||||
app.builder.build(['index_entries'])
|
app.builder.build(['index_entries'])
|
||||||
|
|
||||||
@ -128,8 +125,9 @@ def test_gettext_index_entries(app, status, warning):
|
|||||||
|
|
||||||
@pytest.mark.sphinx(
|
@pytest.mark.sphinx(
|
||||||
'gettext', testroot='intl', srcdir='gettext',
|
'gettext', testroot='intl', srcdir='gettext',
|
||||||
confoverrides={'gettext_compact': False, 'gettext_additional_targets': []})
|
confoverrides={'gettext_compact': False,
|
||||||
def test_gettext_disable_index_entries(app, status, warning):
|
'gettext_additional_targets': []})
|
||||||
|
def test_gettext_disable_index_entries(app):
|
||||||
# regression test for #976
|
# regression test for #976
|
||||||
app.builder.build(['index_entries'])
|
app.builder.build(['index_entries'])
|
||||||
|
|
||||||
@ -160,7 +158,7 @@ def test_gettext_disable_index_entries(app, status, warning):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('gettext', testroot='intl', srcdir='gettext')
|
@pytest.mark.sphinx('gettext', testroot='intl', srcdir='gettext')
|
||||||
def test_gettext_template(app, status, warning):
|
def test_gettext_template(app):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
assert (app.outdir / 'sphinx.pot').isfile()
|
assert (app.outdir / 'sphinx.pot').isfile()
|
||||||
|
|
||||||
|
@ -11,11 +11,12 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from itertools import cycle, chain
|
||||||
|
|
||||||
from six import PY3, iteritems
|
from six import PY3
|
||||||
|
|
||||||
from sphinx import __display_version__
|
from sphinx import __display_version__
|
||||||
from util import remove_unicode_literals, gen_with_app, strip_escseq
|
from util import remove_unicode_literals, strip_escseq
|
||||||
from etree13 import ElementTree
|
from etree13 import ElementTree
|
||||||
from html5lib import getTreeBuilder, HTMLParser
|
from html5lib import getTreeBuilder, HTMLParser
|
||||||
import pytest
|
import pytest
|
||||||
@ -49,6 +50,31 @@ if PY3:
|
|||||||
HTML_WARNINGS = remove_unicode_literals(HTML_WARNINGS)
|
HTML_WARNINGS = remove_unicode_literals(HTML_WARNINGS)
|
||||||
|
|
||||||
|
|
||||||
|
etree_cache = {}
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def cached_etree_parse():
|
||||||
|
def parse(fname):
|
||||||
|
if fname in etree_cache:
|
||||||
|
return etree_cache[fname]
|
||||||
|
with (fname).open('rb') as fp:
|
||||||
|
etree = HTML_PARSER.parse(fp)
|
||||||
|
etree_cache.clear()
|
||||||
|
etree_cache[fname] = etree
|
||||||
|
return etree
|
||||||
|
yield parse
|
||||||
|
etree_cache.clear()
|
||||||
|
|
||||||
|
|
||||||
|
def flat_dict(d):
|
||||||
|
return chain.from_iterable(
|
||||||
|
[
|
||||||
|
zip(cycle([fname]), values)
|
||||||
|
for fname, values in d.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def tail_check(check):
|
def tail_check(check):
|
||||||
rex = re.compile(check)
|
rex = re.compile(check)
|
||||||
|
|
||||||
@ -60,7 +86,85 @@ def tail_check(check):
|
|||||||
return checker
|
return checker
|
||||||
|
|
||||||
|
|
||||||
HTML_XPATH = {
|
def check_xpath(etree, fname, path, check, be_found=True):
|
||||||
|
nodes = list(etree.findall(path))
|
||||||
|
if check is None:
|
||||||
|
assert nodes == [], ('found any nodes matching xpath '
|
||||||
|
'%r in file %s' % (path, fname))
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
assert nodes != [], ('did not find any node matching xpath '
|
||||||
|
'%r in file %s' % (path, fname))
|
||||||
|
if hasattr(check, '__call__'):
|
||||||
|
check(nodes)
|
||||||
|
elif not check:
|
||||||
|
# only check for node presence
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
def get_text(node):
|
||||||
|
if node.text is not None:
|
||||||
|
return node.text
|
||||||
|
else:
|
||||||
|
# Since pygments-2.1.1, empty <span> tag is inserted at top of
|
||||||
|
# highlighting block
|
||||||
|
if len(node) == 1 and node[0].tag == 'span' and node[0].text is None:
|
||||||
|
return node[0].tail
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
rex = re.compile(check)
|
||||||
|
if be_found:
|
||||||
|
if any(rex.search(get_text(node)) for node in nodes):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
if all(not rex.search(get_text(node)) for node in nodes):
|
||||||
|
return
|
||||||
|
|
||||||
|
assert False, ('%r not found in any node matching '
|
||||||
|
'path %s in %s: %r' % (check, path, fname,
|
||||||
|
[node.text for node in nodes]))
|
||||||
|
|
||||||
|
|
||||||
|
def check_static_entries(outdir):
|
||||||
|
staticdir = outdir / '_static'
|
||||||
|
assert staticdir.isdir()
|
||||||
|
# a file from a directory entry in html_static_path
|
||||||
|
assert (staticdir / 'README').isfile()
|
||||||
|
# a directory from a directory entry in html_static_path
|
||||||
|
assert (staticdir / 'subdir' / 'foo.css').isfile()
|
||||||
|
# a file from a file entry in html_static_path
|
||||||
|
assert (staticdir / 'templated.css').isfile()
|
||||||
|
assert (staticdir / 'templated.css').text().splitlines()[1] == __display_version__
|
||||||
|
# a file from _static, but matches exclude_patterns
|
||||||
|
assert not (staticdir / 'excluded.css').exists()
|
||||||
|
|
||||||
|
|
||||||
|
def check_extra_entries(outdir):
|
||||||
|
assert (outdir / 'robots.txt').isfile()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('html', testroot='warnings')
|
||||||
|
def test_html_warnings(app, warning):
|
||||||
|
app.build()
|
||||||
|
html_warnings = strip_escseq(warning.getvalue().replace(os.sep, '/'))
|
||||||
|
html_warnings_exp = HTML_WARNINGS % {
|
||||||
|
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
|
||||||
|
assert re.match(html_warnings_exp + '$', html_warnings), \
|
||||||
|
'Warnings don\'t match:\n' + \
|
||||||
|
'--- Expected (regex):\n' + html_warnings_exp + \
|
||||||
|
'--- Got:\n' + html_warnings
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
|
||||||
|
'html_context.hckey_co': 'hcval_co'})
|
||||||
|
@pytest.mark.test_params(shared_result='test_build_html_output')
|
||||||
|
def test_static_output(app):
|
||||||
|
app.build()
|
||||||
|
check_static_entries(app.builder.outdir)
|
||||||
|
check_extra_entries(app.builder.outdir)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
'images.html': [
|
'images.html': [
|
||||||
(".//img[@src='_images/img.png']", ''),
|
(".//img[@src='_images/img.png']", ''),
|
||||||
(".//img[@src='_images/img1.png']", ''),
|
(".//img[@src='_images/img1.png']", ''),
|
||||||
@ -325,98 +429,16 @@ HTML_XPATH = {
|
|||||||
(".//h1", "Generated section"),
|
(".//h1", "Generated section"),
|
||||||
(".//a[@href='_sources/otherext.foo.txt']", ''),
|
(".//a[@href='_sources/otherext.foo.txt']", ''),
|
||||||
]
|
]
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx('html', tags=['testtag'], confoverrides={
|
||||||
|
'html_context.hckey_co': 'hcval_co'})
|
||||||
|
@pytest.mark.test_params(shared_result='test_build_html_output')
|
||||||
|
def test_html_output(app, cached_etree_parse, fname, expect):
|
||||||
|
app.build()
|
||||||
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
|
|
||||||
|
|
||||||
def check_xpath(etree, fname, path, check, be_found=True):
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
nodes = list(etree.findall(path))
|
|
||||||
if check is None:
|
|
||||||
assert nodes == [], ('found any nodes matching xpath '
|
|
||||||
'%r in file %s' % (path, fname))
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
assert nodes != [], ('did not find any node matching xpath '
|
|
||||||
'%r in file %s' % (path, fname))
|
|
||||||
if hasattr(check, '__call__'):
|
|
||||||
check(nodes)
|
|
||||||
elif not check:
|
|
||||||
# only check for node presence
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
def get_text(node):
|
|
||||||
if node.text is not None:
|
|
||||||
return node.text
|
|
||||||
else:
|
|
||||||
# Since pygments-2.1.1, empty <span> tag is inserted at top of
|
|
||||||
# highlighting block
|
|
||||||
if len(node) == 1 and node[0].tag == 'span' and node[0].text is None:
|
|
||||||
return node[0].tail
|
|
||||||
else:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
rex = re.compile(check)
|
|
||||||
if be_found:
|
|
||||||
if any(rex.search(get_text(node)) for node in nodes):
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
if all(not rex.search(get_text(node)) for node in nodes):
|
|
||||||
return
|
|
||||||
|
|
||||||
assert False, ('%r not found in any node matching '
|
|
||||||
'path %s in %s: %r' % (check, path, fname,
|
|
||||||
[node.text for node in nodes]))
|
|
||||||
|
|
||||||
|
|
||||||
def check_static_entries(outdir):
|
|
||||||
staticdir = outdir / '_static'
|
|
||||||
assert staticdir.isdir()
|
|
||||||
# a file from a directory entry in html_static_path
|
|
||||||
assert (staticdir / 'README').isfile()
|
|
||||||
# a directory from a directory entry in html_static_path
|
|
||||||
assert (staticdir / 'subdir' / 'foo.css').isfile()
|
|
||||||
# a file from a file entry in html_static_path
|
|
||||||
assert (staticdir / 'templated.css').isfile()
|
|
||||||
assert (staticdir / 'templated.css').text().splitlines()[1] == __display_version__
|
|
||||||
# a file from _static, but matches exclude_patterns
|
|
||||||
assert not (staticdir / 'excluded.css').exists()
|
|
||||||
|
|
||||||
|
|
||||||
def check_extra_entries(outdir):
|
|
||||||
assert (outdir / 'robots.txt').isfile()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('html', testroot='warnings', freshenv=True)
|
|
||||||
def test_html_warnings(app, status, warning):
|
|
||||||
app.builder.build_all()
|
|
||||||
html_warnings = strip_escseq(warning.getvalue().replace(os.sep, '/'))
|
|
||||||
html_warnings_exp = HTML_WARNINGS % {
|
|
||||||
'root': re.escape(app.srcdir.replace(os.sep, '/'))}
|
|
||||||
assert re.match(html_warnings_exp + '$', html_warnings), \
|
|
||||||
'Warnings don\'t match:\n' + \
|
|
||||||
'--- Expected (regex):\n' + html_warnings_exp + \
|
|
||||||
'--- Got:\n' + html_warnings
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='html', tags=['testtag'],
|
|
||||||
confoverrides={'html_context.hckey_co': 'hcval_co'})
|
|
||||||
def test_html_output(app, status, warning):
|
|
||||||
app.builder.build_all()
|
|
||||||
for fname, paths in iteritems(HTML_XPATH):
|
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
|
||||||
etree = HTML_PARSER.parse(fp)
|
|
||||||
for path, check in paths:
|
|
||||||
yield check_xpath, etree, fname, path, check
|
|
||||||
|
|
||||||
check_static_entries(app.builder.outdir)
|
|
||||||
check_extra_entries(app.builder.outdir)
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='html', testroot='tocdepth')
|
|
||||||
def test_tocdepth(app, status, warning):
|
|
||||||
# issue #1251
|
|
||||||
app.builder.build_all()
|
|
||||||
|
|
||||||
expects = {
|
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//li[@class='toctree-l3']/a", '1.1.1. Foo A1', True),
|
(".//li[@class='toctree-l3']/a", '1.1.1. Foo A1', True),
|
||||||
(".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True),
|
(".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True),
|
||||||
@ -447,21 +469,16 @@ def test_tocdepth(app, status, warning):
|
|||||||
'baz.html': [
|
'baz.html': [
|
||||||
(".//h1", '2.1.1. Baz A', True),
|
(".//h1", '2.1.1. Baz A', True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx('html', testroot='tocdepth')
|
||||||
for fname, paths in iteritems(expects):
|
@pytest.mark.test_params(shared_result='test_build_html_tocdepth')
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
def test_tocdepth(app, cached_etree_parse, fname, expect):
|
||||||
etree = HTML_PARSER.parse(fp)
|
app.build()
|
||||||
|
# issue #1251
|
||||||
for xpath, check, be_found in paths:
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='singlehtml', testroot='tocdepth')
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
def test_tocdepth_singlehtml(app, status, warning):
|
|
||||||
app.builder.build_all()
|
|
||||||
|
|
||||||
expects = {
|
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//li[@class='toctree-l3']/a", '1.1.1. Foo A1', True),
|
(".//li[@class='toctree-l3']/a", '1.1.1. Foo A1', True),
|
||||||
(".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True),
|
(".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True),
|
||||||
@ -487,27 +504,26 @@ def test_tocdepth_singlehtml(app, status, warning):
|
|||||||
# baz.rst
|
# baz.rst
|
||||||
(".//h4", '2.1.1. Baz A', True),
|
(".//h4", '2.1.1. Baz A', True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx('singlehtml', testroot='tocdepth')
|
||||||
for fname, paths in iteritems(expects):
|
@pytest.mark.test_params(shared_result='test_build_html_tocdepth')
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
def test_tocdepth_singlehtml(app, cached_etree_parse, fname, expect):
|
||||||
etree = HTML_PARSER.parse(fp)
|
app.build()
|
||||||
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
for xpath, check, be_found in paths:
|
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='html', testroot='numfig')
|
@pytest.mark.sphinx('html', testroot='numfig')
|
||||||
def test_numfig_disabled(app, status, warning):
|
@pytest.mark.test_params(shared_result='test_build_html_numfig')
|
||||||
app.builder.build_all()
|
def test_numfig_disabled_warn(app, warning):
|
||||||
|
app.build()
|
||||||
warnings = warning.getvalue()
|
warnings = warning.getvalue()
|
||||||
assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' in warnings
|
assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' in warnings
|
||||||
assert 'index.rst:55: WARNING: no number is assigned for section: index' not in warnings
|
assert 'index.rst:55: WARNING: no number is assigned for section: index' not in warnings
|
||||||
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' not in warnings
|
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' not in warnings
|
||||||
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' not in warnings
|
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' not in warnings
|
||||||
|
|
||||||
expects = {
|
|
||||||
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//div[@class='figure']/p[@class='caption']/"
|
(".//div[@class='figure']/p[@class='caption']/"
|
||||||
"span[@class='caption-number']", None, True),
|
"span[@class='caption-number']", None, True),
|
||||||
@ -546,19 +562,20 @@ def test_numfig_disabled(app, status, warning):
|
|||||||
(".//div[@class='code-block-caption']/"
|
(".//div[@class='code-block-caption']/"
|
||||||
"span[@class='caption-number']", None, True),
|
"span[@class='caption-number']", None, True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx('html', testroot='numfig')
|
||||||
for fname, paths in iteritems(expects):
|
@pytest.mark.test_params(shared_result='test_build_html_numfig')
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
def test_numfig_disabled(app, cached_etree_parse, fname, expect):
|
||||||
etree = HTML_PARSER.parse(fp)
|
app.build()
|
||||||
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
for xpath, check, be_found in paths:
|
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='html', testroot='numfig', freshenv=True,
|
@pytest.mark.sphinx(
|
||||||
|
'html', testroot='numfig',
|
||||||
|
srcdir='test_numfig_without_numbered_toctree_warn',
|
||||||
confoverrides={'numfig': True})
|
confoverrides={'numfig': True})
|
||||||
def test_numfig_without_numbered_toctree(app, status, warning):
|
def test_numfig_without_numbered_toctree_warn(app, warning):
|
||||||
|
app.build()
|
||||||
# remove :numbered: option
|
# remove :numbered: option
|
||||||
index = (app.srcdir / 'index.rst').text()
|
index = (app.srcdir / 'index.rst').text()
|
||||||
index = re.sub(':numbered:.*', '', index, re.MULTILINE)
|
index = re.sub(':numbered:.*', '', index, re.MULTILINE)
|
||||||
@ -571,7 +588,8 @@ def test_numfig_without_numbered_toctree(app, status, warning):
|
|||||||
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings
|
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings
|
||||||
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings
|
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings
|
||||||
|
|
||||||
expects = {
|
|
||||||
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//div[@class='figure']/p[@class='caption']/"
|
(".//div[@class='figure']/p[@class='caption']/"
|
||||||
"span[@class='caption-number']", '^Fig. 9 $', True),
|
"span[@class='caption-number']", '^Fig. 9 $', True),
|
||||||
@ -650,28 +668,34 @@ def test_numfig_without_numbered_toctree(app, status, warning):
|
|||||||
(".//div[@class='code-block-caption']/"
|
(".//div[@class='code-block-caption']/"
|
||||||
"span[@class='caption-number']", '^Listing 6 $', True),
|
"span[@class='caption-number']", '^Listing 6 $', True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx(
|
||||||
for fname, paths in iteritems(expects):
|
'html', testroot='numfig',
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
srcdir='test_numfig_without_numbered_toctree',
|
||||||
etree = HTML_PARSER.parse(fp)
|
|
||||||
|
|
||||||
for xpath, check, be_found in paths:
|
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='html', testroot='numfig', srcdir='test_build_html_numfig_on',
|
|
||||||
confoverrides={'numfig': True})
|
confoverrides={'numfig': True})
|
||||||
def test_numfig_with_numbered_toctree(app, status, warning):
|
def test_numfig_without_numbered_toctree(app, cached_etree_parse, fname, expect):
|
||||||
app.builder.build_all()
|
# remove :numbered: option
|
||||||
|
index = (app.srcdir / 'index.rst').text()
|
||||||
|
index = re.sub(':numbered:.*', '', index, re.MULTILINE)
|
||||||
|
(app.srcdir / 'index.rst').write_text(index, encoding='utf-8')
|
||||||
|
|
||||||
|
if not app.outdir.listdir():
|
||||||
|
app.build()
|
||||||
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('html', testroot='numfig', confoverrides={'numfig': True})
|
||||||
|
@pytest.mark.test_params(shared_result='test_build_html_numfig_on')
|
||||||
|
def test_numfig_with_numbered_toctree_warn(app, warning):
|
||||||
|
app.build()
|
||||||
warnings = warning.getvalue()
|
warnings = warning.getvalue()
|
||||||
assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings
|
assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings
|
||||||
assert 'index.rst:55: WARNING: no number is assigned for section: index' in warnings
|
assert 'index.rst:55: WARNING: no number is assigned for section: index' in warnings
|
||||||
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings
|
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings
|
||||||
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings
|
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings
|
||||||
|
|
||||||
expects = {
|
|
||||||
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//div[@class='figure']/p[@class='caption']/"
|
(".//div[@class='figure']/p[@class='caption']/"
|
||||||
"span[@class='caption-number']", '^Fig. 1 $', True),
|
"span[@class='caption-number']", '^Fig. 1 $', True),
|
||||||
@ -750,33 +774,31 @@ def test_numfig_with_numbered_toctree(app, status, warning):
|
|||||||
(".//div[@class='code-block-caption']/"
|
(".//div[@class='code-block-caption']/"
|
||||||
"span[@class='caption-number']", '^Listing 2.2 $', True),
|
"span[@class='caption-number']", '^Listing 2.2 $', True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx('html', testroot='numfig', confoverrides={'numfig': True})
|
||||||
for fname, paths in iteritems(expects):
|
@pytest.mark.test_params(shared_result='test_build_html_numfig_on')
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
def test_numfig_with_numbered_toctree(app, cached_etree_parse, fname, expect):
|
||||||
etree = HTML_PARSER.parse(fp)
|
app.build()
|
||||||
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
for xpath, check, be_found in paths:
|
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='html', testroot='numfig',
|
@pytest.mark.sphinx('html', testroot='numfig', confoverrides={
|
||||||
srcdir='test_build_html_numfig_format_warn',
|
'numfig': True,
|
||||||
confoverrides={'numfig': True,
|
|
||||||
'numfig_format': {'figure': 'Figure:%s',
|
'numfig_format': {'figure': 'Figure:%s',
|
||||||
'table': 'Tab_%s',
|
'table': 'Tab_%s',
|
||||||
'code-block': 'Code-%s',
|
'code-block': 'Code-%s',
|
||||||
'section': 'SECTION-%s'}})
|
'section': 'SECTION-%s'}})
|
||||||
def test_numfig_with_prefix(app, status, warning):
|
@pytest.mark.test_params(shared_result='test_build_html_numfig_format_warn')
|
||||||
app.builder.build_all()
|
def test_numfig_with_prefix_warn(app, warning):
|
||||||
|
app.build()
|
||||||
warnings = warning.getvalue()
|
warnings = warning.getvalue()
|
||||||
assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings
|
assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings
|
||||||
assert 'index.rst:55: WARNING: no number is assigned for section: index' in warnings
|
assert 'index.rst:55: WARNING: no number is assigned for section: index' in warnings
|
||||||
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings
|
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings
|
||||||
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings
|
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings
|
||||||
|
|
||||||
expects = {
|
|
||||||
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//div[@class='figure']/p[@class='caption']/"
|
(".//div[@class='figure']/p[@class='caption']/"
|
||||||
"span[@class='caption-number']", '^Figure:1 $', True),
|
"span[@class='caption-number']", '^Figure:1 $', True),
|
||||||
@ -855,29 +877,32 @@ def test_numfig_with_prefix(app, status, warning):
|
|||||||
(".//div[@class='code-block-caption']/"
|
(".//div[@class='code-block-caption']/"
|
||||||
"span[@class='caption-number']", '^Code-2.2 $', True),
|
"span[@class='caption-number']", '^Code-2.2 $', True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx('html', testroot='numfig', confoverrides={
|
||||||
for fname, paths in iteritems(expects):
|
'numfig': True,
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
'numfig_format': {'figure': 'Figure:%s',
|
||||||
etree = HTML_PARSER.parse(fp)
|
'table': 'Tab_%s',
|
||||||
|
'code-block': 'Code-%s',
|
||||||
for xpath, check, be_found in paths:
|
'section': 'SECTION-%s'}})
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
@pytest.mark.test_params(shared_result='test_build_html_numfig_format_warn')
|
||||||
|
def test_numfig_with_prefix(app, cached_etree_parse, fname, expect):
|
||||||
|
app.build()
|
||||||
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='html', testroot='numfig',
|
@pytest.mark.sphinx('html', testroot='numfig', confoverrides={
|
||||||
srcdir='test_build_html_numfig_depth_2',
|
'numfig': True, 'numfig_secnum_depth': 2})
|
||||||
confoverrides={'numfig': True, 'numfig_secnum_depth': 2})
|
@pytest.mark.test_params(shared_result='test_build_html_numfig_depth_2')
|
||||||
def test_numfig_with_secnum_depth(app, status, warning):
|
def test_numfig_with_secnum_depth_warn(app, warning):
|
||||||
app.builder.build_all()
|
app.build()
|
||||||
|
|
||||||
warnings = warning.getvalue()
|
warnings = warning.getvalue()
|
||||||
assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings
|
assert 'index.rst:47: WARNING: numfig is disabled. :numref: is ignored.' not in warnings
|
||||||
assert 'index.rst:55: WARNING: no number is assigned for section: index' in warnings
|
assert 'index.rst:55: WARNING: no number is assigned for section: index' in warnings
|
||||||
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings
|
assert 'index.rst:56: WARNING: invalid numfig_format: invalid' in warnings
|
||||||
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings
|
assert 'index.rst:57: WARNING: invalid numfig_format: Fig %s %s' in warnings
|
||||||
|
|
||||||
expects = {
|
|
||||||
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//div[@class='figure']/p[@class='caption']/"
|
(".//div[@class='figure']/p[@class='caption']/"
|
||||||
"span[@class='caption-number']", '^Fig. 1 $', True),
|
"span[@class='caption-number']", '^Fig. 1 $', True),
|
||||||
@ -956,23 +981,16 @@ def test_numfig_with_secnum_depth(app, status, warning):
|
|||||||
(".//div[@class='code-block-caption']/"
|
(".//div[@class='code-block-caption']/"
|
||||||
"span[@class='caption-number']", '^Listing 2.1.2 $', True),
|
"span[@class='caption-number']", '^Listing 2.1.2 $', True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx('html', testroot='numfig', confoverrides={
|
||||||
for fname, paths in iteritems(expects):
|
'numfig': True, 'numfig_secnum_depth': 2})
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
@pytest.mark.test_params(shared_result='test_build_html_numfig_depth_2')
|
||||||
etree = HTML_PARSER.parse(fp)
|
def test_numfig_with_secnum_depth(app, cached_etree_parse, fname, expect):
|
||||||
|
app.build()
|
||||||
for xpath, check, be_found in paths:
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='singlehtml', testroot='numfig',
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
srcdir='test_build_html_numfig_on',
|
|
||||||
confoverrides={'numfig': True})
|
|
||||||
def test_numfig_with_singlehtml(app, status, warning):
|
|
||||||
app.builder.build_all()
|
|
||||||
|
|
||||||
expects = {
|
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//div[@class='figure']/p[@class='caption']/"
|
(".//div[@class='figure']/p[@class='caption']/"
|
||||||
"span[@class='caption-number']", '^Fig. 1 $', True),
|
"span[@class='caption-number']", '^Fig. 1 $', True),
|
||||||
@ -1045,21 +1063,16 @@ def test_numfig_with_singlehtml(app, status, warning):
|
|||||||
(".//div[@class='code-block-caption']/"
|
(".//div[@class='code-block-caption']/"
|
||||||
"span[@class='caption-number']", '^Listing 2.2 $', True),
|
"span[@class='caption-number']", '^Listing 2.2 $', True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx('singlehtml', testroot='numfig', confoverrides={
|
||||||
for fname, paths in iteritems(expects):
|
'numfig': True})
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
@pytest.mark.test_params(shared_result='test_build_html_numfig_on')
|
||||||
etree = HTML_PARSER.parse(fp)
|
def test_numfig_with_singlehtml(app, cached_etree_parse, fname, expect):
|
||||||
|
app.build()
|
||||||
for xpath, check, be_found in paths:
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(buildername='html', testroot='add_enumerable_node')
|
@pytest.mark.parametrize("fname,expect", flat_dict({
|
||||||
def test_enumerable_node(app, status, warning):
|
|
||||||
app.builder.build_all()
|
|
||||||
|
|
||||||
expects = {
|
|
||||||
'index.html': [
|
'index.html': [
|
||||||
(".//div[@class='figure']/p[@class='caption']/span[@class='caption-number']",
|
(".//div[@class='figure']/p[@class='caption']/span[@class='caption-number']",
|
||||||
"Fig. 1", True),
|
"Fig. 1", True),
|
||||||
@ -1075,18 +1088,18 @@ def test_enumerable_node(app, status, warning):
|
|||||||
(".//li/a/span", 'No.1', True),
|
(".//li/a/span", 'No.1', True),
|
||||||
(".//li/a/span", 'No.2', True),
|
(".//li/a/span", 'No.2', True),
|
||||||
],
|
],
|
||||||
}
|
}))
|
||||||
|
@pytest.mark.sphinx(
|
||||||
for fname, paths in iteritems(expects):
|
'html', testroot='add_enumerable_node',
|
||||||
with (app.outdir / fname).open('rb') as fp:
|
srcdir='test_enumerable_node',
|
||||||
etree = HTML_PARSER.parse(fp)
|
)
|
||||||
|
def test_enumerable_node(app, cached_etree_parse, fname, expect):
|
||||||
for xpath, check, be_found in paths:
|
app.build()
|
||||||
yield check_xpath, etree, fname, xpath, check, be_found
|
check_xpath(cached_etree_parse(app.outdir / fname), fname, *expect)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('html', testroot='html_assets')
|
@pytest.mark.sphinx('html', testroot='html_assets')
|
||||||
def test_html_assets(app, status, warning):
|
def test_html_assets(app):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
|
|
||||||
# html_static_path
|
# html_static_path
|
||||||
@ -1114,7 +1127,7 @@ def test_html_assets(app, status, warning):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('html', confoverrides={'html_sourcelink_suffix': ''})
|
@pytest.mark.sphinx('html', confoverrides={'html_sourcelink_suffix': ''})
|
||||||
def test_html_sourcelink_suffix(app, status, warning):
|
def test_html_sourcelink_suffix(app):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
content_otherext = (app.outdir / 'otherext.html').text()
|
content_otherext = (app.outdir / 'otherext.html').text()
|
||||||
content_images = (app.outdir / 'images.html').text()
|
content_images = (app.outdir / 'images.html').text()
|
||||||
|
@ -13,9 +13,6 @@ from six import PY3, iteritems
|
|||||||
import pytest
|
import pytest
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from util import TestApp, gen_with_app, \
|
|
||||||
assert_in, assert_not_in
|
|
||||||
|
|
||||||
import sphinx
|
import sphinx
|
||||||
from sphinx.config import Config
|
from sphinx.config import Config
|
||||||
from sphinx.errors import ExtensionError, ConfigError, VersionRequirementError
|
from sphinx.errors import ExtensionError, ConfigError, VersionRequirementError
|
||||||
@ -76,7 +73,6 @@ def test_core_config(app, status, warning):
|
|||||||
assert cfg['project'] == cfg.project == 'Sphinx Tests'
|
assert cfg['project'] == cfg.project == 'Sphinx Tests'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx()
|
|
||||||
def test_extension_values(app, status, warning):
|
def test_extension_values(app, status, warning):
|
||||||
cfg = app.config
|
cfg = app.config
|
||||||
|
|
||||||
@ -125,39 +121,39 @@ def test_errors_warnings(logger, tempdir):
|
|||||||
assert logger.warning.called is True
|
assert logger.warning.called is True
|
||||||
|
|
||||||
|
|
||||||
def test_errors_if_setup_is_not_callable(tempdir):
|
def test_errors_if_setup_is_not_callable(tempdir, make_app):
|
||||||
# test the error to call setup() in the config file
|
# test the error to call setup() in the config file
|
||||||
(tempdir / 'conf.py').write_text(u'setup = 1')
|
(tempdir / 'conf.py').write_text(u'setup = 1')
|
||||||
with pytest.raises(ConfigError) as excinfo:
|
with pytest.raises(ConfigError) as excinfo:
|
||||||
TestApp(srcdir=tempdir)
|
make_app(srcdir=tempdir)
|
||||||
assert 'callable' in str(excinfo.value)
|
assert 'callable' in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(sphinx, '__display_version__', '1.3.4')
|
@mock.patch.object(sphinx, '__display_version__', '1.3.4')
|
||||||
def test_needs_sphinx():
|
def test_needs_sphinx(make_app):
|
||||||
# micro version
|
# micro version
|
||||||
app = TestApp(confoverrides={'needs_sphinx': '1.3.3'}) # OK: less
|
app = make_app(confoverrides={'needs_sphinx': '1.3.3'}) # OK: less
|
||||||
app.cleanup()
|
app.cleanup()
|
||||||
app = TestApp(confoverrides={'needs_sphinx': '1.3.4'}) # OK: equals
|
app = make_app(confoverrides={'needs_sphinx': '1.3.4'}) # OK: equals
|
||||||
app.cleanup()
|
app.cleanup()
|
||||||
with pytest.raises(VersionRequirementError):
|
with pytest.raises(VersionRequirementError):
|
||||||
TestApp(confoverrides={'needs_sphinx': '1.3.5'}) # NG: greater
|
make_app(confoverrides={'needs_sphinx': '1.3.5'}) # NG: greater
|
||||||
|
|
||||||
# minor version
|
# minor version
|
||||||
app = TestApp(confoverrides={'needs_sphinx': '1.2'}) # OK: less
|
app = make_app(confoverrides={'needs_sphinx': '1.2'}) # OK: less
|
||||||
app.cleanup()
|
app.cleanup()
|
||||||
app = TestApp(confoverrides={'needs_sphinx': '1.3'}) # OK: equals
|
app = make_app(confoverrides={'needs_sphinx': '1.3'}) # OK: equals
|
||||||
app.cleanup()
|
app.cleanup()
|
||||||
with pytest.raises(VersionRequirementError):
|
with pytest.raises(VersionRequirementError):
|
||||||
TestApp(confoverrides={'needs_sphinx': '1.4'}) # NG: greater
|
make_app(confoverrides={'needs_sphinx': '1.4'}) # NG: greater
|
||||||
|
|
||||||
# major version
|
# major version
|
||||||
app = TestApp(confoverrides={'needs_sphinx': '0'}) # OK: less
|
app = make_app(confoverrides={'needs_sphinx': '0'}) # OK: less
|
||||||
app.cleanup()
|
app.cleanup()
|
||||||
app = TestApp(confoverrides={'needs_sphinx': '1'}) # OK: equals
|
app = make_app(confoverrides={'needs_sphinx': '1'}) # OK: equals
|
||||||
app.cleanup()
|
app.cleanup()
|
||||||
with pytest.raises(VersionRequirementError):
|
with pytest.raises(VersionRequirementError):
|
||||||
TestApp(confoverrides={'needs_sphinx': '2'}) # NG: greater
|
make_app(confoverrides={'needs_sphinx': '2'}) # NG: greater
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("sphinx.config.logger")
|
@mock.patch("sphinx.config.logger")
|
||||||
@ -177,12 +173,14 @@ def test_config_eol(logger, tempdir):
|
|||||||
'primary_domain': None})
|
'primary_domain': None})
|
||||||
def test_builtin_conf(app, status, warning):
|
def test_builtin_conf(app, status, warning):
|
||||||
warnings = warning.getvalue()
|
warnings = warning.getvalue()
|
||||||
assert_in('master_doc', warnings,
|
assert 'master_doc' in warnings, (
|
||||||
'override on builtin "master_doc" should raise a type warning')
|
'override on builtin "master_doc" should raise a type warning')
|
||||||
assert_not_in('language', warnings, 'explicitly permitted '
|
assert 'language' not in warnings, (
|
||||||
'override on builtin "language" should NOT raise a type warning')
|
'explicitly permitted override on builtin "language" should NOT raise '
|
||||||
assert_not_in('primary_domain', warnings, 'override to None on builtin '
|
'a type warning')
|
||||||
'"primary_domain" should NOT raise a type warning')
|
assert 'primary_domain' not in warnings, (
|
||||||
|
'override to None on builtin "primary_domain" should NOT raise a type '
|
||||||
|
'warning')
|
||||||
|
|
||||||
|
|
||||||
# See roots/test-config/conf.py.
|
# See roots/test-config/conf.py.
|
||||||
@ -197,7 +195,7 @@ TYPECHECK_WARNINGS = {
|
|||||||
'value8': False,
|
'value8': False,
|
||||||
'value9': False,
|
'value9': False,
|
||||||
'value10': False,
|
'value10': False,
|
||||||
'value11': True,
|
'value11': False if PY3 else True,
|
||||||
'value12': False,
|
'value12': False,
|
||||||
'value13': False,
|
'value13': False,
|
||||||
'value14': False,
|
'value14': False,
|
||||||
@ -206,15 +204,17 @@ TYPECHECK_WARNINGS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app(testroot='config')
|
@pytest.mark.parametrize("key,should", iteritems(TYPECHECK_WARNINGS))
|
||||||
def test_gen_check_types(app, status, warning):
|
@pytest.mark.sphinx(testroot='config')
|
||||||
if PY3:
|
def test_check_types(warning, key, should):
|
||||||
TYPECHECK_WARNINGS['value11'] = False
|
warn = warning.getvalue()
|
||||||
|
if should:
|
||||||
for key, should in iteritems(TYPECHECK_WARNINGS):
|
assert key in warn, (
|
||||||
yield assert_in if should else assert_not_in, key, warning.getvalue(), (
|
'override on "%s" should raise a type warning' % key
|
||||||
'override on "%s" should%s raise a type warning' %
|
)
|
||||||
(key, '' if should else ' NOT')
|
else:
|
||||||
|
assert key not in warn, (
|
||||||
|
'override on "%s" should NOT raise a type warning' % key
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -8,42 +8,30 @@
|
|||||||
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
import os
|
import pytest
|
||||||
|
|
||||||
from util import TestApp
|
|
||||||
|
|
||||||
|
|
||||||
def test_correct_year():
|
@pytest.fixture(
|
||||||
try:
|
params=[
|
||||||
# save current value of SOURCE_DATE_EPOCH
|
|
||||||
sde = os.environ.pop('SOURCE_DATE_EPOCH', None)
|
|
||||||
|
|
||||||
# test with SOURCE_DATE_EPOCH unset: no modification
|
# test with SOURCE_DATE_EPOCH unset: no modification
|
||||||
app = TestApp(buildername='html', testroot='correct-year')
|
(None, '2006-2009'),
|
||||||
app.builder.build_all()
|
# test with SOURCE_DATE_EPOCH set: copyright year should be updated
|
||||||
content = (app.outdir / 'contents.html').text()
|
('1293840000', '2006-2011'),
|
||||||
app.cleanup()
|
('1293839999', '2006-2010'),
|
||||||
assert '2006-2009' in content
|
],
|
||||||
|
|
||||||
# test with SOURCE_DATE_EPOCH set: copyright year should be
|
)
|
||||||
# updated
|
def expect_date(request, monkeypatch):
|
||||||
os.environ['SOURCE_DATE_EPOCH'] = "1293840000"
|
sde, expect = request.param
|
||||||
app = TestApp(buildername='html', testroot='correct-year')
|
if sde:
|
||||||
app.builder.build_all()
|
monkeypatch.setenv('SOURCE_DATE_EPOCH', sde)
|
||||||
content = (app.outdir / 'contents.html').text()
|
|
||||||
app.cleanup()
|
|
||||||
assert '2006-2011' in content
|
|
||||||
|
|
||||||
os.environ['SOURCE_DATE_EPOCH'] = "1293839999"
|
|
||||||
app = TestApp(buildername='html', testroot='correct-year')
|
|
||||||
app.builder.build_all()
|
|
||||||
content = (app.outdir / 'contents.html').text()
|
|
||||||
app.cleanup()
|
|
||||||
assert '2006-2010' in content
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# Restores SOURCE_DATE_EPOCH
|
|
||||||
if sde is None:
|
|
||||||
os.environ.pop('SOURCE_DATE_EPOCH', None)
|
|
||||||
else:
|
else:
|
||||||
os.environ['SOURCE_DATE_EPOCH'] = sde
|
monkeypatch.delenv('SOURCE_DATE_EPOCH', raising=False)
|
||||||
|
yield expect
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('html', testroot='correct-year')
|
||||||
|
def test_correct_year(expect_date, app):
|
||||||
|
app.build()
|
||||||
|
content = (app.outdir / 'contents.html').text()
|
||||||
|
assert expect_date in content
|
||||||
|
@ -9,9 +9,7 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from six import StringIO
|
from util import SphinxTestApp, path
|
||||||
|
|
||||||
from util import TestApp, path
|
|
||||||
|
|
||||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||||
from sphinx.builders.latex import LaTeXBuilder
|
from sphinx.builders.latex import LaTeXBuilder
|
||||||
@ -21,7 +19,7 @@ app = env = None
|
|||||||
|
|
||||||
def setup_module():
|
def setup_module():
|
||||||
global app, env
|
global app, env
|
||||||
app = TestApp(srcdir='root-envtest', warning=StringIO())
|
app = SphinxTestApp(srcdir='root-envtest')
|
||||||
env = app.env
|
env = app.env
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,23 +16,13 @@ from sphinx.addnodes import compact_paragraph, only
|
|||||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from util import gen_with_app, assert_node
|
from util import assert_node
|
||||||
|
|
||||||
|
|
||||||
@gen_with_app('xml', testroot='toctree')
|
@pytest.mark.sphinx('xml', testroot='toctree')
|
||||||
def test_basic(app, status, warning):
|
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
|
||||||
|
def test_process_doc(app):
|
||||||
app.build()
|
app.build()
|
||||||
yield _test_process_doc, app
|
|
||||||
yield _test_get_toc_for, app
|
|
||||||
yield _test_get_toc_for_only, app
|
|
||||||
yield _test_get_toc_for_tocdepth, app
|
|
||||||
yield _test_get_toctree_for, app
|
|
||||||
yield _test_get_toctree_for_collapse, app
|
|
||||||
yield _test_get_toctree_for_maxdepth, app
|
|
||||||
yield _test_get_toctree_for_includehidden, app
|
|
||||||
|
|
||||||
|
|
||||||
def _test_process_doc(app):
|
|
||||||
# tocs
|
# tocs
|
||||||
toctree = app.env.tocs['index']
|
toctree = app.env.tocs['index']
|
||||||
assert_node(toctree,
|
assert_node(toctree,
|
||||||
@ -99,7 +89,7 @@ def _test_process_doc(app):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('dummy', testroot='toctree-glob')
|
@pytest.mark.sphinx('dummy', testroot='toctree-glob')
|
||||||
def test_glob(app, status, warning):
|
def test_glob(app):
|
||||||
includefiles = ['foo', 'bar/index', 'bar/bar_1', 'bar/bar_2',
|
includefiles = ['foo', 'bar/index', 'bar/bar_1', 'bar/bar_2',
|
||||||
'bar/bar_3', 'baz', 'qux/index']
|
'bar/bar_3', 'baz', 'qux/index']
|
||||||
|
|
||||||
@ -144,7 +134,10 @@ def test_glob(app, status, warning):
|
|||||||
assert app.env.numbered_toctrees == set()
|
assert app.env.numbered_toctrees == set()
|
||||||
|
|
||||||
|
|
||||||
def _test_get_toc_for(app):
|
@pytest.mark.sphinx('xml', testroot='toctree')
|
||||||
|
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
|
||||||
|
def test_get_toc_for(app):
|
||||||
|
app.build()
|
||||||
toctree = app.env.get_toc_for('index', app.builder)
|
toctree = app.env.get_toc_for('index', app.builder)
|
||||||
|
|
||||||
assert_node(toctree,
|
assert_node(toctree,
|
||||||
@ -167,7 +160,10 @@ def _test_get_toc_for(app):
|
|||||||
[compact_paragraph, reference, "Indices and tables"])
|
[compact_paragraph, reference, "Indices and tables"])
|
||||||
|
|
||||||
|
|
||||||
def _test_get_toc_for_only(app):
|
@pytest.mark.sphinx('xml', testroot='toctree')
|
||||||
|
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
|
||||||
|
def test_get_toc_for_only(app):
|
||||||
|
app.build()
|
||||||
builder = StandaloneHTMLBuilder(app)
|
builder = StandaloneHTMLBuilder(app)
|
||||||
toctree = app.env.get_toc_for('index', builder)
|
toctree = app.env.get_toc_for('index', builder)
|
||||||
|
|
||||||
@ -194,7 +190,10 @@ def _test_get_toc_for_only(app):
|
|||||||
[compact_paragraph, reference, "Indices and tables"])
|
[compact_paragraph, reference, "Indices and tables"])
|
||||||
|
|
||||||
|
|
||||||
def _test_get_toc_for_tocdepth(app):
|
@pytest.mark.sphinx('xml', testroot='toctree')
|
||||||
|
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
|
||||||
|
def test_get_toc_for_tocdepth(app):
|
||||||
|
app.build()
|
||||||
toctree = app.env.get_toc_for('tocdepth', app.builder)
|
toctree = app.env.get_toc_for('tocdepth', app.builder)
|
||||||
|
|
||||||
assert_node(toctree,
|
assert_node(toctree,
|
||||||
@ -206,7 +205,10 @@ def _test_get_toc_for_tocdepth(app):
|
|||||||
[bullet_list, list_item, compact_paragraph, reference, "level 2"])
|
[bullet_list, list_item, compact_paragraph, reference, "level 2"])
|
||||||
|
|
||||||
|
|
||||||
def _test_get_toctree_for(app):
|
@pytest.mark.sphinx('xml', testroot='toctree')
|
||||||
|
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
|
||||||
|
def test_get_toctree_for(app):
|
||||||
|
app.build()
|
||||||
toctree = app.env.get_toctree_for('index', app.builder, collapse=False)
|
toctree = app.env.get_toctree_for('index', app.builder, collapse=False)
|
||||||
assert_node(toctree,
|
assert_node(toctree,
|
||||||
[compact_paragraph, ([caption, "Table of Contents"],
|
[compact_paragraph, ([caption, "Table of Contents"],
|
||||||
@ -240,7 +242,10 @@ def _test_get_toctree_for(app):
|
|||||||
assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/")
|
assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/")
|
||||||
|
|
||||||
|
|
||||||
def _test_get_toctree_for_collapse(app):
|
@pytest.mark.sphinx('xml', testroot='toctree')
|
||||||
|
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
|
||||||
|
def test_get_toctree_for_collapse(app):
|
||||||
|
app.build()
|
||||||
toctree = app.env.get_toctree_for('index', app.builder, collapse=True)
|
toctree = app.env.get_toctree_for('index', app.builder, collapse=True)
|
||||||
assert_node(toctree,
|
assert_node(toctree,
|
||||||
[compact_paragraph, ([caption, "Table of Contents"],
|
[compact_paragraph, ([caption, "Table of Contents"],
|
||||||
@ -265,7 +270,10 @@ def _test_get_toctree_for_collapse(app):
|
|||||||
assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/")
|
assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/")
|
||||||
|
|
||||||
|
|
||||||
def _test_get_toctree_for_maxdepth(app):
|
@pytest.mark.sphinx('xml', testroot='toctree')
|
||||||
|
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
|
||||||
|
def test_get_toctree_for_maxdepth(app):
|
||||||
|
app.build()
|
||||||
toctree = app.env.get_toctree_for('index', app.builder, collapse=False, maxdepth=3)
|
toctree = app.env.get_toctree_for('index', app.builder, collapse=False, maxdepth=3)
|
||||||
assert_node(toctree,
|
assert_node(toctree,
|
||||||
[compact_paragraph, ([caption, "Table of Contents"],
|
[compact_paragraph, ([caption, "Table of Contents"],
|
||||||
@ -304,7 +312,10 @@ def _test_get_toctree_for_maxdepth(app):
|
|||||||
assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/")
|
assert_node(toctree[3][1][0][0], reference, refuri="http://python.org/")
|
||||||
|
|
||||||
|
|
||||||
def _test_get_toctree_for_includehidden(app):
|
@pytest.mark.sphinx('xml', testroot='toctree')
|
||||||
|
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
|
||||||
|
def test_get_toctree_for_includehidden(app):
|
||||||
|
app.build()
|
||||||
toctree = app.env.get_toctree_for('index', app.builder, collapse=False,
|
toctree = app.env.get_toctree_for('index', app.builder, collapse=False,
|
||||||
includehidden=False)
|
includehidden=False)
|
||||||
assert_node(toctree,
|
assert_node(toctree,
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -13,7 +13,7 @@ import re
|
|||||||
import pickle
|
import pickle
|
||||||
|
|
||||||
from docutils import frontend, utils, nodes
|
from docutils import frontend, utils, nodes
|
||||||
from docutils.parsers import rst
|
from docutils.parsers.rst import Parser as RstParser
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.util import texescape
|
from sphinx.util import texescape
|
||||||
@ -22,31 +22,37 @@ from sphinx.writers.html import HTMLWriter, SmartyPantsHTMLTranslator
|
|||||||
from sphinx.writers.latex import LaTeXWriter, LaTeXTranslator
|
from sphinx.writers.latex import LaTeXWriter, LaTeXTranslator
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from util import TestApp, assert_node
|
from util import assert_node
|
||||||
|
|
||||||
|
|
||||||
app = settings = parser = domain_context = None
|
@pytest.fixture
|
||||||
|
def settings(app):
|
||||||
|
|
||||||
def setup_module():
|
|
||||||
global app, settings, parser, domain_context
|
|
||||||
texescape.init() # otherwise done by the latex builder
|
texescape.init() # otherwise done by the latex builder
|
||||||
app = TestApp()
|
|
||||||
optparser = frontend.OptionParser(
|
optparser = frontend.OptionParser(
|
||||||
components=(rst.Parser, HTMLWriter, LaTeXWriter))
|
components=(RstParser, HTMLWriter, LaTeXWriter))
|
||||||
settings = optparser.get_default_values()
|
settings = optparser.get_default_values()
|
||||||
settings.env = app.builder.env
|
settings.env = app.builder.env
|
||||||
settings.env.temp_data['docname'] = 'dummy'
|
settings.env.temp_data['docname'] = 'dummy'
|
||||||
parser = rst.Parser()
|
|
||||||
domain_context = sphinx_domains(settings.env)
|
domain_context = sphinx_domains(settings.env)
|
||||||
domain_context.enable()
|
domain_context.enable()
|
||||||
|
yield settings
|
||||||
|
|
||||||
def teardown_module():
|
|
||||||
app.cleanup()
|
|
||||||
domain_context.disable()
|
domain_context.disable()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def parse(settings):
|
||||||
|
def parse_(rst):
|
||||||
|
document = utils.new_document(b'test data', settings)
|
||||||
|
document['file'] = 'dummy'
|
||||||
|
parser = RstParser()
|
||||||
|
parser.parse(rst, document)
|
||||||
|
for msg in document.traverse(nodes.system_message):
|
||||||
|
if msg['level'] == 1:
|
||||||
|
msg.replace_self([])
|
||||||
|
return document
|
||||||
|
return parse_
|
||||||
|
|
||||||
|
|
||||||
# since we're not resolving the markup afterwards, these nodes may remain
|
# since we're not resolving the markup afterwards, these nodes may remain
|
||||||
class ForgivingTranslator:
|
class ForgivingTranslator:
|
||||||
def visit_pending_xref(self, node):
|
def visit_pending_xref(self, node):
|
||||||
@ -64,93 +70,158 @@ class ForgivingLaTeXTranslator(LaTeXTranslator, ForgivingTranslator):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def verify_re(rst, html_expected, latex_expected):
|
@pytest.fixture
|
||||||
document = utils.new_document(b'test data', settings)
|
def verify_re_html(app, parse):
|
||||||
document['file'] = 'dummy'
|
def verify(rst, html_expected):
|
||||||
parser.parse(rst, document)
|
document = parse(rst)
|
||||||
for msg in document.traverse(nodes.system_message):
|
|
||||||
if msg['level'] == 1:
|
|
||||||
msg.replace_self([])
|
|
||||||
|
|
||||||
if html_expected:
|
|
||||||
html_translator = ForgivingHTMLTranslator(app.builder, document)
|
html_translator = ForgivingHTMLTranslator(app.builder, document)
|
||||||
document.walkabout(html_translator)
|
document.walkabout(html_translator)
|
||||||
html_translated = ''.join(html_translator.fragment).strip()
|
html_translated = ''.join(html_translator.fragment).strip()
|
||||||
assert re.match(html_expected, html_translated), 'from ' + rst
|
assert re.match(html_expected, html_translated), 'from ' + rst
|
||||||
|
return verify
|
||||||
|
|
||||||
if latex_expected:
|
|
||||||
|
@pytest.fixture
|
||||||
|
def verify_re_latex(app, parse):
|
||||||
|
def verify(rst, latex_expected):
|
||||||
|
document = parse(rst)
|
||||||
latex_translator = ForgivingLaTeXTranslator(document, app.builder)
|
latex_translator = ForgivingLaTeXTranslator(document, app.builder)
|
||||||
latex_translator.first_document = -1 # don't write \begin{document}
|
latex_translator.first_document = -1 # don't write \begin{document}
|
||||||
document.walkabout(latex_translator)
|
document.walkabout(latex_translator)
|
||||||
latex_translated = ''.join(latex_translator.body).strip()
|
latex_translated = ''.join(latex_translator.body).strip()
|
||||||
assert re.match(latex_expected, latex_translated), 'from ' + repr(rst)
|
assert re.match(latex_expected, latex_translated), 'from ' + repr(rst)
|
||||||
|
return verify
|
||||||
|
|
||||||
|
|
||||||
def verify(rst, html_expected, latex_expected):
|
@pytest.fixture
|
||||||
|
def verify_re(verify_re_html, verify_re_latex):
|
||||||
|
def verify_re_(rst, html_expected, latex_expected):
|
||||||
if html_expected:
|
if html_expected:
|
||||||
html_expected = re.escape(html_expected) + '$'
|
return verify_re_html(rst, html_expected)
|
||||||
if latex_expected:
|
if latex_expected:
|
||||||
latex_expected = re.escape(latex_expected) + '$'
|
return verify_re_latex(rst, latex_expected)
|
||||||
verify_re(rst, html_expected, latex_expected)
|
return verify_re_
|
||||||
|
|
||||||
|
|
||||||
def test_inline():
|
@pytest.fixture
|
||||||
|
def verify(verify_re_html, verify_re_latex):
|
||||||
|
def verify_(rst, html_expected, latex_expected):
|
||||||
|
if html_expected:
|
||||||
|
return verify_re_html(rst, re.escape(html_expected) + '$')
|
||||||
|
if latex_expected:
|
||||||
|
return verify_re_latex(rst, re.escape(latex_expected) + '$')
|
||||||
|
return verify_
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def get_verifier(verify, verify_re):
|
||||||
|
v = {
|
||||||
|
'verify': verify,
|
||||||
|
'verify_re': verify_re,
|
||||||
|
}
|
||||||
|
def get(name):
|
||||||
|
return v[name]
|
||||||
|
return get
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('type,rst,html_expected,latex_expected', [
|
||||||
|
(
|
||||||
# correct interpretation of code with whitespace
|
# correct interpretation of code with whitespace
|
||||||
_html = ('<p><code class="(samp )?docutils literal"><span class="pre">'
|
'verify_re',
|
||||||
'code</span>   <span class="pre">sample</span></code></p>')
|
'``code sample``',
|
||||||
yield verify_re, '``code sample``', _html, r'\\sphinxcode{code sample}'
|
('<p><code class="(samp )?docutils literal"><span class="pre">'
|
||||||
yield verify_re, ':samp:`code sample`', _html, r'\\sphinxcode{code sample}'
|
'code</span>   <span class="pre">sample</span></code></p>'),
|
||||||
|
r'\\sphinxcode{code sample}',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
# correct interpretation of code with whitespace
|
||||||
|
'verify_re',
|
||||||
|
':samp:`code sample`',
|
||||||
|
('<p><code class="(samp )?docutils literal"><span class="pre">'
|
||||||
|
'code</span>   <span class="pre">sample</span></code></p>'),
|
||||||
|
r'\\sphinxcode{code sample}',
|
||||||
|
),
|
||||||
|
(
|
||||||
# interpolation of braces in samp and file roles (HTML only)
|
# interpolation of braces in samp and file roles (HTML only)
|
||||||
yield (verify, ':samp:`a{b}c`',
|
'verify',
|
||||||
'<p><code class="samp docutils literal"><span class="pre">a</span>'
|
':samp:`a{b}c`',
|
||||||
|
('<p><code class="samp docutils literal"><span class="pre">a</span>'
|
||||||
'<em><span class="pre">b</span></em>'
|
'<em><span class="pre">b</span></em>'
|
||||||
'<span class="pre">c</span></code></p>',
|
'<span class="pre">c</span></code></p>'),
|
||||||
'\\sphinxcode{a\\sphinxstyleemphasis{b}c}')
|
'\\sphinxcode{a\\sphinxstyleemphasis{b}c}',
|
||||||
|
),
|
||||||
|
(
|
||||||
# interpolation of arrows in menuselection
|
# interpolation of arrows in menuselection
|
||||||
yield (verify, ':menuselection:`a --> b`',
|
'verify',
|
||||||
u'<p><span class="menuselection">a \N{TRIANGULAR BULLET} b</span></p>',
|
':menuselection:`a --> b`',
|
||||||
'\\sphinxmenuselection{a \\(\\rightarrow\\) b}')
|
(u'<p><span class="menuselection">a \N{TRIANGULAR BULLET} b</span></p>'),
|
||||||
|
'\\sphinxmenuselection{a \\(\\rightarrow\\) b}',
|
||||||
|
),
|
||||||
|
(
|
||||||
# interpolation of ampersands in guilabel/menuselection
|
# interpolation of ampersands in guilabel/menuselection
|
||||||
yield (verify, ':guilabel:`&Foo -&&- &Bar`',
|
'verify',
|
||||||
u'<p><span class="guilabel"><span class="accelerator">F</span>oo '
|
':guilabel:`&Foo -&&- &Bar`',
|
||||||
'-&- <span class="accelerator">B</span>ar</span></p>',
|
(u'<p><span class="guilabel"><span class="accelerator">F</span>oo '
|
||||||
r'\sphinxmenuselection{\sphinxaccelerator{F}oo -\&- \sphinxaccelerator{B}ar}')
|
'-&- <span class="accelerator">B</span>ar</span></p>'),
|
||||||
|
r'\sphinxmenuselection{\sphinxaccelerator{F}oo -\&- \sphinxaccelerator{B}ar}',
|
||||||
|
),
|
||||||
|
(
|
||||||
# non-interpolation of dashes in option role
|
# non-interpolation of dashes in option role
|
||||||
yield (verify_re, ':option:`--with-option`',
|
'verify_re',
|
||||||
'<p><code( class="xref std std-option docutils literal")?>'
|
':option:`--with-option`',
|
||||||
'<span class="pre">--with-option</span></code></p>$',
|
('<p><code( class="xref std std-option docutils literal")?>'
|
||||||
r'\\sphinxcode{-{-}with-option}$')
|
'<span class="pre">--with-option</span></code></p>$'),
|
||||||
|
r'\\sphinxcode{-{-}with-option}$',
|
||||||
|
),
|
||||||
|
(
|
||||||
# verify smarty-pants quotes
|
# verify smarty-pants quotes
|
||||||
yield verify, '"John"', '<p>“John”</p>', "``John''"
|
'verify',
|
||||||
|
'"John"',
|
||||||
|
'<p>“John”</p>',
|
||||||
|
"``John''",
|
||||||
|
),
|
||||||
|
(
|
||||||
# ... but not in literal text
|
# ... but not in literal text
|
||||||
yield (verify, '``"John"``',
|
'verify',
|
||||||
'<p><code class="docutils literal"><span class="pre">'
|
'``"John"``',
|
||||||
'"John"</span></code></p>',
|
('<p><code class="docutils literal"><span class="pre">'
|
||||||
'\\sphinxcode{"John"}')
|
'"John"</span></code></p>'),
|
||||||
|
'\\sphinxcode{"John"}',
|
||||||
|
),
|
||||||
|
(
|
||||||
# verify classes for inline roles
|
# verify classes for inline roles
|
||||||
yield (verify, ':manpage:`mp(1)`',
|
'verify',
|
||||||
|
':manpage:`mp(1)`',
|
||||||
'<p><em class="manpage">mp(1)</em></p>',
|
'<p><em class="manpage">mp(1)</em></p>',
|
||||||
'\\sphinxstyleliteralemphasis{mp(1)}')
|
'\\sphinxstyleliteralemphasis{mp(1)}',
|
||||||
|
),
|
||||||
|
(
|
||||||
def test_latex_escaping():
|
|
||||||
# correct escaping in normal mode
|
# correct escaping in normal mode
|
||||||
yield (verify, u'Γ\\\\∞$', None,
|
'verify',
|
||||||
r'\(\Gamma\)\textbackslash{}\(\infty\)\$')
|
u'Γ\\\\∞$',
|
||||||
|
None,
|
||||||
|
r'\(\Gamma\)\textbackslash{}\(\infty\)\$',
|
||||||
|
),
|
||||||
|
(
|
||||||
# in verbatim code fragments
|
# in verbatim code fragments
|
||||||
yield (verify, u'::\n\n @Γ\\∞${}', None,
|
'verify',
|
||||||
u'\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n'
|
u'::\n\n @Γ\\∞${}',
|
||||||
|
None,
|
||||||
|
(u'\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n'
|
||||||
u'@\\(\\Gamma\\)\\PYGZbs{}\\(\\infty\\)\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n'
|
u'@\\(\\Gamma\\)\\PYGZbs{}\\(\\infty\\)\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n'
|
||||||
u'\\end{sphinxVerbatim}')
|
u'\\end{sphinxVerbatim}'),
|
||||||
|
),
|
||||||
|
(
|
||||||
# in URIs
|
# in URIs
|
||||||
yield (verify_re, u'`test <http://example.com/~me/>`_', None,
|
'verify_re',
|
||||||
r'\\href{http://example.com/~me/}{test}.*')
|
u'`test <http://example.com/~me/>`_',
|
||||||
|
None,
|
||||||
|
r'\\href{http://example.com/~me/}{test}.*',
|
||||||
|
),
|
||||||
|
])
|
||||||
|
def test_inline(get_verifier, type, rst, html_expected, latex_expected):
|
||||||
|
verifier = get_verifier(type)
|
||||||
|
verifier(rst, html_expected, latex_expected)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('dummy', testroot='prolog')
|
@pytest.mark.sphinx('dummy', testroot='prolog')
|
||||||
|
@ -12,30 +12,27 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
from os import path
|
|
||||||
|
|
||||||
|
import pytest
|
||||||
from babel.messages.mofile import read_mo
|
from babel.messages.mofile import read_mo
|
||||||
from sphinx.util import i18n
|
from sphinx.util import i18n
|
||||||
from sphinx.errors import SphinxError
|
from sphinx.errors import SphinxError
|
||||||
import pytest
|
|
||||||
|
|
||||||
from util import TestApp
|
|
||||||
|
|
||||||
|
|
||||||
def test_catalog_info_for_file_and_path():
|
def test_catalog_info_for_file_and_path():
|
||||||
cat = i18n.CatalogInfo('path', 'domain', 'utf-8')
|
cat = i18n.CatalogInfo('path', 'domain', 'utf-8')
|
||||||
assert cat.po_file == 'domain.po'
|
assert cat.po_file == 'domain.po'
|
||||||
assert cat.mo_file == 'domain.mo'
|
assert cat.mo_file == 'domain.mo'
|
||||||
assert cat.po_path == path.join('path', 'domain.po')
|
assert cat.po_path == os.path.join('path', 'domain.po')
|
||||||
assert cat.mo_path == path.join('path', 'domain.mo')
|
assert cat.mo_path == os.path.join('path', 'domain.mo')
|
||||||
|
|
||||||
|
|
||||||
def test_catalog_info_for_sub_domain_file_and_path():
|
def test_catalog_info_for_sub_domain_file_and_path():
|
||||||
cat = i18n.CatalogInfo('path', 'sub/domain', 'utf-8')
|
cat = i18n.CatalogInfo('path', 'sub/domain', 'utf-8')
|
||||||
assert cat.po_file == 'sub/domain.po'
|
assert cat.po_file == 'sub/domain.po'
|
||||||
assert cat.mo_file == 'sub/domain.mo'
|
assert cat.mo_file == 'sub/domain.mo'
|
||||||
assert cat.po_path == path.join('path', 'sub/domain.po')
|
assert cat.po_path == os.path.join('path', 'sub/domain.po')
|
||||||
assert cat.mo_path == path.join('path', 'sub/domain.mo')
|
assert cat.mo_path == os.path.join('path', 'sub/domain.mo')
|
||||||
|
|
||||||
|
|
||||||
def test_catalog_outdated(tempdir):
|
def test_catalog_outdated(tempdir):
|
||||||
@ -55,7 +52,7 @@ def test_catalog_write_mo(tempdir):
|
|||||||
(tempdir / 'test.po').write_text('#')
|
(tempdir / 'test.po').write_text('#')
|
||||||
cat = i18n.CatalogInfo(tempdir, 'test', 'utf-8')
|
cat = i18n.CatalogInfo(tempdir, 'test', 'utf-8')
|
||||||
cat.write_mo('en')
|
cat.write_mo('en')
|
||||||
assert path.exists(cat.mo_path)
|
assert os.path.exists(cat.mo_path)
|
||||||
with open(cat.mo_path, 'rb') as f:
|
with open(cat.mo_path, 'rb') as f:
|
||||||
assert read_mo(f) is not None
|
assert read_mo(f) is not None
|
||||||
|
|
||||||
@ -189,9 +186,7 @@ def test_format_date():
|
|||||||
assert i18n.format_date(format, date=date) == 'Feb 7, 2016'
|
assert i18n.format_date(format, date=date) == 'Feb 7, 2016'
|
||||||
|
|
||||||
|
|
||||||
def test_get_filename_for_language():
|
def test_get_filename_for_language(app):
|
||||||
app = TestApp()
|
|
||||||
|
|
||||||
# language is None
|
# language is None
|
||||||
app.env.config.language = None
|
app.env.config.language = None
|
||||||
assert app.env.config.language is None
|
assert app.env.config.language is None
|
||||||
|
@ -17,6 +17,7 @@ from docutils import frontend
|
|||||||
|
|
||||||
from sphinx.util.nodes import extract_messages, clean_astext
|
from sphinx.util.nodes import extract_messages, clean_astext
|
||||||
from sphinx.transforms import ApplySourceWorkaround
|
from sphinx.transforms import ApplySourceWorkaround
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
def _transform(doctree):
|
def _transform(doctree):
|
||||||
@ -49,57 +50,39 @@ def assert_node_count(messages, node_type, expect_count):
|
|||||||
% (node_type, node_list, count, expect_count))
|
% (node_type, node_list, count, expect_count))
|
||||||
|
|
||||||
|
|
||||||
def test_extract_messages():
|
@pytest.mark.parametrize(
|
||||||
text = dedent(
|
'rst,node_cls,count',
|
||||||
|
[
|
||||||
|
(
|
||||||
"""
|
"""
|
||||||
.. admonition:: admonition title
|
.. admonition:: admonition title
|
||||||
|
|
||||||
admonition body
|
admonition body
|
||||||
"""
|
""",
|
||||||
)
|
nodes.title, 1
|
||||||
yield (
|
),
|
||||||
assert_node_count,
|
(
|
||||||
extract_messages(_get_doctree(text)),
|
|
||||||
nodes.title, 1,
|
|
||||||
)
|
|
||||||
|
|
||||||
text = dedent(
|
|
||||||
"""
|
"""
|
||||||
.. figure:: foo.jpg
|
.. figure:: foo.jpg
|
||||||
|
|
||||||
this is title
|
this is title
|
||||||
"""
|
""",
|
||||||
)
|
|
||||||
yield (
|
|
||||||
assert_node_count,
|
|
||||||
extract_messages(_get_doctree(text)),
|
|
||||||
nodes.caption, 1,
|
nodes.caption, 1,
|
||||||
)
|
),
|
||||||
|
(
|
||||||
text = dedent(
|
|
||||||
"""
|
"""
|
||||||
.. rubric:: spam
|
.. rubric:: spam
|
||||||
"""
|
""",
|
||||||
)
|
|
||||||
yield (
|
|
||||||
assert_node_count,
|
|
||||||
extract_messages(_get_doctree(text)),
|
|
||||||
nodes.rubric, 1,
|
nodes.rubric, 1,
|
||||||
)
|
),
|
||||||
|
(
|
||||||
text = dedent(
|
|
||||||
"""
|
"""
|
||||||
| spam
|
| spam
|
||||||
| egg
|
| egg
|
||||||
"""
|
""",
|
||||||
)
|
|
||||||
yield (
|
|
||||||
assert_node_count,
|
|
||||||
extract_messages(_get_doctree(text)),
|
|
||||||
nodes.line, 2,
|
nodes.line, 2,
|
||||||
)
|
),
|
||||||
|
(
|
||||||
text = dedent(
|
|
||||||
"""
|
"""
|
||||||
section
|
section
|
||||||
=======
|
=======
|
||||||
@ -108,25 +91,22 @@ def test_extract_messages():
|
|||||||
| | **Title 1** |
|
| | **Title 1** |
|
||||||
| | Message 1 |
|
| | Message 1 |
|
||||||
+----------------+
|
+----------------+
|
||||||
"""
|
""",
|
||||||
)
|
|
||||||
yield (
|
|
||||||
assert_node_count,
|
|
||||||
extract_messages(_get_doctree(text)),
|
|
||||||
nodes.line, 2,
|
nodes.line, 2,
|
||||||
)
|
),
|
||||||
|
(
|
||||||
text = dedent(
|
|
||||||
"""
|
"""
|
||||||
* | **Title 1**
|
* | **Title 1**
|
||||||
| Message 1
|
| Message 1
|
||||||
"""
|
""",
|
||||||
)
|
|
||||||
yield (
|
|
||||||
assert_node_count,
|
|
||||||
extract_messages(_get_doctree(text)),
|
|
||||||
nodes.line, 2,
|
nodes.line, 2,
|
||||||
)
|
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_extract_messages(rst, node_cls, count):
|
||||||
|
msg = extract_messages(_get_doctree(dedent(rst)))
|
||||||
|
assert_node_count(msg, node_cls, count)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_messages_without_rawsource():
|
def test_extract_messages_without_rawsource():
|
||||||
|
@ -16,7 +16,7 @@ from docutils.parsers.rst.directives.html import MetaBody
|
|||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.versioning import add_uids, merge_doctrees, get_ratio
|
from sphinx.versioning import add_uids, merge_doctrees, get_ratio
|
||||||
|
|
||||||
from util import TestApp
|
from util import SphinxTestApp
|
||||||
|
|
||||||
|
|
||||||
app = original = original_uids = None
|
app = original = original_uids = None
|
||||||
@ -24,7 +24,7 @@ app = original = original_uids = None
|
|||||||
|
|
||||||
def setup_module():
|
def setup_module():
|
||||||
global app, original, original_uids
|
global app, original, original_uids
|
||||||
app = TestApp(testroot='versioning')
|
app = SphinxTestApp(testroot='versioning')
|
||||||
app.builder.env.app = app
|
app.builder.env.app = app
|
||||||
app.connect('doctree-resolved', on_doctree_resolved)
|
app.connect('doctree-resolved', on_doctree_resolved)
|
||||||
app.build()
|
app.build()
|
||||||
|
@ -23,7 +23,7 @@ except ImportError:
|
|||||||
sqlalchemy_missing = True
|
sqlalchemy_missing = True
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from util import rootdir, tempdir, skip_if
|
from util import rootdir, tempdir
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@ -57,13 +57,13 @@ def test_no_srcdir(support):
|
|||||||
support.build()
|
support.build()
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support()
|
@with_support()
|
||||||
def test_build(support):
|
def test_build(support):
|
||||||
support.build()
|
support.build()
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support()
|
@with_support()
|
||||||
def test_get_document(support):
|
def test_get_document(support):
|
||||||
with pytest.raises(DocumentNotFoundError):
|
with pytest.raises(DocumentNotFoundError):
|
||||||
@ -74,7 +74,7 @@ def test_get_document(support):
|
|||||||
and contents['sidebar'] and contents['relbar']
|
and contents['sidebar'] and contents['relbar']
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support()
|
@with_support()
|
||||||
def test_comments(support):
|
def test_comments(support):
|
||||||
session = Session()
|
session = Session()
|
||||||
@ -123,7 +123,7 @@ def test_comments(support):
|
|||||||
assert children[0]['text'] == '<p>Child test comment</p>\n'
|
assert children[0]['text'] == '<p>Child test comment</p>\n'
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support()
|
@with_support()
|
||||||
def test_user_delete_comments(support):
|
def test_user_delete_comments(support):
|
||||||
def get_comment():
|
def get_comment():
|
||||||
@ -152,7 +152,7 @@ def moderation_callback(comment):
|
|||||||
called = True
|
called = True
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support(moderation_callback=moderation_callback)
|
@with_support(moderation_callback=moderation_callback)
|
||||||
def test_moderation(support):
|
def test_moderation(support):
|
||||||
session = Session()
|
session = Session()
|
||||||
@ -178,7 +178,7 @@ def test_moderation(support):
|
|||||||
assert len(comments) == 1
|
assert len(comments) == 1
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support()
|
@with_support()
|
||||||
def test_moderator_delete_comments(support):
|
def test_moderator_delete_comments(support):
|
||||||
def get_comment():
|
def get_comment():
|
||||||
@ -194,7 +194,7 @@ def test_moderator_delete_comments(support):
|
|||||||
get_comment()
|
get_comment()
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support()
|
@with_support()
|
||||||
def test_update_username(support):
|
def test_update_username(support):
|
||||||
support.update_username('user_two', 'new_user_two')
|
support.update_username('user_two', 'new_user_two')
|
||||||
@ -213,7 +213,7 @@ def test_update_username(support):
|
|||||||
assert len(votes) == 0
|
assert len(votes) == 0
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support()
|
@with_support()
|
||||||
def test_proposals(support):
|
def test_proposals(support):
|
||||||
session = Session()
|
session = Session()
|
||||||
@ -229,7 +229,7 @@ def test_proposals(support):
|
|||||||
proposal=proposal)
|
proposal=proposal)
|
||||||
|
|
||||||
|
|
||||||
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
|
@pytest.mark.skipif(sqlalchemy_missing, reason='needs sqlalchemy')
|
||||||
@with_support()
|
@with_support()
|
||||||
def test_voting(support):
|
def test_voting(support):
|
||||||
session = Session()
|
session = Session()
|
||||||
|
@ -207,7 +207,6 @@ def strip_escseq(text):
|
|||||||
# #############################################
|
# #############################################
|
||||||
# DEPRECATED implementations
|
# DEPRECATED implementations
|
||||||
|
|
||||||
import tempfile
|
|
||||||
from six import StringIO
|
from six import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user