mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '2.0' into 6178_hidden_tocs
This commit is contained in:
commit
021749429d
@ -37,6 +37,7 @@ matrix:
|
||||
services: xvfb
|
||||
|
||||
install:
|
||||
- "sudo apt-get install graphviz"
|
||||
- if [ $IS_PYTHON = true ]; then pip install -U tox codecov; fi
|
||||
- if [ $IS_PYTHON = false ]; then npm install; fi
|
||||
|
||||
|
3
CHANGES
3
CHANGES
@ -32,6 +32,9 @@ Bugs fixed
|
||||
classes attribute refers missing citation (refs: #6147)
|
||||
* #2155: Support ``code`` directive
|
||||
* C++, fix parsing of braced initializers.
|
||||
* #6172: AttributeError is raised for old styled index nodes
|
||||
* #4872: inheritance_diagram: correctly describe behavior of ``parts`` option in
|
||||
docs, allow negative values.
|
||||
* #6178: i18n: Captions missing in translations for hidden TOCs
|
||||
|
||||
Testing
|
||||
|
@ -7,7 +7,7 @@ import sphinx
|
||||
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
|
||||
'sphinx.ext.autosummary', 'sphinx.ext.extlinks',
|
||||
'sphinx.ext.viewcode']
|
||||
'sphinx.ext.viewcode', 'sphinx.ext.inheritance_diagram']
|
||||
|
||||
master_doc = 'contents'
|
||||
templates_path = ['_templates']
|
||||
@ -48,7 +48,7 @@ epub_fix_images = False
|
||||
epub_max_image_width = 0
|
||||
epub_show_urls = 'inline'
|
||||
epub_use_index = False
|
||||
epub_guide = (('toc', 'contents.xhtml', u'Table of Contents'),)
|
||||
epub_guide = (('toc', 'contents.xhtml', 'Table of Contents'),)
|
||||
epub_description = 'Sphinx documentation generator system manual'
|
||||
|
||||
latex_documents = [('contents', 'sphinx.tex', 'Sphinx Documentation',
|
||||
|
@ -25,12 +25,18 @@ It adds this directive:
|
||||
graph.
|
||||
|
||||
This directive supports an option called ``parts`` that, if given, must be an
|
||||
integer, advising the directive to remove that many parts of module names
|
||||
from the displayed names. (For example, if all your class names start with
|
||||
``lib.``, you can give ``:parts: 1`` to remove that prefix from the displayed
|
||||
node names.)
|
||||
integer, advising the directive to keep that many dot-separated parts
|
||||
in the displayed names (from right to left). For example, ``parts=1`` will
|
||||
only display class names, without the names of the modules that contain
|
||||
them.
|
||||
|
||||
It also supports a ``private-bases`` flag option; if given, private base
|
||||
.. versionchanged:: 2.0
|
||||
The value of for ``parts`` can also be negative, indicating how many
|
||||
parts to drop from the left. For example, if all your class names start
|
||||
with ``lib.``, you can give ``:parts: -1`` to remove that prefix from the
|
||||
displayed node names.
|
||||
|
||||
The directive also supports a ``private-bases`` flag option; if given, private base
|
||||
classes (those whose name starts with ``_``) will be included.
|
||||
|
||||
You can use ``caption`` option to give a caption to the diagram.
|
||||
@ -92,6 +98,41 @@ It adds this directive:
|
||||
Added ``top-classes`` option to limit the scope of inheritance graphs.
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
The following are different inheritance diagrams for the internal
|
||||
``InheritanceDiagram`` class that implements the directive.
|
||||
|
||||
With full names::
|
||||
|
||||
.. inheritance-diagram:: sphinx.ext.inheritance_diagram.InheritanceDiagram
|
||||
|
||||
.. inheritance-diagram:: sphinx.ext.inheritance_diagram.InheritanceDiagram
|
||||
|
||||
|
||||
Showing class names only::
|
||||
|
||||
.. inheritance-diagram:: sphinx.ext.inheritance_diagram.InheritanceDiagram
|
||||
:parts: 1
|
||||
|
||||
.. inheritance-diagram:: sphinx.ext.inheritance_diagram.InheritanceDiagram
|
||||
:parts: 1
|
||||
|
||||
Stopping the diagram at :class:`sphinx.util.docutils.SphinxDirective` (the
|
||||
highest superclass still part of Sphinx), and dropping the common left-most
|
||||
part (``sphinx``) from all names::
|
||||
|
||||
.. inheritance-diagram:: sphinx.ext.inheritance_diagram.InheritanceDiagram
|
||||
:top-classes: sphinx.util.docutils.SphinxDirective
|
||||
:parts: -1
|
||||
|
||||
.. inheritance-diagram:: sphinx.ext.inheritance_diagram.InheritanceDiagram
|
||||
:top-classes: sphinx.util.docutils.SphinxDirective
|
||||
:parts: -1
|
||||
|
||||
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
|
@ -148,8 +148,8 @@ class ChangesBuilder(Builder):
|
||||
'text': text
|
||||
}
|
||||
f.write(self.templates.render('changes/rstsource.html', ctx))
|
||||
themectx = dict(('theme_' + key, val) for (key, val) in
|
||||
self.theme.get_options({}).items())
|
||||
themectx = {'theme_' + key: val for (key, val) in
|
||||
self.theme.get_options({}).items()}
|
||||
copy_asset_file(path.join(package_dir, 'themes', 'default', 'static', 'default.css_t'),
|
||||
self.outdir, context=themectx, renderer=self.templates)
|
||||
copy_asset_file(path.join(package_dir, 'themes', 'basic', 'static', 'basic.css'),
|
||||
|
@ -187,7 +187,7 @@ class BuildInfo:
|
||||
self.tags_hash = ''
|
||||
|
||||
if config:
|
||||
values = dict((c.name, c.value) for c in config.filter(config_categories))
|
||||
values = {c.name: c.value for c in config.filter(config_categories)}
|
||||
self.config_hash = get_stable_hash(values)
|
||||
|
||||
if tags:
|
||||
|
@ -349,8 +349,7 @@ document is a custom template, you can also set this to another filename.'''))
|
||||
d['extensions'].append('sphinx.ext.%s' % name)
|
||||
|
||||
# Handle conflicting options
|
||||
if set(['sphinx.ext.imgmath', 'sphinx.ext.mathjax']).issubset(
|
||||
d['extensions']):
|
||||
if {'sphinx.ext.imgmath', 'sphinx.ext.mathjax'}.issubset(d['extensions']):
|
||||
print(__('Note: imgmath and mathjax cannot be enabled at the same '
|
||||
'time. imgmath has been deselected.'))
|
||||
d['extensions'].remove('sphinx.ext.imgmath')
|
||||
@ -469,7 +468,7 @@ def valid_dir(d):
|
||||
if not path.isdir(dir):
|
||||
return False
|
||||
|
||||
if set(['Makefile', 'make.bat']) & set(os.listdir(dir)):
|
||||
if {'Makefile', 'make.bat'} & set(os.listdir(dir)):
|
||||
return False
|
||||
|
||||
if d['sep']:
|
||||
@ -590,7 +589,7 @@ def main(argv=sys.argv[1:]):
|
||||
|
||||
d = vars(args)
|
||||
# delete None or False value
|
||||
d = dict((k, v) for k, v in d.items() if v is not None)
|
||||
d = {k: v for k, v in d.items() if v is not None}
|
||||
|
||||
# handle use of CSV-style extension values
|
||||
d.setdefault('extensions', [])
|
||||
@ -601,12 +600,12 @@ def main(argv=sys.argv[1:]):
|
||||
|
||||
try:
|
||||
if 'quiet' in d:
|
||||
if not set(['project', 'author']).issubset(d):
|
||||
if not {'project', 'author'}.issubset(d):
|
||||
print(__('''"quiet" is specified, but any of "project" or \
|
||||
"author" is not specified.'''))
|
||||
return 1
|
||||
|
||||
if set(['quiet', 'project', 'author']).issubset(d):
|
||||
if {'quiet', 'project', 'author'}.issubset(d):
|
||||
# quiet mode with all required params satisfied, use default
|
||||
d.setdefault('version', '')
|
||||
d.setdefault('release', d['version'])
|
||||
|
@ -37,7 +37,7 @@ def deprecated_alias(modname, objects, warning):
|
||||
sys.modules[modname] = _ModuleWrapper(module, modname, objects, warning) # type: ignore
|
||||
|
||||
|
||||
class _ModuleWrapper(object):
|
||||
class _ModuleWrapper:
|
||||
def __init__(self, module, modname, objects, warning):
|
||||
# type: (Any, str, Dict, Type[Warning]) -> None
|
||||
self._module = module
|
||||
|
@ -72,12 +72,12 @@ class CObject(ObjectDescription):
|
||||
|
||||
# These C types aren't described anywhere, so don't try to create
|
||||
# a cross-reference to them
|
||||
stopwords = set((
|
||||
stopwords = {
|
||||
'const', 'void', 'char', 'wchar_t', 'int', 'short',
|
||||
'long', 'float', 'double', 'unsigned', 'signed', 'FILE',
|
||||
'clock_t', 'time_t', 'ptrdiff_t', 'size_t', 'ssize_t',
|
||||
'struct', '_Bool',
|
||||
))
|
||||
}
|
||||
|
||||
def _parse_type(self, node, ctype):
|
||||
# type: (nodes.Element, str) -> None
|
||||
|
@ -45,7 +45,7 @@ else:
|
||||
]
|
||||
|
||||
INITPY = '__init__.py'
|
||||
PY_SUFFIXES = set(['.py', '.pyx'])
|
||||
PY_SUFFIXES = {'.py', '.pyx'}
|
||||
|
||||
|
||||
def makename(package, module):
|
||||
|
@ -80,7 +80,7 @@ def members_set_option(arg):
|
||||
"""Used to convert the :members: option to auto directives."""
|
||||
if arg is None:
|
||||
return ALL
|
||||
return set(x.strip() for x in arg.split(','))
|
||||
return {x.strip() for x in arg.split(',')}
|
||||
|
||||
|
||||
SUPPRESS = object()
|
||||
|
@ -56,7 +56,7 @@ class IfConfig(SphinxDirective):
|
||||
|
||||
def process_ifconfig_nodes(app, doctree, docname):
|
||||
# type: (Sphinx, nodes.document, str) -> None
|
||||
ns = dict((confval.name, confval.value) for confval in app.config)
|
||||
ns = {confval.name: confval.value for confval in app.config}
|
||||
ns.update(app.config.__dict__.copy())
|
||||
ns['builder'] = app.builder.name
|
||||
for node in doctree.traverse(ifconfig):
|
||||
|
@ -167,11 +167,17 @@ class InheritanceGraph:
|
||||
"""Return name and bases for all classes that are ancestors of
|
||||
*classes*.
|
||||
|
||||
*parts* gives the number of dotted name parts that is removed from the
|
||||
displayed node names.
|
||||
*parts* gives the number of dotted name parts to include in the
|
||||
displayed node names, from right to left. If given as a negative, the
|
||||
number of parts to drop from the left. A value of 0 displays the full
|
||||
dotted name. E.g. ``sphinx.ext.inheritance_diagram.InheritanceGraph``
|
||||
with ``parts=2`` or ``parts=-2`` gets displayed as
|
||||
``inheritance_diagram.InheritanceGraph``, and as
|
||||
``ext.inheritance_diagram.InheritanceGraph`` with ``parts=3`` or
|
||||
``parts=-1``.
|
||||
|
||||
*top_classes* gives the name(s) of the top most ancestor class to traverse
|
||||
to. Multiple names can be specified separated by comma.
|
||||
*top_classes* gives the name(s) of the top most ancestor class to
|
||||
traverse to. Multiple names can be specified separated by comma.
|
||||
"""
|
||||
all_classes = {}
|
||||
py_builtins = vars(builtins).values()
|
||||
@ -332,7 +338,7 @@ class InheritanceDiagram(SphinxDirective):
|
||||
optional_arguments = 0
|
||||
final_argument_whitespace = True
|
||||
option_spec = {
|
||||
'parts': directives.nonnegative_int,
|
||||
'parts': int,
|
||||
'private-bases': directives.flag,
|
||||
'caption': directives.unchanged,
|
||||
'top-classes': directives.unchanged_required,
|
||||
|
@ -150,9 +150,9 @@ def _get_safe_url(url):
|
||||
else:
|
||||
frags = list(parts)
|
||||
if parts.port:
|
||||
frags[1] = '{0}@{1}:{2}'.format(parts.username, parts.hostname, parts.port)
|
||||
frags[1] = '{}@{}:{}'.format(parts.username, parts.hostname, parts.port)
|
||||
else:
|
||||
frags[1] = '{0}@{1}'.format(parts.username, parts.hostname)
|
||||
frags[1] = '{}@{}'.format(parts.username, parts.hostname)
|
||||
|
||||
return urlunsplit(frags)
|
||||
|
||||
|
@ -202,7 +202,7 @@ class SphinxFileInput(FileInput):
|
||||
def __init__(self, *args, **kwargs):
|
||||
# type: (Any, Any) -> None
|
||||
kwargs['error_handler'] = 'sphinx'
|
||||
super(SphinxFileInput, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class SphinxRSTFileInput(SphinxBaseFileInput):
|
||||
|
@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
|
||||
EXCLUDE_PATHS = ['**/_sources', '.#*', '**/.#*', '*.lproj/**']
|
||||
|
||||
|
||||
class Project(object):
|
||||
class Project:
|
||||
"""A project is source code set of Sphinx document."""
|
||||
|
||||
def __init__(self, srcdir, source_suffix):
|
||||
|
@ -310,9 +310,9 @@ class IndexBuilder:
|
||||
rv = {}
|
||||
for k, v in mapping.items():
|
||||
if isinstance(v, int):
|
||||
rv[k] = set([index2fn[v]])
|
||||
rv[k] = {index2fn[v]}
|
||||
else:
|
||||
rv[k] = set(index2fn[i] for i in v)
|
||||
rv[k] = {index2fn[i] for i in v}
|
||||
return rv
|
||||
|
||||
self._mapping = load_terms(frozen['terms'])
|
||||
@ -381,12 +381,11 @@ class IndexBuilder:
|
||||
"""Create a usable data structure for serializing."""
|
||||
docnames, titles = zip(*sorted(self._titles.items()))
|
||||
filenames = [self._filenames.get(docname) for docname in docnames]
|
||||
fn2index = dict((f, i) for (i, f) in enumerate(docnames))
|
||||
fn2index = {f: i for (i, f) in enumerate(docnames)}
|
||||
terms, title_terms = self.get_terms(fn2index)
|
||||
|
||||
objects = self.get_objects(fn2index) # populates _objtypes
|
||||
objtypes = dict((v, k[0] + ':' + k[1])
|
||||
for (k, v) in self._objtypes.items())
|
||||
objtypes = {v: k[0] + ':' + k[1] for (k, v) in self._objtypes.items()}
|
||||
objnames = self._objnames
|
||||
return dict(docnames=docnames, filenames=filenames, titles=titles, terms=terms,
|
||||
objects=objects, objtypes=objtypes, objnames=objnames,
|
||||
|
@ -155,14 +155,14 @@ class JanomeSplitter(BaseSplitter):
|
||||
|
||||
|
||||
class DefaultSplitter(BaseSplitter):
|
||||
patterns_ = dict([(re.compile(pattern), value) for pattern, value in {
|
||||
patterns_ = {re.compile(pattern): value for pattern, value in {
|
||||
'[一二三四五六七八九十百千万億兆]': 'M',
|
||||
'[一-龠々〆ヵヶ]': 'H',
|
||||
'[ぁ-ん]': 'I',
|
||||
'[ァ-ヴーア-ン゙ー]': 'K',
|
||||
'[a-zA-Za-zA-Z]': 'A',
|
||||
'[0-90-9]': 'N',
|
||||
}.items()])
|
||||
}.items()}
|
||||
BIAS__ = -332
|
||||
BC1__ = {'HH': 6, 'II': 2461, 'KH': 406, 'OH': -1378}
|
||||
BC2__ = {'AA': -3267, 'AI': 2744, 'AN': -878, 'HH': -4070, 'HM': -1711,
|
||||
|
@ -133,8 +133,8 @@ class SphinxTestApp(application.Sphinx):
|
||||
self._saved_directives = directives._directives.copy() # type: ignore
|
||||
self._saved_roles = roles._roles.copy() # type: ignore
|
||||
|
||||
self._saved_nodeclasses = set(v for v in dir(nodes.GenericNodeVisitor)
|
||||
if v.startswith('visit_'))
|
||||
self._saved_nodeclasses = {v for v in dir(nodes.GenericNodeVisitor)
|
||||
if v.startswith('visit_')}
|
||||
|
||||
try:
|
||||
super().__init__(srcdir, confdir, outdir, doctreedir,
|
||||
|
@ -36,11 +36,11 @@ if False:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
default_substitutions = set([
|
||||
default_substitutions = {
|
||||
'version',
|
||||
'release',
|
||||
'today',
|
||||
])
|
||||
}
|
||||
|
||||
|
||||
class SphinxTransform(Transform):
|
||||
|
@ -138,7 +138,7 @@ class FilenameUniqDict(dict):
|
||||
while uniquename in self._existing:
|
||||
i += 1
|
||||
uniquename = '%s%s%s' % (base, i, ext)
|
||||
self[newfile] = (set([docname]), uniquename)
|
||||
self[newfile] = ({docname}, uniquename)
|
||||
self._existing.add(uniquename)
|
||||
return uniquename
|
||||
|
||||
|
@ -58,12 +58,12 @@ class IndexEntriesMigrator(SphinxTransform):
|
||||
def apply(self, **kwargs):
|
||||
# type: (Any) -> None
|
||||
for node in self.document.traverse(addnodes.index):
|
||||
for entries in node['entries']:
|
||||
for i, entries in enumerate(node['entries']):
|
||||
if len(entries) == 4:
|
||||
source, line = get_source_line(node)
|
||||
warnings.warn('An old styled index node found: %r at (%s:%s)' %
|
||||
(node, source, line), RemovedInSphinx40Warning)
|
||||
entries.extend([None])
|
||||
node['entries'][i] = entries + (None,)
|
||||
|
||||
|
||||
def setup(app):
|
||||
|
@ -61,7 +61,7 @@ def test_extension_in_blacklist(app, status, warning):
|
||||
@pytest.mark.filterwarnings('ignore:The config variable "source_parsers"')
|
||||
@pytest.mark.filterwarnings('ignore:app.add_source_parser\\(\\) does not support suffix')
|
||||
def test_add_source_parser(app, status, warning):
|
||||
assert set(app.config.source_suffix) == set(['.rst', '.md', '.test'])
|
||||
assert set(app.config.source_suffix) == {'.rst', '.md', '.test'}
|
||||
|
||||
# .rst; only in :confval:`source_suffix`
|
||||
assert '.rst' not in app.registry.get_source_parsers()
|
||||
|
@ -28,7 +28,7 @@ def test_incremental_reading(app):
|
||||
# second reading
|
||||
updated = app.builder.read()
|
||||
|
||||
assert set(updated) == set(['index', 'new'])
|
||||
assert set(updated) == {'index', 'new'}
|
||||
assert 'autodoc' not in app.env.all_docs
|
||||
assert 'autodoc' not in app.env.found_docs
|
||||
|
||||
@ -44,4 +44,4 @@ def test_incremental_reading_for_missing_files(app):
|
||||
|
||||
# "index" is listed up to updated because it contains references
|
||||
# to nonexisting downloadable or image files
|
||||
assert set(updated) == set(['index'])
|
||||
assert set(updated) == {'index'}
|
||||
|
@ -42,10 +42,10 @@ def test_compile_all_catalogs(app, status, warning):
|
||||
|
||||
locale_dir = app.srcdir / 'locale'
|
||||
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
|
||||
expect = set([
|
||||
expect = {
|
||||
x.replace('.po', '.mo')
|
||||
for x in find_files(catalog_dir, '.po')
|
||||
])
|
||||
}
|
||||
actual = set(find_files(catalog_dir, '.mo'))
|
||||
assert actual # not empty
|
||||
assert actual == expect
|
||||
@ -66,7 +66,7 @@ def test_compile_specific_catalogs(app, status, warning):
|
||||
actual_on_boot = get_actual() # sphinx.mo might be included
|
||||
app.builder.compile_specific_catalogs([app.srcdir / 'admonitions.txt'])
|
||||
actual = get_actual() - actual_on_boot
|
||||
assert actual == set(['admonitions.mo'])
|
||||
assert actual == {'admonitions.mo'}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('setup_test')
|
||||
@ -79,10 +79,10 @@ def test_compile_update_catalogs(app, status, warning):
|
||||
|
||||
locale_dir = app.srcdir / 'locale'
|
||||
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
|
||||
expect = set([
|
||||
expect = {
|
||||
x.replace('.po', '.mo')
|
||||
for x in find_files(catalog_dir, '.po')
|
||||
])
|
||||
}
|
||||
actual = set(find_files(catalog_dir, '.mo'))
|
||||
assert actual # not empty
|
||||
assert actual == expect
|
||||
|
@ -25,21 +25,20 @@ def test_images(app):
|
||||
htmlbuilder.imgpath = 'dummy'
|
||||
htmlbuilder.post_process_images(tree)
|
||||
assert set(htmlbuilder.images.keys()) == \
|
||||
set(['subdir/img.png', 'img.png', 'subdir/simg.png', 'svgimg.svg',
|
||||
'img.foo.png'])
|
||||
{'subdir/img.png', 'img.png', 'subdir/simg.png', 'svgimg.svg', 'img.foo.png'}
|
||||
assert set(htmlbuilder.images.values()) == \
|
||||
set(['img.png', 'img1.png', 'simg.png', 'svgimg.svg', 'img.foo.png'])
|
||||
{'img.png', 'img1.png', 'simg.png', 'svgimg.svg', 'img.foo.png'}
|
||||
|
||||
latexbuilder = LaTeXBuilder(app)
|
||||
latexbuilder.set_environment(app.env)
|
||||
latexbuilder.init()
|
||||
latexbuilder.post_process_images(tree)
|
||||
assert set(latexbuilder.images.keys()) == \
|
||||
set(['subdir/img.png', 'subdir/simg.png', 'img.png', 'img.pdf',
|
||||
'svgimg.pdf', 'img.foo.png'])
|
||||
{'subdir/img.png', 'subdir/simg.png', 'img.png', 'img.pdf',
|
||||
'svgimg.pdf', 'img.foo.png'}
|
||||
assert set(latexbuilder.images.values()) == \
|
||||
set(['img.pdf', 'img.png', 'img1.png', 'simg.png',
|
||||
'svgimg.pdf', 'img.foo.png'])
|
||||
{'img.pdf', 'img.png', 'img1.png', 'simg.png',
|
||||
'svgimg.pdf', 'img.foo.png'}
|
||||
|
||||
|
||||
@pytest.mark.sphinx('dummy')
|
||||
|
@ -75,11 +75,11 @@ def test_process_doc(app):
|
||||
# other collections
|
||||
assert app.env.toc_num_entries['index'] == 6
|
||||
assert app.env.toctree_includes['index'] == ['foo', 'bar', 'baz']
|
||||
assert app.env.files_to_rebuild['foo'] == set(['index'])
|
||||
assert app.env.files_to_rebuild['bar'] == set(['index'])
|
||||
assert app.env.files_to_rebuild['baz'] == set(['index'])
|
||||
assert app.env.files_to_rebuild['foo'] == {'index'}
|
||||
assert app.env.files_to_rebuild['bar'] == {'index'}
|
||||
assert app.env.files_to_rebuild['baz'] == {'index'}
|
||||
assert app.env.glob_toctrees == set()
|
||||
assert app.env.numbered_toctrees == set(['index'])
|
||||
assert app.env.numbered_toctrees == {'index'}
|
||||
|
||||
# qux has no section title
|
||||
assert len(app.env.tocs['qux']) == 0
|
||||
|
@ -37,7 +37,7 @@ def test_build(app, status, warning):
|
||||
undoc_py, undoc_c = pickle.loads((app.outdir / 'undoc.pickle').bytes())
|
||||
assert len(undoc_c) == 1
|
||||
# the key is the full path to the header file, which isn't testable
|
||||
assert list(undoc_c.values())[0] == set([('function', 'Py_SphinxTest')])
|
||||
assert list(undoc_c.values())[0] == {('function', 'Py_SphinxTest')}
|
||||
|
||||
assert 'autodoc_target' in undoc_py
|
||||
assert 'funcs' in undoc_py['autodoc_target']
|
||||
|
@ -121,7 +121,7 @@ def test_import_classes(rootdir):
|
||||
|
||||
# all of classes in the module
|
||||
classes = import_classes('sphinx.application', None)
|
||||
assert set(classes) == set([Sphinx, TemplateBridge])
|
||||
assert set(classes) == {Sphinx, TemplateBridge}
|
||||
|
||||
# specified class in the module
|
||||
classes = import_classes('sphinx.application.Sphinx', None)
|
||||
|
@ -36,7 +36,7 @@ class NamedtupleSubclass(namedtuple('NamedtupleSubclass', ('attr1', 'attr2'))):
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, attr1, attr2=None):
|
||||
return super(NamedtupleSubclass, cls).__new__(cls, attr1, attr2)
|
||||
return super().__new__(cls, attr1, attr2)
|
||||
|
||||
|
||||
class BaseDocstringTest(TestCase):
|
||||
|
@ -54,7 +54,7 @@ def test_todo(app, status, warning):
|
||||
|
||||
# check handled event
|
||||
assert len(todos) == 3
|
||||
assert set(todo[1].astext() for todo in todos) == {'todo in foo',
|
||||
assert {todo[1].astext() for todo in todos} == {'todo in foo',
|
||||
'todo in bar',
|
||||
'todo in param field'}
|
||||
|
||||
@ -92,7 +92,7 @@ def test_todo_not_included(app, status, warning):
|
||||
|
||||
# check handled event
|
||||
assert len(todos) == 3
|
||||
assert set(todo[1].astext() for todo in todos) == {'todo in foo',
|
||||
assert {todo[1].astext() for todo in todos} == {'todo in foo',
|
||||
'todo in bar',
|
||||
'todo in param field'}
|
||||
|
||||
|
@ -831,8 +831,7 @@ def test_xml_footnote_backlinks(app):
|
||||
|
||||
para0 = secs[0].findall('paragraph')
|
||||
refs0 = para0[0].findall('footnote_reference')
|
||||
refid2id = dict([
|
||||
(r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0])
|
||||
refid2id = {r.attrib.get('refid'): r.attrib.get('ids') for r in refs0}
|
||||
|
||||
footnote0 = secs[0].findall('footnote')
|
||||
for footnote in footnote0:
|
||||
|
@ -70,13 +70,13 @@ def test_get_catalogs_for_xx(tempdir):
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#')
|
||||
|
||||
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', force_all=False)
|
||||
domains = set(c.domain for c in catalogs)
|
||||
assert domains == set([
|
||||
domains = {c.domain for c in catalogs}
|
||||
assert domains == {
|
||||
'test1',
|
||||
'test2',
|
||||
'sub/test4',
|
||||
'sub/test5',
|
||||
])
|
||||
}
|
||||
|
||||
|
||||
def test_get_catalogs_for_en(tempdir):
|
||||
@ -86,8 +86,8 @@ def test_get_catalogs_for_en(tempdir):
|
||||
(tempdir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#')
|
||||
|
||||
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'en', force_all=False)
|
||||
domains = set(c.domain for c in catalogs)
|
||||
assert domains == set(['en_dom'])
|
||||
domains = {c.domain for c in catalogs}
|
||||
assert domains == {'en_dom'}
|
||||
|
||||
|
||||
def test_get_catalogs_with_non_existent_locale(tempdir):
|
||||
@ -121,13 +121,13 @@ def test_get_catalogs_for_xx_without_outdated(tempdir):
|
||||
assert not catalogs
|
||||
|
||||
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', force_all=True)
|
||||
domains = set(c.domain for c in catalogs)
|
||||
assert domains == set([
|
||||
domains = {c.domain for c in catalogs}
|
||||
assert domains == {
|
||||
'test1',
|
||||
'test2',
|
||||
'sub/test4',
|
||||
'sub/test5',
|
||||
])
|
||||
}
|
||||
|
||||
|
||||
def test_get_catalogs_from_multiple_locale_dirs(tempdir):
|
||||
@ -152,8 +152,8 @@ def test_get_catalogs_with_compact(tempdir):
|
||||
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
|
||||
|
||||
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', gettext_compact=True)
|
||||
domains = set(c.domain for c in catalogs)
|
||||
assert domains == set(['test1', 'test2', 'sub/test3', 'sub/test4'])
|
||||
domains = {c.domain for c in catalogs}
|
||||
assert domains == {'test1', 'test2', 'sub/test3', 'sub/test4'}
|
||||
|
||||
|
||||
def test_get_catalogs_excluded(tempdir):
|
||||
@ -163,8 +163,8 @@ def test_get_catalogs_excluded(tempdir):
|
||||
|
||||
catalogs = i18n.find_catalog_source_files(
|
||||
[tempdir / 'loc1'], 'en', force_all=False, excluded=lambda path: '.git' in path)
|
||||
domains = set(c.domain for c in catalogs)
|
||||
assert domains == set(['en_dom'])
|
||||
domains = {c.domain for c in catalogs}
|
||||
assert domains == {'en_dom'}
|
||||
|
||||
|
||||
def test_format_date():
|
||||
|
@ -352,7 +352,7 @@ def test_set_sorting():
|
||||
|
||||
|
||||
def test_set_sorting_fallback():
|
||||
set_ = set((None, 1))
|
||||
set_ = {None, 1}
|
||||
description = inspect.object_description(set_)
|
||||
assert description in ("{1, None}", "{None, 1}")
|
||||
|
||||
|
@ -79,7 +79,7 @@ function splitQuery(query) {
|
||||
}
|
||||
''' % (fold(singles, ','), fold(ranges, '],'))
|
||||
|
||||
js_test_src = u'''
|
||||
js_test_src = '''
|
||||
// This is regression test for https://github.com/sphinx-doc/sphinx/issues/3150
|
||||
// generated by compat_regexp_generator.py
|
||||
// it needs node.js for testing
|
||||
|
Loading…
Reference in New Issue
Block a user