Python-3-only clean ups discovered by pyupgrade

https://github.com/asottile/pyupgrade

> A tool to automatically upgrade syntax for newer versions of the
> language.

- Drop u str prefix
- Drop base object inheritance
- Drop args to super()
- Use set literals
- Use dict comprehension
- Use set comprehension
This commit is contained in:
Jon Dufresne 2019-03-17 12:49:36 -07:00 committed by Takeshi KOMIYA
parent 33ba281e0a
commit 22afc77c48
30 changed files with 72 additions and 76 deletions

View File

@ -48,7 +48,7 @@ epub_fix_images = False
epub_max_image_width = 0 epub_max_image_width = 0
epub_show_urls = 'inline' epub_show_urls = 'inline'
epub_use_index = False epub_use_index = False
epub_guide = (('toc', 'contents.xhtml', u'Table of Contents'),) epub_guide = (('toc', 'contents.xhtml', 'Table of Contents'),)
epub_description = 'Sphinx documentation generator system manual' epub_description = 'Sphinx documentation generator system manual'
latex_documents = [('contents', 'sphinx.tex', 'Sphinx Documentation', latex_documents = [('contents', 'sphinx.tex', 'Sphinx Documentation',

View File

@ -148,8 +148,8 @@ class ChangesBuilder(Builder):
'text': text 'text': text
} }
f.write(self.templates.render('changes/rstsource.html', ctx)) f.write(self.templates.render('changes/rstsource.html', ctx))
themectx = dict(('theme_' + key, val) for (key, val) in themectx = {'theme_' + key: val for (key, val) in
self.theme.get_options({}).items()) self.theme.get_options({}).items()}
copy_asset_file(path.join(package_dir, 'themes', 'default', 'static', 'default.css_t'), copy_asset_file(path.join(package_dir, 'themes', 'default', 'static', 'default.css_t'),
self.outdir, context=themectx, renderer=self.templates) self.outdir, context=themectx, renderer=self.templates)
copy_asset_file(path.join(package_dir, 'themes', 'basic', 'static', 'basic.css'), copy_asset_file(path.join(package_dir, 'themes', 'basic', 'static', 'basic.css'),

View File

@ -187,7 +187,7 @@ class BuildInfo:
self.tags_hash = '' self.tags_hash = ''
if config: if config:
values = dict((c.name, c.value) for c in config.filter(config_categories)) values = {c.name: c.value for c in config.filter(config_categories)}
self.config_hash = get_stable_hash(values) self.config_hash = get_stable_hash(values)
if tags: if tags:

View File

@ -349,8 +349,7 @@ document is a custom template, you can also set this to another filename.'''))
d['extensions'].append('sphinx.ext.%s' % name) d['extensions'].append('sphinx.ext.%s' % name)
# Handle conflicting options # Handle conflicting options
if set(['sphinx.ext.imgmath', 'sphinx.ext.mathjax']).issubset( if {'sphinx.ext.imgmath', 'sphinx.ext.mathjax'}.issubset(d['extensions']):
d['extensions']):
print(__('Note: imgmath and mathjax cannot be enabled at the same ' print(__('Note: imgmath and mathjax cannot be enabled at the same '
'time. imgmath has been deselected.')) 'time. imgmath has been deselected.'))
d['extensions'].remove('sphinx.ext.imgmath') d['extensions'].remove('sphinx.ext.imgmath')
@ -469,7 +468,7 @@ def valid_dir(d):
if not path.isdir(dir): if not path.isdir(dir):
return False return False
if set(['Makefile', 'make.bat']) & set(os.listdir(dir)): if {'Makefile', 'make.bat'} & set(os.listdir(dir)):
return False return False
if d['sep']: if d['sep']:
@ -590,7 +589,7 @@ def main(argv=sys.argv[1:]):
d = vars(args) d = vars(args)
# delete None or False value # delete None or False value
d = dict((k, v) for k, v in d.items() if v is not None) d = {k: v for k, v in d.items() if v is not None}
# handle use of CSV-style extension values # handle use of CSV-style extension values
d.setdefault('extensions', []) d.setdefault('extensions', [])
@ -601,12 +600,12 @@ def main(argv=sys.argv[1:]):
try: try:
if 'quiet' in d: if 'quiet' in d:
if not set(['project', 'author']).issubset(d): if not {'project', 'author'}.issubset(d):
print(__('''"quiet" is specified, but any of "project" or \ print(__('''"quiet" is specified, but any of "project" or \
"author" is not specified.''')) "author" is not specified.'''))
return 1 return 1
if set(['quiet', 'project', 'author']).issubset(d): if {'quiet', 'project', 'author'}.issubset(d):
# quiet mode with all required params satisfied, use default # quiet mode with all required params satisfied, use default
d.setdefault('version', '') d.setdefault('version', '')
d.setdefault('release', d['version']) d.setdefault('release', d['version'])

View File

@ -37,7 +37,7 @@ def deprecated_alias(modname, objects, warning):
sys.modules[modname] = _ModuleWrapper(module, modname, objects, warning) # type: ignore sys.modules[modname] = _ModuleWrapper(module, modname, objects, warning) # type: ignore
class _ModuleWrapper(object): class _ModuleWrapper:
def __init__(self, module, modname, objects, warning): def __init__(self, module, modname, objects, warning):
# type: (Any, str, Dict, Type[Warning]) -> None # type: (Any, str, Dict, Type[Warning]) -> None
self._module = module self._module = module

View File

@ -72,12 +72,12 @@ class CObject(ObjectDescription):
# These C types aren't described anywhere, so don't try to create # These C types aren't described anywhere, so don't try to create
# a cross-reference to them # a cross-reference to them
stopwords = set(( stopwords = {
'const', 'void', 'char', 'wchar_t', 'int', 'short', 'const', 'void', 'char', 'wchar_t', 'int', 'short',
'long', 'float', 'double', 'unsigned', 'signed', 'FILE', 'long', 'float', 'double', 'unsigned', 'signed', 'FILE',
'clock_t', 'time_t', 'ptrdiff_t', 'size_t', 'ssize_t', 'clock_t', 'time_t', 'ptrdiff_t', 'size_t', 'ssize_t',
'struct', '_Bool', 'struct', '_Bool',
)) }
def _parse_type(self, node, ctype): def _parse_type(self, node, ctype):
# type: (nodes.Element, str) -> None # type: (nodes.Element, str) -> None

View File

@ -45,7 +45,7 @@ else:
] ]
INITPY = '__init__.py' INITPY = '__init__.py'
PY_SUFFIXES = set(['.py', '.pyx']) PY_SUFFIXES = {'.py', '.pyx'}
def makename(package, module): def makename(package, module):

View File

@ -80,7 +80,7 @@ def members_set_option(arg):
"""Used to convert the :members: option to auto directives.""" """Used to convert the :members: option to auto directives."""
if arg is None: if arg is None:
return ALL return ALL
return set(x.strip() for x in arg.split(',')) return {x.strip() for x in arg.split(',')}
SUPPRESS = object() SUPPRESS = object()

View File

@ -56,7 +56,7 @@ class IfConfig(SphinxDirective):
def process_ifconfig_nodes(app, doctree, docname): def process_ifconfig_nodes(app, doctree, docname):
# type: (Sphinx, nodes.document, str) -> None # type: (Sphinx, nodes.document, str) -> None
ns = dict((confval.name, confval.value) for confval in app.config) ns = {confval.name: confval.value for confval in app.config}
ns.update(app.config.__dict__.copy()) ns.update(app.config.__dict__.copy())
ns['builder'] = app.builder.name ns['builder'] = app.builder.name
for node in doctree.traverse(ifconfig): for node in doctree.traverse(ifconfig):

View File

@ -150,9 +150,9 @@ def _get_safe_url(url):
else: else:
frags = list(parts) frags = list(parts)
if parts.port: if parts.port:
frags[1] = '{0}@{1}:{2}'.format(parts.username, parts.hostname, parts.port) frags[1] = '{}@{}:{}'.format(parts.username, parts.hostname, parts.port)
else: else:
frags[1] = '{0}@{1}'.format(parts.username, parts.hostname) frags[1] = '{}@{}'.format(parts.username, parts.hostname)
return urlunsplit(frags) return urlunsplit(frags)

View File

@ -202,7 +202,7 @@ class SphinxFileInput(FileInput):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
# type: (Any, Any) -> None # type: (Any, Any) -> None
kwargs['error_handler'] = 'sphinx' kwargs['error_handler'] = 'sphinx'
super(SphinxFileInput, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
class SphinxRSTFileInput(SphinxBaseFileInput): class SphinxRSTFileInput(SphinxBaseFileInput):

View File

@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
EXCLUDE_PATHS = ['**/_sources', '.#*', '**/.#*', '*.lproj/**'] EXCLUDE_PATHS = ['**/_sources', '.#*', '**/.#*', '*.lproj/**']
class Project(object): class Project:
"""A project is source code set of Sphinx document.""" """A project is source code set of Sphinx document."""
def __init__(self, srcdir, source_suffix): def __init__(self, srcdir, source_suffix):

View File

@ -310,9 +310,9 @@ class IndexBuilder:
rv = {} rv = {}
for k, v in mapping.items(): for k, v in mapping.items():
if isinstance(v, int): if isinstance(v, int):
rv[k] = set([index2fn[v]]) rv[k] = {index2fn[v]}
else: else:
rv[k] = set(index2fn[i] for i in v) rv[k] = {index2fn[i] for i in v}
return rv return rv
self._mapping = load_terms(frozen['terms']) self._mapping = load_terms(frozen['terms'])
@ -381,12 +381,11 @@ class IndexBuilder:
"""Create a usable data structure for serializing.""" """Create a usable data structure for serializing."""
docnames, titles = zip(*sorted(self._titles.items())) docnames, titles = zip(*sorted(self._titles.items()))
filenames = [self._filenames.get(docname) for docname in docnames] filenames = [self._filenames.get(docname) for docname in docnames]
fn2index = dict((f, i) for (i, f) in enumerate(docnames)) fn2index = {f: i for (i, f) in enumerate(docnames)}
terms, title_terms = self.get_terms(fn2index) terms, title_terms = self.get_terms(fn2index)
objects = self.get_objects(fn2index) # populates _objtypes objects = self.get_objects(fn2index) # populates _objtypes
objtypes = dict((v, k[0] + ':' + k[1]) objtypes = {v: k[0] + ':' + k[1] for (k, v) in self._objtypes.items()}
for (k, v) in self._objtypes.items())
objnames = self._objnames objnames = self._objnames
return dict(docnames=docnames, filenames=filenames, titles=titles, terms=terms, return dict(docnames=docnames, filenames=filenames, titles=titles, terms=terms,
objects=objects, objtypes=objtypes, objnames=objnames, objects=objects, objtypes=objtypes, objnames=objnames,

View File

@ -155,14 +155,14 @@ class JanomeSplitter(BaseSplitter):
class DefaultSplitter(BaseSplitter): class DefaultSplitter(BaseSplitter):
patterns_ = dict([(re.compile(pattern), value) for pattern, value in { patterns_ = {re.compile(pattern): value for pattern, value in {
'[一二三四五六七八九十百千万億兆]': 'M', '[一二三四五六七八九十百千万億兆]': 'M',
'[一-龠々〆ヵヶ]': 'H', '[一-龠々〆ヵヶ]': 'H',
'[ぁ-ん]': 'I', '[ぁ-ん]': 'I',
'[ァ-ヴーア-ン゙ー]': 'K', '[ァ-ヴーア-ン゙ー]': 'K',
'[a-zA-Z--]': 'A', '[a-zA-Z--]': 'A',
'[0-9-]': 'N', '[0-9-]': 'N',
}.items()]) }.items()}
BIAS__ = -332 BIAS__ = -332
BC1__ = {'HH': 6, 'II': 2461, 'KH': 406, 'OH': -1378} BC1__ = {'HH': 6, 'II': 2461, 'KH': 406, 'OH': -1378}
BC2__ = {'AA': -3267, 'AI': 2744, 'AN': -878, 'HH': -4070, 'HM': -1711, BC2__ = {'AA': -3267, 'AI': 2744, 'AN': -878, 'HH': -4070, 'HM': -1711,

View File

@ -133,8 +133,8 @@ class SphinxTestApp(application.Sphinx):
self._saved_directives = directives._directives.copy() # type: ignore self._saved_directives = directives._directives.copy() # type: ignore
self._saved_roles = roles._roles.copy() # type: ignore self._saved_roles = roles._roles.copy() # type: ignore
self._saved_nodeclasses = set(v for v in dir(nodes.GenericNodeVisitor) self._saved_nodeclasses = {v for v in dir(nodes.GenericNodeVisitor)
if v.startswith('visit_')) if v.startswith('visit_')}
try: try:
super().__init__(srcdir, confdir, outdir, doctreedir, super().__init__(srcdir, confdir, outdir, doctreedir,

View File

@ -36,11 +36,11 @@ if False:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
default_substitutions = set([ default_substitutions = {
'version', 'version',
'release', 'release',
'today', 'today',
]) }
class SphinxTransform(Transform): class SphinxTransform(Transform):

View File

@ -138,7 +138,7 @@ class FilenameUniqDict(dict):
while uniquename in self._existing: while uniquename in self._existing:
i += 1 i += 1
uniquename = '%s%s%s' % (base, i, ext) uniquename = '%s%s%s' % (base, i, ext)
self[newfile] = (set([docname]), uniquename) self[newfile] = ({docname}, uniquename)
self._existing.add(uniquename) self._existing.add(uniquename)
return uniquename return uniquename

View File

@ -61,7 +61,7 @@ def test_extension_in_blacklist(app, status, warning):
@pytest.mark.filterwarnings('ignore:The config variable "source_parsers"') @pytest.mark.filterwarnings('ignore:The config variable "source_parsers"')
@pytest.mark.filterwarnings('ignore:app.add_source_parser\\(\\) does not support suffix') @pytest.mark.filterwarnings('ignore:app.add_source_parser\\(\\) does not support suffix')
def test_add_source_parser(app, status, warning): def test_add_source_parser(app, status, warning):
assert set(app.config.source_suffix) == set(['.rst', '.md', '.test']) assert set(app.config.source_suffix) == {'.rst', '.md', '.test'}
# .rst; only in :confval:`source_suffix` # .rst; only in :confval:`source_suffix`
assert '.rst' not in app.registry.get_source_parsers() assert '.rst' not in app.registry.get_source_parsers()

View File

@ -28,7 +28,7 @@ def test_incremental_reading(app):
# second reading # second reading
updated = app.builder.read() updated = app.builder.read()
assert set(updated) == set(['index', 'new']) assert set(updated) == {'index', 'new'}
assert 'autodoc' not in app.env.all_docs assert 'autodoc' not in app.env.all_docs
assert 'autodoc' not in app.env.found_docs assert 'autodoc' not in app.env.found_docs
@ -44,4 +44,4 @@ def test_incremental_reading_for_missing_files(app):
# "index" is listed up to updated because it contains references # "index" is listed up to updated because it contains references
# to nonexisting downloadable or image files # to nonexisting downloadable or image files
assert set(updated) == set(['index']) assert set(updated) == {'index'}

View File

@ -42,10 +42,10 @@ def test_compile_all_catalogs(app, status, warning):
locale_dir = app.srcdir / 'locale' locale_dir = app.srcdir / 'locale'
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
expect = set([ expect = {
x.replace('.po', '.mo') x.replace('.po', '.mo')
for x in find_files(catalog_dir, '.po') for x in find_files(catalog_dir, '.po')
]) }
actual = set(find_files(catalog_dir, '.mo')) actual = set(find_files(catalog_dir, '.mo'))
assert actual # not empty assert actual # not empty
assert actual == expect assert actual == expect
@ -66,7 +66,7 @@ def test_compile_specific_catalogs(app, status, warning):
actual_on_boot = get_actual() # sphinx.mo might be included actual_on_boot = get_actual() # sphinx.mo might be included
app.builder.compile_specific_catalogs([app.srcdir / 'admonitions.txt']) app.builder.compile_specific_catalogs([app.srcdir / 'admonitions.txt'])
actual = get_actual() - actual_on_boot actual = get_actual() - actual_on_boot
assert actual == set(['admonitions.mo']) assert actual == {'admonitions.mo'}
@pytest.mark.usefixtures('setup_test') @pytest.mark.usefixtures('setup_test')
@ -79,10 +79,10 @@ def test_compile_update_catalogs(app, status, warning):
locale_dir = app.srcdir / 'locale' locale_dir = app.srcdir / 'locale'
catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES' catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
expect = set([ expect = {
x.replace('.po', '.mo') x.replace('.po', '.mo')
for x in find_files(catalog_dir, '.po') for x in find_files(catalog_dir, '.po')
]) }
actual = set(find_files(catalog_dir, '.mo')) actual = set(find_files(catalog_dir, '.mo'))
assert actual # not empty assert actual # not empty
assert actual == expect assert actual == expect

View File

@ -25,21 +25,20 @@ def test_images(app):
htmlbuilder.imgpath = 'dummy' htmlbuilder.imgpath = 'dummy'
htmlbuilder.post_process_images(tree) htmlbuilder.post_process_images(tree)
assert set(htmlbuilder.images.keys()) == \ assert set(htmlbuilder.images.keys()) == \
set(['subdir/img.png', 'img.png', 'subdir/simg.png', 'svgimg.svg', {'subdir/img.png', 'img.png', 'subdir/simg.png', 'svgimg.svg', 'img.foo.png'}
'img.foo.png'])
assert set(htmlbuilder.images.values()) == \ assert set(htmlbuilder.images.values()) == \
set(['img.png', 'img1.png', 'simg.png', 'svgimg.svg', 'img.foo.png']) {'img.png', 'img1.png', 'simg.png', 'svgimg.svg', 'img.foo.png'}
latexbuilder = LaTeXBuilder(app) latexbuilder = LaTeXBuilder(app)
latexbuilder.set_environment(app.env) latexbuilder.set_environment(app.env)
latexbuilder.init() latexbuilder.init()
latexbuilder.post_process_images(tree) latexbuilder.post_process_images(tree)
assert set(latexbuilder.images.keys()) == \ assert set(latexbuilder.images.keys()) == \
set(['subdir/img.png', 'subdir/simg.png', 'img.png', 'img.pdf', {'subdir/img.png', 'subdir/simg.png', 'img.png', 'img.pdf',
'svgimg.pdf', 'img.foo.png']) 'svgimg.pdf', 'img.foo.png'}
assert set(latexbuilder.images.values()) == \ assert set(latexbuilder.images.values()) == \
set(['img.pdf', 'img.png', 'img1.png', 'simg.png', {'img.pdf', 'img.png', 'img1.png', 'simg.png',
'svgimg.pdf', 'img.foo.png']) 'svgimg.pdf', 'img.foo.png'}
@pytest.mark.sphinx('dummy') @pytest.mark.sphinx('dummy')

View File

@ -75,11 +75,11 @@ def test_process_doc(app):
# other collections # other collections
assert app.env.toc_num_entries['index'] == 6 assert app.env.toc_num_entries['index'] == 6
assert app.env.toctree_includes['index'] == ['foo', 'bar', 'baz'] assert app.env.toctree_includes['index'] == ['foo', 'bar', 'baz']
assert app.env.files_to_rebuild['foo'] == set(['index']) assert app.env.files_to_rebuild['foo'] == {'index'}
assert app.env.files_to_rebuild['bar'] == set(['index']) assert app.env.files_to_rebuild['bar'] == {'index'}
assert app.env.files_to_rebuild['baz'] == set(['index']) assert app.env.files_to_rebuild['baz'] == {'index'}
assert app.env.glob_toctrees == set() assert app.env.glob_toctrees == set()
assert app.env.numbered_toctrees == set(['index']) assert app.env.numbered_toctrees == {'index'}
# qux has no section title # qux has no section title
assert len(app.env.tocs['qux']) == 0 assert len(app.env.tocs['qux']) == 0

View File

@ -37,7 +37,7 @@ def test_build(app, status, warning):
undoc_py, undoc_c = pickle.loads((app.outdir / 'undoc.pickle').bytes()) undoc_py, undoc_c = pickle.loads((app.outdir / 'undoc.pickle').bytes())
assert len(undoc_c) == 1 assert len(undoc_c) == 1
# the key is the full path to the header file, which isn't testable # the key is the full path to the header file, which isn't testable
assert list(undoc_c.values())[0] == set([('function', 'Py_SphinxTest')]) assert list(undoc_c.values())[0] == {('function', 'Py_SphinxTest')}
assert 'autodoc_target' in undoc_py assert 'autodoc_target' in undoc_py
assert 'funcs' in undoc_py['autodoc_target'] assert 'funcs' in undoc_py['autodoc_target']

View File

@ -121,7 +121,7 @@ def test_import_classes(rootdir):
# all of classes in the module # all of classes in the module
classes = import_classes('sphinx.application', None) classes = import_classes('sphinx.application', None)
assert set(classes) == set([Sphinx, TemplateBridge]) assert set(classes) == {Sphinx, TemplateBridge}
# specified class in the module # specified class in the module
classes = import_classes('sphinx.application.Sphinx', None) classes = import_classes('sphinx.application.Sphinx', None)

View File

@ -36,7 +36,7 @@ class NamedtupleSubclass(namedtuple('NamedtupleSubclass', ('attr1', 'attr2'))):
__slots__ = () __slots__ = ()
def __new__(cls, attr1, attr2=None): def __new__(cls, attr1, attr2=None):
return super(NamedtupleSubclass, cls).__new__(cls, attr1, attr2) return super().__new__(cls, attr1, attr2)
class BaseDocstringTest(TestCase): class BaseDocstringTest(TestCase):

View File

@ -54,9 +54,9 @@ def test_todo(app, status, warning):
# check handled event # check handled event
assert len(todos) == 3 assert len(todos) == 3
assert set(todo[1].astext() for todo in todos) == {'todo in foo', assert {todo[1].astext() for todo in todos} == {'todo in foo',
'todo in bar', 'todo in bar',
'todo in param field'} 'todo in param field'}
@pytest.mark.sphinx('html', testroot='ext-todo', freshenv=True, @pytest.mark.sphinx('html', testroot='ext-todo', freshenv=True,
@ -92,9 +92,9 @@ def test_todo_not_included(app, status, warning):
# check handled event # check handled event
assert len(todos) == 3 assert len(todos) == 3
assert set(todo[1].astext() for todo in todos) == {'todo in foo', assert {todo[1].astext() for todo in todos} == {'todo in foo',
'todo in bar', 'todo in bar',
'todo in param field'} 'todo in param field'}
@pytest.mark.sphinx('latex', testroot='ext-todo', freshenv=True, @pytest.mark.sphinx('latex', testroot='ext-todo', freshenv=True,

View File

@ -829,8 +829,7 @@ def test_xml_footnote_backlinks(app):
para0 = secs[0].findall('paragraph') para0 = secs[0].findall('paragraph')
refs0 = para0[0].findall('footnote_reference') refs0 = para0[0].findall('footnote_reference')
refid2id = dict([ refid2id = {r.attrib.get('refid'): r.attrib.get('ids') for r in refs0}
(r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0])
footnote0 = secs[0].findall('footnote') footnote0 = secs[0].findall('footnote')
for footnote in footnote0: for footnote in footnote0:

View File

@ -70,13 +70,13 @@ def test_get_catalogs_for_xx(tempdir):
(tempdir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#') (tempdir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#')
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', force_all=False) catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', force_all=False)
domains = set(c.domain for c in catalogs) domains = {c.domain for c in catalogs}
assert domains == set([ assert domains == {
'test1', 'test1',
'test2', 'test2',
'sub/test4', 'sub/test4',
'sub/test5', 'sub/test5',
]) }
def test_get_catalogs_for_en(tempdir): def test_get_catalogs_for_en(tempdir):
@ -86,8 +86,8 @@ def test_get_catalogs_for_en(tempdir):
(tempdir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#') (tempdir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#')
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'en', force_all=False) catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'en', force_all=False)
domains = set(c.domain for c in catalogs) domains = {c.domain for c in catalogs}
assert domains == set(['en_dom']) assert domains == {'en_dom'}
def test_get_catalogs_with_non_existent_locale(tempdir): def test_get_catalogs_with_non_existent_locale(tempdir):
@ -121,13 +121,13 @@ def test_get_catalogs_for_xx_without_outdated(tempdir):
assert not catalogs assert not catalogs
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', force_all=True) catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', force_all=True)
domains = set(c.domain for c in catalogs) domains = {c.domain for c in catalogs}
assert domains == set([ assert domains == {
'test1', 'test1',
'test2', 'test2',
'sub/test4', 'sub/test4',
'sub/test5', 'sub/test5',
]) }
def test_get_catalogs_from_multiple_locale_dirs(tempdir): def test_get_catalogs_from_multiple_locale_dirs(tempdir):
@ -152,8 +152,8 @@ def test_get_catalogs_with_compact(tempdir):
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#') (tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', gettext_compact=True) catalogs = i18n.find_catalog_source_files([tempdir / 'loc1'], 'xx', gettext_compact=True)
domains = set(c.domain for c in catalogs) domains = {c.domain for c in catalogs}
assert domains == set(['test1', 'test2', 'sub/test3', 'sub/test4']) assert domains == {'test1', 'test2', 'sub/test3', 'sub/test4'}
def test_get_catalogs_excluded(tempdir): def test_get_catalogs_excluded(tempdir):
@ -163,8 +163,8 @@ def test_get_catalogs_excluded(tempdir):
catalogs = i18n.find_catalog_source_files( catalogs = i18n.find_catalog_source_files(
[tempdir / 'loc1'], 'en', force_all=False, excluded=lambda path: '.git' in path) [tempdir / 'loc1'], 'en', force_all=False, excluded=lambda path: '.git' in path)
domains = set(c.domain for c in catalogs) domains = {c.domain for c in catalogs}
assert domains == set(['en_dom']) assert domains == {'en_dom'}
def test_format_date(): def test_format_date():

View File

@ -352,7 +352,7 @@ def test_set_sorting():
def test_set_sorting_fallback(): def test_set_sorting_fallback():
set_ = set((None, 1)) set_ = {None, 1}
description = inspect.object_description(set_) description = inspect.object_description(set_)
assert description in ("{1, None}", "{None, 1}") assert description in ("{1, None}", "{None, 1}")

View File

@ -79,7 +79,7 @@ function splitQuery(query) {
} }
''' % (fold(singles, ','), fold(ranges, '],')) ''' % (fold(singles, ','), fold(ranges, '],'))
js_test_src = u''' js_test_src = '''
// This is regression test for https://github.com/sphinx-doc/sphinx/issues/3150 // This is regression test for https://github.com/sphinx-doc/sphinx/issues/3150
// generated by compat_regexp_generator.py // generated by compat_regexp_generator.py
// it needs node.js for testing // it needs node.js for testing