Merge pull request #6110 from tk0miya/refactor_catalog

Refactor catalog detection and compilation
This commit is contained in:
Takeshi KOMIYA 2019-02-27 23:36:14 +09:00 committed by GitHub
commit 8d0d75fea7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 200 additions and 97 deletions

View File

@ -17,6 +17,9 @@ Deprecated
* ``sphinx.ext.autodoc.importer.MockLoader``
* ``sphinx.ext.autodoc.importer.mock()``
* ``sphinx.ext.autosummary.autolink_role()``
* ``sphinx.util.i18n.find_catalog()``
* ``sphinx.util.i18n.find_catalog_files()``
* ``sphinx.util.i18n.find_catalog_source_files()``
Features added
--------------

View File

@ -254,6 +254,21 @@ The following is a list of deprecated interfaces.
- 4.0
- ``sphinx.ext.autosummary.AutoLink``
* - ``sphinx.util.i18n.find_catalog()``
- 2.1
- 4.0
- ``sphinx.util.i18n.docname_to_domain()``
* - ``sphinx.util.i18n.find_catalog_files()``
- 2.1
- 4.0
- ``sphinx.util.i18n.CatalogRepository``
* - ``sphinx.util.i18n.find_catalog_source_files()``
- 2.1
- 4.0
- ``sphinx.util.i18n.CatalogRepository``
* - ``encoding`` argument of ``autodoc.Documenter.get_doc()``,
``autodoc.DocstringSignatureMixin.get_doc()``,
``autodoc.DocstringSignatureMixin._find_signature()``, and

View File

@ -40,7 +40,7 @@ from sphinx.util import logging
from sphinx.util.build_phase import BuildPhase
from sphinx.util.console import bold # type: ignore
from sphinx.util.docutils import directive_helper
from sphinx.util.i18n import find_catalog_source_files
from sphinx.util.i18n import CatalogRepository
from sphinx.util.logging import prefixed_warnings
from sphinx.util.osutil import abspath, ensuredir, relpath
from sphinx.util.tags import Tags
@ -265,21 +265,21 @@ class Sphinx:
"""Load translated strings from the configured localedirs if enabled in
the configuration.
"""
if self.config.language is not None:
if self.config.language is None:
self.translator, has_translation = locale.init([], None)
else:
logger.info(bold(__('loading translations [%s]... ') % self.config.language),
nonl=True)
user_locale_dirs = [
path.join(self.srcdir, x) for x in self.config.locale_dirs]
# compile mo files if sphinx.po file in user locale directories are updated
for catinfo in find_catalog_source_files(
user_locale_dirs, self.config.language, domains=['sphinx'],
charset=self.config.source_encoding):
catinfo.write_mo(self.config.language)
locale_dirs = [None, path.join(package_dir, 'locale')] + user_locale_dirs
else:
locale_dirs = []
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
if self.config.language is not None:
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
self.config.language, self.config.source_encoding)
for catalog in repo.catalogs:
if catalog.domain == 'sphinx' and catalog.is_outdated():
catalog.write_mo(self.config.language)
locale_dirs = [None, path.join(package_dir, 'locale')] + list(repo.locale_dirs)
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
if has_translation or self.config.language == 'en':
# "en" never needs to be translated
logger.info(__('done'))

View File

@ -19,12 +19,11 @@ from sphinx.environment.adapters.asset import ImageAdapter
from sphinx.errors import SphinxError
from sphinx.io import read_doc
from sphinx.locale import __
from sphinx.util import i18n, import_object, logging, rst, progress_message, status_iterator
from sphinx.util import import_object, logging, rst, progress_message, status_iterator
from sphinx.util.build_phase import BuildPhase
from sphinx.util.console import bold # type: ignore
from sphinx.util.docutils import sphinx_domains
from sphinx.util.i18n import find_catalog
from sphinx.util.matching import Matcher
from sphinx.util.i18n import CatalogRepository, docname_to_domain
from sphinx.util.osutil import SEP, ensuredir, relative_uri, relpath
from sphinx.util.parallel import ParallelTasks, SerialTasks, make_chunks, \
parallel_available
@ -236,14 +235,10 @@ class Builder:
def compile_all_catalogs(self):
# type: () -> None
catalogs = i18n.find_catalog_source_files(
[path.join(self.srcdir, x) for x in self.config.locale_dirs],
self.config.language,
charset=self.config.source_encoding,
force_all=True,
excluded=Matcher(['**/.?**']))
message = __('all of %d po files') % len(catalogs)
self.compile_catalogs(catalogs, message)
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
self.config.language, self.config.source_encoding)
message = __('all of %d po files') % len(list(repo.catalogs))
self.compile_catalogs(set(repo.catalogs), message)
def compile_specific_catalogs(self, specified_files):
# type: (List[str]) -> None
@ -251,28 +246,25 @@ class Builder:
# type: (str) -> str
docname = self.env.path2doc(path.abspath(fpath))
if docname:
return find_catalog(docname, self.config.gettext_compact)
return docname_to_domain(docname, self.config.gettext_compact)
else:
return None
specified_domains = set(map(to_domain, specified_files))
specified_domains.discard(None)
catalogs = i18n.find_catalog_source_files(
[path.join(self.srcdir, x) for x in self.config.locale_dirs],
self.config.language,
domains=list(specified_domains),
charset=self.config.source_encoding,
excluded=Matcher(['**/.?**']))
catalogs = set()
domains = set(map(to_domain, specified_files))
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
self.config.language, self.config.source_encoding)
for catalog in repo.catalogs:
if catalog.domain in domains and catalog.is_outdated():
catalogs.add(catalog)
message = __('targets for %d po files that are specified') % len(catalogs)
self.compile_catalogs(catalogs, message)
def compile_update_catalogs(self):
# type: () -> None
catalogs = i18n.find_catalog_source_files(
[path.join(self.srcdir, x) for x in self.config.locale_dirs],
self.config.language,
charset=self.config.source_encoding,
excluded=Matcher(['**/.?**']))
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
self.config.language, self.config.source_encoding)
catalogs = {c for c in repo.catalogs if c.is_outdated()}
message = __('targets for %d po files that are out of date') % len(catalogs)
self.compile_catalogs(catalogs, message)

View File

@ -22,7 +22,7 @@ from sphinx.errors import ThemeError
from sphinx.locale import __
from sphinx.util import split_index_msg, logging, status_iterator
from sphinx.util.console import bold # type: ignore
from sphinx.util.i18n import find_catalog
from sphinx.util.i18n import docname_to_domain
from sphinx.util.nodes import extract_messages, traverse_translatable_index
from sphinx.util.osutil import relpath, ensuredir, canon_path
from sphinx.util.tags import Tags
@ -140,7 +140,7 @@ class I18nBuilder(Builder):
def write_doc(self, docname, doctree):
# type: (str, nodes.document) -> None
catalog = self.catalogs[find_catalog(docname, self.config.gettext_compact)]
catalog = self.catalogs[docname_to_domain(docname, self.config.gettext_compact)]
for node, msg in extract_messages(doctree):
catalog.add(msg, node)

View File

@ -25,7 +25,7 @@ from sphinx.transforms import SphinxTransformer
from sphinx.util import DownloadFiles, FilenameUniqDict
from sphinx.util import logging
from sphinx.util.docutils import LoggingReporter
from sphinx.util.i18n import find_catalog_files
from sphinx.util.i18n import CatalogRepository, docname_to_domain
from sphinx.util.nodes import is_translatable
if False:
@ -395,15 +395,13 @@ class BuildEnvironment:
# move i18n process into the writing phase, and remove these lines.
if builder.use_message_catalog:
# add catalog mo file dependency
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
self.config.language, self.config.source_encoding)
for docname in self.found_docs:
catalog_files = find_catalog_files(
docname,
self.srcdir,
self.config.locale_dirs,
self.config.language,
self.config.gettext_compact)
for filename in catalog_files:
self.dependencies[docname].add(filename)
domain = docname_to_domain(docname, self.config.gettext_compact)
for catalog in repo.catalogs:
if catalog.domain == domain:
self.dependencies[docname].add(catalog.mo_path)
except OSError as exc:
raise DocumentError(__('Failed to scan documents in %s: %r') % (self.srcdir, exc))

View File

@ -21,7 +21,7 @@ from sphinx.domains.std import make_glossary_term, split_term_classifiers
from sphinx.locale import __, init as init_locale
from sphinx.transforms import SphinxTransform
from sphinx.util import split_index_msg, logging
from sphinx.util.i18n import find_catalog
from sphinx.util.i18n import docname_to_domain
from sphinx.util.nodes import (
LITERAL_TYPE_NODES, IMAGE_TYPE_NODES, NodeMatcher,
extract_messages, is_pending_meta, traverse_translatable_index,
@ -94,7 +94,7 @@ class Locale(SphinxTransform):
assert source.startswith(self.env.srcdir)
docname = path.splitext(relative_path(path.join(self.env.srcdir, 'dummy'),
source))[0]
textdomain = find_catalog(docname, self.config.gettext_compact)
textdomain = docname_to_domain(docname, self.config.gettext_compact)
# fetch translations
dirs = [path.join(self.env.srcdir, directory)

View File

@ -19,19 +19,19 @@ import babel.dates
from babel.messages.mofile import write_mo
from babel.messages.pofile import read_po
from sphinx.deprecation import RemovedInSphinx30Warning
from sphinx.deprecation import RemovedInSphinx30Warning, RemovedInSphinx40Warning
from sphinx.errors import SphinxError
from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.matching import Matcher
from sphinx.util.osutil import SEP, relpath
from sphinx.util.osutil import SEP, canon_path, relpath
logger = logging.getLogger(__name__)
if False:
# For type annotation
from typing import Callable, List, Set # NOQA
from typing import Callable, Generator, List, Set, Tuple # NOQA
from sphinx.environment import BuildEnvironment # NOQA
LocaleFileInfoBase = namedtuple('CatalogInfo', 'base_dir,domain,charset')
@ -81,8 +81,55 @@ class CatalogInfo(LocaleFileInfoBase):
logger.warning(__('writing error: %s, %s'), self.mo_path, exc)
class CatalogRepository:
"""A repository for message catalogs."""
def __init__(self, basedir, locale_dirs, language, encoding):
# type: (str, List[str], str, str) -> None
self.basedir = basedir
self._locale_dirs = locale_dirs
self.language = language
self.encoding = encoding
@property
def locale_dirs(self):
# type: () -> Generator[str, None, None]
if not self.language:
return
for locale_dir in self._locale_dirs:
locale_dir = path.join(self.basedir, locale_dir)
if path.exists(path.join(locale_dir, self.language, 'LC_MESSAGES')):
yield locale_dir
@property
def pofiles(self):
# type: () -> Generator[Tuple[str, str], None, None]
for locale_dir in self.locale_dirs:
basedir = path.join(locale_dir, self.language, 'LC_MESSAGES')
for root, dirnames, filenames in os.walk(basedir):
# skip dot-directories
for dirname in dirnames:
if dirname.startswith('.'):
dirnames.remove(dirname)
for filename in filenames:
if filename.endswith('.po'):
fullpath = path.join(root, filename)
yield basedir, relpath(fullpath, basedir)
@property
def catalogs(self):
# type: () -> Generator[CatalogInfo, None, None]
for basedir, filename in self.pofiles:
domain = canon_path(path.splitext(filename)[0])
yield CatalogInfo(basedir, domain, self.encoding)
def find_catalog(docname, compaction):
# type: (str, bool) -> str
warnings.warn('find_catalog() is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
if compaction:
ret = docname.split(SEP, 1)[0]
else:
@ -91,8 +138,19 @@ def find_catalog(docname, compaction):
return ret
def docname_to_domain(docname, compation):
# type: (str, bool) -> str
"""Convert docname to domain for catalogs."""
if compation:
return docname.split(SEP, 1)[0]
else:
return docname
def find_catalog_files(docname, srcdir, locale_dirs, lang, compaction):
# type: (str, str, List[str], str, bool) -> List[str]
warnings.warn('find_catalog_files() is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
if not(lang and locale_dirs):
return []
@ -120,6 +178,8 @@ def find_catalog_source_files(locale_dirs, locale, domains=None, gettext_compact
default is False.
:return: [CatalogInfo(), ...]
"""
warnings.warn('find_catalog_source_files() is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
if gettext_compact is not None:
warnings.warn('gettext_compact argument for find_catalog_source_files() '
'is deprecated.', RemovedInSphinx30Warning, stacklevel=2)

View File

@ -0,0 +1 @@
language = 'xx'

View File

@ -17,18 +17,19 @@ from sphinx.testing.util import find_files
@pytest.fixture
def setup_test(app_params):
srcdir = app_params.kwargs['srcdir']
locale_dir = srcdir / 'locale'
src_locale_dir = srcdir / 'xx' / 'LC_MESSAGES'
dest_locale_dir = srcdir / 'locale'
# copy all catalogs into locale layout directory
for po in find_files(srcdir, '.po'):
copy_po = (locale_dir / 'en' / 'LC_MESSAGES' / po)
for po in find_files(src_locale_dir, '.po'):
copy_po = (dest_locale_dir / 'en' / 'LC_MESSAGES' / po)
if not copy_po.parent.exists():
copy_po.parent.makedirs()
shutil.copy(srcdir / po, copy_po)
shutil.copy(src_locale_dir / po, copy_po)
yield
# delete remnants left over after failed build
locale_dir.rmtree(True)
dest_locale_dir.rmtree(True)
(srcdir / '_build').rmtree(True)

View File

@ -41,31 +41,21 @@ def write_mo(pathname, po):
return mofile.write_mo(f, po)
@pytest.fixture
def build_mo():
def builder(srcdir):
"""
:param str srcdir: app.srcdir
"""
srcdir = path(srcdir)
for dirpath, dirs, files in os.walk(srcdir):
dirpath = path(dirpath)
for f in [f for f in files if f.endswith('.po')]:
po = dirpath / f
mo = srcdir / 'xx' / 'LC_MESSAGES' / (
os.path.relpath(po[:-3], srcdir) + '.mo')
if not mo.parent.exists():
mo.parent.makedirs()
if not mo.exists() or mo.stat().st_mtime < po.stat().st_mtime:
# compile .mo file only if needed
write_mo(mo, read_po(po))
return builder
@pytest.fixture(autouse=True)
def setup_intl(app_params, build_mo):
build_mo(app_params.kwargs['srcdir'])
def setup_intl(app_params):
srcdir = path(app_params.kwargs['srcdir'])
for dirpath, dirs, files in os.walk(srcdir):
dirpath = path(dirpath)
for f in [f for f in files if f.endswith('.po')]:
po = dirpath / f
mo = srcdir / 'xx' / 'LC_MESSAGES' / (
os.path.relpath(po[:-3], srcdir) + '.mo')
if not mo.parent.exists():
mo.parent.makedirs()
if not mo.exists() or mo.stat().st_mtime < po.stat().st_mtime:
# compile .mo file only if needed
write_mo(mo, read_po(po))
@pytest.fixture(autouse=True)
@ -296,7 +286,7 @@ def test_text_glossary_term_inconsistencies(app, warning):
def test_gettext_section(app):
app.build()
# --- section
expect = read_po(app.srcdir / 'section.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'section.po')
actual = read_po(app.outdir / 'section.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
@ -309,7 +299,7 @@ def test_text_section(app):
app.build()
# --- section
result = (app.outdir / 'section.txt').text()
expect = read_po(app.srcdir / 'section.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'section.po')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.string in result
@ -445,7 +435,7 @@ def test_text_admonitions(app):
def test_gettext_toctree(app):
app.build()
# --- toctree
expect = read_po(app.srcdir / 'index.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'index.po')
actual = read_po(app.outdir / 'index.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
@ -457,7 +447,7 @@ def test_gettext_toctree(app):
def test_gettext_table(app):
app.build()
# --- toctree
expect = read_po(app.srcdir / 'table.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'table.po')
actual = read_po(app.outdir / 'table.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
@ -470,7 +460,7 @@ def test_text_table(app):
app.build()
# --- toctree
result = (app.outdir / 'table.txt').text()
expect = read_po(app.srcdir / 'table.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'table.po')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.string in result
@ -481,7 +471,7 @@ def test_text_table(app):
def test_gettext_topic(app):
app.build()
# --- topic
expect = read_po(app.srcdir / 'topic.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'topic.po')
actual = read_po(app.outdir / 'topic.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
@ -494,7 +484,7 @@ def test_text_topic(app):
app.build()
# --- topic
result = (app.outdir / 'topic.txt').text()
expect = read_po(app.srcdir / 'topic.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'topic.po')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.string in result
@ -505,7 +495,7 @@ def test_text_topic(app):
def test_gettext_definition_terms(app):
app.build()
# --- definition terms: regression test for #2198, #2205
expect = read_po(app.srcdir / 'definition_terms.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'definition_terms.po')
actual = read_po(app.outdir / 'definition_terms.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
@ -517,7 +507,7 @@ def test_gettext_definition_terms(app):
def test_gettext_glossary_terms(app, warning):
app.build()
# --- glossary terms: regression test for #1090
expect = read_po(app.srcdir / 'glossary_terms.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'glossary_terms.po')
actual = read_po(app.outdir / 'glossary_terms.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
@ -531,7 +521,7 @@ def test_gettext_glossary_terms(app, warning):
def test_gettext_glossary_term_inconsistencies(app):
app.build()
# --- glossary term inconsistencies: regression test for #1090
expect = read_po(app.srcdir / 'glossary_terms_inconsistency.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'glossary_terms_inconsistency.po')
actual = read_po(app.outdir / 'glossary_terms_inconsistency.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
@ -543,7 +533,7 @@ def test_gettext_glossary_term_inconsistencies(app):
def test_gettext_literalblock(app):
app.build()
# --- gettext builder always ignores ``only`` directive
expect = read_po(app.srcdir / 'literalblock.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'literalblock.po')
actual = read_po(app.outdir / 'literalblock.pot')
for expect_msg in [m for m in expect if m.id]:
if len(expect_msg.id.splitlines()) == 1:
@ -559,7 +549,7 @@ def test_gettext_literalblock(app):
def test_gettext_buildr_ignores_only_directive(app):
app.build()
# --- gettext builder always ignores ``only`` directive
expect = read_po(app.srcdir / 'only.po')
expect = read_po(app.srcdir / 'xx' / 'LC_MESSAGES' / 'only.po')
actual = read_po(app.outdir / 'only.pot')
for expect_msg in [m for m in expect if m.id]:
assert expect_msg.id in [m.id for m in actual if m.id]
@ -568,7 +558,7 @@ def test_gettext_buildr_ignores_only_directive(app):
@sphinx_intl
# use individual shared_result directory to avoid "incompatible doctree" error
@pytest.mark.sphinx(testroot='builder-gettext-dont-rebuild-mo')
def test_gettext_dont_rebuild_mo(make_app, app_params, build_mo):
def test_gettext_dont_rebuild_mo(make_app, app_params):
# --- don't rebuild by .mo mtime
def get_number_of_update_targets(app_):
app_.env.find_files(app_.config, app_.builder)
@ -579,7 +569,6 @@ def test_gettext_dont_rebuild_mo(make_app, app_params, build_mo):
# phase1: build document with non-gettext builder and generate mo file in srcdir
app0 = make_app('dummy', *args, **kwargs)
build_mo(app0.srcdir)
app0.build()
assert (app0.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').exists()
# Since it is after the build, the number of documents to be updated is 0

View File

@ -255,3 +255,47 @@ def test_get_filename_for_language(app):
app.env.config.figure_language_filename = '{root}.{invalid}{ext}'
with pytest.raises(SphinxError):
i18n.get_image_filename_for_language('foo.png', app.env)
def test_CatalogRepository(tempdir):
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#')
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / '.dotdir').makedirs()
(tempdir / 'loc1' / 'xx' / 'LC_MESSAGES' / '.dotdir' / 'test5.po').write_text('#')
(tempdir / 'loc1' / 'yy' / 'LC_MESSAGES').makedirs()
(tempdir / 'loc1' / 'yy' / 'LC_MESSAGES' / 'test6.po').write_text('#')
(tempdir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs()
(tempdir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
(tempdir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test7.po').write_text('#')
# for language xx
repo = i18n.CatalogRepository(tempdir, ['loc1', 'loc2'], 'xx', 'utf-8')
assert list(repo.locale_dirs) == [str(tempdir / 'loc1'),
str(tempdir / 'loc2')]
assert all(isinstance(c, i18n.CatalogInfo) for c in repo.catalogs)
assert sorted(c.domain for c in repo.catalogs) == ['sub/test3', 'sub/test4',
'test1', 'test1', 'test2', 'test7']
# for language yy
repo = i18n.CatalogRepository(tempdir, ['loc1', 'loc2'], 'yy', 'utf-8')
assert sorted(c.domain for c in repo.catalogs) == ['test6']
# unknown languages
repo = i18n.CatalogRepository(tempdir, ['loc1', 'loc2'], 'zz', 'utf-8')
assert sorted(c.domain for c in repo.catalogs) == []
# no languages
repo = i18n.CatalogRepository(tempdir, ['loc1', 'loc2'], None, 'utf-8')
assert sorted(c.domain for c in repo.catalogs) == []
# unknown locale_dirs
repo = i18n.CatalogRepository(tempdir, ['loc3'], None, 'utf-8')
assert sorted(c.domain for c in repo.catalogs) == []
# no locale_dirs
repo = i18n.CatalogRepository(tempdir, [], None, 'utf-8')
assert sorted(c.domain for c in repo.catalogs) == []