merge with stable

This commit is contained in:
shimizukawa 2015-07-25 18:15:09 +02:00
commit d51228152e
18 changed files with 104 additions and 36 deletions

31
CHANGES
View File

@ -19,6 +19,37 @@ Bugs fixed
lines after class definitions.
* #1790: ``literalinclude`` strips empty lines at the head and tail.
* #1913: C++, fix assert bug for enumerators in next-to-global and global scope.
* #1790: ``literalinclude`` strips empty lines at the head and tail
* #1802: load plugin themes automatically when theme.conf use it as 'inherit'. Thanks to
Takayuki Hirai.
* #1794: custom theme extended from alabaster or sphinx_rtd_theme can't find base theme.
* #1834: compatibility for docutils-0.13: handle_io_errors keyword argument for
docutils.io.FileInput cause TypeError.
* #1823: '.' as <module_path> for sphinx-apidoc cause an unfriendly error. Now '.'
is converted to absolute path automatically.
* Fix a crash when setting up extensions which do not support metadata.
* #1784: Provide non-minified JS code in ``sphinx/search/*.py``
* #1822, #1892: Fix regression for #1061. autosummary can't generate doc for imported
members since sphinx-1.3b3. Thanks to Eric Larson.
* #1793, #1819: "see also" misses a linebreak in text output. Thanks to Takayuki Hirai.
* #1780, #1866: "make text" shows "class" keyword twice. Thanks to Takayuki
Hirai.
* #1871: Fix for LaTeX output of tables with one column and multirows.
* Work around the lack of the HTMLParserError exception in Python 3.5.
* #1949: Use ``safe_getattr`` in the coverage builder to avoid aborting with
descriptors that have custom behavior.
* #1915: Do not generate smart quotes in doc field type annotations.
* #1796: On py3, automated .mo building caused UnicodeDecodeError.
* #1923: Use babel features only if the babel latex element is nonempty.
* #1942: Fix a KeyError in websupport.
* #1903: Fix strange id generation for glossary terms.
* #1796, On py3, automated .mo building cause UnicodeDecodeError
* Fix: ``make text`` will crush if a definition list item has more than 1 classifiers as:
* #1796: On py3, automated .mo building cause UnicodeDecodeError
* ``make text`` will crush if a definition list item has more than 1 classifiers as:
* Fixed #1855: make gettext generates broken po file for definition lists with classifier.
``term : classifier1 : classifier2``.
* #1855: make gettext generates broken po file for definition lists with classifier.
Documentation
-------------

View File

@ -53,6 +53,10 @@ It adds these directives:
"bar" -- "baz";
.. note:: The graph name is passed unchanged to Graphviz. If it contains
non-alphanumeric characters (e.g. a dash), you will have to double-quote
it.
.. rst:directive:: digraph

View File

@ -26,8 +26,8 @@ This extension is quite simple, and features only one directive:
:file:`conf.py`, e.g.::
def setup(app):
app.add_config_value('releaselevel', '', True)
app.add_config_value('releaselevel', '', 'env')
The second argument is the default value, the third should always be ``True``
The second argument is the default value, the third should always be ``'env'``
for such values (it selects if Sphinx re-reads the documents if the value
changes).

View File

@ -149,7 +149,7 @@ The new elements are added in the extension's setup function. Let us create a
new Python module called :file:`todo.py` and add the setup function::
def setup(app):
app.add_config_value('todo_include_todos', False, False)
app.add_config_value('todo_include_todos', False, 'html')
app.add_node(todolist)
app.add_node(todo,
@ -171,7 +171,7 @@ the individual calls do is the following:
new *config value* ``todo_include_todos``, whose default value should be
``False`` (this also tells Sphinx that it is a boolean value).
If the third argument was ``True``, all documents would be re-read if the
If the third argument was ``'html'``, HTML documents would be full rebuild if the
config value changed its value. This is needed for config values that
influence reading (build phase 1).

View File

@ -202,7 +202,8 @@ class Sphinx(object):
else:
locale_dirs = []
self.translator, has_translation = locale.init(locale_dirs,
self.config.language)
self.config.language,
charset=self.config.source_encoding)
if self.config.language is not None:
if has_translation or self.config.language == 'en':
# "en" never needs to be translated

View File

@ -170,6 +170,7 @@ class Builder(object):
catalogs = i18n.find_catalog_source_files(
[path.join(self.srcdir, x) for x in self.config.locale_dirs],
self.config.language,
charset=self.config.source_encoding,
gettext_compact=self.config.gettext_compact,
force_all=True)
message = 'all of %d po files' % len(catalogs)
@ -186,6 +187,7 @@ class Builder(object):
[path.join(self.srcdir, x) for x in self.config.locale_dirs],
self.config.language,
domains=list(specified_domains),
charset=self.config.source_encoding,
gettext_compact=self.config.gettext_compact)
message = 'targets for %d po files that are specified' % len(catalogs)
self.compile_catalogs(catalogs, message)
@ -194,6 +196,7 @@ class Builder(object):
catalogs = i18n.find_catalog_source_files(
[path.join(self.srcdir, x) for x in self.config.locale_dirs],
self.config.language,
charset=self.config.source_encoding,
gettext_compact=self.config.gettext_compact)
message = 'targets for %d po files that are out of date' % len(catalogs)
self.compile_catalogs(catalogs, message)

View File

@ -114,6 +114,8 @@ class WebSupportBuilder(PickleHTMLBuilder):
doc_ctx = {
'body': ctx.get('body', ''),
'title': ctx.get('title', ''),
'css': ctx.get('css', ''),
'script': ctx.get('script', ''),
}
# partially render the html template to get at interesting macros
template = self.templates.environment.get_template(templatename)

View File

@ -224,7 +224,7 @@ def make_termnodes_from_paragraph_node(env, node, new_id=None):
termtext = node.astext()
if new_id is None:
new_id = 'term-' + nodes.make_id(termtext)
new_id = nodes.make_id('term-' + termtext)
if new_id in gloss_entries:
new_id = 'term-' + str(len(gloss_entries))
gloss_entries.add(new_id)

View File

@ -195,7 +195,7 @@ else:
return translators['sphinx'].ugettext(message)
def init(locale_dirs, language, catalog='sphinx'):
def init(locale_dirs, language, catalog='sphinx', charset='utf-8'):
"""Look for message catalogs in `locale_dirs` and *ensure* that there is at
least a NullTranslations catalog set in `translators`. If called multiple
times or if several ``.mo`` files are found, their contents are merged
@ -212,7 +212,8 @@ def init(locale_dirs, language, catalog='sphinx'):
# compile mo files if po file is updated
# TODO: remove circular importing
from sphinx.util.i18n import find_catalog_source_files
for catinfo in find_catalog_source_files(locale_dirs, language, domains=[catalog]):
for catinfo in find_catalog_source_files(locale_dirs, language, domains=[catalog],
charset=charset):
catinfo.write_mo(language)
# loading

View File

@ -225,7 +225,8 @@ class Locale(Transform):
dirs = [path.join(env.srcdir, directory)
for directory in env.config.locale_dirs]
catalog, has_catalog = init_locale(dirs, env.config.language,
textdomain)
textdomain,
charset=env.config.source_encoding)
if not has_catalog:
return

View File

@ -9,6 +9,7 @@
:license: BSD, see LICENSE for details.
"""
import gettext
import io
from os import path
from collections import namedtuple
@ -19,7 +20,7 @@ from sphinx.util.osutil import walk
from sphinx.util import SEP
LocaleFileInfoBase = namedtuple('CatalogInfo', 'base_dir,domain')
LocaleFileInfoBase = namedtuple('CatalogInfo', 'base_dir,domain,charset')
class CatalogInfo(LocaleFileInfoBase):
@ -46,8 +47,8 @@ class CatalogInfo(LocaleFileInfoBase):
path.getmtime(self.mo_path) < path.getmtime(self.po_path))
def write_mo(self, locale):
with open(self.po_path, 'rt') as po:
with open(self.mo_path, 'wb') as mo:
with io.open(self.po_path, 'rt', encoding=self.charset) as po:
with io.open(self.mo_path, 'wb') as mo:
write_mo(mo, read_po(po, locale))
@ -72,7 +73,7 @@ def find_catalog_files(docname, srcdir, locale_dirs, lang, compaction):
def find_catalog_source_files(locale_dirs, locale, domains=None, gettext_compact=False,
force_all=False):
charset='utf-8', force_all=False):
"""
:param list locale_dirs:
list of path as `['locale_dir1', 'locale_dir2', ...]` to find
@ -112,7 +113,7 @@ def find_catalog_source_files(locale_dirs, locale, domains=None, gettext_compact
domain = domain.replace(path.sep, SEP)
if domains and domain not in domains:
continue
cat = CatalogInfo(base_dir, domain)
cat = CatalogInfo(base_dir, domain, charset)
if force_all or cat.is_outdated():
catalogs.add(cat)

View File

@ -36,20 +36,23 @@ caption_ref_re = explicit_title_re # b/w compat alias
def apply_source_workaround(node):
# workaround: nodes.term have wrong rawsource if classifier is specified.
# The behavior of docutils-0.11, 0.12 is:
# * when ``term text : classifier1 : classifier2`` is specified,
# * rawsource of term node will have: ``term text : classifier1 : classifier2``
# * rawsource of classifier node will be None
if isinstance(node, nodes.classifier) and not node.rawsource:
definition_list_item = node.parent
node.source = definition_list_item.source
node.line = definition_list_item.line - 1
node.rawsource = node.astext() # set 'classifier1' (or 'classifier2')
if isinstance(node, nodes.term):
# overwrite: ``term : classifier1 : classifier2`` -> ``term text``
node.rawsource = node.astext()
if node.source and node.rawsource:
return
# workaround: nodes.term doesn't have source, line and rawsource
# (fixed in Docutils r7495)
if isinstance(node, nodes.term):
definition_list_item = node.parent
if definition_list_item.line is not None:
node.source = definition_list_item.source
node.line = definition_list_item.line - 1
node.rawsource = definition_list_item. \
rawsource.split("\n", 2)[0]
return
# workaround: docutils-0.10.0 or older's nodes.caption for nodes.figure
# and nodes.title for nodes.admonition doesn't have source, line.
# this issue was filed to Docutils tracker:

View File

@ -342,8 +342,12 @@ class LaTeXTranslator(nodes.NodeVisitor):
else:
language = 'english'
babel_prefix = '\\addto\\captions%s{' % language
babel_suffix = '}'
if self.elements['babel']:
babel_prefix = '\\addto\\captions%s{' % language
babel_suffix = '}'
else:
babel_prefix = ''
babel_suffix = ''
figure = self.builder.config.numfig_format['figure'].split('%s', 1)
if len(figure) == 1:

View File

@ -628,8 +628,7 @@ class TextTranslator(nodes.NodeVisitor):
self.end_state(first='%s. ' % self.list_counter[-1])
def visit_definition_list_item(self, node):
self._li_has_classifier = len(node) >= 2 and \
isinstance(node[1], nodes.classifier)
self._classifier_count_in_li = len(node.traverse(nodes.classifier))
def depart_definition_list_item(self, node):
pass
@ -638,7 +637,7 @@ class TextTranslator(nodes.NodeVisitor):
self.new_state(0)
def depart_term(self, node):
if not self._li_has_classifier:
if not self._classifier_count_in_li:
self.end_state(end=None)
def visit_termsep(self, node):
@ -649,7 +648,9 @@ class TextTranslator(nodes.NodeVisitor):
self.add_text(' : ')
def depart_classifier(self, node):
self.end_state(end=None)
self._classifier_count_in_li -= 1
if not self._classifier_count_in_li:
self.end_state(end=None)
def visit_definition(self, node):
self.new_state()

View File

@ -30,3 +30,13 @@ msgstr "SOME OTHER TERM"
msgid "The corresponding definition #2"
msgstr "THE CORRESPONDING DEFINITION #2"
msgid "Some term with"
msgstr "SOME TERM WITH"
msgid "classifier1"
msgstr "CLASSIFIER1"
msgid "classifier2"
msgstr "CLASSIFIER2"

View File

@ -9,3 +9,6 @@ Some term
Some other term
The corresponding definition #2
Some term with : classifier1 : classifier2
The corresponding definition

View File

@ -183,7 +183,10 @@ def test_text_builder(app, status, warning):
u"\nSOME TERM"
u"\n THE CORRESPONDING DEFINITION\n"
u"\nSOME OTHER TERM"
u"\n THE CORRESPONDING DEFINITION #2\n")
u"\n THE CORRESPONDING DEFINITION #2\n"
u"\nSOME TERM WITH : CLASSIFIER1 : CLASSIFIER2"
u"\n THE CORRESPONDING DEFINITION\n"
)
yield assert_equal, result, expect
# --- glossary terms: regression test for #1090

View File

@ -20,7 +20,7 @@ from util import with_tempdir
def test_catalog_info_for_file_and_path():
cat = i18n.CatalogInfo('path', 'domain')
cat = i18n.CatalogInfo('path', 'domain', 'utf-8')
assert cat.po_file == 'domain.po'
assert cat.mo_file == 'domain.mo'
assert cat.po_path == path.join('path', 'domain.po')
@ -28,7 +28,7 @@ def test_catalog_info_for_file_and_path():
def test_catalog_info_for_sub_domain_file_and_path():
cat = i18n.CatalogInfo('path', 'sub/domain')
cat = i18n.CatalogInfo('path', 'sub/domain', 'utf-8')
assert cat.po_file == 'sub/domain.po'
assert cat.mo_file == 'sub/domain.mo'
assert cat.po_path == path.join('path', 'sub/domain.po')
@ -38,7 +38,7 @@ def test_catalog_info_for_sub_domain_file_and_path():
@with_tempdir
def test_catalog_outdated(dir):
(dir / 'test.po').write_text('#')
cat = i18n.CatalogInfo(dir, 'test')
cat = i18n.CatalogInfo(dir, 'test', 'utf-8')
assert cat.is_outdated() # if mo is not exist
mo_file = (dir / 'test.mo')
@ -52,7 +52,7 @@ def test_catalog_outdated(dir):
@with_tempdir
def test_catalog_write_mo(dir):
(dir / 'test.po').write_text('#')
cat = i18n.CatalogInfo(dir, 'test')
cat = i18n.CatalogInfo(dir, 'test', 'utf-8')
cat.write_mo('en')
assert path.exists(cat.mo_path)
assert read_mo(open(cat.mo_path, 'rb')) is not None