mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
merge
This commit is contained in:
commit
1bd7aaed7f
2
CHANGES
2
CHANGES
@ -1,6 +1,8 @@
|
||||
Release 1.2 (in development)
|
||||
============================
|
||||
|
||||
* Fix text builder did not respect wide/fullwidth charactors.
|
||||
|
||||
* #1062: sphinx.ext.autodoc use __init__ method signature for class signature.
|
||||
|
||||
* PR#111: Respect add_autodoc_attrgetter() even when inherited-members is set.
|
||||
|
@ -4,6 +4,7 @@ tag_date = true
|
||||
|
||||
[aliases]
|
||||
release = egg_info -RDb ''
|
||||
upload = upload --sign --identity=36580288
|
||||
|
||||
[extract_messages]
|
||||
mapping_file = babel.cfg
|
||||
|
@ -26,28 +26,27 @@ from itertools import izip, groupby
|
||||
from docutils import nodes
|
||||
from docutils.io import FileInput, NullOutput
|
||||
from docutils.core import Publisher
|
||||
from docutils.utils import Reporter, relative_path, new_document, \
|
||||
get_source_line
|
||||
from docutils.utils import Reporter, relative_path, get_source_line
|
||||
from docutils.readers import standalone
|
||||
from docutils.parsers.rst import roles, directives, Parser as RSTParser
|
||||
from docutils.parsers.rst import roles, directives
|
||||
from docutils.parsers.rst.languages import en as english
|
||||
from docutils.parsers.rst.directives.html import MetaBody
|
||||
from docutils.writers import UnfilteredWriter
|
||||
from docutils.transforms import Transform
|
||||
from docutils.transforms.parts import ContentsFilter
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import url_re, get_matching_docs, docname_join, split_into, \
|
||||
split_index_msg, FilenameUniqDict
|
||||
from sphinx.util.nodes import clean_astext, make_refnode, extract_messages, \
|
||||
traverse_translatable_index, WarningStream
|
||||
from sphinx.util.osutil import SEP, ustrftime, find_catalog, fs_encoding
|
||||
FilenameUniqDict
|
||||
from sphinx.util.nodes import clean_astext, make_refnode, WarningStream
|
||||
from sphinx.util.osutil import SEP, fs_encoding
|
||||
from sphinx.util.matching import compile_matchers
|
||||
from sphinx.util.pycompat import all, class_types
|
||||
from sphinx.util.pycompat import class_types
|
||||
from sphinx.util.websupport import is_commentable
|
||||
from sphinx.errors import SphinxError, ExtensionError
|
||||
from sphinx.locale import _, init as init_locale
|
||||
from sphinx.locale import _
|
||||
from sphinx.versioning import add_uids, merge_doctrees
|
||||
from sphinx.transforms import DefaultSubstitutions, MoveModuleTargets, \
|
||||
HandleCodeBlocks, SortIds, CitationReferences, Locale, \
|
||||
RemoveTranslatableInline, SphinxContentsFilter
|
||||
|
||||
|
||||
orig_role_function = roles.role
|
||||
@ -73,12 +72,6 @@ default_settings = {
|
||||
ENV_VERSION = 42 + (sys.version_info[0] - 2)
|
||||
|
||||
|
||||
default_substitutions = set([
|
||||
'version',
|
||||
'release',
|
||||
'today',
|
||||
])
|
||||
|
||||
dummy_reporter = Reporter('', 4, 4)
|
||||
|
||||
versioning_conditions = {
|
||||
@ -93,261 +86,6 @@ class NoUri(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DefaultSubstitutions(Transform):
|
||||
"""
|
||||
Replace some substitutions if they aren't defined in the document.
|
||||
"""
|
||||
# run before the default Substitutions
|
||||
default_priority = 210
|
||||
|
||||
def apply(self):
|
||||
config = self.document.settings.env.config
|
||||
# only handle those not otherwise defined in the document
|
||||
to_handle = default_substitutions - set(self.document.substitution_defs)
|
||||
for ref in self.document.traverse(nodes.substitution_reference):
|
||||
refname = ref['refname']
|
||||
if refname in to_handle:
|
||||
text = config[refname]
|
||||
if refname == 'today' and not text:
|
||||
# special handling: can also specify a strftime format
|
||||
text = ustrftime(config.today_fmt or _('%B %d, %Y'))
|
||||
ref.replace_self(nodes.Text(text, text))
|
||||
|
||||
|
||||
class MoveModuleTargets(Transform):
|
||||
"""
|
||||
Move module targets that are the first thing in a section to the section
|
||||
title.
|
||||
|
||||
XXX Python specific
|
||||
"""
|
||||
default_priority = 210
|
||||
|
||||
def apply(self):
|
||||
for node in self.document.traverse(nodes.target):
|
||||
if not node['ids']:
|
||||
continue
|
||||
if (node.has_key('ismod') and
|
||||
node.parent.__class__ is nodes.section and
|
||||
# index 0 is the section title node
|
||||
node.parent.index(node) == 1):
|
||||
node.parent['ids'][0:0] = node['ids']
|
||||
node.parent.remove(node)
|
||||
|
||||
|
||||
class HandleCodeBlocks(Transform):
|
||||
"""
|
||||
Several code block related transformations.
|
||||
"""
|
||||
default_priority = 210
|
||||
|
||||
def apply(self):
|
||||
# move doctest blocks out of blockquotes
|
||||
for node in self.document.traverse(nodes.block_quote):
|
||||
if all(isinstance(child, nodes.doctest_block) for child
|
||||
in node.children):
|
||||
node.replace_self(node.children)
|
||||
# combine successive doctest blocks
|
||||
#for node in self.document.traverse(nodes.doctest_block):
|
||||
# if node not in node.parent.children:
|
||||
# continue
|
||||
# parindex = node.parent.index(node)
|
||||
# while len(node.parent) > parindex+1 and \
|
||||
# isinstance(node.parent[parindex+1], nodes.doctest_block):
|
||||
# node[0] = nodes.Text(node[0] + '\n\n' +
|
||||
# node.parent[parindex+1][0])
|
||||
# del node.parent[parindex+1]
|
||||
|
||||
|
||||
class SortIds(Transform):
|
||||
"""
|
||||
Sort secion IDs so that the "id[0-9]+" one comes last.
|
||||
"""
|
||||
default_priority = 261
|
||||
|
||||
def apply(self):
|
||||
for node in self.document.traverse(nodes.section):
|
||||
if len(node['ids']) > 1 and node['ids'][0].startswith('id'):
|
||||
node['ids'] = node['ids'][1:] + [node['ids'][0]]
|
||||
|
||||
|
||||
class CitationReferences(Transform):
|
||||
"""
|
||||
Replace citation references by pending_xref nodes before the default
|
||||
docutils transform tries to resolve them.
|
||||
"""
|
||||
default_priority = 619
|
||||
|
||||
def apply(self):
|
||||
for citnode in self.document.traverse(nodes.citation_reference):
|
||||
cittext = citnode.astext()
|
||||
refnode = addnodes.pending_xref(cittext, reftype='citation',
|
||||
reftarget=cittext, refwarn=True,
|
||||
ids=citnode["ids"])
|
||||
refnode.line = citnode.line or citnode.parent.line
|
||||
refnode += nodes.Text('[' + cittext + ']')
|
||||
citnode.parent.replace(citnode, refnode)
|
||||
|
||||
|
||||
class Locale(Transform):
|
||||
"""
|
||||
Replace translatable nodes with their translated doctree.
|
||||
"""
|
||||
default_priority = 0
|
||||
|
||||
def apply(self):
|
||||
env = self.document.settings.env
|
||||
settings, source = self.document.settings, self.document['source']
|
||||
# XXX check if this is reliable
|
||||
assert source.startswith(env.srcdir)
|
||||
docname = path.splitext(relative_path(env.srcdir, source))[0]
|
||||
textdomain = find_catalog(docname,
|
||||
self.document.settings.gettext_compact)
|
||||
|
||||
# fetch translations
|
||||
dirs = [path.join(env.srcdir, directory)
|
||||
for directory in env.config.locale_dirs]
|
||||
catalog, has_catalog = init_locale(dirs, env.config.language,
|
||||
textdomain)
|
||||
if not has_catalog:
|
||||
return
|
||||
|
||||
parser = RSTParser()
|
||||
|
||||
for node, msg in extract_messages(self.document):
|
||||
msgstr = catalog.gettext(msg)
|
||||
# XXX add marker to untranslated parts
|
||||
if not msgstr or msgstr == msg: # as-of-yet untranslated
|
||||
continue
|
||||
|
||||
# Avoid "Literal block expected; none found." warnings.
|
||||
# If msgstr ends with '::' then it cause warning message at
|
||||
# parser.parse() processing.
|
||||
# literal-block-warning is only appear in avobe case.
|
||||
if msgstr.strip().endswith('::'):
|
||||
msgstr += '\n\n dummy literal'
|
||||
# dummy literal node will discard by 'patch = patch[0]'
|
||||
|
||||
patch = new_document(source, settings)
|
||||
parser.parse(msgstr, patch)
|
||||
patch = patch[0]
|
||||
# XXX doctest and other block markup
|
||||
if not isinstance(patch, nodes.paragraph):
|
||||
continue # skip for now
|
||||
|
||||
# auto-numbered foot note reference should use original 'ids'.
|
||||
def is_autonumber_footnote_ref(node):
|
||||
return isinstance(node, nodes.footnote_reference) and \
|
||||
node.get('auto') == 1
|
||||
old_foot_refs = node.traverse(is_autonumber_footnote_ref)
|
||||
new_foot_refs = patch.traverse(is_autonumber_footnote_ref)
|
||||
if len(old_foot_refs) != len(new_foot_refs):
|
||||
env.warn_node('inconsistent footnote references in '
|
||||
'translated message', node)
|
||||
for old, new in zip(old_foot_refs, new_foot_refs):
|
||||
new['ids'] = old['ids']
|
||||
self.document.autofootnote_refs.remove(old)
|
||||
self.document.note_autofootnote_ref(new)
|
||||
|
||||
# reference should use original 'refname'.
|
||||
# * reference target ".. _Python: ..." is not translatable.
|
||||
# * section refname is not translatable.
|
||||
# * inline reference "`Python <...>`_" has no 'refname'.
|
||||
def is_refnamed_ref(node):
|
||||
return isinstance(node, nodes.reference) and \
|
||||
'refname' in node
|
||||
old_refs = node.traverse(is_refnamed_ref)
|
||||
new_refs = patch.traverse(is_refnamed_ref)
|
||||
applied_refname_map = {}
|
||||
if len(old_refs) != len(new_refs):
|
||||
env.warn_node('inconsistent references in '
|
||||
'translated message', node)
|
||||
for new in new_refs:
|
||||
if new['refname'] in applied_refname_map:
|
||||
# 2nd appearance of the reference
|
||||
new['refname'] = applied_refname_map[new['refname']]
|
||||
elif old_refs:
|
||||
# 1st appearance of the reference in old_refs
|
||||
old = old_refs.pop(0)
|
||||
refname = old['refname']
|
||||
new['refname'] = refname
|
||||
applied_refname_map[new['refname']] = refname
|
||||
else:
|
||||
# the reference is not found in old_refs
|
||||
applied_refname_map[new['refname']] = new['refname']
|
||||
|
||||
self.document.note_refname(new)
|
||||
|
||||
# refnamed footnote and citation should use original 'ids'.
|
||||
def is_refnamed_footnote_ref(node):
|
||||
footnote_ref_classes = (nodes.footnote_reference,
|
||||
nodes.citation_reference)
|
||||
return isinstance(node, footnote_ref_classes) and \
|
||||
'refname' in node
|
||||
old_refs = node.traverse(is_refnamed_footnote_ref)
|
||||
new_refs = patch.traverse(is_refnamed_footnote_ref)
|
||||
refname_ids_map = {}
|
||||
if len(old_refs) != len(new_refs):
|
||||
env.warn_node('inconsistent references in '
|
||||
'translated message', node)
|
||||
for old in old_refs:
|
||||
refname_ids_map[old["refname"]] = old["ids"]
|
||||
for new in new_refs:
|
||||
refname = new["refname"]
|
||||
if refname in refname_ids_map:
|
||||
new["ids"] = refname_ids_map[refname]
|
||||
|
||||
# Original pending_xref['reftarget'] contain not-translated
|
||||
# target name, new pending_xref must use original one.
|
||||
old_refs = node.traverse(addnodes.pending_xref)
|
||||
new_refs = patch.traverse(addnodes.pending_xref)
|
||||
if len(old_refs) != len(new_refs):
|
||||
env.warn_node('inconsistent term references in '
|
||||
'translated message', node)
|
||||
for old, new in zip(old_refs, new_refs):
|
||||
new['reftarget'] = old['reftarget']
|
||||
|
||||
# update leaves
|
||||
for child in patch.children:
|
||||
child.parent = node
|
||||
node.children = patch.children
|
||||
|
||||
# Extract and translate messages for index entries.
|
||||
for node, entries in traverse_translatable_index(self.document):
|
||||
new_entries = []
|
||||
for type, msg, tid, main in entries:
|
||||
msg_parts = split_index_msg(type, msg)
|
||||
msgstr_parts = []
|
||||
for part in msg_parts:
|
||||
msgstr = catalog.gettext(part)
|
||||
if not msgstr:
|
||||
msgstr = part
|
||||
msgstr_parts.append(msgstr)
|
||||
|
||||
new_entries.append((type, ';'.join(msgstr_parts), tid, main))
|
||||
|
||||
node['raw_entries'] = entries
|
||||
node['entries'] = new_entries
|
||||
|
||||
|
||||
class RemoveTranslatableInline(Transform):
|
||||
"""
|
||||
Remove inline nodes used for translation as placeholders.
|
||||
"""
|
||||
default_priority = 999
|
||||
|
||||
def apply(self):
|
||||
from sphinx.builders.gettext import MessageCatalogBuilder
|
||||
env = self.document.settings.env
|
||||
builder = env.app.builder
|
||||
if isinstance(builder, MessageCatalogBuilder):
|
||||
return
|
||||
for inline in self.document.traverse(nodes.inline):
|
||||
if 'translatable' in inline:
|
||||
inline.parent.remove(inline)
|
||||
inline.parent += inline.children
|
||||
|
||||
|
||||
class SphinxStandaloneReader(standalone.Reader):
|
||||
"""
|
||||
Add our own transforms.
|
||||
@ -367,20 +105,6 @@ class SphinxDummyWriter(UnfilteredWriter):
|
||||
pass
|
||||
|
||||
|
||||
class SphinxContentsFilter(ContentsFilter):
|
||||
"""
|
||||
Used with BuildEnvironment.add_toc_from() to discard cross-file links
|
||||
within table-of-contents link nodes.
|
||||
"""
|
||||
def visit_pending_xref(self, node):
|
||||
text = node.astext()
|
||||
self.parent.append(nodes.literal(text, text))
|
||||
raise nodes.SkipNode
|
||||
|
||||
def visit_image(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
||||
class BuildEnvironment:
|
||||
"""
|
||||
The environment in which the ReST files are translated.
|
||||
|
337
sphinx/transforms.py
Normal file
337
sphinx/transforms.py
Normal file
@ -0,0 +1,337 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.transforms
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Docutils transforms used by Sphinx when reading documents.
|
||||
|
||||
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from os import path
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.utils import new_document, relative_path
|
||||
from docutils.parsers.rst import Parser as RSTParser
|
||||
from docutils.transforms import Transform
|
||||
from docutils.transforms.parts import ContentsFilter
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.locale import _, init as init_locale
|
||||
from sphinx.util import split_index_msg
|
||||
from sphinx.util.nodes import traverse_translatable_index, extract_messages
|
||||
from sphinx.util.osutil import ustrftime, find_catalog
|
||||
from sphinx.util.pycompat import all
|
||||
|
||||
|
||||
default_substitutions = set([
|
||||
'version',
|
||||
'release',
|
||||
'today',
|
||||
])
|
||||
|
||||
class DefaultSubstitutions(Transform):
|
||||
"""
|
||||
Replace some substitutions if they aren't defined in the document.
|
||||
"""
|
||||
# run before the default Substitutions
|
||||
default_priority = 210
|
||||
|
||||
def apply(self):
|
||||
config = self.document.settings.env.config
|
||||
# only handle those not otherwise defined in the document
|
||||
to_handle = default_substitutions - set(self.document.substitution_defs)
|
||||
for ref in self.document.traverse(nodes.substitution_reference):
|
||||
refname = ref['refname']
|
||||
if refname in to_handle:
|
||||
text = config[refname]
|
||||
if refname == 'today' and not text:
|
||||
# special handling: can also specify a strftime format
|
||||
text = ustrftime(config.today_fmt or _('%B %d, %Y'))
|
||||
ref.replace_self(nodes.Text(text, text))
|
||||
|
||||
|
||||
class MoveModuleTargets(Transform):
|
||||
"""
|
||||
Move module targets that are the first thing in a section to the section
|
||||
title.
|
||||
|
||||
XXX Python specific
|
||||
"""
|
||||
default_priority = 210
|
||||
|
||||
def apply(self):
|
||||
for node in self.document.traverse(nodes.target):
|
||||
if not node['ids']:
|
||||
continue
|
||||
if (node.has_key('ismod') and
|
||||
node.parent.__class__ is nodes.section and
|
||||
# index 0 is the section title node
|
||||
node.parent.index(node) == 1):
|
||||
node.parent['ids'][0:0] = node['ids']
|
||||
node.parent.remove(node)
|
||||
|
||||
|
||||
class HandleCodeBlocks(Transform):
|
||||
"""
|
||||
Several code block related transformations.
|
||||
"""
|
||||
default_priority = 210
|
||||
|
||||
def apply(self):
|
||||
# move doctest blocks out of blockquotes
|
||||
for node in self.document.traverse(nodes.block_quote):
|
||||
if all(isinstance(child, nodes.doctest_block) for child
|
||||
in node.children):
|
||||
node.replace_self(node.children)
|
||||
# combine successive doctest blocks
|
||||
#for node in self.document.traverse(nodes.doctest_block):
|
||||
# if node not in node.parent.children:
|
||||
# continue
|
||||
# parindex = node.parent.index(node)
|
||||
# while len(node.parent) > parindex+1 and \
|
||||
# isinstance(node.parent[parindex+1], nodes.doctest_block):
|
||||
# node[0] = nodes.Text(node[0] + '\n\n' +
|
||||
# node.parent[parindex+1][0])
|
||||
# del node.parent[parindex+1]
|
||||
|
||||
|
||||
class SortIds(Transform):
|
||||
"""
|
||||
Sort secion IDs so that the "id[0-9]+" one comes last.
|
||||
"""
|
||||
default_priority = 261
|
||||
|
||||
def apply(self):
|
||||
for node in self.document.traverse(nodes.section):
|
||||
if len(node['ids']) > 1 and node['ids'][0].startswith('id'):
|
||||
node['ids'] = node['ids'][1:] + [node['ids'][0]]
|
||||
|
||||
|
||||
class CitationReferences(Transform):
|
||||
"""
|
||||
Replace citation references by pending_xref nodes before the default
|
||||
docutils transform tries to resolve them.
|
||||
"""
|
||||
default_priority = 619
|
||||
|
||||
def apply(self):
|
||||
for citnode in self.document.traverse(nodes.citation_reference):
|
||||
cittext = citnode.astext()
|
||||
refnode = addnodes.pending_xref(cittext, reftype='citation',
|
||||
reftarget=cittext, refwarn=True,
|
||||
ids=citnode["ids"])
|
||||
refnode.line = citnode.line or citnode.parent.line
|
||||
refnode += nodes.Text('[' + cittext + ']')
|
||||
citnode.parent.replace(citnode, refnode)
|
||||
|
||||
|
||||
class CustomLocaleReporter(object):
|
||||
"""
|
||||
Replacer for document.reporter.get_source_and_line method.
|
||||
|
||||
reST text lines for translation not have original source line number.
|
||||
This class provide correct line number at reporting.
|
||||
"""
|
||||
def __init__(self, source, line):
|
||||
self.source, self.line = source, line
|
||||
|
||||
try:
|
||||
from docutils import __version__ as du_version
|
||||
v = tuple([int(x) for x in du_version.split('.')[:2]])
|
||||
except ImportError:
|
||||
v = (99, 99)
|
||||
self.du_version = v
|
||||
|
||||
def set_reporter(self, document):
|
||||
if self.du_version < (0, 9):
|
||||
document.reporter.locator = self.get_source_and_line
|
||||
else:
|
||||
document.reporter.get_source_and_line = self.get_source_and_line
|
||||
|
||||
def get_source_and_line(self, lineno=None):
|
||||
return self.source, self.line
|
||||
|
||||
|
||||
class Locale(Transform):
|
||||
"""
|
||||
Replace translatable nodes with their translated doctree.
|
||||
"""
|
||||
default_priority = 0
|
||||
|
||||
def apply(self):
|
||||
env = self.document.settings.env
|
||||
settings, source = self.document.settings, self.document['source']
|
||||
# XXX check if this is reliable
|
||||
assert source.startswith(env.srcdir)
|
||||
docname = path.splitext(relative_path(env.srcdir, source))[0]
|
||||
textdomain = find_catalog(docname,
|
||||
self.document.settings.gettext_compact)
|
||||
|
||||
# fetch translations
|
||||
dirs = [path.join(env.srcdir, directory)
|
||||
for directory in env.config.locale_dirs]
|
||||
catalog, has_catalog = init_locale(dirs, env.config.language,
|
||||
textdomain)
|
||||
if not has_catalog:
|
||||
return
|
||||
|
||||
parser = RSTParser()
|
||||
|
||||
for node, msg in extract_messages(self.document):
|
||||
msgstr = catalog.gettext(msg)
|
||||
# XXX add marker to untranslated parts
|
||||
if not msgstr or msgstr == msg: # as-of-yet untranslated
|
||||
continue
|
||||
|
||||
# Avoid "Literal block expected; none found." warnings.
|
||||
# If msgstr ends with '::' then it cause warning message at
|
||||
# parser.parse() processing.
|
||||
# literal-block-warning is only appear in avobe case.
|
||||
if msgstr.strip().endswith('::'):
|
||||
msgstr += '\n\n dummy literal'
|
||||
# dummy literal node will discard by 'patch = patch[0]'
|
||||
|
||||
patch = new_document(source, settings)
|
||||
CustomLocaleReporter(node.source, node.line).set_reporter(patch)
|
||||
parser.parse(msgstr, patch)
|
||||
patch = patch[0]
|
||||
# XXX doctest and other block markup
|
||||
if not isinstance(patch, nodes.paragraph):
|
||||
continue # skip for now
|
||||
|
||||
# auto-numbered foot note reference should use original 'ids'.
|
||||
def is_autonumber_footnote_ref(node):
|
||||
return isinstance(node, nodes.footnote_reference) and \
|
||||
node.get('auto') == 1
|
||||
old_foot_refs = node.traverse(is_autonumber_footnote_ref)
|
||||
new_foot_refs = patch.traverse(is_autonumber_footnote_ref)
|
||||
if len(old_foot_refs) != len(new_foot_refs):
|
||||
env.warn_node('inconsistent footnote references in '
|
||||
'translated message', node)
|
||||
for old, new in zip(old_foot_refs, new_foot_refs):
|
||||
new['ids'] = old['ids']
|
||||
for id in new['ids']:
|
||||
self.document.ids[id] = new
|
||||
self.document.autofootnote_refs.remove(old)
|
||||
self.document.note_autofootnote_ref(new)
|
||||
|
||||
# reference should use original 'refname'.
|
||||
# * reference target ".. _Python: ..." is not translatable.
|
||||
# * section refname is not translatable.
|
||||
# * inline reference "`Python <...>`_" has no 'refname'.
|
||||
def is_refnamed_ref(node):
|
||||
return isinstance(node, nodes.reference) and \
|
||||
'refname' in node
|
||||
old_refs = node.traverse(is_refnamed_ref)
|
||||
new_refs = patch.traverse(is_refnamed_ref)
|
||||
applied_refname_map = {}
|
||||
if len(old_refs) != len(new_refs):
|
||||
env.warn_node('inconsistent references in '
|
||||
'translated message', node)
|
||||
for new in new_refs:
|
||||
if new['refname'] in applied_refname_map:
|
||||
# 2nd appearance of the reference
|
||||
new['refname'] = applied_refname_map[new['refname']]
|
||||
elif old_refs:
|
||||
# 1st appearance of the reference in old_refs
|
||||
old = old_refs.pop(0)
|
||||
refname = old['refname']
|
||||
new['refname'] = refname
|
||||
applied_refname_map[new['refname']] = refname
|
||||
else:
|
||||
# the reference is not found in old_refs
|
||||
applied_refname_map[new['refname']] = new['refname']
|
||||
|
||||
self.document.note_refname(new)
|
||||
|
||||
# refnamed footnote and citation should use original 'ids'.
|
||||
def is_refnamed_footnote_ref(node):
|
||||
footnote_ref_classes = (nodes.footnote_reference,
|
||||
nodes.citation_reference)
|
||||
return isinstance(node, footnote_ref_classes) and \
|
||||
'refname' in node
|
||||
old_refs = node.traverse(is_refnamed_footnote_ref)
|
||||
new_refs = patch.traverse(is_refnamed_footnote_ref)
|
||||
refname_ids_map = {}
|
||||
if len(old_refs) != len(new_refs):
|
||||
env.warn_node('inconsistent references in '
|
||||
'translated message', node)
|
||||
for old in old_refs:
|
||||
refname_ids_map[old["refname"]] = old["ids"]
|
||||
for new in new_refs:
|
||||
refname = new["refname"]
|
||||
if refname in refname_ids_map:
|
||||
new["ids"] = refname_ids_map[refname]
|
||||
|
||||
# Original pending_xref['reftarget'] contain not-translated
|
||||
# target name, new pending_xref must use original one.
|
||||
# This code restricts to change ref-targets in the translation.
|
||||
old_refs = node.traverse(addnodes.pending_xref)
|
||||
new_refs = patch.traverse(addnodes.pending_xref)
|
||||
xref_reftarget_map = {}
|
||||
if len(old_refs) != len(new_refs):
|
||||
env.warn_node('inconsistent term references in '
|
||||
'translated message', node)
|
||||
for old in old_refs:
|
||||
key = old["reftype"], old["refdomain"]
|
||||
xref_reftarget_map[key] = old["reftarget"]
|
||||
for new in new_refs:
|
||||
key = new["reftype"], new["refdomain"]
|
||||
if key in xref_reftarget_map:
|
||||
new['reftarget'] = xref_reftarget_map[key]
|
||||
|
||||
# update leaves
|
||||
for child in patch.children:
|
||||
child.parent = node
|
||||
node.children = patch.children
|
||||
|
||||
# Extract and translate messages for index entries.
|
||||
for node, entries in traverse_translatable_index(self.document):
|
||||
new_entries = []
|
||||
for type, msg, tid, main in entries:
|
||||
msg_parts = split_index_msg(type, msg)
|
||||
msgstr_parts = []
|
||||
for part in msg_parts:
|
||||
msgstr = catalog.gettext(part)
|
||||
if not msgstr:
|
||||
msgstr = part
|
||||
msgstr_parts.append(msgstr)
|
||||
|
||||
new_entries.append((type, ';'.join(msgstr_parts), tid, main))
|
||||
|
||||
node['raw_entries'] = entries
|
||||
node['entries'] = new_entries
|
||||
|
||||
|
||||
class RemoveTranslatableInline(Transform):
|
||||
"""
|
||||
Remove inline nodes used for translation as placeholders.
|
||||
"""
|
||||
default_priority = 999
|
||||
|
||||
def apply(self):
|
||||
from sphinx.builders.gettext import MessageCatalogBuilder
|
||||
env = self.document.settings.env
|
||||
builder = env.app.builder
|
||||
if isinstance(builder, MessageCatalogBuilder):
|
||||
return
|
||||
for inline in self.document.traverse(nodes.inline):
|
||||
if 'translatable' in inline:
|
||||
inline.parent.remove(inline)
|
||||
inline.parent += inline.children
|
||||
|
||||
|
||||
class SphinxContentsFilter(ContentsFilter):
|
||||
"""
|
||||
Used with BuildEnvironment.add_toc_from() to discard cross-file links
|
||||
within table-of-contents link nodes.
|
||||
"""
|
||||
def visit_pending_xref(self, node):
|
||||
text = node.astext()
|
||||
self.parent.append(nodes.literal(text, text))
|
||||
raise nodes.SkipNode
|
||||
|
||||
def visit_image(self, node):
|
||||
raise nodes.SkipNode
|
@ -13,6 +13,7 @@ import re
|
||||
import textwrap
|
||||
|
||||
from docutils import nodes, writers
|
||||
from docutils.utils import column_width
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.locale import admonitionlabels, versionlabels, _
|
||||
@ -165,7 +166,8 @@ class TextTranslator(nodes.NodeVisitor):
|
||||
char = '^'
|
||||
text = ''.join(x[1] for x in self.states.pop() if x[0] == -1)
|
||||
self.stateindent.pop()
|
||||
self.states[-1].append((0, ['', text, '%s' % (char * len(text)), '']))
|
||||
self.states[-1].append(
|
||||
(0, ['', text, '%s' % (char * column_width(text)), '']))
|
||||
|
||||
def visit_subtitle(self, node):
|
||||
pass
|
||||
@ -391,7 +393,7 @@ class TextTranslator(nodes.NodeVisitor):
|
||||
for i, cell in enumerate(line):
|
||||
par = my_wrap(cell, width=colwidths[i])
|
||||
if par:
|
||||
maxwidth = max(map(len, par))
|
||||
maxwidth = max(map(column_width, par))
|
||||
else:
|
||||
maxwidth = 0
|
||||
realwidths[i] = max(realwidths[i], maxwidth)
|
||||
@ -411,7 +413,9 @@ class TextTranslator(nodes.NodeVisitor):
|
||||
out = ['|']
|
||||
for i, cell in enumerate(line):
|
||||
if cell:
|
||||
out.append(' ' + cell.ljust(realwidths[i]+1))
|
||||
adjust_len = len(cell) - column_width(cell)
|
||||
out.append(' ' + cell.ljust(
|
||||
realwidths[i] + 1 + adjust_len))
|
||||
else:
|
||||
out.append(' ' * (realwidths[i] + 2))
|
||||
out.append('|')
|
||||
|
@ -1,9 +1,13 @@
|
||||
CONTENTS
|
||||
========
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:numbered:
|
||||
|
||||
subdir/contents
|
||||
bom
|
||||
warnings
|
||||
footnote
|
||||
external_links
|
||||
refs_inconsistency
|
||||
@ -12,6 +16,7 @@
|
||||
definition_terms
|
||||
figure_caption
|
||||
index_entries
|
||||
role_xref
|
||||
glossary_terms
|
||||
glossary_terms_inconsistency
|
||||
docfields
|
||||
|
23
tests/roots/test-intl/role_xref.po
Normal file
23
tests/roots/test-intl/role_xref.po
Normal file
@ -0,0 +1,23 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) 2012, foof
|
||||
# This file is distributed under the same license as the foo package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: sphinx 1.0\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2013-02-04 14:00\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
msgid "i18n role xref"
|
||||
msgstr "I18N ROCK'N ROLE XREF"
|
||||
|
||||
msgid "link to :term:`Some term`, :ref:`i18n-role-xref`, :doc:`contents`."
|
||||
msgstr "LINK TO :ref:`i18n-role-xref`, :doc:`contents`, :term:`SOME NEW TERM`."
|
9
tests/roots/test-intl/role_xref.txt
Normal file
9
tests/roots/test-intl/role_xref.txt
Normal file
@ -0,0 +1,9 @@
|
||||
:tocdepth: 2
|
||||
|
||||
.. _i18n-role-xref:
|
||||
|
||||
i18n role xref
|
||||
==============
|
||||
|
||||
link to :term:`Some term`, :ref:`i18n-role-xref`, :doc:`contents`.
|
||||
|
23
tests/roots/test-intl/warnings.po
Normal file
23
tests/roots/test-intl/warnings.po
Normal file
@ -0,0 +1,23 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) 2010, Georg Brandl & Team
|
||||
# This file is distributed under the same license as the Sphinx <Tests> package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: Sphinx <Tests> 0.6\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2013-02-04 13:06\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
msgid "i18n with reST warnings"
|
||||
msgstr "I18N WITH REST WARNINGS"
|
||||
|
||||
msgid "line of ``literal`` markup."
|
||||
msgstr "LINE OF ``BROKEN LITERAL MARKUP."
|
5
tests/roots/test-intl/warnings.txt
Normal file
5
tests/roots/test-intl/warnings.txt
Normal file
@ -0,0 +1,5 @@
|
||||
i18n with reST warnings
|
||||
========================
|
||||
|
||||
line of ``literal`` markup.
|
||||
|
65
tests/test_build_text.py
Normal file
65
tests/test_build_text.py
Normal file
@ -0,0 +1,65 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
test_build_text
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Test the build process with Text builder with the test root.
|
||||
|
||||
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from textwrap import dedent
|
||||
|
||||
from docutils.utils import column_width
|
||||
|
||||
from util import *
|
||||
|
||||
|
||||
def with_text_app(*args, **kw):
|
||||
default_kw = {
|
||||
'buildername': 'text',
|
||||
'srcdir': '(empty)',
|
||||
'confoverrides': {
|
||||
'project': 'text',
|
||||
'master_doc': 'contents',
|
||||
},
|
||||
}
|
||||
default_kw.update(kw)
|
||||
return with_app(*args, **default_kw)
|
||||
|
||||
|
||||
@with_text_app()
|
||||
def test_multibyte_title_line(app):
|
||||
title = u'\u65e5\u672c\u8a9e'
|
||||
underline = u'=' * column_width(title)
|
||||
content = u'\n'.join((title, underline, u''))
|
||||
|
||||
(app.srcdir / 'contents.rst').write_text(content, encoding='utf-8')
|
||||
app.builder.build_all()
|
||||
result = (app.outdir / 'contents.txt').text(encoding='utf-8')
|
||||
|
||||
expect_underline = underline.replace('=', '*')
|
||||
result_underline = result.splitlines()[2].strip()
|
||||
assert expect_underline == result_underline
|
||||
|
||||
|
||||
@with_text_app()
|
||||
def test_multibyte_table(app):
|
||||
text = u'\u65e5\u672c\u8a9e'
|
||||
contents = (u"\n.. list-table::"
|
||||
"\n"
|
||||
"\n - - spam"
|
||||
"\n - egg"
|
||||
"\n"
|
||||
"\n - - %(text)s"
|
||||
"\n - %(text)s"
|
||||
"\n" % locals())
|
||||
|
||||
(app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
|
||||
app.builder.build_all()
|
||||
result = (app.outdir / 'contents.txt').text(encoding='utf-8')
|
||||
|
||||
lines = [line.strip() for line in result.splitlines() if line.strip()]
|
||||
line_widths = [column_width(line) for line in lines]
|
||||
assert len(set(line_widths)) == 1 # same widths
|
@ -89,6 +89,23 @@ def test_subdir(app):
|
||||
assert result.startswith(u"\nsubdir contents\n***************\n")
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', warning=warnfile)
|
||||
def test_i18n_warnings_in_translation(app):
|
||||
app.builddir.rmtree(True)
|
||||
app.builder.build(['warnings'])
|
||||
result = (app.outdir / 'warnings.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH REST WARNINGS"
|
||||
u"\n***********************\n"
|
||||
u"\nLINE OF >>``<<BROKEN LITERAL MARKUP.\n")
|
||||
|
||||
assert result == expect
|
||||
|
||||
warnings = warnfile.getvalue().replace(os.sep, '/')
|
||||
warning_expr = u'.*/warnings.txt:4: ' \
|
||||
u'WARNING: Inline literal start-string without end-string.\n'
|
||||
assert re.search(warning_expr, warnings)
|
||||
|
||||
|
||||
@with_intl_app(buildername='html', cleanenv=True)
|
||||
def test_i18n_footnote_break_refid(app):
|
||||
"""test for #955 cant-build-html-with-footnotes-when-using"""
|
||||
@ -97,9 +114,10 @@ def test_i18n_footnote_break_refid(app):
|
||||
# expect no error by build
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', cleanenv=True)
|
||||
@with_intl_app(buildername='text', warning=warnfile)
|
||||
def test_i18n_footnote_regression(app):
|
||||
"""regression test for fix #955"""
|
||||
app.builddir.rmtree(True)
|
||||
app.builder.build(['footnote'])
|
||||
result = (app.outdir / 'footnote.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N WITH FOOTNOTE"
|
||||
@ -110,6 +128,10 @@ def test_i18n_footnote_regression(app):
|
||||
u"\n[100] THIS IS A NUMBERED FOOTNOTE.\n")
|
||||
assert result == expect
|
||||
|
||||
warnings = warnfile.getvalue().replace(os.sep, '/')
|
||||
warning_expr = u'.*/footnote.txt:\\d*: SEVERE: Duplicate ID: ".*".\n'
|
||||
assert not re.search(warning_expr, warnings)
|
||||
|
||||
|
||||
@with_intl_app(buildername='html', cleanenv=True)
|
||||
def test_i18n_footnote_backlink(app):
|
||||
@ -271,6 +293,24 @@ def test_i18n_glossary_terms(app):
|
||||
assert 'term not in glossary' not in warnings
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', warning=warnfile)
|
||||
def test_i18n_role_xref(app):
|
||||
# regression test for #1090
|
||||
app.builddir.rmtree(True) #for warnings acceleration
|
||||
app.builder.build(['role_xref'])
|
||||
result = (app.outdir / 'role_xref.txt').text(encoding='utf-8')
|
||||
expect = (u"\nI18N ROCK'N ROLE XREF"
|
||||
u"\n*********************\n"
|
||||
u"\nLINK TO *I18N ROCK'N ROLE XREF*, *CONTENTS*, *SOME NEW TERM*.\n")
|
||||
|
||||
warnings = warnfile.getvalue().replace(os.sep, '/')
|
||||
assert 'term not in glossary' not in warnings
|
||||
assert 'undefined label' not in warnings
|
||||
assert 'unknown document' not in warnings
|
||||
|
||||
assert result == expect
|
||||
|
||||
|
||||
@with_intl_app(buildername='text', warning=warnfile)
|
||||
def test_i18n_glossary_terms_inconsistency(app):
|
||||
# regression test for #1090
|
||||
|
@ -142,6 +142,13 @@ class TestApp(application.Sphinx):
|
||||
temproot = tempdir / 'root'
|
||||
test_root.copytree(temproot)
|
||||
srcdir = temproot
|
||||
elif srcdir == '(empty)':
|
||||
tempdir = path(tempfile.mkdtemp())
|
||||
self.cleanup_trees.append(tempdir)
|
||||
temproot = tempdir / 'root'
|
||||
temproot.makedirs()
|
||||
(temproot / 'conf.py').write_text('')
|
||||
srcdir = temproot
|
||||
else:
|
||||
srcdir = path(srcdir)
|
||||
self.builddir = srcdir.joinpath('_build')
|
||||
|
Loading…
Reference in New Issue
Block a user