Fix multiple cross references (term, ref, doc) in the same line return the same link with i18n. refs #1090, #1193

This commit is contained in:
Takayuki Shimizukawa 2013-06-16 23:57:08 +09:00
parent 472a3a83b2
commit deb01d8e30
6 changed files with 151 additions and 36 deletions

View File

@ -32,6 +32,8 @@ Bugs fixed
characters to "Project name" on quickstart.
* #1190: Output TeX/texinfo/man filename has no basename (only extention)
when using multibyte characters to "Project name" on quickstart.
* #1090: Fix multiple cross references (term, ref, doc) in the same line
return the same link with i18n.
Release 1.2 (beta1 released Mar 31, 2013)

View File

@ -205,6 +205,41 @@ class OptionXRefRole(XRefRole):
return title, target
def make_termnodes_from_paragraph_node(env, node):
gloss_entries = env.temp_data.setdefault('gloss_entries', set())
objects = env.domaindata['std']['objects']
termtext = node.astext()
new_id = 'term-' + nodes.make_id(termtext)
if new_id in gloss_entries:
new_id = 'term-' + str(len(gloss_entries))
gloss_entries.add(new_id)
objects['term', termtext.lower()] = env.docname, new_id
# add an index entry too
indexnode = addnodes.index()
indexnode['entries'] = [('single', termtext, new_id, 'main')]
new_termnodes = []
new_termnodes.append(indexnode)
new_termnodes.extend(node.children)
new_termnodes.append(addnodes.termsep())
for termnode in new_termnodes:
termnode.source, termnode.line = node.source, node.line
return new_id, termtext, new_termnodes
def make_term_from_paragraph_node(termnodes, ids):
# make a single "term" node with all the terms, separated by termsep
# nodes (remove the dangling trailing separator)
term = nodes.term('', '', *termnodes[:-1])
term.source, term.line = termnodes[0].source, termnodes[0].line
term.rawsource = term.astext()
term['ids'].extend(ids)
term['names'].extend(ids)
return term
class Glossary(Directive):
"""
Directive to create a glossary with cross-reference targets for :term:
@ -221,8 +256,6 @@ class Glossary(Directive):
def run(self):
env = self.state.document.settings.env
objects = env.domaindata['std']['objects']
gloss_entries = env.temp_data.setdefault('gloss_entries', set())
node = addnodes.glossary()
node.document = self.state.document
@ -296,31 +329,15 @@ class Glossary(Directive):
# get a text-only representation of the term and register it
# as a cross-reference target
tmp = nodes.paragraph('', '', *res[0])
termtext = tmp.astext()
new_id = 'term-' + nodes.make_id(termtext)
if new_id in gloss_entries:
new_id = 'term-' + str(len(gloss_entries))
gloss_entries.add(new_id)
tmp.source = source
tmp.line = lineno
new_id, termtext, new_termnodes = \
make_termnodes_from_paragraph_node(env, tmp)
ids.append(new_id)
objects['term', termtext.lower()] = env.docname, new_id
termtexts.append(termtext)
# add an index entry too
indexnode = addnodes.index()
indexnode['entries'] = [('single', termtext, new_id, 'main')]
new_termnodes = []
new_termnodes.append(indexnode)
new_termnodes.extend(res[0])
new_termnodes.append(addnodes.termsep())
for termnode in new_termnodes:
termnode.source, termnode.line = source, lineno
termnodes.extend(new_termnodes)
# make a single "term" node with all the terms, separated by termsep
# nodes (remove the dangling trailing separator)
term = nodes.term('', '', *termnodes[:-1])
term.source, term.line = termnodes[0].source, termnodes[0].line
term.rawsource = term.astext()
term['ids'].extend(ids)
term['names'].extend(ids)
term = make_term_from_paragraph_node(termnodes, ids)
term += system_messages
defnode = nodes.definition()

View File

@ -24,6 +24,10 @@ from sphinx.util.nodes import traverse_translatable_index, extract_messages
from sphinx.util.osutil import ustrftime, find_catalog
from sphinx.util.compat import docutils_version
from sphinx.util.pycompat import all
from sphinx.domains.std import (
make_term_from_paragraph_node,
make_termnodes_from_paragraph_node,
)
default_substitutions = set([
@ -259,6 +263,16 @@ class Locale(Transform):
if refname in refname_ids_map:
new["ids"] = refname_ids_map[refname]
# glossary terms update refid
if isinstance(node, nodes.term):
new_id, _, termnodes = \
make_termnodes_from_paragraph_node(env, patch)
term = make_term_from_paragraph_node(
termnodes, [new_id])
patch = term
node['ids'] = patch['ids']
node['names'] = patch['names']
# Original pending_xref['reftarget'] contain not-translated
# target name, new pending_xref must use original one.
# This code restricts to change ref-targets in the translation.
@ -268,11 +282,24 @@ class Locale(Transform):
if len(old_refs) != len(new_refs):
env.warn_node('inconsistent term references in '
'translated message', node)
def get_key(node):
key = node["refdomain"], node["reftype"]
if key == ('std', 'term'):
key = None
elif key == ('std', 'ref'):
key += (node['reftarget'],)
elif key == ('', 'doc'):
key += (node['reftarget'],)
else:
pass
return key
for old in old_refs:
key = old["reftype"], old["refdomain"]
xref_reftarget_map[key] = old["reftarget"]
key = get_key(old)
if key:
xref_reftarget_map[key] = old["reftarget"]
for new in new_refs:
key = new["reftype"], new["refdomain"]
key = get_key(new)
if key in xref_reftarget_map:
new['reftarget'] = xref_reftarget_map[key]

View File

@ -21,3 +21,16 @@ msgstr "I18N ROCK'N ROLE XREF"
msgid "link to :term:`Some term`, :ref:`i18n-role-xref`, :doc:`contents`."
msgstr "LINK TO :ref:`i18n-role-xref`, :doc:`contents`, :term:`SOME NEW TERM`."
msgid "same type links"
msgstr "SAME TYPE LINKS"
msgid "link to :term:`Some term` and :term:`Some other term`."
msgstr "LINK TO :term:`SOME OTHER NEW TERM` AND :term:`SOME NEW TERM`."
msgid "link to :ref:`i18n-role-xref` and :ref:`same-type-links`."
msgstr "LINK TO :ref:`same-type-links` AND :ref:`i18n-role-xref`."
msgid "link to :doc:`contents` and :doc:`glossary_terms`."
msgstr "LINK TO :doc:`glossary_terms` AND :doc:`contents`."

View File

@ -7,3 +7,13 @@ i18n role xref
link to :term:`Some term`, :ref:`i18n-role-xref`, :doc:`contents`.
.. _same-type-links:
same type links
=================
link to :term:`Some term` and :term:`Some other term`.
link to :ref:`i18n-role-xref` and :ref:`same-type-links`.
link to :doc:`contents` and :doc:`glossary_terms`.

View File

@ -14,6 +14,7 @@ import os
import re
from StringIO import StringIO
from subprocess import Popen, PIPE
from xml.etree import ElementTree
from sphinx.util.pycompat import relpath
@ -292,25 +293,70 @@ def test_i18n_glossary_terms(app):
assert 'term not in glossary' not in warnings
@with_intl_app(buildername='text', warning=warnfile)
@with_intl_app(buildername='xml', warning=warnfile)
def test_i18n_role_xref(app):
# regression test for #1090
def gettexts(elem):
def itertext(self):
# this function copied from Python-2.7 'ElementTree.itertext'.
# for compatibility to Python-2.5, 2.6, 3.1
tag = self.tag
if not isinstance(tag, basestring) and tag is not None:
return
if self.text:
yield self.text
for e in self:
for s in itertext(e):
yield s
if e.tail:
yield e.tail
return filter(None, [s.strip() for s in itertext(elem)])
def getref(elem):
return elem.attrib.get('refid') or elem.attrib.get('refuri')
def assert_text_refs(elem, text, refs):
_text = gettexts(elem)
assert _text == text
_refs = map(getref, elem.findall('reference'))
assert _refs == refs
app.builddir.rmtree(True) #for warnings acceleration
app.builder.build(['role_xref'])
result = (app.outdir / 'role_xref.txt').text(encoding='utf-8')
expect = (
u"\nI18N ROCK'N ROLE XREF"
u"\n*********************\n"
u"\nLINK TO *I18N ROCK'N ROLE XREF*, *CONTENTS*, *SOME NEW TERM*.\n"
)
et = ElementTree.parse(app.outdir / 'role_xref.xml')
sec1, sec2 = et.findall('section')
para1, = sec1.findall('paragraph')
assert_text_refs(
para1,
['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
'SOME NEW TERM', '.'],
['i18n-role-xref',
'contents',
'glossary_terms#term-some-new-term'])
para21, para22, para23 = sec2.findall('paragraph')
assert_text_refs(
para21,
['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM', '.'],
['glossary_terms#term-some-other-new-term',
'glossary_terms#term-some-new-term'])
assert_text_refs(
para22,
['LINK TO', 'SAME TYPE LINKS', 'AND', "I18N ROCK'N ROLE XREF", '.'],
['same-type-links', 'i18n-role-xref'])
assert_text_refs(
para23,
['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS', '.'],
['glossary_terms', 'contents'])
#warnings
warnings = warnfile.getvalue().replace(os.sep, '/')
assert 'term not in glossary' not in warnings
assert 'undefined label' not in warnings
assert 'unknown document' not in warnings
assert result == expect
@with_intl_app(buildername='text', warning=warnfile)
def test_i18n_glossary_terms_inconsistency(app):