Now term nodes in a glossary directive are wrapped with termset node to handle multiple term correctly.

Fix #2251; Line breaks in .rst files are transferred to .pot files in a wrong way.
This commit is contained in:
shimizukawa 2016-02-13 17:51:44 +09:00
parent b0897a47c4
commit 804e866404
10 changed files with 61 additions and 61 deletions

View File

@ -21,6 +21,9 @@ Incompatible changes
refers to :confval:`exclude_patterns` to exclude extra files and directories. refers to :confval:`exclude_patterns` to exclude extra files and directories.
* #2300: enhance autoclass:: to use the docstring of __new__ if __init__ method's is missing * #2300: enhance autoclass:: to use the docstring of __new__ if __init__ method's is missing
of empty of empty
* #2251: term nodes in a glossary directive are wrapped with ``termset`` node to handle
multiple term correctly. ``termsep`` node is removed and ``termset`` is added.
By this change, every writers must have visit_termset and depart_termset method.
Features added Features added
-------------- --------------
@ -84,6 +87,7 @@ Bugs fixed
* #2074: make gettext should use canonical relative paths for .pot. Thanks to * #2074: make gettext should use canonical relative paths for .pot. Thanks to
anatoly techtonik. anatoly techtonik.
* #2311: Fix sphinx.ext.inheritance_diagram raises AttributeError * #2311: Fix sphinx.ext.inheritance_diagram raises AttributeError
* #2251: Line breaks in .rst files are transferred to .pot files in a wrong way.
Documentation Documentation

View File

@ -54,4 +54,4 @@ You should not need to generate the nodes below in extensions.
.. autoclass:: start_of_file .. autoclass:: start_of_file
.. autoclass:: productionlist .. autoclass:: productionlist
.. autoclass:: production .. autoclass:: production
.. autoclass:: termsep .. autoclass:: termset

View File

@ -208,8 +208,8 @@ class abbreviation(nodes.Inline, nodes.TextElement):
"""Node for abbreviations with explanations.""" """Node for abbreviations with explanations."""
class termsep(nodes.Structural, nodes.Element): class termset(nodes.Structural, nodes.Element):
"""Separates two terms within a <term> node.""" """A set of <term> node"""
class manpage(nodes.Inline, nodes.TextElement): class manpage(nodes.Inline, nodes.TextElement):

View File

@ -214,7 +214,7 @@ class OptionXRefRole(XRefRole):
return title, target return title, target
def make_termnodes_from_paragraph_node(env, node, new_id=None): def register_term_to_glossary(env, node, new_id=None):
gloss_entries = env.temp_data.setdefault('gloss_entries', set()) gloss_entries = env.temp_data.setdefault('gloss_entries', set())
objects = env.domaindata['std']['objects'] objects = env.domaindata['std']['objects']
@ -229,25 +229,18 @@ def make_termnodes_from_paragraph_node(env, node, new_id=None):
# add an index entry too # add an index entry too
indexnode = addnodes.index() indexnode = addnodes.index()
indexnode['entries'] = [('single', termtext, new_id, 'main')] indexnode['entries'] = [('single', termtext, new_id, 'main')]
new_termnodes = [] indexnode.source, indexnode.line = node.source, node.line
new_termnodes.append(indexnode) node.append(indexnode)
new_termnodes.extend(node.children) node['ids'].append(new_id)
new_termnodes.append(addnodes.termsep()) node['names'].append(new_id)
for termnode in new_termnodes:
termnode.source, termnode.line = node.source, node.line
return new_id, termtext, new_termnodes
def make_term_from_paragraph_node(termnodes, ids): def make_termset_from_termnodes(termnodes):
# make a single "term" node with all the terms, separated by termsep # make a single "termset" node with all the terms
# nodes (remove the dangling trailing separator) termset = addnodes.termset('', *termnodes)
term = nodes.term('', '', *termnodes[:-1]) termset.source, termset.line = termnodes[0].source, termnodes[0].line
term.source, term.line = termnodes[0].source, termnodes[0].line termset.rawsource = termset.astext()
term.rawsource = term.astext() return termset
term['ids'].extend(ids)
term['names'].extend(ids)
return term
class Glossary(Directive): class Glossary(Directive):
@ -330,7 +323,6 @@ class Glossary(Directive):
termtexts = [] termtexts = []
termnodes = [] termnodes = []
system_messages = [] system_messages = []
ids = []
for line, source, lineno in terms: for line, source, lineno in terms:
# parse the term with inline markup # parse the term with inline markup
res = self.state.inline_text(line, lineno) res = self.state.inline_text(line, lineno)
@ -338,17 +330,15 @@ class Glossary(Directive):
# get a text-only representation of the term and register it # get a text-only representation of the term and register it
# as a cross-reference target # as a cross-reference target
tmp = nodes.paragraph('', '', *res[0]) term = nodes.term('', '', *res[0])
tmp.source = source term.source = source
tmp.line = lineno term.line = lineno
new_id, termtext, new_termnodes = \ register_term_to_glossary(env, term)
make_termnodes_from_paragraph_node(env, tmp) termtexts.append(term.astext())
ids.append(new_id) termnodes.append(term)
termtexts.append(termtext)
termnodes.extend(new_termnodes)
term = make_term_from_paragraph_node(termnodes, ids) termset = make_termset_from_termnodes(termnodes)
term += system_messages termset += system_messages
defnode = nodes.definition() defnode = nodes.definition()
if definition: if definition:
@ -356,7 +346,7 @@ class Glossary(Directive):
defnode) defnode)
items.append((termtexts, items.append((termtexts,
nodes.definition_list_item('', term, defnode))) nodes.definition_list_item('', termset, defnode)))
if 'sorted' in self.options: if 'sorted' in self.options:
items.sort(key=lambda x: items.sort(key=lambda x:

View File

@ -27,10 +27,7 @@ from sphinx.util.nodes import (
from sphinx.util.osutil import ustrftime from sphinx.util.osutil import ustrftime
from sphinx.util.i18n import find_catalog from sphinx.util.i18n import find_catalog
from sphinx.util.pycompat import indent from sphinx.util.pycompat import indent
from sphinx.domains.std import ( from sphinx.domains.std import register_term_to_glossary
make_term_from_paragraph_node,
make_termnodes_from_paragraph_node,
)
default_substitutions = set([ default_substitutions = set([
@ -340,18 +337,10 @@ class Locale(Transform):
# glossary terms update refid # glossary terms update refid
if isinstance(node, nodes.term): if isinstance(node, nodes.term):
gloss_entries = env.temp_data.setdefault('gloss_entries', set()) gloss_entries = env.temp_data.setdefault('gloss_entries', set())
ids = []
termnodes = []
for _id in node['names']: for _id in node['names']:
if _id in gloss_entries: if _id in gloss_entries:
gloss_entries.remove(_id) gloss_entries.remove(_id)
_id, _, new_termnodes = \ register_term_to_glossary(env, patch, _id)
make_termnodes_from_paragraph_node(env, patch, _id)
ids.append(_id)
termnodes.extend(new_termnodes)
if termnodes and ids:
patch = make_term_from_paragraph_node(termnodes, ids)
node['ids'] = patch['ids'] node['ids'] = patch['ids']
node['names'] = patch['names'] node['names'] = patch['names']
processed = True processed = True

View File

@ -629,9 +629,11 @@ class HTMLTranslator(BaseTranslator):
def depart_abbreviation(self, node): def depart_abbreviation(self, node):
self.body.append('</abbr>') self.body.append('</abbr>')
def visit_termsep(self, node): def visit_termset(self, node):
self.body.append('<br />') pass
raise nodes.SkipNode
def depart_termset(self, node):
pass
def visit_manpage(self, node): def visit_manpage(self, node):
return self.visit_literal_emphasis(node) return self.visit_literal_emphasis(node)
@ -692,6 +694,15 @@ class HTMLTranslator(BaseTranslator):
(self.builder.current_docname, node.line)) (self.builder.current_docname, node.line))
raise nodes.SkipNode raise nodes.SkipNode
# overwritten to do not add '</dt>' in 'visit_definition' state.
def visit_definition(self, node):
self.body.append(self.starttag(node, 'dd', ''))
self.set_first_last(node)
# overwritten to add '</dt>' in 'depart_term' state.
def depart_term(self, node):
self.body.append('</dt>\n')
def unknown_visit(self, node): def unknown_visit(self, node):
raise NotImplementedError('Unknown node: ' + node.__class__.__name__) raise NotImplementedError('Unknown node: ' + node.__class__.__name__)

View File

@ -1222,9 +1222,11 @@ class LaTeXTranslator(nodes.NodeVisitor):
self.unrestrict_footnote(node) self.unrestrict_footnote(node)
self.in_term -= 1 self.in_term -= 1
def visit_termsep(self, node): def visit_termset(self, node):
self.body.append(', ') pass
raise nodes.SkipNode
def depart_termset(self, node):
pass
def visit_classifier(self, node): def visit_classifier(self, node):
self.body.append('{[}') self.body.append('{[}')

View File

@ -200,9 +200,11 @@ class ManualPageTranslator(BaseTranslator):
def depart_versionmodified(self, node): def depart_versionmodified(self, node):
self.depart_paragraph(node) self.depart_paragraph(node)
def visit_termsep(self, node): def visit_termset(self, node):
self.body.append(', ') pass
raise nodes.SkipNode
def depart_termset(self, node):
pass
# overwritten -- we don't want source comments to show up # overwritten -- we don't want source comments to show up
def visit_comment(self, node): def visit_comment(self, node):

View File

@ -952,10 +952,10 @@ class TexinfoTranslator(nodes.NodeVisitor):
def depart_term(self, node): def depart_term(self, node):
pass pass
def visit_termsep(self, node): def visit_termset(self, node):
self.body.append('\n%s ' % self.at_item_x) pass
def depart_termsep(self, node): def depart_termset(self, node):
pass pass
def visit_classifier(self, node): def visit_classifier(self, node):

View File

@ -640,9 +640,11 @@ class TextTranslator(nodes.NodeVisitor):
if not self._classifier_count_in_li: if not self._classifier_count_in_li:
self.end_state(end=None) self.end_state(end=None)
def visit_termsep(self, node): def visit_termset(self, node):
self.add_text(', ') pass
raise nodes.SkipNode
def depart_termset(self, node):
pass
def visit_classifier(self, node): def visit_classifier(self, node):
self.add_text(' : ') self.add_text(' : ')