diff --git a/CHANGES b/CHANGES index c46ea7aa3..f65817ab0 100644 --- a/CHANGES +++ b/CHANGES @@ -21,6 +21,9 @@ Incompatible changes refers to :confval:`exclude_patterns` to exclude extra files and directories. * #2300: enhance autoclass:: to use the docstring of __new__ if __init__ method's is missing of empty +* #2251: term nodes in a glossary directive are wrapped with ``termset`` node to handle + multiple term correctly. ``termsep`` node is removed and ``termset`` is added. + By this change, every writers must have visit_termset and depart_termset method. Features added -------------- @@ -84,6 +87,7 @@ Bugs fixed * #2074: make gettext should use canonical relative paths for .pot. Thanks to anatoly techtonik. * #2311: Fix sphinx.ext.inheritance_diagram raises AttributeError +* #2251: Line breaks in .rst files are transferred to .pot files in a wrong way. Documentation diff --git a/doc/extdev/nodes.rst b/doc/extdev/nodes.rst index e67fa3da6..e00a2ab8b 100644 --- a/doc/extdev/nodes.rst +++ b/doc/extdev/nodes.rst @@ -54,4 +54,4 @@ You should not need to generate the nodes below in extensions. .. autoclass:: start_of_file .. autoclass:: productionlist .. autoclass:: production -.. autoclass:: termsep +.. autoclass:: termset diff --git a/sphinx/addnodes.py b/sphinx/addnodes.py index 3ff5772ca..a834adc32 100644 --- a/sphinx/addnodes.py +++ b/sphinx/addnodes.py @@ -208,8 +208,8 @@ class abbreviation(nodes.Inline, nodes.TextElement): """Node for abbreviations with explanations.""" -class termsep(nodes.Structural, nodes.Element): - """Separates two terms within a node.""" +class termset(nodes.Structural, nodes.Element): + """A set of node""" class manpage(nodes.Inline, nodes.TextElement): diff --git a/sphinx/domains/std.py b/sphinx/domains/std.py index 5adf12c07..193f7bd69 100644 --- a/sphinx/domains/std.py +++ b/sphinx/domains/std.py @@ -214,7 +214,7 @@ class OptionXRefRole(XRefRole): return title, target -def make_termnodes_from_paragraph_node(env, node, new_id=None): +def register_term_to_glossary(env, node, new_id=None): gloss_entries = env.temp_data.setdefault('gloss_entries', set()) objects = env.domaindata['std']['objects'] @@ -229,25 +229,18 @@ def make_termnodes_from_paragraph_node(env, node, new_id=None): # add an index entry too indexnode = addnodes.index() indexnode['entries'] = [('single', termtext, new_id, 'main')] - new_termnodes = [] - new_termnodes.append(indexnode) - new_termnodes.extend(node.children) - new_termnodes.append(addnodes.termsep()) - for termnode in new_termnodes: - termnode.source, termnode.line = node.source, node.line - - return new_id, termtext, new_termnodes + indexnode.source, indexnode.line = node.source, node.line + node.append(indexnode) + node['ids'].append(new_id) + node['names'].append(new_id) -def make_term_from_paragraph_node(termnodes, ids): - # make a single "term" node with all the terms, separated by termsep - # nodes (remove the dangling trailing separator) - term = nodes.term('', '', *termnodes[:-1]) - term.source, term.line = termnodes[0].source, termnodes[0].line - term.rawsource = term.astext() - term['ids'].extend(ids) - term['names'].extend(ids) - return term +def make_termset_from_termnodes(termnodes): + # make a single "termset" node with all the terms + termset = addnodes.termset('', *termnodes) + termset.source, termset.line = termnodes[0].source, termnodes[0].line + termset.rawsource = termset.astext() + return termset class Glossary(Directive): @@ -330,7 +323,6 @@ class Glossary(Directive): termtexts = [] termnodes = [] system_messages = [] - ids = [] for line, source, lineno in terms: # parse the term with inline markup res = self.state.inline_text(line, lineno) @@ -338,17 +330,15 @@ class Glossary(Directive): # get a text-only representation of the term and register it # as a cross-reference target - tmp = nodes.paragraph('', '', *res[0]) - tmp.source = source - tmp.line = lineno - new_id, termtext, new_termnodes = \ - make_termnodes_from_paragraph_node(env, tmp) - ids.append(new_id) - termtexts.append(termtext) - termnodes.extend(new_termnodes) + term = nodes.term('', '', *res[0]) + term.source = source + term.line = lineno + register_term_to_glossary(env, term) + termtexts.append(term.astext()) + termnodes.append(term) - term = make_term_from_paragraph_node(termnodes, ids) - term += system_messages + termset = make_termset_from_termnodes(termnodes) + termset += system_messages defnode = nodes.definition() if definition: @@ -356,7 +346,7 @@ class Glossary(Directive): defnode) items.append((termtexts, - nodes.definition_list_item('', term, defnode))) + nodes.definition_list_item('', termset, defnode))) if 'sorted' in self.options: items.sort(key=lambda x: diff --git a/sphinx/transforms.py b/sphinx/transforms.py index 99490b5c9..0d22020af 100644 --- a/sphinx/transforms.py +++ b/sphinx/transforms.py @@ -27,10 +27,7 @@ from sphinx.util.nodes import ( from sphinx.util.osutil import ustrftime from sphinx.util.i18n import find_catalog from sphinx.util.pycompat import indent -from sphinx.domains.std import ( - make_term_from_paragraph_node, - make_termnodes_from_paragraph_node, -) +from sphinx.domains.std import register_term_to_glossary default_substitutions = set([ @@ -340,18 +337,10 @@ class Locale(Transform): # glossary terms update refid if isinstance(node, nodes.term): gloss_entries = env.temp_data.setdefault('gloss_entries', set()) - ids = [] - termnodes = [] for _id in node['names']: if _id in gloss_entries: gloss_entries.remove(_id) - _id, _, new_termnodes = \ - make_termnodes_from_paragraph_node(env, patch, _id) - ids.append(_id) - termnodes.extend(new_termnodes) - - if termnodes and ids: - patch = make_term_from_paragraph_node(termnodes, ids) + register_term_to_glossary(env, patch, _id) node['ids'] = patch['ids'] node['names'] = patch['names'] processed = True diff --git a/sphinx/writers/html.py b/sphinx/writers/html.py index 54975ae8a..fac40cdb4 100644 --- a/sphinx/writers/html.py +++ b/sphinx/writers/html.py @@ -629,9 +629,11 @@ class HTMLTranslator(BaseTranslator): def depart_abbreviation(self, node): self.body.append('') - def visit_termsep(self, node): - self.body.append('
') - raise nodes.SkipNode + def visit_termset(self, node): + pass + + def depart_termset(self, node): + pass def visit_manpage(self, node): return self.visit_literal_emphasis(node) @@ -692,6 +694,15 @@ class HTMLTranslator(BaseTranslator): (self.builder.current_docname, node.line)) raise nodes.SkipNode + # overwritten to do not add '' in 'visit_definition' state. + def visit_definition(self, node): + self.body.append(self.starttag(node, 'dd', '')) + self.set_first_last(node) + + # overwritten to add '' in 'depart_term' state. + def depart_term(self, node): + self.body.append('\n') + def unknown_visit(self, node): raise NotImplementedError('Unknown node: ' + node.__class__.__name__) diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index f0c226f52..13430fbbe 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -1222,9 +1222,11 @@ class LaTeXTranslator(nodes.NodeVisitor): self.unrestrict_footnote(node) self.in_term -= 1 - def visit_termsep(self, node): - self.body.append(', ') - raise nodes.SkipNode + def visit_termset(self, node): + pass + + def depart_termset(self, node): + pass def visit_classifier(self, node): self.body.append('{[}') diff --git a/sphinx/writers/manpage.py b/sphinx/writers/manpage.py index d1c65bfb7..1950c1e48 100644 --- a/sphinx/writers/manpage.py +++ b/sphinx/writers/manpage.py @@ -200,9 +200,11 @@ class ManualPageTranslator(BaseTranslator): def depart_versionmodified(self, node): self.depart_paragraph(node) - def visit_termsep(self, node): - self.body.append(', ') - raise nodes.SkipNode + def visit_termset(self, node): + pass + + def depart_termset(self, node): + pass # overwritten -- we don't want source comments to show up def visit_comment(self, node): diff --git a/sphinx/writers/texinfo.py b/sphinx/writers/texinfo.py index 14345e664..78ff02bd5 100644 --- a/sphinx/writers/texinfo.py +++ b/sphinx/writers/texinfo.py @@ -952,10 +952,10 @@ class TexinfoTranslator(nodes.NodeVisitor): def depart_term(self, node): pass - def visit_termsep(self, node): - self.body.append('\n%s ' % self.at_item_x) + def visit_termset(self, node): + pass - def depart_termsep(self, node): + def depart_termset(self, node): pass def visit_classifier(self, node): diff --git a/sphinx/writers/text.py b/sphinx/writers/text.py index 1e8bbf023..d646064f0 100644 --- a/sphinx/writers/text.py +++ b/sphinx/writers/text.py @@ -640,9 +640,11 @@ class TextTranslator(nodes.NodeVisitor): if not self._classifier_count_in_li: self.end_state(end=None) - def visit_termsep(self, node): - self.add_text(', ') - raise nodes.SkipNode + def visit_termset(self, node): + pass + + def depart_termset(self, node): + pass def visit_classifier(self, node): self.add_text(' : ')