mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge pull request #2318 from sphinx-doc/glossary-term-structure
Restructuring glossary term node to fix #2251
This commit is contained in:
commit
58c29968f9
6
CHANGES
6
CHANGES
@ -21,6 +21,11 @@ Incompatible changes
|
||||
refers to :confval:`exclude_patterns` to exclude extra files and directories.
|
||||
* #2300: enhance autoclass:: to use the docstring of __new__ if __init__ method's is missing
|
||||
of empty
|
||||
* #2251: Previously, under glossary directives, multiple terms for one definition are
|
||||
converted into single ``term`` node and the each terms in the term node are separated
|
||||
by ``termsep`` node. In new implementation, each terms are converted into individual
|
||||
``term`` nodes and ``termsep`` node is removed.
|
||||
By this change, output layout of every builders are changed a bit.
|
||||
|
||||
Features added
|
||||
--------------
|
||||
@ -84,6 +89,7 @@ Bugs fixed
|
||||
* #2074: make gettext should use canonical relative paths for .pot. Thanks to
|
||||
anatoly techtonik.
|
||||
* #2311: Fix sphinx.ext.inheritance_diagram raises AttributeError
|
||||
* #2251: Line breaks in .rst files are transferred to .pot files in a wrong way.
|
||||
|
||||
|
||||
Documentation
|
||||
|
@ -9,6 +9,8 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import warnings
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
|
||||
@ -209,7 +211,16 @@ class abbreviation(nodes.Inline, nodes.TextElement):
|
||||
|
||||
|
||||
class termsep(nodes.Structural, nodes.Element):
|
||||
"""Separates two terms within a <term> node."""
|
||||
"""Separates two terms within a <term> node.
|
||||
|
||||
.. versionchanged:: 1.4
|
||||
sphinx.addnodes.termsep is deprecated. It will be removed at Sphinx-1.5.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
warnings.warn('sphinx.addnodes.termsep will be removed at Sphinx-1.5',
|
||||
DeprecationWarning, stacklevel=2)
|
||||
super(termsep, self).__init__(*args, **kw)
|
||||
|
||||
|
||||
class manpage(nodes.Inline, nodes.TextElement):
|
||||
|
@ -214,7 +214,7 @@ class OptionXRefRole(XRefRole):
|
||||
return title, target
|
||||
|
||||
|
||||
def make_termnodes_from_paragraph_node(env, node, new_id=None):
|
||||
def register_term_to_glossary(env, node, new_id=None):
|
||||
gloss_entries = env.temp_data.setdefault('gloss_entries', set())
|
||||
objects = env.domaindata['std']['objects']
|
||||
|
||||
@ -229,25 +229,10 @@ def make_termnodes_from_paragraph_node(env, node, new_id=None):
|
||||
# add an index entry too
|
||||
indexnode = addnodes.index()
|
||||
indexnode['entries'] = [('single', termtext, new_id, 'main')]
|
||||
new_termnodes = []
|
||||
new_termnodes.append(indexnode)
|
||||
new_termnodes.extend(node.children)
|
||||
new_termnodes.append(addnodes.termsep())
|
||||
for termnode in new_termnodes:
|
||||
termnode.source, termnode.line = node.source, node.line
|
||||
|
||||
return new_id, termtext, new_termnodes
|
||||
|
||||
|
||||
def make_term_from_paragraph_node(termnodes, ids):
|
||||
# make a single "term" node with all the terms, separated by termsep
|
||||
# nodes (remove the dangling trailing separator)
|
||||
term = nodes.term('', '', *termnodes[:-1])
|
||||
term.source, term.line = termnodes[0].source, termnodes[0].line
|
||||
term.rawsource = term.astext()
|
||||
term['ids'].extend(ids)
|
||||
term['names'].extend(ids)
|
||||
return term
|
||||
indexnode.source, indexnode.line = node.source, node.line
|
||||
node.append(indexnode)
|
||||
node['ids'].append(new_id)
|
||||
node['names'].append(new_id)
|
||||
|
||||
|
||||
class Glossary(Directive):
|
||||
@ -330,7 +315,6 @@ class Glossary(Directive):
|
||||
termtexts = []
|
||||
termnodes = []
|
||||
system_messages = []
|
||||
ids = []
|
||||
for line, source, lineno in terms:
|
||||
# parse the term with inline markup
|
||||
res = self.state.inline_text(line, lineno)
|
||||
@ -338,25 +322,22 @@ class Glossary(Directive):
|
||||
|
||||
# get a text-only representation of the term and register it
|
||||
# as a cross-reference target
|
||||
tmp = nodes.paragraph('', '', *res[0])
|
||||
tmp.source = source
|
||||
tmp.line = lineno
|
||||
new_id, termtext, new_termnodes = \
|
||||
make_termnodes_from_paragraph_node(env, tmp)
|
||||
ids.append(new_id)
|
||||
termtexts.append(termtext)
|
||||
termnodes.extend(new_termnodes)
|
||||
term = nodes.term('', '', *res[0])
|
||||
term.source = source
|
||||
term.line = lineno
|
||||
register_term_to_glossary(env, term)
|
||||
termtexts.append(term.astext())
|
||||
termnodes.append(term)
|
||||
|
||||
term = make_term_from_paragraph_node(termnodes, ids)
|
||||
term += system_messages
|
||||
termnodes.extend(system_messages)
|
||||
|
||||
defnode = nodes.definition()
|
||||
if definition:
|
||||
self.state.nested_parse(definition, definition.items[0][1],
|
||||
defnode)
|
||||
|
||||
termnodes.append(defnode)
|
||||
items.append((termtexts,
|
||||
nodes.definition_list_item('', term, defnode)))
|
||||
nodes.definition_list_item('', *termnodes)))
|
||||
|
||||
if 'sorted' in self.options:
|
||||
items.sort(key=lambda x:
|
||||
|
@ -27,10 +27,7 @@ from sphinx.util.nodes import (
|
||||
from sphinx.util.osutil import ustrftime
|
||||
from sphinx.util.i18n import find_catalog
|
||||
from sphinx.util.pycompat import indent
|
||||
from sphinx.domains.std import (
|
||||
make_term_from_paragraph_node,
|
||||
make_termnodes_from_paragraph_node,
|
||||
)
|
||||
from sphinx.domains.std import register_term_to_glossary
|
||||
|
||||
|
||||
default_substitutions = set([
|
||||
@ -340,18 +337,10 @@ class Locale(Transform):
|
||||
# glossary terms update refid
|
||||
if isinstance(node, nodes.term):
|
||||
gloss_entries = env.temp_data.setdefault('gloss_entries', set())
|
||||
ids = []
|
||||
termnodes = []
|
||||
for _id in node['names']:
|
||||
if _id in gloss_entries:
|
||||
gloss_entries.remove(_id)
|
||||
_id, _, new_termnodes = \
|
||||
make_termnodes_from_paragraph_node(env, patch, _id)
|
||||
ids.append(_id)
|
||||
termnodes.extend(new_termnodes)
|
||||
|
||||
if termnodes and ids:
|
||||
patch = make_term_from_paragraph_node(termnodes, ids)
|
||||
register_term_to_glossary(env, patch, _id)
|
||||
node['ids'] = patch['ids']
|
||||
node['names'] = patch['names']
|
||||
processed = True
|
||||
|
@ -13,6 +13,7 @@ import sys
|
||||
import posixpath
|
||||
import os
|
||||
import copy
|
||||
import warnings
|
||||
|
||||
from six import string_types
|
||||
from docutils import nodes
|
||||
@ -629,7 +630,26 @@ class HTMLTranslator(BaseTranslator):
|
||||
def depart_abbreviation(self, node):
|
||||
self.body.append('</abbr>')
|
||||
|
||||
# overwritten (but not changed) to keep pair of visit/depart_term
|
||||
def visit_term(self, node):
|
||||
self.body.append(self.starttag(node, 'dt', ''))
|
||||
|
||||
# overwritten to add '</dt>' in 'depart_term' state.
|
||||
def depart_term(self, node):
|
||||
self.body.append('</dt>\n')
|
||||
|
||||
# overwritten to do not add '</dt>' in 'visit_definition' state.
|
||||
def visit_definition(self, node):
|
||||
self.body.append(self.starttag(node, 'dd', ''))
|
||||
self.set_first_last(node)
|
||||
|
||||
# overwritten (but not changed) to keep pair of visit/depart_definition
|
||||
def depart_definition(self, node):
|
||||
self.body.append('</dd>\n')
|
||||
|
||||
def visit_termsep(self, node):
|
||||
warnings.warn('sphinx.addnodes.termsep will be removed at Sphinx-1.5',
|
||||
DeprecationWarning)
|
||||
self.body.append('<br />')
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
import re
|
||||
import sys
|
||||
from os import path
|
||||
import warnings
|
||||
|
||||
from six import itervalues, text_type
|
||||
from docutils import nodes, writers
|
||||
@ -1223,6 +1224,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.in_term -= 1
|
||||
|
||||
def visit_termsep(self, node):
|
||||
warnings.warn('sphinx.addnodes.termsep will be removed at Sphinx-1.5',
|
||||
DeprecationWarning)
|
||||
self.body.append(', ')
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
@ -9,6 +9,8 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import warnings
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.writers.manpage import (
|
||||
MACRO_DEF,
|
||||
@ -201,6 +203,8 @@ class ManualPageTranslator(BaseTranslator):
|
||||
self.depart_paragraph(node)
|
||||
|
||||
def visit_termsep(self, node):
|
||||
warnings.warn('sphinx.addnodes.termsep will be removed at Sphinx-1.5',
|
||||
DeprecationWarning)
|
||||
self.body.append(', ')
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
@ -12,6 +12,7 @@
|
||||
import re
|
||||
import textwrap
|
||||
from os import path
|
||||
import warnings
|
||||
|
||||
from six import itervalues
|
||||
from six.moves import range
|
||||
@ -953,6 +954,8 @@ class TexinfoTranslator(nodes.NodeVisitor):
|
||||
pass
|
||||
|
||||
def visit_termsep(self, node):
|
||||
warnings.warn('sphinx.addnodes.termsep will be removed at Sphinx-1.5',
|
||||
DeprecationWarning)
|
||||
self.body.append('\n%s ' % self.at_item_x)
|
||||
|
||||
def depart_termsep(self, node):
|
||||
|
@ -12,6 +12,7 @@ import os
|
||||
import re
|
||||
import textwrap
|
||||
from itertools import groupby
|
||||
import warnings
|
||||
|
||||
from six.moves import zip_longest
|
||||
|
||||
@ -641,6 +642,8 @@ class TextTranslator(nodes.NodeVisitor):
|
||||
self.end_state(end=None)
|
||||
|
||||
def visit_termsep(self, node):
|
||||
warnings.warn('sphinx.addnodes.termsep will be removed at Sphinx-1.5',
|
||||
DeprecationWarning)
|
||||
self.add_text(', ')
|
||||
raise nodes.SkipNode
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user