mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '1.8'
This commit is contained in:
commit
16f47cded9
6
CHANGES
6
CHANGES
@ -92,6 +92,12 @@ Bugs fixed
|
||||
* #5419: incompatible math_block node has been generated
|
||||
* #5548: Fix ensuredir() in case of pre-existing file
|
||||
* #5549: graphviz Correctly deal with non-existing static dir
|
||||
* #3002: i18n: multiple footnote_references referring same footnote causes
|
||||
duplicated node_ids
|
||||
* #5563: latex: footnote_references generated by extension causes LaTeX builder
|
||||
crashed
|
||||
* #5561: make all-pdf fails with old xindy version
|
||||
* #5557: quickstart: --no-batchfile isn't honored
|
||||
|
||||
Testing
|
||||
--------
|
||||
|
@ -349,14 +349,14 @@ class Builder:
|
||||
else:
|
||||
logger.info(__('none found'))
|
||||
|
||||
# save the environment
|
||||
from sphinx.application import ENV_PICKLE_FILENAME
|
||||
logger.info(bold(__('pickling environment... ')), nonl=True)
|
||||
with open(path.join(self.doctreedir, ENV_PICKLE_FILENAME), 'wb') as f:
|
||||
pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)
|
||||
logger.info(__('done'))
|
||||
|
||||
if updated_docnames:
|
||||
# save the environment
|
||||
from sphinx.application import ENV_PICKLE_FILENAME
|
||||
logger.info(bold(__('pickling environment... ')), nonl=True)
|
||||
with open(path.join(self.doctreedir, ENV_PICKLE_FILENAME), 'wb') as f:
|
||||
pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)
|
||||
logger.info(__('done'))
|
||||
|
||||
# global actions
|
||||
self.app.phase = BuildPhase.CONSISTENCY_CHECK
|
||||
logger.info(bold(__('checking consistency... ')), nonl=True)
|
||||
|
@ -27,7 +27,7 @@ URI_SCHEMES = ('mailto:', 'http:', 'https:', 'ftp:')
|
||||
|
||||
class FootnoteDocnameUpdater(SphinxTransform):
|
||||
"""Add docname to footnote and footnote_reference nodes."""
|
||||
default_priority = 200
|
||||
default_priority = 700
|
||||
TARGET_NODES = (nodes.footnote, nodes.footnote_reference)
|
||||
|
||||
def apply(self):
|
||||
|
@ -528,7 +528,7 @@ Makefile to be used with sphinx-build.
|
||||
"""))
|
||||
|
||||
parser.add_argument('-q', '--quiet', action='store_true', dest='quiet',
|
||||
default=False,
|
||||
default=None,
|
||||
help=__('quiet mode'))
|
||||
parser.add_argument('--version', action='version', dest='show_version',
|
||||
version='%%(prog)s %s' % __display_version__)
|
||||
@ -537,7 +537,7 @@ Makefile to be used with sphinx-build.
|
||||
help=__('output path'))
|
||||
|
||||
group = parser.add_argument_group(__('Structure options'))
|
||||
group.add_argument('--sep', action='store_true',
|
||||
group.add_argument('--sep', action='store_true', default=None,
|
||||
help=__('if specified, separate source and build dirs'))
|
||||
group.add_argument('--dot', metavar='DOT',
|
||||
help=__('replacement for dot in _templates etc.'))
|
||||
@ -569,11 +569,11 @@ Makefile to be used with sphinx-build.
|
||||
action='append', help=__('enable arbitrary extensions'))
|
||||
|
||||
group = parser.add_argument_group(__('Makefile and Batchfile creation'))
|
||||
group.add_argument('--makefile', action='store_true', dest='makefile',
|
||||
group.add_argument('--makefile', action='store_true', dest='makefile', default=None,
|
||||
help=__('create makefile'))
|
||||
group.add_argument('--no-makefile', action='store_false', dest='makefile',
|
||||
help=__('do not create makefile'))
|
||||
group.add_argument('--batchfile', action='store_true', dest='batchfile',
|
||||
group.add_argument('--batchfile', action='store_true', dest='batchfile', default=None,
|
||||
help=__('create batchfile'))
|
||||
group.add_argument('--no-batchfile', action='store_false',
|
||||
dest='batchfile',
|
||||
@ -613,7 +613,7 @@ def main(argv=sys.argv[1:]):
|
||||
|
||||
d = vars(args)
|
||||
# delete None or False value
|
||||
d = dict((k, v) for k, v in d.items() if not (v is None or v is False))
|
||||
d = dict((k, v) for k, v in d.items() if v is not None)
|
||||
|
||||
try:
|
||||
if 'quiet' in d:
|
||||
|
@ -33,8 +33,6 @@ XINDYOPTS += -M LICRcyr2utf8.xdy
|
||||
{% if xindy_cyrillic -%}
|
||||
XINDYOPTS += -M LatinRules.xdy
|
||||
{% endif -%}
|
||||
# also with pdflatex as LICRlatin2utf8.xdy replaces xindy's /tex/inputenc/utf8.xdy
|
||||
XINDYOPTS += -I xelatex
|
||||
{% endif -%}
|
||||
# format: pdf or dvi (used only by archive targets)
|
||||
FMT = pdf
|
||||
|
@ -4,6 +4,9 @@ heading_prefix " \\bigletter "
|
||||
|
||||
preamble "\\begin{sphinxtheindex}
|
||||
\\let\\bigletter\\sphinxstyleindexlettergroup
|
||||
\\let\\spxpagem \\sphinxstyleindexpagemain
|
||||
\\let\\spxentry \\sphinxstyleindexentry
|
||||
\\let\\spxextra \\sphinxstyleindexextra
|
||||
|
||||
"
|
||||
|
||||
|
@ -1623,8 +1623,11 @@
|
||||
|
||||
% additional customizable styling
|
||||
\def\sphinxstyleindexentry #1{\texttt{#1}}
|
||||
\def\sphinxstyleindexextra #1{ \emph{(#1)}}
|
||||
\def\sphinxstyleindexextra #1{ (\emph{#1})}
|
||||
\def\sphinxstyleindexpageref #1{, \pageref{#1}}
|
||||
\def\sphinxstyleindexpagemain#1{\textbf{#1}}
|
||||
\protected\def\spxentry#1{#1}% will get \let to \sphinxstyleindexentry in index
|
||||
\protected\def\spxextra#1{#1}% will get \let to \sphinxstyleindexextra in index
|
||||
\def\sphinxstyleindexlettergroup #1%
|
||||
{{\Large\sffamily#1}\nopagebreak\vspace{1mm}}
|
||||
\def\sphinxstyleindexlettergroupDefault #1%
|
||||
|
@ -3,11 +3,12 @@
|
||||
;; Unfortunately xindy is out-of-the-box hyperref-incompatible. This
|
||||
;; configuration is a workaround, which requires to pass option
|
||||
;; hyperindex=false to hyperref.
|
||||
;; textit and emph not currently used by Sphinx LaTeX writer.
|
||||
(define-attributes (("textbf" "textit" "emph" "default")))
|
||||
;; textit and emph not currently used, spxpagem replaces former textbf
|
||||
(define-attributes (("textbf" "textit" "emph" "spxpagem" "default")))
|
||||
(markup-locref :open "\textbf{\hyperpage{" :close "}}" :attr "textbf")
|
||||
(markup-locref :open "\textit{\hyperpage{" :close "}}" :attr "textit")
|
||||
(markup-locref :open "\emph{\hyperpage{" :close "}}" :attr "emph")
|
||||
(markup-locref :open "\spxpagem{\hyperpage{" :close "}}" :attr "spxpagem")
|
||||
(markup-locref :open "\hyperpage{" :close "}" :attr "default")
|
||||
|
||||
(require "numeric-sort.xdy")
|
||||
@ -193,6 +194,9 @@
|
||||
(markup-index :open "\begin{sphinxtheindex}
|
||||
\let\lettergroup\sphinxstyleindexlettergroup
|
||||
\let\lettergroupDefault\sphinxstyleindexlettergroupDefault
|
||||
\let\spxpagem\sphinxstyleindexpagemain
|
||||
\let\spxentry\sphinxstyleindexentry
|
||||
\let\spxextra\sphinxstyleindexextra
|
||||
|
||||
"
|
||||
:close "
|
||||
|
@ -354,7 +354,7 @@ class Locale(SphinxTransform):
|
||||
is_refnamed_footnote_ref = NodeMatcher(nodes.footnote_reference, refname=Any)
|
||||
old_foot_refs = node.traverse(is_refnamed_footnote_ref)
|
||||
new_foot_refs = patch.traverse(is_refnamed_footnote_ref)
|
||||
refname_ids_map = {}
|
||||
refname_ids_map = {} # type: Dict[unicode, List[unicode]]
|
||||
if len(old_foot_refs) != len(new_foot_refs):
|
||||
old_foot_ref_rawsources = [ref.rawsource for ref in old_foot_refs]
|
||||
new_foot_ref_rawsources = [ref.rawsource for ref in new_foot_refs]
|
||||
@ -363,11 +363,11 @@ class Locale(SphinxTransform):
|
||||
.format(old_foot_ref_rawsources, new_foot_ref_rawsources),
|
||||
location=node)
|
||||
for old in old_foot_refs:
|
||||
refname_ids_map[old["refname"]] = old["ids"]
|
||||
refname_ids_map.setdefault(old["refname"], []).append(old["ids"])
|
||||
for new in new_foot_refs:
|
||||
refname = new["refname"]
|
||||
if refname in refname_ids_map:
|
||||
new["ids"] = refname_ids_map[refname]
|
||||
if refname_ids_map.get(refname):
|
||||
new["ids"] = refname_ids_map[refname].pop(0)
|
||||
|
||||
# citation should use original 'ids'.
|
||||
is_citation_ref = NodeMatcher(nodes.citation_reference, refname=Any)
|
||||
@ -382,11 +382,11 @@ class Locale(SphinxTransform):
|
||||
.format(old_cite_ref_rawsources, new_cite_ref_rawsources),
|
||||
location=node)
|
||||
for old in old_cite_refs:
|
||||
refname_ids_map[old["refname"]] = old["ids"]
|
||||
refname_ids_map.setdefault(old["refname"], []).append(old["ids"])
|
||||
for new in new_cite_refs:
|
||||
refname = new["refname"]
|
||||
if refname in refname_ids_map:
|
||||
new["ids"] = refname_ids_map[refname]
|
||||
if refname_ids_map.get(refname):
|
||||
new["ids"] = refname_ids_map[refname].pop()
|
||||
|
||||
# Original pending_xref['reftarget'] contain not-translated
|
||||
# target name, new pending_xref must use original one.
|
||||
|
@ -167,6 +167,8 @@ ADDITIONAL_SETTINGS = {
|
||||
},
|
||||
} # type: Dict[unicode, Dict[unicode, unicode]]
|
||||
|
||||
EXTRA_RE = re.compile(r'^(.*\S)\s+\(([^()]*)\)\s*$')
|
||||
|
||||
|
||||
class collected_footnote(nodes.footnote):
|
||||
"""Footnotes that are collected are assigned this class."""
|
||||
@ -1829,8 +1831,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
# type: (nodes.Node) -> None
|
||||
self.body.append('\n\\end{flushright}\n')
|
||||
|
||||
def visit_index(self, node, scre=re.compile(r';\s*')):
|
||||
# type: (nodes.Node, Pattern) -> None
|
||||
def visit_index(self, node, scre = None):
|
||||
# type: (nodes.Node, None) -> None
|
||||
def escape(value):
|
||||
value = self.encode(value)
|
||||
value = value.replace(r'\{', r'\sphinxleftcurlybrace{}')
|
||||
@ -1840,33 +1842,57 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
value = value.replace('!', '"!')
|
||||
return value
|
||||
|
||||
def style(string):
|
||||
match = EXTRA_RE.match(string)
|
||||
if match:
|
||||
return match.expand(r'\\spxentry{\1}\\spxextra{\2}')
|
||||
else:
|
||||
return '\\spxentry{%s}' % string
|
||||
|
||||
if scre:
|
||||
warnings.warn(('LaTeXTranslator.visit_index() optional argument '
|
||||
'"scre" is deprecated. It is ignored.'),
|
||||
RemovedInSphinx30Warning, stacklevel=2)
|
||||
if not node.get('inline', True):
|
||||
self.body.append('\n')
|
||||
entries = node['entries']
|
||||
for type, string, tid, ismain, key_ in entries:
|
||||
m = ''
|
||||
if ismain:
|
||||
m = '|textbf'
|
||||
m = '|spxpagem'
|
||||
try:
|
||||
if type == 'single':
|
||||
p = scre.sub('!', escape(string))
|
||||
self.body.append(r'\index{%s%s}' % (p, m))
|
||||
try:
|
||||
p1, p2 = [escape(x) for x in split_into(2, 'single', string)]
|
||||
P1, P2 = style(p1), style(p2)
|
||||
self.body.append(r'\index{%s@%s!%s@%s%s}' % (p1, P1, p2, P2, m))
|
||||
except ValueError:
|
||||
p = escape(split_into(1, 'single', string)[0])
|
||||
P = style(p)
|
||||
self.body.append(r'\index{%s@%s%s}' % (p, P, m))
|
||||
elif type == 'pair':
|
||||
p1, p2 = [escape(x) for x in split_into(2, 'pair', string)]
|
||||
self.body.append(r'\index{%s!%s%s}\index{%s!%s%s}' %
|
||||
(p1, p2, m, p2, p1, m))
|
||||
P1, P2 = style(p1), style(p2)
|
||||
self.body.append(r'\index{%s@%s!%s@%s%s}\index{%s@%s!%s@%s%s}' %
|
||||
(p1, P1, p2, P2, m, p2, P2, p1, P1, m))
|
||||
elif type == 'triple':
|
||||
p1, p2, p3 = [escape(x) for x in split_into(3, 'triple', string)]
|
||||
P1, P2, P3 = style(p1), style(p2), style(p3)
|
||||
self.body.append(
|
||||
r'\index{%s!%s %s%s}\index{%s!%s, %s%s}'
|
||||
r'\index{%s!%s %s%s}' %
|
||||
(p1, p2, p3, m, p2, p3, p1, m, p3, p1, p2, m))
|
||||
r'\index{%s@%s!%s %s@%s %s%s}'
|
||||
r'\index{%s@%s!%s, %s@%s, %s%s}'
|
||||
r'\index{%s@%s!%s %s@%s %s%s}' %
|
||||
(p1, P1, p2, p3, P2, P3, m,
|
||||
p2, P2, p3, p1, P3, P1, m,
|
||||
p3, P3, p1, p2, P1, P2, m))
|
||||
elif type == 'see':
|
||||
p1, p2 = [escape(x) for x in split_into(2, 'see', string)]
|
||||
self.body.append(r'\index{%s|see{%s}}' % (p1, p2))
|
||||
P1 = style(p1)
|
||||
self.body.append(r'\index{%s@%s|see{%s}}' % (p1, P1, p2))
|
||||
elif type == 'seealso':
|
||||
p1, p2 = [escape(x) for x in split_into(2, 'seealso', string)]
|
||||
self.body.append(r'\index{%s|see{%s}}' % (p1, p2))
|
||||
P1 = style(p1)
|
||||
self.body.append(r'\index{%s@%s|see{%s}}' % (p1, P1, p2))
|
||||
else:
|
||||
logger.warning(__('unknown index entry type %s found'), type)
|
||||
except ValueError as err:
|
||||
|
@ -19,8 +19,10 @@ msgstr ""
|
||||
msgid "i18n with Footnote"
|
||||
msgstr "I18N WITH FOOTNOTE"
|
||||
|
||||
msgid "[100]_ Contents [#]_ for `i18n with Footnote`_ [ref]_ [#named]_ [*]_."
|
||||
msgstr "`I18N WITH FOOTNOTE`_ INCLUDE THIS CONTENTS [#named]_ [ref]_ [#]_ [100]_ [*]_."
|
||||
msgid "[100]_ Contents [#]_ for `i18n with Footnote`_ [ref]_ [#named]_ [*]_. "
|
||||
"second footnote_ref [100]_."
|
||||
msgstr "`I18N WITH FOOTNOTE`_ INCLUDE THIS CONTENTS [#named]_ [ref]_ [#]_ [100]_ [*]_. "
|
||||
"SECOND FOOTNOTE_REF [100]_."
|
||||
|
||||
msgid "This is a auto numbered footnote."
|
||||
msgstr "THIS IS A AUTO NUMBERED FOOTNOTE."
|
||||
|
@ -5,6 +5,7 @@ i18n with Footnote
|
||||
.. #955 cant-build-html-with-footnotes-when-using
|
||||
|
||||
[100]_ Contents [#]_ for `i18n with Footnote`_ [ref]_ [#named]_ [*]_.
|
||||
second footnote_ref [100]_.
|
||||
|
||||
.. [#] This is a auto numbered footnote.
|
||||
.. [ref] This is a named footnote.
|
||||
|
@ -1227,9 +1227,13 @@ def test_latex_index(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
result = (app.outdir / 'Python.tex').text(encoding='utf8')
|
||||
assert 'A \\index{famous}famous \\index{equation}equation:\n' in result
|
||||
assert '\n\\index{Einstein}\\index{relativity}\\ignorespaces \nand' in result
|
||||
assert '\n\\index{main \\sphinxleftcurlybrace{}}\\ignorespaces ' in result
|
||||
assert ('A \\index{famous@\\spxentry{famous}}famous '
|
||||
'\\index{equation@\\spxentry{equation}}equation:\n' in result)
|
||||
assert ('\n\\index{Einstein@\\spxentry{Einstein}}'
|
||||
'\\index{relativity@\\spxentry{relativity}}'
|
||||
'\\ignorespaces \nand') in result
|
||||
assert ('\n\\index{main \\sphinxleftcurlybrace{}@\\spxentry{'
|
||||
'main \\sphinxleftcurlybrace{}}}\\ignorespaces ' in result)
|
||||
|
||||
|
||||
@pytest.mark.sphinx('latex', testroot='latex-equations')
|
||||
@ -1297,20 +1301,22 @@ def test_latex_glossary(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
result = (app.outdir / 'test.tex').text(encoding='utf8')
|
||||
assert (u'\\item[{änhlich\\index{änhlich|textbf}\\phantomsection'
|
||||
assert (u'\\item[{änhlich\\index{änhlich@\\spxentry{änhlich}|spxpagem}'
|
||||
r'\phantomsection'
|
||||
r'\label{\detokenize{index:term-anhlich}}}] \leavevmode' in result)
|
||||
assert (r'\item[{boson\index{boson|textbf}\phantomsection'
|
||||
assert (r'\item[{boson\index{boson@\spxentry{boson}|spxpagem}\phantomsection'
|
||||
r'\label{\detokenize{index:term-boson}}}] \leavevmode' in result)
|
||||
assert (r'\item[{\sphinxstyleemphasis{fermion}\index{fermion|textbf}'
|
||||
assert (r'\item[{\sphinxstyleemphasis{fermion}'
|
||||
r'\index{fermion@\spxentry{fermion}|spxpagem}'
|
||||
r'\phantomsection'
|
||||
r'\label{\detokenize{index:term-fermion}}}] \leavevmode' in result)
|
||||
assert (r'\item[{tauon\index{tauon|textbf}\phantomsection'
|
||||
assert (r'\item[{tauon\index{tauon@\spxentry{tauon}|spxpagem}\phantomsection'
|
||||
r'\label{\detokenize{index:term-tauon}}}] \leavevmode'
|
||||
r'\item[{myon\index{myon|textbf}\phantomsection'
|
||||
r'\item[{myon\index{myon@\spxentry{myon}|spxpagem}\phantomsection'
|
||||
r'\label{\detokenize{index:term-myon}}}] \leavevmode'
|
||||
r'\item[{electron\index{electron|textbf}\phantomsection'
|
||||
r'\item[{electron\index{electron@\spxentry{electron}|spxpagem}\phantomsection'
|
||||
r'\label{\detokenize{index:term-electron}}}] \leavevmode' in result)
|
||||
assert (u'\\item[{über\\index{über|textbf}\\phantomsection'
|
||||
assert (u'\\item[{über\\index{über@\\spxentry{über}|spxpagem}\\phantomsection'
|
||||
r'\label{\detokenize{index:term-uber}}}] \leavevmode' in result)
|
||||
|
||||
|
||||
|
@ -780,9 +780,13 @@ def test_xml_footnotes(app, warning):
|
||||
assert_elem(
|
||||
para0[0],
|
||||
['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
|
||||
'2', '[ref]', '1', '100', '*', '.'],
|
||||
'2', '[ref]', '1', '100', '*', '. SECOND FOOTNOTE_REF', '100', '.'],
|
||||
['i18n-with-footnote', 'ref'])
|
||||
|
||||
# check node_id for footnote_references which refer same footnote (refs: #3002)
|
||||
assert para0[0][4].text == para0[0][6].text == '100'
|
||||
assert para0[0][4].attrib['ids'] != para0[0][6].attrib['ids']
|
||||
|
||||
footnote0 = secs[0].findall('footnote')
|
||||
assert_elem(
|
||||
footnote0[0],
|
||||
@ -834,8 +838,8 @@ def test_xml_footnote_backlinks(app):
|
||||
footnote0 = secs[0].findall('footnote')
|
||||
for footnote in footnote0:
|
||||
ids = footnote.attrib.get('ids')
|
||||
backrefs = footnote.attrib.get('backrefs')
|
||||
assert refid2id[ids] == backrefs
|
||||
backrefs = footnote.attrib.get('backrefs').split()
|
||||
assert refid2id[ids] in backrefs
|
||||
|
||||
|
||||
@sphinx_intl
|
||||
|
Loading…
Reference in New Issue
Block a user