mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
merge with 0.6
This commit is contained in:
commit
2acec48b86
4
CHANGES
4
CHANGES
@ -62,6 +62,10 @@ Release 1.0 (in development)
|
|||||||
Release 0.6.4 (in development)
|
Release 0.6.4 (in development)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
|
* Restore compatibility with Pygments >= 1.2.
|
||||||
|
|
||||||
|
* #295: Fix escaping of hyperref targets in LaTeX output.
|
||||||
|
|
||||||
* #302: Fix links generated by the ``:doc:`` role for LaTeX output.
|
* #302: Fix links generated by the ``:doc:`` role for LaTeX output.
|
||||||
|
|
||||||
* #286: collect todo nodes after the whole document has been read;
|
* #286: collect todo nodes after the whole document has been read;
|
||||||
|
@ -133,8 +133,8 @@ tables of contents. The ``toctree`` directive is the central element.
|
|||||||
In the end, all documents in the :term:`source directory` (or subdirectories)
|
In the end, all documents in the :term:`source directory` (or subdirectories)
|
||||||
must occur in some ``toctree`` directive; Sphinx will emit a warning if it
|
must occur in some ``toctree`` directive; Sphinx will emit a warning if it
|
||||||
finds a file that is not included, because that means that this file will not
|
finds a file that is not included, because that means that this file will not
|
||||||
be reachable through standard navigation. Use :confval:`unused_documents` to
|
be reachable through standard navigation. Use :confval:`unused_docs` to
|
||||||
explicitly exclude documents from building, and :confval:`exclude_dirs` to
|
explicitly exclude documents from building, and :confval:`exclude_trees` to
|
||||||
exclude whole directories.
|
exclude whole directories.
|
||||||
|
|
||||||
The "master document" (selected by :confval:`master_doc`) is the "root" of
|
The "master document" (selected by :confval:`master_doc`) is the "root" of
|
||||||
|
@ -20,7 +20,7 @@ except ImportError:
|
|||||||
# parser is not available on Jython
|
# parser is not available on Jython
|
||||||
parser = None
|
parser = None
|
||||||
|
|
||||||
from sphinx.util.texescape import tex_hl_escape_map
|
from sphinx.util.texescape import tex_hl_escape_map_old, tex_hl_escape_map_new
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pygments
|
import pygments
|
||||||
@ -130,7 +130,7 @@ class PygmentsBridge(object):
|
|||||||
# first, escape highlighting characters like Pygments does
|
# first, escape highlighting characters like Pygments does
|
||||||
source = source.translate(escape_hl_chars)
|
source = source.translate(escape_hl_chars)
|
||||||
# then, escape all characters nonrepresentable in LaTeX
|
# then, escape all characters nonrepresentable in LaTeX
|
||||||
source = source.translate(tex_hl_escape_map)
|
source = source.translate(tex_hl_escape_map_old)
|
||||||
return '\\begin{Verbatim}[commandchars=@\\[\\]]\n' + \
|
return '\\begin{Verbatim}[commandchars=@\\[\\]]\n' + \
|
||||||
source + '\\end{Verbatim}\n'
|
source + '\\end{Verbatim}\n'
|
||||||
|
|
||||||
@ -215,7 +215,10 @@ class PygmentsBridge(object):
|
|||||||
return highlight(source, lexer, self.fmter[bool(linenos)])
|
return highlight(source, lexer, self.fmter[bool(linenos)])
|
||||||
else:
|
else:
|
||||||
hlsource = highlight(source, lexer, self.fmter[bool(linenos)])
|
hlsource = highlight(source, lexer, self.fmter[bool(linenos)])
|
||||||
return hlsource.translate(tex_hl_escape_map)
|
if hlsource.startswith(r'\begin{Verbatim}[commandchars=\\\{\}'):
|
||||||
|
# Pygments >= 1.2
|
||||||
|
return hlsource.translate(tex_hl_escape_map_new)
|
||||||
|
return hlsource.translate(tex_hl_escape_map_old)
|
||||||
except ErrorToken:
|
except ErrorToken:
|
||||||
# this is most probably not the selected language,
|
# this is most probably not the selected language,
|
||||||
# so let it pass unhighlighted
|
# so let it pass unhighlighted
|
||||||
|
@ -278,7 +278,6 @@ def patfilter(names, pat):
|
|||||||
Return the subset of the list NAMES that match PAT.
|
Return the subset of the list NAMES that match PAT.
|
||||||
Adapted from fnmatch module.
|
Adapted from fnmatch module.
|
||||||
"""
|
"""
|
||||||
result = []
|
|
||||||
if pat not in _pat_cache:
|
if pat not in _pat_cache:
|
||||||
_pat_cache[pat] = re.compile(_translate_pattern(pat))
|
_pat_cache[pat] = re.compile(_translate_pattern(pat))
|
||||||
match = _pat_cache[pat].match
|
match = _pat_cache[pat].match
|
||||||
@ -409,7 +408,6 @@ def movefile(source, dest):
|
|||||||
def copytimes(source, dest):
|
def copytimes(source, dest):
|
||||||
"""Copy a file's modification times."""
|
"""Copy a file's modification times."""
|
||||||
st = os.stat(source)
|
st = os.stat(source)
|
||||||
mode = stat.S_IMODE(st.st_mode)
|
|
||||||
if hasattr(os, 'utime'):
|
if hasattr(os, 'utime'):
|
||||||
os.utime(dest, (st.st_atime, st.st_mtime))
|
os.utime(dest, (st.st_atime, st.st_mtime))
|
||||||
|
|
||||||
|
@ -99,8 +99,9 @@ tex_replacements = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
tex_escape_map = {}
|
tex_escape_map = {}
|
||||||
tex_hl_escape_map = {}
|
tex_hl_escape_map_old = {} # replacement map for Pygments <= 1.1
|
||||||
_new_cmd_chars = {ord(u'\\'): u'@', ord(u'{'): u'[', ord(u'}'): u']'}
|
tex_hl_escape_map_new = {} # replacement map for Pygments >= 1.2
|
||||||
|
_old_cmd_chars = {ord(u'\\'): u'@', ord(u'{'): u'[', ord(u'}'): u']'}
|
||||||
|
|
||||||
def init():
|
def init():
|
||||||
for a, b in tex_replacements:
|
for a, b in tex_replacements:
|
||||||
@ -108,4 +109,5 @@ def init():
|
|||||||
|
|
||||||
for a, b in tex_replacements:
|
for a, b in tex_replacements:
|
||||||
if a in u'[]{}\\': continue
|
if a in u'[]{}\\': continue
|
||||||
tex_hl_escape_map[ord(a)] = b.translate(_new_cmd_chars)
|
tex_hl_escape_map_new[ord(a)] = b
|
||||||
|
tex_hl_escape_map_old[ord(a)] = b.translate(_old_cmd_chars)
|
||||||
|
@ -253,6 +253,9 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
return (HEADER % self.elements + self.highlighter.get_stylesheet() +
|
return (HEADER % self.elements + self.highlighter.get_stylesheet() +
|
||||||
u''.join(self.body) + FOOTER % self.elements)
|
u''.join(self.body) + FOOTER % self.elements)
|
||||||
|
|
||||||
|
def idescape(self, id):
|
||||||
|
return str(unicode(id).translate(tex_escape_map))
|
||||||
|
|
||||||
def visit_document(self, node):
|
def visit_document(self, node):
|
||||||
self.footnotestack.append(self.collect_footnotes(node))
|
self.footnotestack.append(self.collect_footnotes(node))
|
||||||
self.curfilestack.append(node.get('docname', ''))
|
self.curfilestack.append(node.get('docname', ''))
|
||||||
@ -466,7 +469,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
d = self.descstack[-1]
|
d = self.descstack[-1]
|
||||||
d.cls = d.cls.rstrip('.')
|
d.cls = d.cls.rstrip('.')
|
||||||
if node.parent['desctype'] != 'describe' and node['ids']:
|
if node.parent['desctype'] != 'describe' and node['ids']:
|
||||||
hyper = '\\hypertarget{%s}{}' % node['ids'][0]
|
hyper = '\\hypertarget{%s}{}' % self.idescape(node['ids'][0])
|
||||||
else:
|
else:
|
||||||
hyper = ''
|
hyper = ''
|
||||||
if d.count == 0:
|
if d.count == 0:
|
||||||
@ -757,7 +760,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
def visit_term(self, node):
|
def visit_term(self, node):
|
||||||
ctx = '] \\leavevmode'
|
ctx = '] \\leavevmode'
|
||||||
if node.has_key('ids') and node['ids']:
|
if node.has_key('ids') and node['ids']:
|
||||||
ctx += '\\hypertarget{%s}{}' % node['ids'][0]
|
ctx += '\\hypertarget{%s}{}' % self.idescape(node['ids'][0])
|
||||||
self.body.append('\\item[')
|
self.body.append('\\item[')
|
||||||
self.context.append(ctx)
|
self.context.append(ctx)
|
||||||
def depart_term(self, node):
|
def depart_term(self, node):
|
||||||
|
Loading…
Reference in New Issue
Block a user