mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merged revisions 65566-65567,65623,65625 via svnmerge from
svn+ssh://pythondev@svn.python.org/doctools/branches/0.4.x ........ r65566 | georg.brandl | 2008-08-07 09:11:11 +0000 (Thu, 07 Aug 2008) | 2 lines Clarification for the ref role. ........ r65567 | georg.brandl | 2008-08-07 09:11:25 +0000 (Thu, 07 Aug 2008) | 2 lines Rebuild everything if extensions change. ........ r65623 | georg.brandl | 2008-08-10 11:18:42 +0000 (Sun, 10 Aug 2008) | 2 lines Unify handling of LaTeX escaping, and add some more replacements. ........ r65625 | georg.brandl | 2008-08-10 11:25:41 +0000 (Sun, 10 Aug 2008) | 2 lines Make tex escapes a module. ........
This commit is contained in:
parent
ac8a346f45
commit
e93af0e57c
2
Makefile
2
Makefile
@ -4,7 +4,7 @@ export PYTHONPATH = $(shell echo "$$PYTHONPATH"):./sphinx
|
||||
|
||||
.PHONY: all check clean clean-pyc clean-patchfiles pylint reindent test
|
||||
|
||||
all: clean-pyc check
|
||||
all: clean-pyc check test
|
||||
|
||||
check:
|
||||
@$(PYTHON) utils/check_sources.py -i sphinx/style/jquery.js sphinx
|
||||
|
@ -182,10 +182,12 @@ The following role creates a cross-reference to the term in the glossary:
|
||||
Cross-referencing arbitrary locations
|
||||
-------------------------------------
|
||||
|
||||
To support cross-referencing to arbitrary locations in the documentation, the
|
||||
standard reST labels used. Of course, for this to work label names must be
|
||||
unique throughout the entire documentation. There are two ways in which you can
|
||||
refer to labels:
|
||||
.. index:: pair: ref; role
|
||||
|
||||
To support cross-referencing to arbitrary locations in any document, the
|
||||
standard reST labels are used. For this to work label names must be unique
|
||||
throughout the entire documentation. There are two ways in which you can refer
|
||||
to labels:
|
||||
|
||||
* If you place a label directly before a section title, you can reference to it
|
||||
with ``:ref:`label-name```. Example::
|
||||
@ -200,12 +202,17 @@ refer to labels:
|
||||
It refers to the section itself, see :ref:`my-reference-label`.
|
||||
|
||||
The ``:ref:`` role would then generate a link to the section, with the link
|
||||
title being "Section to cross-reference".
|
||||
title being "Section to cross-reference". This works just as well when
|
||||
section and reference are in different source files.
|
||||
|
||||
* Labels that aren't placed before a section title can still be referenced to,
|
||||
but you must give the link an explicit title, using this syntax: ``:ref:`Link
|
||||
title <label-name>```.
|
||||
|
||||
Using :role:`ref` is advised over standard reStructuredText links to sections
|
||||
(like ```Section title`_``) because it works across files, when section headings
|
||||
are changed, and for all builders that support cross-references.
|
||||
|
||||
|
||||
Other semantic markup
|
||||
---------------------
|
||||
|
@ -26,7 +26,7 @@ from docutils.frontend import OptionParser
|
||||
from docutils.readers.doctree import Reader as DoctreeReader
|
||||
|
||||
from sphinx import addnodes, locale, __version__
|
||||
from sphinx.util import ensuredir, relative_uri, SEP, os_path, json
|
||||
from sphinx.util import ensuredir, relative_uri, SEP, os_path, json, texescape
|
||||
from sphinx.htmlhelp import build_hhx
|
||||
from sphinx.htmlwriter import HTMLWriter, HTMLTranslator, SmartyPantsHTMLTranslator
|
||||
from sphinx.textwriter import TextWriter
|
||||
@ -899,6 +899,7 @@ class LaTeXBuilder(Builder):
|
||||
def init(self):
|
||||
self.docnames = []
|
||||
self.document_data = []
|
||||
texescape.init()
|
||||
|
||||
def get_outdated_docs(self):
|
||||
return 'all documents' # for now
|
||||
|
@ -418,6 +418,11 @@ class BuildEnvironment:
|
||||
break
|
||||
else:
|
||||
msg = ''
|
||||
# this value is not covered by the above loop because it is handled
|
||||
# specially by the config class
|
||||
if self.config.extensions != config.extensions:
|
||||
msg = '[extensions changed] '
|
||||
config_changed = True
|
||||
# the source and doctree directories may have been relocated
|
||||
self.srcdir = srcdir
|
||||
self.doctreedir = doctreedir
|
||||
|
@ -14,6 +14,8 @@ import cgi
|
||||
import re
|
||||
import parser
|
||||
|
||||
from sphinx.util.texescape import tex_hl_escape_map
|
||||
|
||||
try:
|
||||
import pygments
|
||||
from pygments import highlight
|
||||
@ -56,14 +58,9 @@ else:
|
||||
_lexer.add_filter('raiseonerror')
|
||||
|
||||
|
||||
|
||||
def escape_tex(text):
|
||||
return text.replace('@', '\x00'). \
|
||||
replace('[', '\x01'). \
|
||||
replace(']', '\x02'). \
|
||||
replace('\x00', '@at[]').\
|
||||
replace('\x01', '@lb[]').\
|
||||
replace('\x02', '@rb[]')
|
||||
escape_hl_chars = {ord(u'@'): u'@at[]',
|
||||
ord(u'['): u'@lb[]',
|
||||
ord(u']'): u'@rb[]'}
|
||||
|
||||
# used if Pygments is not available
|
||||
_LATEX_STYLES = r'''
|
||||
@ -98,15 +95,20 @@ class PygmentsBridge(object):
|
||||
True: LatexFormatter(style=style, linenos=True,
|
||||
commandprefix='PYG')}
|
||||
|
||||
def highlight_block(self, source, lang, linenos=False):
|
||||
def unhighlighted():
|
||||
def unhighlighted(self, source):
|
||||
if self.dest == 'html':
|
||||
return '<pre>' + cgi.escape(source) + '</pre>\n'
|
||||
else:
|
||||
# first, escape highlighting characters like Pygments does
|
||||
source = source.translate(escape_hl_chars)
|
||||
# then, escape all characters nonrepresentable in LaTeX
|
||||
source = source.translate(tex_hl_escape_map)
|
||||
return '\\begin{Verbatim}[commandchars=@\\[\\]]\n' + \
|
||||
escape_tex(source) + '\\end{Verbatim}\n'
|
||||
source + '\\end{Verbatim}\n'
|
||||
|
||||
def highlight_block(self, source, lang, linenos=False):
|
||||
if not pygments:
|
||||
return unhighlighted()
|
||||
return self.unhighlighted(source)
|
||||
if lang == 'python':
|
||||
if source.startswith('>>>'):
|
||||
# interactive session
|
||||
@ -138,7 +140,7 @@ class PygmentsBridge(object):
|
||||
try:
|
||||
parser.suite(src)
|
||||
except parsing_exceptions:
|
||||
return unhighlighted()
|
||||
return self.unhighlighted(source)
|
||||
else:
|
||||
lexer = lexers['python']
|
||||
else:
|
||||
@ -148,12 +150,15 @@ class PygmentsBridge(object):
|
||||
lexer = lexers[lang] = get_lexer_by_name(lang)
|
||||
lexer.add_filter('raiseonerror')
|
||||
try:
|
||||
fmter = (self.dest == 'html' and self.hfmter or self.lfmter)[bool(linenos)]
|
||||
return highlight(source, lexer, fmter)
|
||||
if self.dest == 'html':
|
||||
return highlight(source, lexer, self.hfmter[bool(linenos)])
|
||||
else:
|
||||
hlsource = highlight(source, lexer, self.lfmter[bool(linenos)])
|
||||
return hlsource.translate(tex_hl_escape_map)
|
||||
except ErrorToken:
|
||||
# this is most probably not the selected language,
|
||||
# so let it pass unhighlighted
|
||||
return unhighlighted()
|
||||
return self.unhighlighted(source)
|
||||
|
||||
def get_stylesheet(self):
|
||||
if not pygments:
|
||||
|
@ -23,6 +23,7 @@ from docutils.writers.latex2e import Babel
|
||||
from sphinx import addnodes
|
||||
from sphinx import highlighting
|
||||
from sphinx.locale import admonitionlabels, versionlabels
|
||||
from sphinx.util.texescape import tex_escape_map
|
||||
from sphinx.util.smartypants import educateQuotesLatex
|
||||
|
||||
HEADER = r'''%% Generated by Sphinx.
|
||||
@ -42,7 +43,7 @@ HEADER = r'''%% Generated by Sphinx.
|
||||
|
||||
BEGIN_DOC = r'''
|
||||
\begin{document}
|
||||
\shorthandoff{"}
|
||||
%(shorthandoff)s
|
||||
\maketitle
|
||||
\tableofcontents
|
||||
'''
|
||||
@ -87,6 +88,13 @@ class LaTeXWriter(writers.Writer):
|
||||
|
||||
# Helper classes
|
||||
|
||||
class ExtBabel(Babel):
|
||||
def get_shorthandoff(self):
|
||||
if self.language == 'de':
|
||||
return '\\shorthandoff{"}'
|
||||
return ''
|
||||
|
||||
|
||||
class Table(object):
|
||||
def __init__(self):
|
||||
self.col = 0
|
||||
@ -120,7 +128,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
paper = 'letterpaper'
|
||||
date = time.strftime(builder.config.today_fmt or _('%B %d, %Y'))
|
||||
logo = (builder.config.latex_logo and
|
||||
"\\includegraphics{%s}\\par" % path.basename(builder.config.latex_logo)
|
||||
'\\includegraphics{%s}\\par' % path.basename(builder.config.latex_logo)
|
||||
or '')
|
||||
self.options = {'docclass': docclass,
|
||||
'papersize': paper,
|
||||
@ -135,11 +143,13 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
'releasename': _('Release'),
|
||||
'logo': logo,
|
||||
'date': date,
|
||||
'classoptions': '',
|
||||
'classoptions': ',english',
|
||||
'shorthandoff': '',
|
||||
}
|
||||
if builder.config.language:
|
||||
babel = Babel(builder.config.language)
|
||||
babel = ExtBabel(builder.config.language)
|
||||
self.options['classoptions'] += ',' + babel.get_language()
|
||||
self.shorthandoff = babel.get_shorthandoff()
|
||||
self.highlighter = highlighting.PygmentsBridge(
|
||||
'latex', builder.config.pygments_style)
|
||||
self.context = []
|
||||
@ -1045,42 +1055,12 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
|
||||
# text handling
|
||||
|
||||
replacements = [
|
||||
(u"\\", u"\x00"),
|
||||
(u"$", ur"\$"),
|
||||
(r"%", ur"\%"),
|
||||
(u"&", ur"\&"),
|
||||
(u"#", ur"\#"),
|
||||
(u"_", ur"\_"),
|
||||
(u"{", ur"\{"),
|
||||
(u"}", ur"\}"),
|
||||
(u"[", ur"{[}"),
|
||||
(u"]", ur"{]}"),
|
||||
(u"¶", ur"\P{}"),
|
||||
(u"§", ur"\S{}"),
|
||||
(u"∞", ur"$\infty$"),
|
||||
(u"±", ur"$\pm$"),
|
||||
(u"‣", ur"$\rightarrow$"),
|
||||
(u"Ω", ur"$\Omega$"),
|
||||
(u"Ω", ur"$\Omega$"),
|
||||
(u"φ", ur"$\phi$"),
|
||||
(u"π", ur"$\pi$"),
|
||||
(u"~", ur"\textasciitilde{}"),
|
||||
(u"€", ur"\texteuro{}"),
|
||||
(u"<", ur"\textless{}"),
|
||||
(u">", ur"\textgreater{}"),
|
||||
(u"^", ur"\textasciicircum{}"),
|
||||
(u"\x00", ur"\textbackslash{}"),
|
||||
(u"\N{RIGHTWARDS ARROW}", ur"$\rightarrow$"),
|
||||
]
|
||||
|
||||
def encode(self, text):
|
||||
for x, y in self.replacements:
|
||||
text = text.replace(x, y)
|
||||
text = unicode(text).translate(tex_escape_map)
|
||||
if self.literal_whitespace:
|
||||
# Insert a blank before the newline, to avoid
|
||||
# ! LaTeX Error: There's no line here to end.
|
||||
text = text.replace("\n", '~\\\\\n').replace(" ", "~")
|
||||
text = text.replace(u'\n', u'~\\\\\n').replace(u' ', u'~')
|
||||
return text
|
||||
|
||||
def visit_Text(self, node):
|
||||
@ -1101,4 +1081,4 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.body.append('\n')
|
||||
|
||||
def unknown_visit(self, node):
|
||||
raise NotImplementedError("Unknown node: " + node.__class__.__name__)
|
||||
raise NotImplementedError('Unknown node: ' + node.__class__.__name__)
|
||||
|
107
sphinx/util/texescape.py
Normal file
107
sphinx/util/texescape.py
Normal file
@ -0,0 +1,107 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.util.texescape
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
TeX escaping helper.
|
||||
|
||||
:copyright: 2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
tex_replacements = [
|
||||
# map TeX special chars
|
||||
(u'$', ur'\$'),
|
||||
(u'%', ur'\%'),
|
||||
(u'&', ur'\&'),
|
||||
(u'#', ur'\#'),
|
||||
(u'_', ur'\_'),
|
||||
(u'{', ur'\{'),
|
||||
(u'}', ur'\}'),
|
||||
(u'[', ur'{[}'),
|
||||
(u']', ur'{]}'),
|
||||
(u'\\',ur'\textbackslash{}'),
|
||||
(u'~', ur'\textasciitilde{}'),
|
||||
(u'<', ur'\textless{}'),
|
||||
(u'>', ur'\textgreater{}'),
|
||||
(u'^', ur'\textasciicircum{}'),
|
||||
# map special Unicode characters to TeX commands
|
||||
(u'¶', ur'\P{}'),
|
||||
(u'§', ur'\S{}'),
|
||||
(u'€', ur'\texteuro{}'),
|
||||
(u'∞', ur'\(\infty\)'),
|
||||
(u'±', ur'\(\pm\)'),
|
||||
(u'→', ur'\(\rightarrow\)'),
|
||||
(u'‣', ur'\(\rightarrow\)'),
|
||||
# map some special Unicode characters to similar ASCII ones
|
||||
(u'─', ur'-'),
|
||||
(u'⎽', ur'\_'),
|
||||
(u'╲', ur'\textbackslash{}'),
|
||||
(u'│', ur'|'),
|
||||
(u'ℯ', ur'e'),
|
||||
(u'ⅈ', ur'i'),
|
||||
(u'₁', ur'1'),
|
||||
(u'₂', ur'2'),
|
||||
# map Greek alphabet
|
||||
(u'α', ur'\(\alpha\)'),
|
||||
(u'β', ur'\(\beta\)'),
|
||||
(u'γ', ur'\(\gamma\)'),
|
||||
(u'δ', ur'\(\delta\)'),
|
||||
(u'ε', ur'\(\epsilon\)'),
|
||||
(u'ζ', ur'\(\zeta\)'),
|
||||
(u'η', ur'\(\eta\)'),
|
||||
(u'θ', ur'\(\theta\)'),
|
||||
(u'ι', ur'\(\iota\)'),
|
||||
(u'κ', ur'\(\kappa\)'),
|
||||
(u'λ', ur'\(\lambda\)'),
|
||||
(u'μ', ur'\(\mu\)'),
|
||||
(u'ν', ur'\(\nu\)'),
|
||||
(u'ξ', ur'\(\xi\)'),
|
||||
(u'ο', ur'o'),
|
||||
(u'π', ur'\(\pi\)'),
|
||||
(u'ρ', ur'\(\rho\)'),
|
||||
(u'σ', ur'\(\sigma\)'),
|
||||
(u'τ', ur'\(\tau\)'),
|
||||
(u'υ', u'\\(\\upsilon\\)'),
|
||||
(u'φ', ur'\(\phi\)'),
|
||||
(u'χ', ur'\(\chi\)'),
|
||||
(u'ψ', ur'\(\psi\)'),
|
||||
(u'ω', ur'\(\omega\)'),
|
||||
(u'Α', ur'A'),
|
||||
(u'Β', ur'B'),
|
||||
(u'Γ', ur'\(\Gamma\)'),
|
||||
(u'Δ', ur'\(\Delta\)'),
|
||||
(u'Ε', ur'E'),
|
||||
(u'Ζ', ur'Z'),
|
||||
(u'Η', ur'H'),
|
||||
(u'Θ', ur'\(\Theta\)'),
|
||||
(u'Ι', ur'I'),
|
||||
(u'Κ', ur'K'),
|
||||
(u'Λ', ur'\(\Lambda\)'),
|
||||
(u'Μ', ur'M'),
|
||||
(u'Ν', ur'N'),
|
||||
(u'Ξ', ur'\(\Xi\)'),
|
||||
(u'Ο', ur'O'),
|
||||
(u'Π', ur'\(\Pi\)'),
|
||||
(u'Ρ', ur'P'),
|
||||
(u'Σ', ur'\(\Sigma\)'),
|
||||
(u'Τ', ur'T'),
|
||||
(u'Υ', u'\\(\\Upsilon\\)'),
|
||||
(u'Φ', ur'\(\Phi\)'),
|
||||
(u'Χ', ur'X'),
|
||||
(u'Ψ', ur'\(\Psi\)'),
|
||||
(u'Ω', ur'\(\Omega\)'),
|
||||
(u'Ω', ur'\(\Omega\)'),
|
||||
]
|
||||
|
||||
tex_escape_map = {}
|
||||
tex_hl_escape_map = {}
|
||||
_new_cmd_chars = {ord(u'\\'): u'@', ord(u'{'): u'[', ord(u'}'): u']'}
|
||||
|
||||
def init():
|
||||
for a, b in tex_replacements:
|
||||
tex_escape_map[ord(a)] = b
|
||||
|
||||
for a, b in tex_replacements:
|
||||
if a in u'[]{}\\': continue
|
||||
tex_hl_escape_map[ord(a)] = b.translate(_new_cmd_chars)
|
@ -63,10 +63,14 @@ def verify_re(rst, html_expected, latex_expected):
|
||||
latex_translator.first_document = -1 # don't write \begin{document}
|
||||
document.walkabout(latex_translator)
|
||||
latex_translated = ''.join(latex_translator.body).strip()
|
||||
assert re.match(latex_expected, latex_translated), 'from ' + rst
|
||||
assert re.match(latex_expected, latex_translated), 'from ' + repr(rst)
|
||||
|
||||
def verify(rst, html_expected, latex_expected):
|
||||
verify_re(rst, re.escape(html_expected) + '$', re.escape(latex_expected) + '$')
|
||||
if html_expected:
|
||||
html_expected = re.escape(html_expected) + '$'
|
||||
if latex_expected:
|
||||
latex_expected = re.escape(latex_expected) + '$'
|
||||
verify_re(rst, html_expected, latex_expected)
|
||||
|
||||
|
||||
def test_inline():
|
||||
@ -85,7 +89,7 @@ def test_inline():
|
||||
# interpolation of arrows in menuselection
|
||||
verify(':menuselection:`a --> b`',
|
||||
u'<p><em>a \N{TRIANGULAR BULLET} b</em></p>',
|
||||
'\\emph{a $\\rightarrow$ b}')
|
||||
'\\emph{a \\(\\rightarrow\\) b}')
|
||||
|
||||
# non-interpolation of dashes in option role
|
||||
verify_re(':option:`--with-option`',
|
||||
@ -99,3 +103,12 @@ def test_inline():
|
||||
'<p><tt class="docutils literal"><span class="pre">'
|
||||
'"John"</span></tt></p>',
|
||||
'\\code{"John"}')
|
||||
|
||||
def test_latex_escaping():
|
||||
# correct escaping in normal mode
|
||||
verify(u'Γ\\\\∞$', None, ur'\(\Gamma\)\textbackslash{}\(\infty\)\$')
|
||||
# in verbatim code fragments
|
||||
verify(u'::\n\n @Γ\\∞$[]', None,
|
||||
u'\\begin{Verbatim}[commandchars=@\\[\\]]\n'
|
||||
u'@at[]@(@Gamma@)\\@(@infty@)@$@lb[]@rb[]\n'
|
||||
u'\\end{Verbatim}')
|
||||
|
Loading…
Reference in New Issue
Block a user