mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merged revisions 65138,65145-65146,65268-65273 via svnmerge from
svn+ssh://pythondev@svn.python.org/doctools/branches/0.4.x ........ r65138 | georg.brandl | 2008-07-19 15:42:35 +0200 (Sat, 19 Jul 2008) | 2 lines #3416: fix missing parameter. ........ r65145 | georg.brandl | 2008-07-19 20:01:25 +0200 (Sat, 19 Jul 2008) | 2 lines Now that we don't ship Jinja anymore by default the comment can go. ........ r65146 | georg.brandl | 2008-07-19 20:01:51 +0200 (Sat, 19 Jul 2008) | 2 lines Reread documents with globbed toctrees when files are removed/added. ........ r65268 | georg.brandl | 2008-07-29 10:21:33 +0200 (Tue, 29 Jul 2008) | 2 lines Fix by Markus Gritsch to place correct links to headings. ........ r65269 | georg.brandl | 2008-07-29 10:21:59 +0200 (Tue, 29 Jul 2008) | 2 lines Make the writer's settings public. ........ r65270 | georg.brandl | 2008-07-29 10:22:28 +0200 (Tue, 29 Jul 2008) | 2 lines Export test_root. ........ r65271 | georg.brandl | 2008-07-29 10:22:47 +0200 (Tue, 29 Jul 2008) | 2 lines Add a markup test. ........ r65272 | georg.brandl | 2008-07-29 10:27:19 +0200 (Tue, 29 Jul 2008) | 2 lines Bump version number. ........ r65273 | georg.brandl | 2008-07-29 11:05:37 +0200 (Tue, 29 Jul 2008) | 2 lines Correct rendering of ``samp``. ........
This commit is contained in:
parent
e3158eb653
commit
6c2f991be0
2
Makefile
2
Makefile
@ -27,4 +27,4 @@ reindent:
|
|||||||
@$(PYTHON) utils/reindent.py -r -B .
|
@$(PYTHON) utils/reindent.py -r -B .
|
||||||
|
|
||||||
test:
|
test:
|
||||||
@cd tests; $(PYTHON) run.py
|
@cd tests; $(PYTHON) run.py -d
|
||||||
|
@ -20,7 +20,7 @@ from sphinx.util import format_exception_cut_frames, save_traceback
|
|||||||
from sphinx.util.console import darkred, nocolor
|
from sphinx.util.console import darkred, nocolor
|
||||||
|
|
||||||
__revision__ = '$Revision$'
|
__revision__ = '$Revision$'
|
||||||
__version__ = '0.4'
|
__version__ = '0.5'
|
||||||
|
|
||||||
|
|
||||||
def usage(argv, msg=None):
|
def usage(argv, msg=None):
|
||||||
|
@ -737,6 +737,7 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
sidebarfile = self.config.html_sidebars.get(pagename)
|
sidebarfile = self.config.html_sidebars.get(pagename)
|
||||||
if sidebarfile:
|
if sidebarfile:
|
||||||
ctx['customsidebar'] = sidebarfile
|
ctx['customsidebar'] = sidebarfile
|
||||||
|
|
||||||
if not outfilename:
|
if not outfilename:
|
||||||
outfilename = path.join(self.outdir, os_path(pagename) + self.out_suffix)
|
outfilename = path.join(self.outdir, os_path(pagename) + self.out_suffix)
|
||||||
|
|
||||||
|
@ -69,6 +69,7 @@ def toctree_directive(name, arguments, options, content, lineno,
|
|||||||
subnode['includefiles'] = includefiles
|
subnode['includefiles'] = includefiles
|
||||||
subnode['includetitles'] = includetitles
|
subnode['includetitles'] = includetitles
|
||||||
subnode['maxdepth'] = options.get('maxdepth', -1)
|
subnode['maxdepth'] = options.get('maxdepth', -1)
|
||||||
|
subnode['glob'] = glob
|
||||||
ret.append(subnode)
|
ret.append(subnode)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ default_settings = {
|
|||||||
|
|
||||||
# This is increased every time an environment attribute is added
|
# This is increased every time an environment attribute is added
|
||||||
# or changed to properly invalidate pickle files.
|
# or changed to properly invalidate pickle files.
|
||||||
ENV_VERSION = 24
|
ENV_VERSION = 25
|
||||||
|
|
||||||
|
|
||||||
default_substitutions = set([
|
default_substitutions = set([
|
||||||
@ -242,6 +242,7 @@ class BuildEnvironment:
|
|||||||
self.toctree_includes = {} # docname -> list of toctree includefiles
|
self.toctree_includes = {} # docname -> list of toctree includefiles
|
||||||
self.files_to_rebuild = {} # docname -> set of files (containing its TOCs)
|
self.files_to_rebuild = {} # docname -> set of files (containing its TOCs)
|
||||||
# to rebuild too
|
# to rebuild too
|
||||||
|
self.glob_toctrees = set() # docnames that have :glob: toctrees
|
||||||
|
|
||||||
# X-ref target inventory
|
# X-ref target inventory
|
||||||
self.descrefs = {} # fullname -> docname, desctype
|
self.descrefs = {} # fullname -> docname, desctype
|
||||||
@ -296,6 +297,7 @@ class BuildEnvironment:
|
|||||||
self.toctree_includes.pop(docname, None)
|
self.toctree_includes.pop(docname, None)
|
||||||
self.filemodules.pop(docname, None)
|
self.filemodules.pop(docname, None)
|
||||||
self.indexentries.pop(docname, None)
|
self.indexentries.pop(docname, None)
|
||||||
|
self.glob_toctrees.discard(docname)
|
||||||
|
|
||||||
for subfn, fnset in self.files_to_rebuild.items():
|
for subfn, fnset in self.files_to_rebuild.items():
|
||||||
fnset.discard(docname)
|
fnset.discard(docname)
|
||||||
@ -420,7 +422,14 @@ class BuildEnvironment:
|
|||||||
self.srcdir = srcdir
|
self.srcdir = srcdir
|
||||||
self.doctreedir = doctreedir
|
self.doctreedir = doctreedir
|
||||||
self.find_files(config)
|
self.find_files(config)
|
||||||
|
|
||||||
added, changed, removed = self.get_outdated_files(config_changed)
|
added, changed, removed = self.get_outdated_files(config_changed)
|
||||||
|
|
||||||
|
# if files were added or removed, all documents with globbed toctrees
|
||||||
|
# must be reread
|
||||||
|
if added or removed:
|
||||||
|
changed.update(self.glob_toctrees)
|
||||||
|
|
||||||
msg += '%s added, %s changed, %s removed' % (len(added), len(changed),
|
msg += '%s added, %s changed, %s removed' % (len(added), len(changed),
|
||||||
len(removed))
|
len(removed))
|
||||||
yield msg
|
yield msg
|
||||||
@ -641,6 +650,8 @@ class BuildEnvironment:
|
|||||||
def note_toctree(self, docname, toctreenode):
|
def note_toctree(self, docname, toctreenode):
|
||||||
"""Note a TOC tree directive in a document and gather information about
|
"""Note a TOC tree directive in a document and gather information about
|
||||||
file relations from it."""
|
file relations from it."""
|
||||||
|
if toctreenode['glob']:
|
||||||
|
self.glob_toctrees.add(docname)
|
||||||
includefiles = toctreenode['includefiles']
|
includefiles = toctreenode['includefiles']
|
||||||
for includefile in includefiles:
|
for includefile in includefiles:
|
||||||
# note that if the included file is rebuilt, this one must be
|
# note that if the included file is rebuilt, this one must be
|
||||||
|
@ -56,6 +56,7 @@ class HTMLTranslator(BaseTranslator):
|
|||||||
self.builder = builder
|
self.builder = builder
|
||||||
self.highlightlang = 'python'
|
self.highlightlang = 'python'
|
||||||
self.highlightlinenothreshold = sys.maxint
|
self.highlightlinenothreshold = sys.maxint
|
||||||
|
self.protect_literal_text = 0
|
||||||
|
|
||||||
def visit_desc(self, node):
|
def visit_desc(self, node):
|
||||||
self.body.append(self.starttag(node, 'dl', CLASS=node['desctype']))
|
self.body.append(self.starttag(node, 'dl', CLASS=node['desctype']))
|
||||||
@ -206,7 +207,11 @@ class HTMLTranslator(BaseTranslator):
|
|||||||
if len(node.children) == 1 and \
|
if len(node.children) == 1 and \
|
||||||
node.children[0] in ('None', 'True', 'False'):
|
node.children[0] in ('None', 'True', 'False'):
|
||||||
node['classes'].append('xref')
|
node['classes'].append('xref')
|
||||||
BaseTranslator.visit_literal(self, node)
|
self.body.append(self.starttag(node, 'tt', '', CLASS='docutils literal'))
|
||||||
|
self.protect_literal_text += 1
|
||||||
|
def depart_literal(self, node):
|
||||||
|
self.protect_literal_text -= 1
|
||||||
|
self.body.append('</tt>')
|
||||||
|
|
||||||
def visit_productionlist(self, node):
|
def visit_productionlist(self, node):
|
||||||
self.body.append(self.starttag(node, 'pre'))
|
self.body.append(self.starttag(node, 'pre'))
|
||||||
@ -285,6 +290,33 @@ class HTMLTranslator(BaseTranslator):
|
|||||||
def depart_module(self, node):
|
def depart_module(self, node):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def bulk_text_processor(self, text):
|
||||||
|
return text
|
||||||
|
|
||||||
|
# overwritten
|
||||||
|
def visit_Text(self, node):
|
||||||
|
text = node.astext()
|
||||||
|
encoded = self.encode(text)
|
||||||
|
if self.protect_literal_text:
|
||||||
|
# moved here from base class's visit_literal to support
|
||||||
|
# more formatting in literal nodes
|
||||||
|
for token in self.words_and_spaces.findall(encoded):
|
||||||
|
if token.strip():
|
||||||
|
# protect literal text from line wrapping
|
||||||
|
self.body.append('<span class="pre">%s</span>' % token)
|
||||||
|
elif token in ' \n':
|
||||||
|
# allow breaks at whitespace
|
||||||
|
self.body.append(token)
|
||||||
|
else:
|
||||||
|
# protect runs of multiple spaces; the last one can wrap
|
||||||
|
self.body.append(' ' * (len(token)-1) + ' ')
|
||||||
|
else:
|
||||||
|
if self.in_mailto and self.settings.cloak_email_addresses:
|
||||||
|
encoded = self.cloak_email(encoded)
|
||||||
|
else:
|
||||||
|
encoded = self.bulk_text_processor(encoded)
|
||||||
|
self.body.append(encoded)
|
||||||
|
|
||||||
# these are all for docutils 0.5 compatibility
|
# these are all for docutils 0.5 compatibility
|
||||||
|
|
||||||
def visit_note(self, node):
|
def visit_note(self, node):
|
||||||
@ -333,7 +365,6 @@ class HTMLTranslator(BaseTranslator):
|
|||||||
def depart_tip(self, node):
|
def depart_tip(self, node):
|
||||||
self.depart_admonition()
|
self.depart_admonition()
|
||||||
|
|
||||||
|
|
||||||
# these are only handled specially in the SmartyPantsHTMLTranslator
|
# these are only handled specially in the SmartyPantsHTMLTranslator
|
||||||
def visit_literal_emphasis(self, node):
|
def visit_literal_emphasis(self, node):
|
||||||
return self.visit_emphasis(node)
|
return self.visit_emphasis(node)
|
||||||
@ -397,11 +428,7 @@ class SmartyPantsHTMLTranslator(HTMLTranslator):
|
|||||||
finally:
|
finally:
|
||||||
self.no_smarty -= 1
|
self.no_smarty -= 1
|
||||||
|
|
||||||
def visit_Text(self, node):
|
def bulk_text_processor(self, text):
|
||||||
text = node.astext()
|
if self.no_smarty <= 0:
|
||||||
encoded = self.encode(text)
|
return sphinx_smarty_pants(text)
|
||||||
if self.in_mailto and self.settings.cloak_email_addresses:
|
return text
|
||||||
encoded = self.cloak_email(encoded)
|
|
||||||
elif self.no_smarty <= 0:
|
|
||||||
encoded = sphinx_smarty_pants(encoded)
|
|
||||||
self.body.append(encoded)
|
|
||||||
|
@ -23,7 +23,6 @@ from sphinx import addnodes
|
|||||||
from sphinx import highlighting
|
from sphinx import highlighting
|
||||||
from sphinx.util.smartypants import educateQuotesLatex
|
from sphinx.util.smartypants import educateQuotesLatex
|
||||||
|
|
||||||
# XXX: Move to a template?
|
|
||||||
HEADER = r'''%% Generated by Sphinx.
|
HEADER = r'''%% Generated by Sphinx.
|
||||||
\documentclass[%(papersize)s,%(pointsize)s]{%(docclass)s}
|
\documentclass[%(papersize)s,%(pointsize)s]{%(docclass)s}
|
||||||
\usepackage[utf8]{inputenc}
|
\usepackage[utf8]{inputenc}
|
||||||
@ -56,7 +55,11 @@ class LaTeXWriter(writers.Writer):
|
|||||||
|
|
||||||
supported = ('sphinxlatex',)
|
supported = ('sphinxlatex',)
|
||||||
|
|
||||||
settings_spec = ('No options here.', '', ())
|
settings_spec = ('LaTeX writer options', '', (
|
||||||
|
('Document name', ['--docname'], {'default': ''}),
|
||||||
|
('Document class', ['--docclass'], {'default': 'manual'}),
|
||||||
|
('Author', ['--author'], {'default': ''}),
|
||||||
|
))
|
||||||
settings_defaults = {}
|
settings_defaults = {}
|
||||||
|
|
||||||
output = None
|
output = None
|
||||||
|
@ -336,14 +336,14 @@
|
|||||||
|
|
||||||
% Augment the sectioning commands used to get our own font family in place,
|
% Augment the sectioning commands used to get our own font family in place,
|
||||||
% and reset some internal data items:
|
% and reset some internal data items:
|
||||||
\titleformat{\section}{\Large\py@HeaderFamily\py@TitleColor}%
|
\titleformat{\section}{\Large\py@HeaderFamily}%
|
||||||
{\thesection}{0.5em}{}{\py@NormalColor}
|
{\py@TitleColor\thesection}{0.5em}{\py@TitleColor}{\py@NormalColor}
|
||||||
\titleformat{\subsection}{\large\py@HeaderFamily\py@TitleColor}%
|
\titleformat{\subsection}{\large\py@HeaderFamily}%
|
||||||
{\thesubsection}{0.5em}{}{\py@NormalColor}
|
{\py@TitleColor\thesubsection}{0.5em}{\py@TitleColor}{\py@NormalColor}
|
||||||
\titleformat{\subsubsection}{\py@HeaderFamily\py@TitleColor}%
|
\titleformat{\subsubsection}{\py@HeaderFamily}%
|
||||||
{\thesubsubsection}{0.5em}{}{\py@NormalColor}
|
{\py@TitleColor\thesubsubsection}{0.5em}{\py@TitleColor}{\py@NormalColor}
|
||||||
\titleformat{\paragraph}{\large\py@HeaderFamily\py@TitleColor}%
|
\titleformat{\paragraph}{\large\py@HeaderFamily}%
|
||||||
{}{0em}{}{\py@NormalColor}
|
{\py@TitleColor}{0em}{\py@TitleColor}{\py@NormalColor}
|
||||||
|
|
||||||
|
|
||||||
% Now for a lot of semantically-loaded environments that do a ton of magical
|
% Now for a lot of semantically-loaded environments that do a ton of magical
|
||||||
|
@ -43,6 +43,7 @@ def test_core_config():
|
|||||||
# "contains" gives True both for set and unset values
|
# "contains" gives True both for set and unset values
|
||||||
assert 'project' in cfg
|
assert 'project' in cfg
|
||||||
assert 'html_title' in cfg
|
assert 'html_title' in cfg
|
||||||
|
assert 'nonexisting_value' not in cfg
|
||||||
|
|
||||||
# invalid values
|
# invalid values
|
||||||
raises(AttributeError, getattr, cfg, '_value')
|
raises(AttributeError, getattr, cfg, '_value')
|
||||||
|
91
tests/test_markup.py
Normal file
91
tests/test_markup.py
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
test_markup
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
Test various Sphinx-specific markup extensions.
|
||||||
|
|
||||||
|
:copyright: 2008 by Georg Brandl.
|
||||||
|
:license: BSD.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from util import *
|
||||||
|
|
||||||
|
from docutils import frontend, utils, nodes
|
||||||
|
from docutils.parsers import rst
|
||||||
|
|
||||||
|
from sphinx import addnodes
|
||||||
|
from sphinx.htmlwriter import HTMLWriter, SmartyPantsHTMLTranslator
|
||||||
|
from sphinx.latexwriter import LaTeXWriter, LaTeXTranslator
|
||||||
|
|
||||||
|
app = TestApp()
|
||||||
|
optparser = frontend.OptionParser(components=(rst.Parser, HTMLWriter, LaTeXWriter))
|
||||||
|
settings = optparser.get_default_values()
|
||||||
|
settings.env = app.builder.env
|
||||||
|
parser = rst.Parser()
|
||||||
|
|
||||||
|
# since we're not resolving the markup afterwards, these nodes may remain
|
||||||
|
class ForgivingTranslator:
|
||||||
|
def visit_pending_xref(self, node):
|
||||||
|
pass
|
||||||
|
def depart_pending_xref(self, node):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ForgivingHTMLTranslator(SmartyPantsHTMLTranslator, ForgivingTranslator):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ForgivingLaTeXTranslator(LaTeXTranslator, ForgivingTranslator):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def verify(rst, html_expected, latex_expected):
|
||||||
|
document = utils.new_document('test data', settings)
|
||||||
|
parser.parse(rst, document)
|
||||||
|
for msg in document.traverse(nodes.system_message):
|
||||||
|
if msg['level'] == 1:
|
||||||
|
msg.replace_self([])
|
||||||
|
|
||||||
|
if html_expected:
|
||||||
|
html_translator = ForgivingHTMLTranslator(app.builder, document)
|
||||||
|
document.walkabout(html_translator)
|
||||||
|
html_translated = ''.join(html_translator.fragment).strip()
|
||||||
|
assert html_translated == html_expected, 'from ' + rst
|
||||||
|
|
||||||
|
if latex_expected:
|
||||||
|
latex_translator = ForgivingLaTeXTranslator(document, app.builder)
|
||||||
|
latex_translator.first_document = -1 # don't write \begin{document}
|
||||||
|
document.walkabout(latex_translator)
|
||||||
|
latex_translated = ''.join(latex_translator.body).strip()
|
||||||
|
assert latex_translated == latex_expected, 'from ' + rst
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline():
|
||||||
|
# correct interpretation of code with whitespace
|
||||||
|
_html = ('<p><tt class="docutils literal"><span class="pre">'
|
||||||
|
'code</span> <span class="pre">sample</span></tt></p>')
|
||||||
|
verify('``code sample``', _html, '\\code{code sample}')
|
||||||
|
verify(':samp:`code sample`', _html, '\\samp{code sample}')
|
||||||
|
|
||||||
|
# interpolation of braces in samp and file roles (HTML only)
|
||||||
|
verify(':samp:`a{b}c`',
|
||||||
|
'<p><tt class="docutils literal"><span class="pre">a</span>'
|
||||||
|
'<em><span class="pre">b</span></em><span class="pre">c</span></tt></p>',
|
||||||
|
'\\samp{abc}')
|
||||||
|
|
||||||
|
# interpolation of arrows in menuselection
|
||||||
|
verify(':menuselection:`a --> b`',
|
||||||
|
u'<p><em>a \N{TRIANGULAR BULLET} b</em></p>',
|
||||||
|
'\\emph{a $\\rightarrow$ b}')
|
||||||
|
|
||||||
|
# non-interpolation of dashes in option role
|
||||||
|
verify(':option:`--with-option`',
|
||||||
|
'<p><em>--with-option</em></p>',
|
||||||
|
r'\emph{\texttt{--with-option}}')
|
||||||
|
|
||||||
|
# verify smarty-pants quotes
|
||||||
|
verify('"John"', '<p>“John”</p>', "``John''")
|
||||||
|
# ... but not in literal text
|
||||||
|
verify('``"John"``',
|
||||||
|
'<p><tt class="docutils literal"><span class="pre">'
|
||||||
|
'"John"</span></tt></p>',
|
||||||
|
'\\code{"John"}')
|
@ -17,12 +17,16 @@ from path import path
|
|||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
'test_root',
|
||||||
'raises', 'raises_msg',
|
'raises', 'raises_msg',
|
||||||
'ErrorOutput', 'TestApp',
|
'ErrorOutput', 'TestApp',
|
||||||
'path', 'with_tempdir', 'write_file',
|
'path', 'with_tempdir', 'write_file',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
test_root = path(__file__).parent.joinpath('root').abspath()
|
||||||
|
|
||||||
|
|
||||||
def _excstr(exc):
|
def _excstr(exc):
|
||||||
if type(exc) is tuple:
|
if type(exc) is tuple:
|
||||||
return str(tuple(map(_excstr, exc)))
|
return str(tuple(map(_excstr, exc)))
|
||||||
@ -79,7 +83,7 @@ class TestApp(application.Sphinx):
|
|||||||
application.CONFIG_FILENAME = confname
|
application.CONFIG_FILENAME = confname
|
||||||
|
|
||||||
if srcdir is None:
|
if srcdir is None:
|
||||||
srcdir = path(__file__).parent.joinpath('root').abspath()
|
srcdir = test_root
|
||||||
else:
|
else:
|
||||||
srcdir = path(srcdir)
|
srcdir = path(srcdir)
|
||||||
if confdir is None:
|
if confdir is None:
|
||||||
|
Loading…
Reference in New Issue
Block a user