mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Reformat to EOL80.
This commit is contained in:
parent
d0e0acaaa1
commit
b068e91803
@ -22,7 +22,8 @@ import sphinx
|
|||||||
from sphinx.roles import xfileref_role, innernodetypes
|
from sphinx.roles import xfileref_role, innernodetypes
|
||||||
from sphinx.config import Config
|
from sphinx.config import Config
|
||||||
from sphinx.builders import BUILTIN_BUILDERS
|
from sphinx.builders import BUILTIN_BUILDERS
|
||||||
from sphinx.directives import desc_directive, target_directive, additional_xref_types
|
from sphinx.directives import desc_directive, target_directive, \
|
||||||
|
additional_xref_types
|
||||||
from sphinx.environment import SphinxStandaloneReader
|
from sphinx.environment import SphinxStandaloneReader
|
||||||
from sphinx.util.console import bold
|
from sphinx.util.console import bold
|
||||||
|
|
||||||
@ -152,7 +153,8 @@ class Sphinx(object):
|
|||||||
self._warning.write('WARNING: %s\n' % message)
|
self._warning.write('WARNING: %s\n' % message)
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
encoding = getattr(self._warning, 'encoding', 'ascii')
|
encoding = getattr(self._warning, 'encoding', 'ascii')
|
||||||
self._warning.write(('WARNING: %s\n' % message).encode(encoding, 'replace'))
|
self._warning.write(('WARNING: %s\n' % message).encode(encoding,
|
||||||
|
'replace'))
|
||||||
|
|
||||||
def info(self, message='', nonl=False):
|
def info(self, message='', nonl=False):
|
||||||
try:
|
try:
|
||||||
@ -171,7 +173,8 @@ class Sphinx(object):
|
|||||||
try:
|
try:
|
||||||
mod = __import__(extension, None, None, ['setup'])
|
mod = __import__(extension, None, None, ['setup'])
|
||||||
except ImportError, err:
|
except ImportError, err:
|
||||||
raise ExtensionError('Could not import extension %s' % extension, err)
|
raise ExtensionError('Could not import extension %s' % extension,
|
||||||
|
err)
|
||||||
if hasattr(mod, 'setup'):
|
if hasattr(mod, 'setup'):
|
||||||
mod.setup(self)
|
mod.setup(self)
|
||||||
|
|
||||||
@ -181,15 +184,18 @@ class Sphinx(object):
|
|||||||
module, name = objname.rsplit('.', 1)
|
module, name = objname.rsplit('.', 1)
|
||||||
except ValueError, err:
|
except ValueError, err:
|
||||||
raise ExtensionError('Invalid full object name %s' % objname +
|
raise ExtensionError('Invalid full object name %s' % objname +
|
||||||
(source and ' (needed for %s)' % source or ''), err)
|
(source and ' (needed for %s)' % source or ''),
|
||||||
|
err)
|
||||||
try:
|
try:
|
||||||
return getattr(__import__(module, None, None, [name]), name)
|
return getattr(__import__(module, None, None, [name]), name)
|
||||||
except ImportError, err:
|
except ImportError, err:
|
||||||
raise ExtensionError('Could not import %s' % module +
|
raise ExtensionError('Could not import %s' % module +
|
||||||
(source and ' (needed for %s)' % source or ''), err)
|
(source and ' (needed for %s)' % source or ''),
|
||||||
|
err)
|
||||||
except AttributeError, err:
|
except AttributeError, err:
|
||||||
raise ExtensionError('Could not find %s' % objname +
|
raise ExtensionError('Could not find %s' % objname +
|
||||||
(source and ' (needed for %s)' % source or ''), err)
|
(source and ' (needed for %s)' % source or ''),
|
||||||
|
err)
|
||||||
|
|
||||||
# event interface
|
# event interface
|
||||||
|
|
||||||
@ -229,13 +235,15 @@ class Sphinx(object):
|
|||||||
|
|
||||||
def add_builder(self, builder):
|
def add_builder(self, builder):
|
||||||
if not hasattr(builder, 'name'):
|
if not hasattr(builder, 'name'):
|
||||||
raise ExtensionError('Builder class %s has no "name" attribute' % builder)
|
raise ExtensionError('Builder class %s has no "name" attribute'
|
||||||
|
% builder)
|
||||||
if builder.name in self.builderclasses:
|
if builder.name in self.builderclasses:
|
||||||
if isinstance(self.builderclasses[builder.name], tuple):
|
if isinstance(self.builderclasses[builder.name], tuple):
|
||||||
raise ExtensionError('Builder %r is a builtin builder' %
|
raise ExtensionError('Builder %r is a builtin builder' %
|
||||||
builder.name)
|
builder.name)
|
||||||
else:
|
else:
|
||||||
raise ExtensionError('Builder %r already exists (in module %s)' % (
|
raise ExtensionError(
|
||||||
|
'Builder %r already exists (in module %s)' % (
|
||||||
builder.name, self.builderclasses[builder.name].__module__))
|
builder.name, self.builderclasses[builder.name].__module__))
|
||||||
self.builderclasses[builder.name] = builder
|
self.builderclasses[builder.name] = builder
|
||||||
|
|
||||||
@ -255,8 +263,8 @@ class Sphinx(object):
|
|||||||
try:
|
try:
|
||||||
visit, depart = val
|
visit, depart = val
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ExtensionError('Value for key %r must be a (visit, depart) '
|
raise ExtensionError('Value for key %r must be a '
|
||||||
'function tuple' % key)
|
'(visit, depart) function tuple' % key)
|
||||||
if key == 'html':
|
if key == 'html':
|
||||||
from sphinx.writers.html import HTMLTranslator as translator
|
from sphinx.writers.html import HTMLTranslator as translator
|
||||||
elif key == 'latex':
|
elif key == 'latex':
|
||||||
@ -281,7 +289,8 @@ class Sphinx(object):
|
|||||||
|
|
||||||
def add_description_unit(self, directivename, rolename, indextemplate='',
|
def add_description_unit(self, directivename, rolename, indextemplate='',
|
||||||
parse_node=None, ref_nodeclass=None):
|
parse_node=None, ref_nodeclass=None):
|
||||||
additional_xref_types[directivename] = (rolename, indextemplate, parse_node)
|
additional_xref_types[directivename] = (rolename, indextemplate,
|
||||||
|
parse_node)
|
||||||
directives.register_directive(directivename, desc_directive)
|
directives.register_directive(directivename, desc_directive)
|
||||||
roles.register_canonical_role(rolename, xfileref_role)
|
roles.register_canonical_role(rolename, xfileref_role)
|
||||||
if ref_nodeclass is not None:
|
if ref_nodeclass is not None:
|
||||||
|
@ -97,20 +97,20 @@ class Builder(object):
|
|||||||
|
|
||||||
def get_relative_uri(self, from_, to, typ=None):
|
def get_relative_uri(self, from_, to, typ=None):
|
||||||
"""
|
"""
|
||||||
Return a relative URI between two source filenames. May raise environment.NoUri
|
Return a relative URI between two source filenames. May raise
|
||||||
if there's no way to return a sensible URI.
|
environment.NoUri if there's no way to return a sensible URI.
|
||||||
"""
|
"""
|
||||||
return relative_uri(self.get_target_uri(from_),
|
return relative_uri(self.get_target_uri(from_),
|
||||||
self.get_target_uri(to, typ))
|
self.get_target_uri(to, typ))
|
||||||
|
|
||||||
def get_outdated_docs(self):
|
def get_outdated_docs(self):
|
||||||
"""
|
"""
|
||||||
Return an iterable of output files that are outdated, or a string describing
|
Return an iterable of output files that are outdated, or a string
|
||||||
what an update build will build.
|
describing what an update build will build.
|
||||||
|
|
||||||
If the builder does not output individual files corresponding to source files,
|
If the builder does not output individual files corresponding to
|
||||||
return a string here. If it does, return an iterable of those files that need
|
source files, return a string here. If it does, return an iterable
|
||||||
to be written.
|
of those files that need to be written.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@ -142,7 +142,8 @@ class Builder(object):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
self.warn('%s:%s: no matching candidate for image URI %r' %
|
self.warn('%s:%s: no matching candidate for image URI %r' %
|
||||||
(node.source, getattr(node, 'lineno', ''), node['uri']))
|
(node.source, getattr(node, 'lineno', ''),
|
||||||
|
node['uri']))
|
||||||
continue
|
continue
|
||||||
node['uri'] = candidate
|
node['uri'] = candidate
|
||||||
else:
|
else:
|
||||||
@ -161,8 +162,8 @@ class Builder(object):
|
|||||||
"""
|
"""
|
||||||
self.translator = None
|
self.translator = None
|
||||||
if self.config.language is not None:
|
if self.config.language is not None:
|
||||||
self.info(bold('loading translations [%s]... ' % self.config.language),
|
self.info(bold('loading translations [%s]... ' %
|
||||||
nonl=True)
|
self.config.language), nonl=True)
|
||||||
locale_dirs = [path.join(package_dir, 'locale')] + \
|
locale_dirs = [path.join(package_dir, 'locale')] + \
|
||||||
[path.join(self.srcdir, x) for x in self.config.locale_dirs]
|
[path.join(self.srcdir, x) for x in self.config.locale_dirs]
|
||||||
for dir_ in locale_dirs:
|
for dir_ in locale_dirs:
|
||||||
@ -200,10 +201,12 @@ class Builder(object):
|
|||||||
self.info('not found')
|
self.info('not found')
|
||||||
else:
|
else:
|
||||||
self.info('failed: %s' % err)
|
self.info('failed: %s' % err)
|
||||||
self.env = BuildEnvironment(self.srcdir, self.doctreedir, self.config)
|
self.env = BuildEnvironment(self.srcdir, self.doctreedir,
|
||||||
|
self.config)
|
||||||
self.env.find_files(self.config)
|
self.env.find_files(self.config)
|
||||||
else:
|
else:
|
||||||
self.env = BuildEnvironment(self.srcdir, self.doctreedir, self.config)
|
self.env = BuildEnvironment(self.srcdir, self.doctreedir,
|
||||||
|
self.config)
|
||||||
self.env.find_files(self.config)
|
self.env.find_files(self.config)
|
||||||
self.env.set_warnfunc(self.warn)
|
self.env.set_warnfunc(self.warn)
|
||||||
|
|
||||||
@ -241,7 +244,8 @@ class Builder(object):
|
|||||||
|
|
||||||
def build(self, docnames, summary=None, method='update'):
|
def build(self, docnames, summary=None, method='update'):
|
||||||
"""
|
"""
|
||||||
Main build method. First updates the environment, and then calls :meth:`write`.
|
Main build method. First updates the environment, and then
|
||||||
|
calls :meth:`write`.
|
||||||
"""
|
"""
|
||||||
if summary:
|
if summary:
|
||||||
self.info(bold('building [%s]: ' % self.name), nonl=1)
|
self.info(bold('building [%s]: ' % self.name), nonl=1)
|
||||||
@ -252,12 +256,15 @@ class Builder(object):
|
|||||||
warnings = []
|
warnings = []
|
||||||
self.env.set_warnfunc(warnings.append)
|
self.env.set_warnfunc(warnings.append)
|
||||||
self.info(bold('updating environment: '), nonl=1)
|
self.info(bold('updating environment: '), nonl=1)
|
||||||
iterator = self.env.update(self.config, self.srcdir, self.doctreedir, self.app)
|
iterator = self.env.update(self.config, self.srcdir,
|
||||||
|
self.doctreedir, self.app)
|
||||||
# the first item in the iterator is a summary message
|
# the first item in the iterator is a summary message
|
||||||
self.info(iterator.next())
|
self.info(iterator.next())
|
||||||
for docname in self.status_iterator(iterator, 'reading sources... ', purple):
|
for docname in self.status_iterator(iterator, 'reading sources... ',
|
||||||
|
purple):
|
||||||
updated_docnames.append(docname)
|
updated_docnames.append(docname)
|
||||||
# nothing further to do, the environment has already done the reading
|
# nothing further to do, the environment has already
|
||||||
|
# done the reading
|
||||||
for warning in warnings:
|
for warning in warnings:
|
||||||
if warning.strip():
|
if warning.strip():
|
||||||
self.warn(warning)
|
self.warn(warning)
|
||||||
@ -278,12 +285,14 @@ class Builder(object):
|
|||||||
self.info(bold('no targets are out of date.'))
|
self.info(bold('no targets are out of date.'))
|
||||||
return
|
return
|
||||||
|
|
||||||
# another indirection to support builders that don't build files individually
|
# another indirection to support builders that don't build
|
||||||
|
# files individually
|
||||||
self.write(docnames, updated_docnames, method)
|
self.write(docnames, updated_docnames, method)
|
||||||
|
|
||||||
# finish (write static files etc.)
|
# finish (write static files etc.)
|
||||||
self.finish()
|
self.finish()
|
||||||
status = self.app.statuscode == 0 and 'succeeded' or 'finished with problems'
|
status = (self.app.statuscode == 0 and 'succeeded'
|
||||||
|
or 'finished with problems')
|
||||||
if self.app._warncount:
|
if self.app._warncount:
|
||||||
self.info(bold('build %s, %s warning%s.' %
|
self.info(bold('build %s, %s warning%s.' %
|
||||||
(status, self.app._warncount,
|
(status, self.app._warncount,
|
||||||
|
@ -55,7 +55,8 @@ class ChangesBuilder(Builder):
|
|||||||
if not descname:
|
if not descname:
|
||||||
continue
|
continue
|
||||||
if context:
|
if context:
|
||||||
entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext, context)
|
entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext,
|
||||||
|
context)
|
||||||
else:
|
else:
|
||||||
entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
|
entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
|
||||||
apichanges.append((entry, docname, lineno))
|
apichanges.append((entry, docname, lineno))
|
||||||
@ -65,10 +66,12 @@ class ChangesBuilder(Builder):
|
|||||||
if not descname:
|
if not descname:
|
||||||
descname = _('Module level')
|
descname = _('Module level')
|
||||||
if context:
|
if context:
|
||||||
entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext, context)
|
entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext,
|
||||||
|
context)
|
||||||
else:
|
else:
|
||||||
entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
|
entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
|
||||||
libchanges.setdefault(module, []).append((entry, docname, lineno))
|
libchanges.setdefault(module, []).append((entry, docname,
|
||||||
|
lineno))
|
||||||
else:
|
else:
|
||||||
if not context:
|
if not context:
|
||||||
continue
|
continue
|
||||||
@ -119,7 +122,10 @@ class ChangesBuilder(Builder):
|
|||||||
f = codecs.open(targetfn, 'w', 'latin1')
|
f = codecs.open(targetfn, 'w', 'latin1')
|
||||||
try:
|
try:
|
||||||
text = ''.join(hl(i+1, line) for (i, line) in enumerate(lines))
|
text = ''.join(hl(i+1, line) for (i, line) in enumerate(lines))
|
||||||
ctx = {'filename': self.env.doc2path(docname, None), 'text': text}
|
ctx = {
|
||||||
|
'filename': self.env.doc2path(docname, None),
|
||||||
|
'text': text
|
||||||
|
}
|
||||||
f.write(self.templates.render('changes/rstsource.html', ctx))
|
f.write(self.templates.render('changes/rstsource.html', ctx))
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
|
@ -27,7 +27,8 @@ from sphinx.search import js_index
|
|||||||
from sphinx.builders import Builder, ENV_PICKLE_FILENAME
|
from sphinx.builders import Builder, ENV_PICKLE_FILENAME
|
||||||
from sphinx.highlighting import PygmentsBridge
|
from sphinx.highlighting import PygmentsBridge
|
||||||
from sphinx.util.console import bold
|
from sphinx.util.console import bold
|
||||||
from sphinx.writers.html import HTMLWriter, HTMLTranslator, SmartyPantsHTMLTranslator
|
from sphinx.writers.html import HTMLWriter, HTMLTranslator, \
|
||||||
|
SmartyPantsHTMLTranslator
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import json
|
import json
|
||||||
@ -88,7 +89,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
def init_translator_class(self):
|
def init_translator_class(self):
|
||||||
if self.config.html_translator_class:
|
if self.config.html_translator_class:
|
||||||
self.translator_class = self.app.import_object(
|
self.translator_class = self.app.import_object(
|
||||||
self.config.html_translator_class, 'html_translator_class setting')
|
self.config.html_translator_class,
|
||||||
|
'html_translator_class setting')
|
||||||
elif self.config.html_use_smartypants:
|
elif self.config.html_use_smartypants:
|
||||||
self.translator_class = SmartyPantsHTMLTranslator
|
self.translator_class = SmartyPantsHTMLTranslator
|
||||||
else:
|
else:
|
||||||
@ -141,7 +143,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
if self.config.html_use_index:
|
if self.config.html_use_index:
|
||||||
rellinks.append(('genindex', _('General Index'), 'I', _('index')))
|
rellinks.append(('genindex', _('General Index'), 'I', _('index')))
|
||||||
if self.config.html_use_modindex and self.env.modules:
|
if self.config.html_use_modindex and self.env.modules:
|
||||||
rellinks.append(('modindex', _('Global Module Index'), 'M', _('modules')))
|
rellinks.append(('modindex', _('Global Module Index'),
|
||||||
|
'M', _('modules')))
|
||||||
|
|
||||||
if self.config.html_style is not None:
|
if self.config.html_style is not None:
|
||||||
stylename = self.config.html_style
|
stylename = self.config.html_style
|
||||||
@ -184,18 +187,23 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
titles = self.env.titles
|
titles = self.env.titles
|
||||||
if related and related[2]:
|
if related and related[2]:
|
||||||
try:
|
try:
|
||||||
next = {'link': self.get_relative_uri(docname, related[2]),
|
next = {
|
||||||
'title': self.render_partial(titles[related[2]])['title']}
|
'link': self.get_relative_uri(docname, related[2]),
|
||||||
|
'title': self.render_partial(titles[related[2]])['title']
|
||||||
|
}
|
||||||
rellinks.append((related[2], next['title'], 'N', _('next')))
|
rellinks.append((related[2], next['title'], 'N', _('next')))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
next = None
|
next = None
|
||||||
if related and related[1]:
|
if related and related[1]:
|
||||||
try:
|
try:
|
||||||
prev = {'link': self.get_relative_uri(docname, related[1]),
|
prev = {
|
||||||
'title': self.render_partial(titles[related[1]])['title']}
|
'link': self.get_relative_uri(docname, related[1]),
|
||||||
|
'title': self.render_partial(titles[related[1]])['title']
|
||||||
|
}
|
||||||
rellinks.append((related[1], prev['title'], 'P', _('previous')))
|
rellinks.append((related[1], prev['title'], 'P', _('previous')))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# the relation is (somehow) not in the TOC tree, handle that gracefully
|
# the relation is (somehow) not in the TOC tree, handle
|
||||||
|
# that gracefully
|
||||||
prev = None
|
prev = None
|
||||||
while related and related[0]:
|
while related and related[0]:
|
||||||
try:
|
try:
|
||||||
@ -219,6 +227,9 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
# metadata for the document
|
# metadata for the document
|
||||||
meta = self.env.metadata.get(docname)
|
meta = self.env.metadata.get(docname)
|
||||||
|
|
||||||
|
# TOC
|
||||||
|
toc = self.render_partial(self.env.get_toc_for(docname))['fragment']
|
||||||
|
|
||||||
return dict(
|
return dict(
|
||||||
parents = parents,
|
parents = parents,
|
||||||
prev = prev,
|
prev = prev,
|
||||||
@ -229,7 +240,7 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
metatags = metatags,
|
metatags = metatags,
|
||||||
rellinks = rellinks,
|
rellinks = rellinks,
|
||||||
sourcename = sourcename,
|
sourcename = sourcename,
|
||||||
toc = self.render_partial(self.env.get_toc_for(docname))['fragment'],
|
toc = toc,
|
||||||
# only display a TOC if there's more than one item to show
|
# only display a TOC if there's more than one item to show
|
||||||
display_toc = (self.env.toc_num_entries[docname] > 1),
|
display_toc = (self.env.toc_num_entries[docname] > 1),
|
||||||
)
|
)
|
||||||
@ -272,12 +283,15 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
self.info(' genindex', nonl=1)
|
self.info(' genindex', nonl=1)
|
||||||
|
|
||||||
if self.config.html_split_index:
|
if self.config.html_split_index:
|
||||||
self.handle_page('genindex', genindexcontext, 'genindex-split.html')
|
self.handle_page('genindex', genindexcontext,
|
||||||
self.handle_page('genindex-all', genindexcontext, 'genindex.html')
|
'genindex-split.html')
|
||||||
|
self.handle_page('genindex-all', genindexcontext,
|
||||||
|
'genindex.html')
|
||||||
for (key, entries), count in zip(genindex, indexcounts):
|
for (key, entries), count in zip(genindex, indexcounts):
|
||||||
ctx = {'key': key, 'entries': entries, 'count': count,
|
ctx = {'key': key, 'entries': entries, 'count': count,
|
||||||
'genindexentries': genindex}
|
'genindexentries': genindex}
|
||||||
self.handle_page('genindex-' + key, ctx, 'genindex-single.html')
|
self.handle_page('genindex-' + key, ctx,
|
||||||
|
'genindex-single.html')
|
||||||
else:
|
else:
|
||||||
self.handle_page('genindex', genindexcontext, 'genindex.html')
|
self.handle_page('genindex', genindexcontext, 'genindex.html')
|
||||||
|
|
||||||
@ -318,11 +332,13 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
elif not pmn.startswith(tn):
|
elif not pmn.startswith(tn):
|
||||||
# submodule without parent in list, add dummy entry
|
# submodule without parent in list, add dummy entry
|
||||||
cg += 1
|
cg += 1
|
||||||
modindexentries.append([tn, True, cg, False, '', '', [], False])
|
modindexentries.append([tn, True, cg,
|
||||||
|
False, '', '', [], False])
|
||||||
else:
|
else:
|
||||||
num_toplevels += 1
|
num_toplevels += 1
|
||||||
cg += 1
|
cg += 1
|
||||||
modindexentries.append([mn, False, cg, (tn != mn), fn, sy, pl, dep])
|
modindexentries.append([mn, False, cg, (tn != mn),
|
||||||
|
fn, sy, pl, dep])
|
||||||
pmn = mn
|
pmn = mn
|
||||||
fl = mn[0].lower()
|
fl = mn[0].lower()
|
||||||
platforms = sorted(platforms)
|
platforms = sorted(platforms)
|
||||||
@ -430,7 +446,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
if docname not in self.env.all_docs:
|
if docname not in self.env.all_docs:
|
||||||
yield docname
|
yield docname
|
||||||
continue
|
continue
|
||||||
targetname = self.env.doc2path(docname, self.outdir, self.out_suffix)
|
targetname = self.env.doc2path(docname, self.outdir,
|
||||||
|
self.out_suffix)
|
||||||
try:
|
try:
|
||||||
targetmtime = path.getmtime(targetname)
|
targetmtime = path.getmtime(targetname)
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -454,8 +471,9 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
f.close()
|
f.close()
|
||||||
except (IOError, OSError, ValueError):
|
except (IOError, OSError, ValueError):
|
||||||
if keep:
|
if keep:
|
||||||
self.warn("search index couldn't be loaded, but not all documents "
|
self.warn('search index couldn\'t be loaded, but not all '
|
||||||
"will be built: the index will be incomplete.")
|
'documents will be built: the index will be '
|
||||||
|
'incomplete.')
|
||||||
# delete all entries for files that will be rebuilt
|
# delete all entries for files that will be rebuilt
|
||||||
self.indexer.prune(keep)
|
self.indexer.prune(keep)
|
||||||
|
|
||||||
@ -485,12 +503,15 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
ctx['customsidebar'] = self.config.html_sidebars.get(pagename)
|
ctx['customsidebar'] = self.config.html_sidebars.get(pagename)
|
||||||
ctx.update(addctx)
|
ctx.update(addctx)
|
||||||
|
|
||||||
self.app.emit('html-page-context', pagename, templatename, ctx, event_arg)
|
self.app.emit('html-page-context', pagename, templatename,
|
||||||
|
ctx, event_arg)
|
||||||
|
|
||||||
output = self.templates.render(templatename, ctx)
|
output = self.templates.render(templatename, ctx)
|
||||||
if not outfilename:
|
if not outfilename:
|
||||||
outfilename = path.join(self.outdir, os_path(pagename) + self.out_suffix)
|
outfilename = path.join(self.outdir,
|
||||||
ensuredir(path.dirname(outfilename)) # normally different from self.outdir
|
os_path(pagename) + self.out_suffix)
|
||||||
|
# outfilename's path is in general different from self.outdir
|
||||||
|
ensuredir(path.dirname(outfilename))
|
||||||
try:
|
try:
|
||||||
f = codecs.open(outfilename, 'w', 'utf-8')
|
f = codecs.open(outfilename, 'w', 'utf-8')
|
||||||
try:
|
try:
|
||||||
@ -501,7 +522,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
self.warn("Error writing file %s: %s" % (outfilename, err))
|
self.warn("Error writing file %s: %s" % (outfilename, err))
|
||||||
if self.copysource and ctx.get('sourcename'):
|
if self.copysource and ctx.get('sourcename'):
|
||||||
# copy the source file for the "show source" link
|
# copy the source file for the "show source" link
|
||||||
source_name = path.join(self.outdir, '_sources', os_path(ctx['sourcename']))
|
source_name = path.join(self.outdir, '_sources',
|
||||||
|
os_path(ctx['sourcename']))
|
||||||
ensuredir(path.dirname(source_name))
|
ensuredir(path.dirname(source_name))
|
||||||
shutil.copyfile(self.env.doc2path(pagename), source_name)
|
shutil.copyfile(self.env.doc2path(pagename), source_name)
|
||||||
|
|
||||||
@ -509,8 +531,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
self.info(bold('dumping search index... '), nonl=True)
|
self.info(bold('dumping search index... '), nonl=True)
|
||||||
self.indexer.prune(self.env.all_docs)
|
self.indexer.prune(self.env.all_docs)
|
||||||
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
||||||
# first write to a temporary file, so that if dumping fails, the existing
|
# first write to a temporary file, so that if dumping fails,
|
||||||
# index won't be overwritten
|
# the existing index won't be overwritten
|
||||||
f = open(searchindexfn + '.tmp', 'wb')
|
f = open(searchindexfn + '.tmp', 'wb')
|
||||||
try:
|
try:
|
||||||
self.indexer.dump(f, self.indexer_format)
|
self.indexer.dump(f, self.indexer_format)
|
||||||
@ -528,7 +550,8 @@ class StandaloneHTMLBuilder(Builder):
|
|||||||
for modname, info in self.env.modules.iteritems():
|
for modname, info in self.env.modules.iteritems():
|
||||||
f.write('%s mod %s\n' % (modname, self.get_target_uri(info[0])))
|
f.write('%s mod %s\n' % (modname, self.get_target_uri(info[0])))
|
||||||
for refname, (docname, desctype) in self.env.descrefs.iteritems():
|
for refname, (docname, desctype) in self.env.descrefs.iteritems():
|
||||||
f.write('%s %s %s\n' % (refname, desctype, self.get_target_uri(docname)))
|
f.write('%s %s %s\n' % (refname, desctype,
|
||||||
|
self.get_target_uri(docname)))
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
self.info('done')
|
self.info('done')
|
||||||
@ -568,9 +591,11 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
|||||||
ctx['customsidebar'] = sidebarfile
|
ctx['customsidebar'] = sidebarfile
|
||||||
|
|
||||||
if not outfilename:
|
if not outfilename:
|
||||||
outfilename = path.join(self.outdir, os_path(pagename) + self.out_suffix)
|
outfilename = path.join(self.outdir,
|
||||||
|
os_path(pagename) + self.out_suffix)
|
||||||
|
|
||||||
self.app.emit('html-page-context', pagename, templatename, ctx, event_arg)
|
self.app.emit('html-page-context', pagename, templatename,
|
||||||
|
ctx, event_arg)
|
||||||
|
|
||||||
ensuredir(path.dirname(outfilename))
|
ensuredir(path.dirname(outfilename))
|
||||||
f = open(outfilename, 'wb')
|
f = open(outfilename, 'wb')
|
||||||
@ -638,7 +663,7 @@ class JSONHTMLBuilder(SerializingHTMLBuilder):
|
|||||||
def init(self):
|
def init(self):
|
||||||
if json is None:
|
if json is None:
|
||||||
from sphinx.application import SphinxError
|
from sphinx.application import SphinxError
|
||||||
raise SphinxError('The module simplejson (or json in Python >= 2.6) '
|
raise SphinxError(
|
||||||
'is not available. The JSONHTMLBuilder builder '
|
'The module simplejson (or json in Python >= 2.6) '
|
||||||
'will not work.')
|
'is not available. The JSONHTMLBuilder builder will not work.')
|
||||||
SerializingHTMLBuilder.init(self)
|
SerializingHTMLBuilder.init(self)
|
||||||
|
@ -122,8 +122,8 @@ was will with
|
|||||||
|
|
||||||
class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
||||||
"""
|
"""
|
||||||
Builder that also outputs Windows HTML help project, contents and index files.
|
Builder that also outputs Windows HTML help project, contents and
|
||||||
Adapted from the original Doc/tools/prechm.py.
|
index files. Adapted from the original Doc/tools/prechm.py.
|
||||||
"""
|
"""
|
||||||
name = 'htmlhelp'
|
name = 'htmlhelp'
|
||||||
|
|
||||||
@ -166,8 +166,10 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
for root, dirs, files in os.walk(outdir):
|
for root, dirs, files in os.walk(outdir):
|
||||||
staticdir = (root == path.join(outdir, '_static'))
|
staticdir = (root == path.join(outdir, '_static'))
|
||||||
for fn in files:
|
for fn in files:
|
||||||
if (staticdir and not fn.endswith('.js')) or fn.endswith('.html'):
|
if (staticdir and not fn.endswith('.js')) or \
|
||||||
print >>f, path.join(root, fn)[olen:].replace(os.sep, '\\')
|
fn.endswith('.html'):
|
||||||
|
print >>f, path.join(root, fn)[olen:].replace(os.sep,
|
||||||
|
'\\')
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
@ -182,8 +184,8 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
f.write('<LI> ' + object_sitemap % (_('Global Module Index'),
|
f.write('<LI> ' + object_sitemap % (_('Global Module Index'),
|
||||||
'modindex.html'))
|
'modindex.html'))
|
||||||
# the TOC
|
# the TOC
|
||||||
tocdoc = self.env.get_and_resolve_doctree(self.config.master_doc, self,
|
tocdoc = self.env.get_and_resolve_doctree(
|
||||||
prune_toctrees=False)
|
self.config.master_doc, self, prune_toctrees=False)
|
||||||
def write_toc(node, ullevel=0):
|
def write_toc(node, ullevel=0):
|
||||||
if isinstance(node, nodes.list_item):
|
if isinstance(node, nodes.list_item):
|
||||||
f.write('<LI> ')
|
f.write('<LI> ')
|
||||||
@ -204,7 +206,8 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
elif isinstance(node, addnodes.compact_paragraph):
|
elif isinstance(node, addnodes.compact_paragraph):
|
||||||
for subnode in node:
|
for subnode in node:
|
||||||
write_toc(subnode, ullevel)
|
write_toc(subnode, ullevel)
|
||||||
istoctree = lambda node: isinstance(node, addnodes.compact_paragraph) and \
|
def istoctree(node):
|
||||||
|
return isinstance(node, addnodes.compact_paragraph) and \
|
||||||
node.has_key('toctree')
|
node.has_key('toctree')
|
||||||
for node in tocdoc.traverse(istoctree):
|
for node in tocdoc.traverse(istoctree):
|
||||||
write_toc(node)
|
write_toc(node)
|
||||||
@ -230,7 +233,8 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
write_param('Local', refs[0])
|
write_param('Local', refs[0])
|
||||||
else:
|
else:
|
||||||
for i, ref in enumerate(refs):
|
for i, ref in enumerate(refs):
|
||||||
write_param('Name', '[%d] %s' % (i, ref)) # XXX: better title?
|
# XXX: better title?
|
||||||
|
write_param('Name', '[%d] %s' % (i, ref))
|
||||||
write_param('Local', ref)
|
write_param('Local', ref)
|
||||||
f.write('</OBJECT>\n')
|
f.write('</OBJECT>\n')
|
||||||
if subitems:
|
if subitems:
|
||||||
|
@ -116,12 +116,12 @@ class LaTeXBuilder(Builder):
|
|||||||
for includefile in includefiles:
|
for includefile in includefiles:
|
||||||
try:
|
try:
|
||||||
self.info(darkgreen(includefile) + " ", nonl=1)
|
self.info(darkgreen(includefile) + " ", nonl=1)
|
||||||
subtree = process_tree(includefile,
|
subtree = process_tree(
|
||||||
self.env.get_doctree(includefile))
|
includefile, self.env.get_doctree(includefile))
|
||||||
self.docnames.add(includefile)
|
self.docnames.add(includefile)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.warn('%s: toctree contains ref to nonexisting file %r' %
|
self.warn('%s: toctree contains ref to nonexisting '
|
||||||
(docname, includefile))
|
'file %r' % (docname, includefile))
|
||||||
else:
|
else:
|
||||||
sof = addnodes.start_of_file(docname=includefile)
|
sof = addnodes.start_of_file(docname=includefile)
|
||||||
sof.children = subtree.children
|
sof.children = subtree.children
|
||||||
@ -131,10 +131,12 @@ class LaTeXBuilder(Builder):
|
|||||||
tree = self.env.get_doctree(indexfile)
|
tree = self.env.get_doctree(indexfile)
|
||||||
tree['docname'] = indexfile
|
tree['docname'] = indexfile
|
||||||
if toctree_only:
|
if toctree_only:
|
||||||
# extract toctree nodes from the tree and put them in a fresh document
|
# extract toctree nodes from the tree and put them in a
|
||||||
|
# fresh document
|
||||||
new_tree = new_document('<latex output>')
|
new_tree = new_document('<latex output>')
|
||||||
new_sect = nodes.section()
|
new_sect = nodes.section()
|
||||||
new_sect += nodes.title(u'<Set title in conf.py>', u'<Set title in conf.py>')
|
new_sect += nodes.title(u'<Set title in conf.py>',
|
||||||
|
u'<Set title in conf.py>')
|
||||||
new_tree += new_sect
|
new_tree += new_sect
|
||||||
for node in tree.traverse(addnodes.toctree):
|
for node in tree.traverse(addnodes.toctree):
|
||||||
new_sect += node
|
new_sect += node
|
||||||
|
@ -90,7 +90,8 @@ class CheckExternalLinksBuilder(Builder):
|
|||||||
self.warn('%s:%s: broken link: %s' % (docname, lineno, uri))
|
self.warn('%s:%s: broken link: %s' % (docname, lineno, uri))
|
||||||
else:
|
else:
|
||||||
self.info(' - ' + purple('redirected') + ' to ' + s)
|
self.info(' - ' + purple('redirected') + ' to ' + s)
|
||||||
self.write_entry('redirected', docname, lineno, uri + ' to ' + s)
|
self.write_entry('redirected', docname,
|
||||||
|
lineno, uri + ' to ' + s)
|
||||||
self.redirected[uri] = (r, s)
|
self.redirected[uri] = (r, s)
|
||||||
elif len(uri) == 0 or uri[0:7] == 'mailto:' or uri[0:4] == 'ftp:':
|
elif len(uri) == 0 or uri[0:7] == 'mailto:' or uri[0:4] == 'ftp:':
|
||||||
return
|
return
|
||||||
|
@ -19,7 +19,8 @@ from docutils import nodes
|
|||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||||
|
|
||||||
_idpattern = re.compile('(?P<title>.+) (\((?P<id>[\w\.]+)( (?P<descr>\w+))?\))$')
|
_idpattern = re.compile(
|
||||||
|
r'(?P<title>.+) (\((?P<id>[\w\.]+)( (?P<descr>\w+))?\))$')
|
||||||
|
|
||||||
|
|
||||||
# Qt Help Collection Project (.qhcp).
|
# Qt Help Collection Project (.qhcp).
|
||||||
@ -149,7 +150,8 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
|
|||||||
for root, dirs, files in os.walk(outdir):
|
for root, dirs, files in os.walk(outdir):
|
||||||
staticdir = (root == path.join(outdir, '_static'))
|
staticdir = (root == path.join(outdir, '_static'))
|
||||||
for fn in files:
|
for fn in files:
|
||||||
if (staticdir and not fn.endswith('.js')) or fn.endswith('.html'):
|
if (staticdir and not fn.endswith('.js')) or \
|
||||||
|
fn.endswith('.html'):
|
||||||
filename = path.join(root, fn)[olen:]
|
filename = path.join(root, fn)[olen:]
|
||||||
#filename = filename.replace(os.sep, '\\') # XXX
|
#filename = filename.replace(os.sep, '\\') # XXX
|
||||||
projectfiles.append(file_template % {'filename': filename})
|
projectfiles.append(file_template % {'filename': filename})
|
||||||
|
@ -31,7 +31,8 @@ class TextBuilder(Builder):
|
|||||||
if docname not in self.env.all_docs:
|
if docname not in self.env.all_docs:
|
||||||
yield docname
|
yield docname
|
||||||
continue
|
continue
|
||||||
targetname = self.env.doc2path(docname, self.outdir, self.out_suffix)
|
targetname = self.env.doc2path(docname, self.outdir,
|
||||||
|
self.out_suffix)
|
||||||
try:
|
try:
|
||||||
targetmtime = path.getmtime(targetname)
|
targetmtime = path.getmtime(targetname)
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -54,7 +55,7 @@ class TextBuilder(Builder):
|
|||||||
destination = StringOutput(encoding='utf-8')
|
destination = StringOutput(encoding='utf-8')
|
||||||
self.writer.write(doctree, destination)
|
self.writer.write(doctree, destination)
|
||||||
outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
|
outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
|
||||||
ensuredir(path.dirname(outfilename)) # normally different from self.outdir
|
ensuredir(path.dirname(outfilename))
|
||||||
try:
|
try:
|
||||||
f = codecs.open(outfilename, 'w', 'utf-8')
|
f = codecs.open(outfilename, 'w', 'utf-8')
|
||||||
try:
|
try:
|
||||||
|
@ -31,7 +31,8 @@ def usage(argv, msg=None):
|
|||||||
Sphinx v%s
|
Sphinx v%s
|
||||||
Usage: %s [options] sourcedir outdir [filenames...]
|
Usage: %s [options] sourcedir outdir [filenames...]
|
||||||
Options: -b <builder> -- builder to use; default is html
|
Options: -b <builder> -- builder to use; default is html
|
||||||
-a -- write all files; default is to only write new and changed files
|
-a -- write all files; default is to only write \
|
||||||
|
new and changed files
|
||||||
-E -- don't use a saved environment, always read all files
|
-E -- don't use a saved environment, always read all files
|
||||||
-d <path> -- path for the cached environment and doctree files
|
-d <path> -- path for the cached environment and doctree files
|
||||||
(default: outdir/.doctrees)
|
(default: outdir/.doctrees)
|
||||||
@ -64,7 +65,8 @@ def main(argv):
|
|||||||
return 1
|
return 1
|
||||||
if not path.isfile(path.join(srcdir, 'conf.py')) and \
|
if not path.isfile(path.join(srcdir, 'conf.py')) and \
|
||||||
'-c' not in allopts and '-C' not in allopts:
|
'-c' not in allopts and '-C' not in allopts:
|
||||||
print >>sys.stderr, 'Error: Source directory doesn\'t contain conf.py file.'
|
print >>sys.stderr, ('Error: Source directory doesn\'t '
|
||||||
|
'contain conf.py file.')
|
||||||
return 1
|
return 1
|
||||||
outdir = path.abspath(args[1])
|
outdir = path.abspath(args[1])
|
||||||
if not path.isdir(outdir):
|
if not path.isdir(outdir):
|
||||||
@ -103,8 +105,8 @@ def main(argv):
|
|||||||
elif opt == '-c':
|
elif opt == '-c':
|
||||||
confdir = path.abspath(val)
|
confdir = path.abspath(val)
|
||||||
if not path.isfile(path.join(confdir, 'conf.py')):
|
if not path.isfile(path.join(confdir, 'conf.py')):
|
||||||
print >>sys.stderr, \
|
print >>sys.stderr, ('Error: Configuration directory '
|
||||||
'Error: Configuration directory doesn\'t contain conf.py file.'
|
'doesn\'t contain conf.py file.')
|
||||||
return 1
|
return 1
|
||||||
elif opt == '-C':
|
elif opt == '-C':
|
||||||
confdir = None
|
confdir = None
|
||||||
@ -112,8 +114,8 @@ def main(argv):
|
|||||||
try:
|
try:
|
||||||
key, val = val.split('=')
|
key, val = val.split('=')
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print >>sys.stderr, \
|
print >>sys.stderr, ('Error: -D option argument must be '
|
||||||
'Error: -D option argument must be in the form name=value.'
|
'in the form name=value.')
|
||||||
return 1
|
return 1
|
||||||
try:
|
try:
|
||||||
val = int(val)
|
val = int(val)
|
||||||
@ -124,8 +126,8 @@ def main(argv):
|
|||||||
try:
|
try:
|
||||||
key, val = val.split('=')
|
key, val = val.split('=')
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print >>sys.stderr, \
|
print >>sys.stderr, ('Error: -A option argument must be '
|
||||||
'Error: -A option argument must be in the form name=value.'
|
'in the form name=value.')
|
||||||
return 1
|
return 1
|
||||||
try:
|
try:
|
||||||
val = int(val)
|
val = int(val)
|
||||||
@ -153,7 +155,8 @@ def main(argv):
|
|||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
if use_pdb:
|
if use_pdb:
|
||||||
import pdb
|
import pdb
|
||||||
print >>sys.stderr, darkred('Interrupted while building, starting debugger:')
|
print >>sys.stderr, darkred('Interrupted while building, '
|
||||||
|
'starting debugger:')
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
pdb.post_mortem(sys.exc_info()[2])
|
pdb.post_mortem(sys.exc_info()[2])
|
||||||
return 1
|
return 1
|
||||||
@ -167,7 +170,8 @@ def main(argv):
|
|||||||
else:
|
else:
|
||||||
if isinstance(err, SystemMessage):
|
if isinstance(err, SystemMessage):
|
||||||
print >>sys.stderr, darkred('reST markup error:')
|
print >>sys.stderr, darkred('reST markup error:')
|
||||||
print >>sys.stderr, err.args[0].encode('ascii', 'backslashreplace')
|
print >>sys.stderr, err.args[0].encode('ascii',
|
||||||
|
'backslashreplace')
|
||||||
elif isinstance(err, SphinxError):
|
elif isinstance(err, SphinxError):
|
||||||
print >>sys.stderr, darkred('%s:' % err.category)
|
print >>sys.stderr, darkred('%s:' % err.category)
|
||||||
print >>sys.stderr, err
|
print >>sys.stderr, err
|
||||||
@ -181,6 +185,6 @@ def main(argv):
|
|||||||
print >>sys.stderr, ('Please also report this if it was a user '
|
print >>sys.stderr, ('Please also report this if it was a user '
|
||||||
'error, so that a better error message '
|
'error, so that a better error message '
|
||||||
'can be provided next time.')
|
'can be provided next time.')
|
||||||
print >>sys.stderr, ('Send reports to sphinx-dev@googlegroups.com. '
|
print >>sys.stderr, ('Send reports to '
|
||||||
'Thanks!')
|
'sphinx-dev@googlegroups.com. Thanks!')
|
||||||
return 1
|
return 1
|
||||||
|
@ -63,7 +63,8 @@ class Config(object):
|
|||||||
html_logo = (None, False),
|
html_logo = (None, False),
|
||||||
html_favicon = (None, False),
|
html_favicon = (None, False),
|
||||||
html_static_path = ([], False),
|
html_static_path = ([], False),
|
||||||
html_last_updated_fmt = (None, False), # the real default is locale-dependent
|
# the real default is locale-dependent
|
||||||
|
html_last_updated_fmt = (None, False),
|
||||||
html_use_smartypants = (True, False),
|
html_use_smartypants = (True, False),
|
||||||
html_translator_class = (None, False),
|
html_translator_class = (None, False),
|
||||||
html_sidebars = ({}, False),
|
html_sidebars = ({}, False),
|
||||||
|
@ -18,7 +18,7 @@ from sphinx import addnodes
|
|||||||
from sphinx.util import parselinenos
|
from sphinx.util import parselinenos
|
||||||
|
|
||||||
|
|
||||||
# ------ highlight directive --------------------------------------------------------
|
# ------ highlight directive ---------------------------------------------------
|
||||||
|
|
||||||
def highlightlang_directive(name, arguments, options, content, lineno,
|
def highlightlang_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -36,10 +36,11 @@ highlightlang_directive.content = 0
|
|||||||
highlightlang_directive.arguments = (1, 0, 0)
|
highlightlang_directive.arguments = (1, 0, 0)
|
||||||
highlightlang_directive.options = {'linenothreshold': directives.unchanged}
|
highlightlang_directive.options = {'linenothreshold': directives.unchanged}
|
||||||
directives.register_directive('highlight', highlightlang_directive)
|
directives.register_directive('highlight', highlightlang_directive)
|
||||||
directives.register_directive('highlightlang', highlightlang_directive) # old name
|
# old name
|
||||||
|
directives.register_directive('highlightlang', highlightlang_directive)
|
||||||
|
|
||||||
|
|
||||||
# ------ code-block directive -------------------------------------------------------
|
# ------ code-block directive --------------------------------------------------
|
||||||
|
|
||||||
def codeblock_directive(name, arguments, options, content, lineno,
|
def codeblock_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -56,13 +57,15 @@ directives.register_directive('code-block', codeblock_directive)
|
|||||||
directives.register_directive('sourcecode', codeblock_directive)
|
directives.register_directive('sourcecode', codeblock_directive)
|
||||||
|
|
||||||
|
|
||||||
# ------ literalinclude directive ---------------------------------------------------
|
# ------ literalinclude directive ----------------------------------------------
|
||||||
|
|
||||||
def literalinclude_directive(name, arguments, options, content, lineno,
|
def literalinclude_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
"""Like .. include:: :literal:, but only warns if the include file is not found."""
|
"""Like .. include:: :literal:, but only warns if the include file is
|
||||||
|
not found."""
|
||||||
if not state.document.settings.file_insertion_enabled:
|
if not state.document.settings.file_insertion_enabled:
|
||||||
return [state.document.reporter.warning('File insertion disabled', line=lineno)]
|
return [state.document.reporter.warning('File insertion disabled',
|
||||||
|
line=lineno)]
|
||||||
env = state.document.settings.env
|
env = state.document.settings.env
|
||||||
rel_fn = arguments[0]
|
rel_fn = arguments[0]
|
||||||
source_dir = path.dirname(path.abspath(state_machine.input_lines.source(
|
source_dir = path.dirname(path.abspath(state_machine.input_lines.source(
|
||||||
@ -133,14 +136,15 @@ def literalinclude_directive(name, arguments, options, content, lineno,
|
|||||||
state.document.settings.env.note_dependency(rel_fn)
|
state.document.settings.env.note_dependency(rel_fn)
|
||||||
return [retnode]
|
return [retnode]
|
||||||
|
|
||||||
literalinclude_directive.options = {'linenos': directives.flag,
|
literalinclude_directive.options = {
|
||||||
|
'linenos': directives.flag,
|
||||||
'language': directives.unchanged_required,
|
'language': directives.unchanged_required,
|
||||||
'encoding': directives.encoding,
|
'encoding': directives.encoding,
|
||||||
'pyobject': directives.unchanged_required,
|
'pyobject': directives.unchanged_required,
|
||||||
'lines': directives.unchanged_required,
|
'lines': directives.unchanged_required,
|
||||||
'start-after': directives.unchanged_required,
|
'start-after': directives.unchanged_required,
|
||||||
'end-before': directives.unchanged_required,
|
'end-before': directives.unchanged_required,
|
||||||
}
|
}
|
||||||
literalinclude_directive.content = 0
|
literalinclude_directive.content = 0
|
||||||
literalinclude_directive.arguments = (1, 0, 0)
|
literalinclude_directive.arguments = (1, 0, 0)
|
||||||
directives.register_directive('literalinclude', literalinclude_directive)
|
directives.register_directive('literalinclude', literalinclude_directive)
|
||||||
|
@ -17,7 +17,7 @@ from sphinx import addnodes
|
|||||||
from sphinx.util import ws_re
|
from sphinx.util import ws_re
|
||||||
|
|
||||||
|
|
||||||
# ------ information units ---------------------------------------------------------
|
# ------ information units -----------------------------------------------------
|
||||||
|
|
||||||
def desc_index_text(desctype, module, name, add_modules):
|
def desc_index_text(desctype, module, name, add_modules):
|
||||||
if desctype == 'function':
|
if desctype == 'function':
|
||||||
@ -342,7 +342,8 @@ def parse_c_type(node, ctype):
|
|||||||
tnode = nodes.Text(part, part)
|
tnode = nodes.Text(part, part)
|
||||||
if part[0] in string.ascii_letters+'_' and part not in stopwords:
|
if part[0] in string.ascii_letters+'_' and part not in stopwords:
|
||||||
pnode = addnodes.pending_xref(
|
pnode = addnodes.pending_xref(
|
||||||
'', reftype='ctype', reftarget=part, modname=None, classname=None)
|
'', reftype='ctype', reftarget=part,
|
||||||
|
modname=None, classname=None)
|
||||||
pnode += tnode
|
pnode += tnode
|
||||||
node += pnode
|
node += pnode
|
||||||
else:
|
else:
|
||||||
@ -449,8 +450,10 @@ def desc_directive(desctype, arguments, options, content, lineno,
|
|||||||
if desctype in ('function', 'data', 'class', 'exception',
|
if desctype in ('function', 'data', 'class', 'exception',
|
||||||
'method', 'staticmethod', 'classmethod',
|
'method', 'staticmethod', 'classmethod',
|
||||||
'attribute'):
|
'attribute'):
|
||||||
name, clsname = parse_py_signature(signode, sig, desctype, module, env)
|
name, clsname = parse_py_signature(signode, sig,
|
||||||
elif desctype in ('cfunction', 'cmember', 'cmacro', 'ctype', 'cvar'):
|
desctype, module, env)
|
||||||
|
elif desctype in ('cfunction', 'cmember', 'cmacro',
|
||||||
|
'ctype', 'cvar'):
|
||||||
name = parse_c_signature(signode, sig, desctype)
|
name = parse_c_signature(signode, sig, desctype)
|
||||||
elif desctype == 'cmdoption':
|
elif desctype == 'cmdoption':
|
||||||
optname = parse_option_desc(signode, sig)
|
optname = parse_option_desc(signode, sig)
|
||||||
@ -463,7 +466,8 @@ def desc_directive(desctype, arguments, options, content, lineno,
|
|||||||
state.document.note_explicit_target(signode)
|
state.document.note_explicit_target(signode)
|
||||||
inode['entries'].append(
|
inode['entries'].append(
|
||||||
('pair', _('%scommand line option; %s') %
|
('pair', _('%scommand line option; %s') %
|
||||||
((env.currprogram and env.currprogram + ' ' or ''), sig),
|
((env.currprogram and env.currprogram + ' ' or ''),
|
||||||
|
sig),
|
||||||
targetname, targetname))
|
targetname, targetname))
|
||||||
env.note_progoption(optname, targetname)
|
env.note_progoption(optname, targetname)
|
||||||
continue
|
continue
|
||||||
@ -473,7 +477,8 @@ def desc_directive(desctype, arguments, options, content, lineno,
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
# another registered generic x-ref directive
|
# another registered generic x-ref directive
|
||||||
rolename, indextemplate, parse_node = additional_xref_types[desctype]
|
rolename, indextemplate, parse_node = \
|
||||||
|
additional_xref_types[desctype]
|
||||||
if parse_node:
|
if parse_node:
|
||||||
fullname = parse_node(env, sig, signode)
|
fullname = parse_node(env, sig, signode)
|
||||||
else:
|
else:
|
||||||
@ -502,8 +507,8 @@ def desc_directive(desctype, arguments, options, content, lineno,
|
|||||||
signode.clear()
|
signode.clear()
|
||||||
signode += addnodes.desc_name(sig, sig)
|
signode += addnodes.desc_name(sig, sig)
|
||||||
continue # we don't want an index entry here
|
continue # we don't want an index entry here
|
||||||
# only add target and index entry if this is the first description of the
|
# only add target and index entry if this is the first description
|
||||||
# function name in this desc block
|
# of the function name in this desc block
|
||||||
if not noindex and name not in names:
|
if not noindex and name not in names:
|
||||||
fullname = (module and module + '.' or '') + name
|
fullname = (module and module + '.' or '') + name
|
||||||
# note target
|
# note target
|
||||||
@ -583,7 +588,7 @@ additional_xref_types = {
|
|||||||
del _
|
del _
|
||||||
|
|
||||||
|
|
||||||
# ------ target --------------------------------------------------------------------
|
# ------ target ----------------------------------------------------------------
|
||||||
|
|
||||||
def target_directive(targettype, arguments, options, content, lineno,
|
def target_directive(targettype, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -603,7 +608,8 @@ def target_directive(targettype, arguments, options, content, lineno,
|
|||||||
if colon != -1:
|
if colon != -1:
|
||||||
indextype = indexentry[:colon].strip()
|
indextype = indexentry[:colon].strip()
|
||||||
indexentry = indexentry[colon+1:].strip()
|
indexentry = indexentry[colon+1:].strip()
|
||||||
inode = addnodes.index(entries=[(indextype, indexentry, targetname, targetname)])
|
inode = addnodes.index(entries=[(indextype, indexentry,
|
||||||
|
targetname, targetname)])
|
||||||
ret.insert(0, inode)
|
ret.insert(0, inode)
|
||||||
env.note_reftarget(rolename, fullname, targetname)
|
env.note_reftarget(rolename, fullname, targetname)
|
||||||
return ret
|
return ret
|
||||||
@ -611,5 +617,5 @@ def target_directive(targettype, arguments, options, content, lineno,
|
|||||||
target_directive.content = 0
|
target_directive.content = 0
|
||||||
target_directive.arguments = (1, 0, 1)
|
target_directive.arguments = (1, 0, 1)
|
||||||
|
|
||||||
# note, the target directive is not registered here, it is used by the application
|
# note, the target directive is not registered here, it is used by the
|
||||||
# when registering additional xref types
|
# application when registering additional xref types
|
||||||
|
@ -18,7 +18,7 @@ from sphinx.util import patfilter, ws_re, caption_ref_re, docname_join
|
|||||||
from sphinx.util.compat import make_admonition
|
from sphinx.util.compat import make_admonition
|
||||||
|
|
||||||
|
|
||||||
# ------ the TOC tree ---------------------------------------------------------------
|
# ------ the TOC tree ----------------------------------------------------------
|
||||||
|
|
||||||
def toctree_directive(name, arguments, options, content, lineno,
|
def toctree_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -50,7 +50,8 @@ def toctree_directive(name, arguments, options, content, lineno,
|
|||||||
docname = docname_join(env.docname, docname)
|
docname = docname_join(env.docname, docname)
|
||||||
if docname not in env.found_docs:
|
if docname not in env.found_docs:
|
||||||
ret.append(state.document.reporter.warning(
|
ret.append(state.document.reporter.warning(
|
||||||
'toctree references unknown document %r' % docname, line=lineno))
|
'toctree references unknown document %r' % docname,
|
||||||
|
line=lineno))
|
||||||
else:
|
else:
|
||||||
includefiles.append(docname)
|
includefiles.append(docname)
|
||||||
else:
|
else:
|
||||||
@ -61,8 +62,8 @@ def toctree_directive(name, arguments, options, content, lineno,
|
|||||||
includefiles.append(docname)
|
includefiles.append(docname)
|
||||||
if not docnames:
|
if not docnames:
|
||||||
ret.append(state.document.reporter.warning(
|
ret.append(state.document.reporter.warning(
|
||||||
'toctree glob pattern %r didn\'t match any documents' % entry,
|
'toctree glob pattern %r didn\'t match any documents'
|
||||||
line=lineno))
|
% entry, line=lineno))
|
||||||
subnode = addnodes.toctree()
|
subnode = addnodes.toctree()
|
||||||
subnode['includefiles'] = includefiles
|
subnode['includefiles'] = includefiles
|
||||||
subnode['includetitles'] = includetitles
|
subnode['includetitles'] = includetitles
|
||||||
@ -78,7 +79,7 @@ toctree_directive.options = {'maxdepth': int, 'glob': directives.flag,
|
|||||||
directives.register_directive('toctree', toctree_directive)
|
directives.register_directive('toctree', toctree_directive)
|
||||||
|
|
||||||
|
|
||||||
# ------ section metadata ----------------------------------------------------------
|
# ------ section metadata ------------------------------------------------------
|
||||||
|
|
||||||
def module_directive(name, arguments, options, content, lineno,
|
def module_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -101,7 +102,8 @@ def module_directive(name, arguments, options, content, lineno,
|
|||||||
node += nodes.emphasis('', _('Platforms: '))
|
node += nodes.emphasis('', _('Platforms: '))
|
||||||
node += nodes.Text(options['platform'], options['platform'])
|
node += nodes.Text(options['platform'], options['platform'])
|
||||||
ret.append(node)
|
ret.append(node)
|
||||||
# the synopsis isn't printed; in fact, it is only used in the modindex currently
|
# the synopsis isn't printed; in fact, it is only used in the
|
||||||
|
# modindex currently
|
||||||
if not noindex:
|
if not noindex:
|
||||||
indextext = _('%s (module)') % modname
|
indextext = _('%s (module)') % modname
|
||||||
inode = addnodes.index(entries=[('single', indextext,
|
inode = addnodes.index(entries=[('single', indextext,
|
||||||
@ -172,7 +174,7 @@ program_directive.arguments = (1, 0, 1)
|
|||||||
directives.register_directive('program', program_directive)
|
directives.register_directive('program', program_directive)
|
||||||
|
|
||||||
|
|
||||||
# ------ index markup --------------------------------------------------------------
|
# ------ index markup ----------------------------------------------------------
|
||||||
|
|
||||||
indextypes = [
|
indextypes = [
|
||||||
'single', 'pair', 'triple',
|
'single', 'pair', 'triple',
|
||||||
@ -214,7 +216,7 @@ def index_directive(name, arguments, options, content, lineno,
|
|||||||
index_directive.arguments = (1, 0, 1)
|
index_directive.arguments = (1, 0, 1)
|
||||||
directives.register_directive('index', index_directive)
|
directives.register_directive('index', index_directive)
|
||||||
|
|
||||||
# ------ versionadded/versionchanged -----------------------------------------------
|
# ------ versionadded/versionchanged -------------------------------------------
|
||||||
|
|
||||||
def version_directive(name, arguments, options, content, lineno,
|
def version_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -241,7 +243,7 @@ directives.register_directive('versionadded', version_directive)
|
|||||||
directives.register_directive('versionchanged', version_directive)
|
directives.register_directive('versionchanged', version_directive)
|
||||||
|
|
||||||
|
|
||||||
# ------ see also ------------------------------------------------------------------
|
# ------ see also --------------------------------------------------------------
|
||||||
|
|
||||||
def seealso_directive(name, arguments, options, content, lineno,
|
def seealso_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -261,7 +263,7 @@ seealso_directive.arguments = (0, 1, 1)
|
|||||||
directives.register_directive('seealso', seealso_directive)
|
directives.register_directive('seealso', seealso_directive)
|
||||||
|
|
||||||
|
|
||||||
# ------ production list (for the reference) ---------------------------------------
|
# ------ production list (for the reference) -----------------------------------
|
||||||
|
|
||||||
token_re = re.compile('`([a-z_]+)`')
|
token_re = re.compile('`([a-z_]+)`')
|
||||||
|
|
||||||
@ -317,7 +319,7 @@ productionlist_directive.arguments = (1, 0, 1)
|
|||||||
directives.register_directive('productionlist', productionlist_directive)
|
directives.register_directive('productionlist', productionlist_directive)
|
||||||
|
|
||||||
|
|
||||||
# ------ glossary directive ---------------------------------------------------------
|
# ------ glossary directive ----------------------------------------------------
|
||||||
|
|
||||||
def glossary_directive(name, arguments, options, content, lineno,
|
def glossary_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -354,7 +356,7 @@ glossary_directive.arguments = (0, 0, 0)
|
|||||||
directives.register_directive('glossary', glossary_directive)
|
directives.register_directive('glossary', glossary_directive)
|
||||||
|
|
||||||
|
|
||||||
# ------ miscellaneous markup -------------------------------------------------------
|
# ------ miscellaneous markup --------------------------------------------------
|
||||||
|
|
||||||
def centered_directive(name, arguments, options, content, lineno,
|
def centered_directive(name, arguments, options, content, lineno,
|
||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
@ -373,7 +375,8 @@ def acks_directive(name, arguments, options, content, lineno,
|
|||||||
content_offset, block_text, state, state_machine):
|
content_offset, block_text, state, state_machine):
|
||||||
node = addnodes.acks()
|
node = addnodes.acks()
|
||||||
state.nested_parse(content, content_offset, node)
|
state.nested_parse(content, content_offset, node)
|
||||||
if len(node.children) != 1 or not isinstance(node.children[0], nodes.bullet_list):
|
if len(node.children) != 1 or not isinstance(node.children[0],
|
||||||
|
nodes.bullet_list):
|
||||||
return [state.document.reporter.warning('.. acks content is not a list',
|
return [state.document.reporter.warning('.. acks content is not a list',
|
||||||
line=lineno)]
|
line=lineno)]
|
||||||
return [node]
|
return [node]
|
||||||
@ -388,9 +391,10 @@ def hlist_directive(name, arguments, options, content, lineno,
|
|||||||
ncolumns = options.get('columns', 2)
|
ncolumns = options.get('columns', 2)
|
||||||
node = nodes.paragraph()
|
node = nodes.paragraph()
|
||||||
state.nested_parse(content, content_offset, node)
|
state.nested_parse(content, content_offset, node)
|
||||||
if len(node.children) != 1 or not isinstance(node.children[0], nodes.bullet_list):
|
if len(node.children) != 1 or not isinstance(node.children[0],
|
||||||
return [state.document.reporter.warning('.. hlist content is not a list',
|
nodes.bullet_list):
|
||||||
line=lineno)]
|
return [state.document.reporter.warning(
|
||||||
|
'.. hlist content is not a list', line=lineno)]
|
||||||
fulllist = node.children[0]
|
fulllist = node.children[0]
|
||||||
# create a hlist node where the items are distributed
|
# create a hlist node where the items are distributed
|
||||||
npercol, nmore = divmod(len(fulllist), ncolumns)
|
npercol, nmore = divmod(len(fulllist), ncolumns)
|
||||||
|
@ -205,8 +205,8 @@ class BuildEnvironment:
|
|||||||
self.set_warnfunc(None)
|
self.set_warnfunc(None)
|
||||||
values = self.config.values
|
values = self.config.values
|
||||||
del self.config.values
|
del self.config.values
|
||||||
# first write to a temporary file, so that if dumping fails, the existing
|
# first write to a temporary file, so that if dumping fails,
|
||||||
# environment won't be overwritten
|
# the existing environment won't be overwritten
|
||||||
picklefile = open(filename + '.tmp', 'wb')
|
picklefile = open(filename + '.tmp', 'wb')
|
||||||
# remove potentially pickling-problematic values from config
|
# remove potentially pickling-problematic values from config
|
||||||
for key, val in vars(self.config).items():
|
for key, val in vars(self.config).items():
|
||||||
@ -244,13 +244,14 @@ class BuildEnvironment:
|
|||||||
# this is to invalidate old pickles
|
# this is to invalidate old pickles
|
||||||
self.version = ENV_VERSION
|
self.version = ENV_VERSION
|
||||||
|
|
||||||
# All "docnames" here are /-separated and relative and exclude the source suffix.
|
# All "docnames" here are /-separated and relative and exclude
|
||||||
|
# the source suffix.
|
||||||
|
|
||||||
self.found_docs = set() # contains all existing docnames
|
self.found_docs = set() # contains all existing docnames
|
||||||
self.all_docs = {} # docname -> mtime at the time of build
|
self.all_docs = {} # docname -> mtime at the time of build
|
||||||
# contains all built docnames
|
# contains all built docnames
|
||||||
self.dependencies = {} # docname -> set of dependent file names, relative to
|
self.dependencies = {} # docname -> set of dependent file
|
||||||
# documentation root
|
# names, relative to documentation root
|
||||||
|
|
||||||
# File metadata
|
# File metadata
|
||||||
self.metadata = {} # docname -> dict of metadata items
|
self.metadata = {} # docname -> dict of metadata items
|
||||||
@ -259,30 +260,34 @@ class BuildEnvironment:
|
|||||||
self.titles = {} # docname -> title node
|
self.titles = {} # docname -> title node
|
||||||
self.tocs = {} # docname -> table of contents nodetree
|
self.tocs = {} # docname -> table of contents nodetree
|
||||||
self.toc_num_entries = {} # docname -> number of real entries
|
self.toc_num_entries = {} # docname -> number of real entries
|
||||||
# used to determine when to show the TOC in a sidebar
|
# used to determine when to show the TOC
|
||||||
# (don't show if it's only one item)
|
# in a sidebar (don't show if it's only one item)
|
||||||
|
|
||||||
self.toctree_includes = {} # docname -> list of toctree includefiles
|
self.toctree_includes = {} # docname -> list of toctree includefiles
|
||||||
self.files_to_rebuild = {} # docname -> set of files (containing its TOCs)
|
self.files_to_rebuild = {} # docname -> set of files
|
||||||
# to rebuild too
|
# (containing its TOCs) to rebuild too
|
||||||
self.glob_toctrees = set() # docnames that have :glob: toctrees
|
self.glob_toctrees = set() # docnames that have :glob: toctrees
|
||||||
|
|
||||||
# X-ref target inventory
|
# X-ref target inventory
|
||||||
self.descrefs = {} # fullname -> docname, desctype
|
self.descrefs = {} # fullname -> docname, desctype
|
||||||
self.filemodules = {} # docname -> [modules]
|
self.filemodules = {} # docname -> [modules]
|
||||||
self.modules = {} # modname -> docname, synopsis, platform, deprecated
|
self.modules = {} # modname -> docname, synopsis,
|
||||||
|
# platform, deprecated
|
||||||
self.labels = {} # labelname -> docname, labelid, sectionname
|
self.labels = {} # labelname -> docname, labelid, sectionname
|
||||||
self.anonlabels = {} # labelname -> docname, labelid
|
self.anonlabels = {} # labelname -> docname, labelid
|
||||||
self.progoptions = {} # (program, name) -> docname, labelid
|
self.progoptions = {} # (program, name) -> docname, labelid
|
||||||
self.reftargets = {} # (type, name) -> docname, labelid
|
self.reftargets = {} # (type, name) -> docname, labelid
|
||||||
# where type is term, token, envvar, citation
|
# type: term, token, envvar, citation
|
||||||
|
|
||||||
# Other inventories
|
# Other inventories
|
||||||
self.indexentries = {} # docname -> list of
|
self.indexentries = {} # docname -> list of
|
||||||
# (type, string, target, aliasname)
|
# (type, string, target, aliasname)
|
||||||
self.versionchanges = {} # version -> list of
|
self.versionchanges = {} # version -> list of (type, docname,
|
||||||
# (type, docname, lineno, module, descname, content)
|
# lineno, module, descname, content)
|
||||||
self.images = FilenameUniqDict() # absolute path -> (docnames, unique filename)
|
|
||||||
self.dlfiles = FilenameUniqDict() # absolute path -> (docnames, unique filename)
|
# these map absolute path -> (docnames, unique filename)
|
||||||
|
self.images = FilenameUniqDict()
|
||||||
|
self.dlfiles = FilenameUniqDict()
|
||||||
|
|
||||||
# These are set while parsing a file
|
# These are set while parsing a file
|
||||||
self.docname = None # current document name
|
self.docname = None # current document name
|
||||||
@ -362,7 +367,8 @@ class BuildEnvironment:
|
|||||||
"""
|
"""
|
||||||
suffix = suffix or self.config.source_suffix
|
suffix = suffix or self.config.source_suffix
|
||||||
if base is True:
|
if base is True:
|
||||||
return path.join(self.srcdir, docname.replace(SEP, path.sep)) + suffix
|
return path.join(self.srcdir,
|
||||||
|
docname.replace(SEP, path.sep)) + suffix
|
||||||
elif base is None:
|
elif base is None:
|
||||||
return docname.replace(SEP, path.sep) + suffix
|
return docname.replace(SEP, path.sep) + suffix
|
||||||
else:
|
else:
|
||||||
@ -375,8 +381,10 @@ class BuildEnvironment:
|
|||||||
exclude_dirs = [d.replace(SEP, path.sep) for d in config.exclude_dirs]
|
exclude_dirs = [d.replace(SEP, path.sep) for d in config.exclude_dirs]
|
||||||
exclude_trees = [d.replace(SEP, path.sep) for d in config.exclude_trees]
|
exclude_trees = [d.replace(SEP, path.sep) for d in config.exclude_trees]
|
||||||
self.found_docs = set(get_matching_docs(
|
self.found_docs = set(get_matching_docs(
|
||||||
self.srcdir, config.source_suffix, exclude_docs=set(config.unused_docs),
|
self.srcdir, config.source_suffix,
|
||||||
exclude_dirs=exclude_dirs, exclude_trees=exclude_trees,
|
exclude_docs=set(config.unused_docs),
|
||||||
|
exclude_dirs=exclude_dirs,
|
||||||
|
exclude_trees=exclude_trees,
|
||||||
exclude_dirnames=['_sources'] + config.exclude_dirnames))
|
exclude_dirnames=['_sources'] + config.exclude_dirnames))
|
||||||
|
|
||||||
def get_outdated_files(self, config_changed):
|
def get_outdated_files(self, config_changed):
|
||||||
@ -428,16 +436,17 @@ class BuildEnvironment:
|
|||||||
return added, changed, removed
|
return added, changed, removed
|
||||||
|
|
||||||
def update(self, config, srcdir, doctreedir, app=None):
|
def update(self, config, srcdir, doctreedir, app=None):
|
||||||
"""(Re-)read all files new or changed since last update. Yields a summary
|
"""(Re-)read all files new or changed since last update.
|
||||||
and then docnames as it processes them. Store all environment docnames
|
Yields a summary and then docnames as it processes them.
|
||||||
in the canonical format (ie using SEP as a separator in place of
|
Store all environment docnames in the canonical format
|
||||||
os.path.sep)."""
|
(ie using SEP as a separator in place of os.path.sep)."""
|
||||||
config_changed = False
|
config_changed = False
|
||||||
if self.config is None:
|
if self.config is None:
|
||||||
msg = '[new config] '
|
msg = '[new config] '
|
||||||
config_changed = True
|
config_changed = True
|
||||||
else:
|
else:
|
||||||
# check if a config value was changed that affects how doctrees are read
|
# check if a config value was changed that affects how
|
||||||
|
# doctrees are read
|
||||||
for key, descr in config.config_values.iteritems():
|
for key, descr in config.config_values.iteritems():
|
||||||
if not descr[1]:
|
if not descr[1]:
|
||||||
continue
|
continue
|
||||||
@ -577,7 +586,8 @@ class BuildEnvironment:
|
|||||||
|
|
||||||
if save_parsed:
|
if save_parsed:
|
||||||
# save the parsed doctree
|
# save the parsed doctree
|
||||||
doctree_filename = self.doc2path(docname, self.doctreedir, '.doctree')
|
doctree_filename = self.doc2path(docname, self.doctreedir,
|
||||||
|
'.doctree')
|
||||||
dirname = path.dirname(doctree_filename)
|
dirname = path.dirname(doctree_filename)
|
||||||
if not path.isdir(dirname):
|
if not path.isdir(dirname):
|
||||||
os.makedirs(dirname)
|
os.makedirs(dirname)
|
||||||
@ -638,7 +648,8 @@ class BuildEnvironment:
|
|||||||
node['candidates'] = candidates = {}
|
node['candidates'] = candidates = {}
|
||||||
imguri = node['uri']
|
imguri = node['uri']
|
||||||
if imguri.find('://') != -1:
|
if imguri.find('://') != -1:
|
||||||
self.warn(docname, 'Nonlocal image URI found: %s' % imguri, node.line)
|
self.warn(docname, 'Nonlocal image URI found: %s' % imguri,
|
||||||
|
node.line)
|
||||||
candidates['?'] = imguri
|
candidates['?'] = imguri
|
||||||
continue
|
continue
|
||||||
# imgpath is the image path *from srcdir*
|
# imgpath is the image path *from srcdir*
|
||||||
@ -660,7 +671,8 @@ class BuildEnvironment:
|
|||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
self.warn(docname, 'Image file %s not readable' % filename)
|
self.warn(docname,
|
||||||
|
'Image file %s not readable' % filename)
|
||||||
if imgtype:
|
if imgtype:
|
||||||
candidates['image/' + imgtype] = new_imgpath
|
candidates['image/' + imgtype] = new_imgpath
|
||||||
else:
|
else:
|
||||||
@ -725,7 +737,8 @@ class BuildEnvironment:
|
|||||||
continue
|
continue
|
||||||
if name in self.labels:
|
if name in self.labels:
|
||||||
self.warn(docname, 'duplicate label %s, ' % name +
|
self.warn(docname, 'duplicate label %s, ' % name +
|
||||||
'other instance in %s' % self.doc2path(self.labels[name][0]),
|
'other instance in ' +
|
||||||
|
self.doc2path(self.labels[name][0]),
|
||||||
node.line)
|
node.line)
|
||||||
self.anonlabels[name] = docname, labelid
|
self.anonlabels[name] = docname, labelid
|
||||||
if node.tagname == 'section':
|
if node.tagname == 'section':
|
||||||
@ -835,7 +848,8 @@ class BuildEnvironment:
|
|||||||
if fullname in self.descrefs:
|
if fullname in self.descrefs:
|
||||||
self.warn(self.docname,
|
self.warn(self.docname,
|
||||||
'duplicate canonical description name %s, ' % fullname +
|
'duplicate canonical description name %s, ' % fullname +
|
||||||
'other instance in %s' % self.doc2path(self.descrefs[fullname][0]),
|
'other instance in ' +
|
||||||
|
self.doc2path(self.descrefs[fullname][0]),
|
||||||
line)
|
line)
|
||||||
self.descrefs[fullname] = (self.docname, desctype)
|
self.descrefs[fullname] = (self.docname, desctype)
|
||||||
|
|
||||||
@ -851,7 +865,8 @@ class BuildEnvironment:
|
|||||||
|
|
||||||
def note_versionchange(self, type, version, node, lineno):
|
def note_versionchange(self, type, version, node, lineno):
|
||||||
self.versionchanges.setdefault(version, []).append(
|
self.versionchanges.setdefault(version, []).append(
|
||||||
(type, self.docname, lineno, self.currmodule, self.currdesc, node.astext()))
|
(type, self.docname, lineno, self.currmodule, self.currdesc,
|
||||||
|
node.astext()))
|
||||||
|
|
||||||
def note_dependency(self, filename):
|
def note_dependency(self, filename):
|
||||||
basename = path.dirname(self.doc2path(self.docname, base=None))
|
basename = path.dirname(self.doc2path(self.docname, base=None))
|
||||||
@ -915,7 +930,8 @@ class BuildEnvironment:
|
|||||||
def _walk_depth(node, depth, maxdepth, titleoverrides):
|
def _walk_depth(node, depth, maxdepth, titleoverrides):
|
||||||
"""Utility: Cut a TOC at a specified depth."""
|
"""Utility: Cut a TOC at a specified depth."""
|
||||||
for subnode in node.children[:]:
|
for subnode in node.children[:]:
|
||||||
if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)):
|
if isinstance(subnode, (addnodes.compact_paragraph,
|
||||||
|
nodes.list_item)):
|
||||||
subnode['classes'].append('toctree-l%d' % (depth-1))
|
subnode['classes'].append('toctree-l%d' % (depth-1))
|
||||||
_walk_depth(subnode, depth, maxdepth, titleoverrides)
|
_walk_depth(subnode, depth, maxdepth, titleoverrides)
|
||||||
elif isinstance(subnode, nodes.bullet_list):
|
elif isinstance(subnode, nodes.bullet_list):
|
||||||
@ -934,27 +950,30 @@ class BuildEnvironment:
|
|||||||
toc = self.tocs[includefile].deepcopy()
|
toc = self.tocs[includefile].deepcopy()
|
||||||
if not toc.children:
|
if not toc.children:
|
||||||
# empty toc means: no titles will show up in the toctree
|
# empty toc means: no titles will show up in the toctree
|
||||||
self.warn(docname, 'toctree contains reference to document '
|
self.warn(docname,
|
||||||
'%r that doesn\'t have a title: no link will be '
|
'toctree contains reference to document '
|
||||||
'generated' % includefile)
|
'%r that doesn\'t have a title: no link '
|
||||||
|
'will be generated' % includefile)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# this is raised if the included file does not exist
|
# this is raised if the included file does not exist
|
||||||
self.warn(docname, 'toctree contains reference to nonexisting '
|
self.warn(docname, 'toctree contains reference to '
|
||||||
'document %r' % includefile)
|
'nonexisting document %r' % includefile)
|
||||||
else:
|
else:
|
||||||
# if titles_only is given, only keep the main title and
|
# if titles_only is given, only keep the main title and
|
||||||
# sub-toctrees
|
# sub-toctrees
|
||||||
if titles_only:
|
if titles_only:
|
||||||
# delete everything but the toplevel title(s) and toctrees
|
# delete everything but the toplevel title(s)
|
||||||
|
# and toctrees
|
||||||
for toplevel in toc:
|
for toplevel in toc:
|
||||||
# nodes with length 1 don't have any children anyway
|
# nodes with length 1 don't have any children anyway
|
||||||
if len(toplevel) > 1:
|
if len(toplevel) > 1:
|
||||||
subtoctrees = toplevel.traverse(addnodes.toctree)
|
subtrees = toplevel.traverse(addnodes.toctree)
|
||||||
toplevel[1][:] = subtoctrees
|
toplevel[1][:] = subtrees
|
||||||
# resolve all sub-toctrees
|
# resolve all sub-toctrees
|
||||||
for toctreenode in toc.traverse(addnodes.toctree):
|
for toctreenode in toc.traverse(addnodes.toctree):
|
||||||
i = toctreenode.parent.index(toctreenode) + 1
|
i = toctreenode.parent.index(toctreenode) + 1
|
||||||
for item in _entries_from_toctree(toctreenode, subtree=True):
|
for item in _entries_from_toctree(toctreenode,
|
||||||
|
subtree=True):
|
||||||
toctreenode.parent.insert(i, item)
|
toctreenode.parent.insert(i, item)
|
||||||
i += 1
|
i += 1
|
||||||
toctreenode.parent.remove(toctreenode)
|
toctreenode.parent.remove(toctreenode)
|
||||||
@ -993,8 +1012,9 @@ class BuildEnvironment:
|
|||||||
refnode.children = [nodes.Text(newtitle)]
|
refnode.children = [nodes.Text(newtitle)]
|
||||||
return newnode
|
return newnode
|
||||||
|
|
||||||
descroles = frozenset(('data', 'exc', 'func', 'class', 'const', 'attr', 'obj',
|
descroles = frozenset(('data', 'exc', 'func', 'class', 'const',
|
||||||
'meth', 'cfunc', 'cmember', 'cdata', 'ctype', 'cmacro'))
|
'attr', 'obj', 'meth', 'cfunc', 'cmember',
|
||||||
|
'cdata', 'ctype', 'cmacro'))
|
||||||
|
|
||||||
def resolve_references(self, doctree, fromdocname, builder):
|
def resolve_references(self, doctree, fromdocname, builder):
|
||||||
reftarget_roles = set(('token', 'term', 'citation'))
|
reftarget_roles = set(('token', 'term', 'citation'))
|
||||||
@ -1011,30 +1031,32 @@ class BuildEnvironment:
|
|||||||
try:
|
try:
|
||||||
if typ == 'ref':
|
if typ == 'ref':
|
||||||
if node['refcaption']:
|
if node['refcaption']:
|
||||||
# reference to anonymous label; the reference uses the supplied
|
# reference to anonymous label; the reference uses
|
||||||
# link caption
|
# the supplied link caption
|
||||||
docname, labelid = self.anonlabels.get(target, ('',''))
|
docname, labelid = self.anonlabels.get(target, ('',''))
|
||||||
sectname = node.astext()
|
sectname = node.astext()
|
||||||
if not docname:
|
if not docname:
|
||||||
newnode = doctree.reporter.system_message(
|
newnode = doctree.reporter.system_message(
|
||||||
2, 'undefined label: %s' % target)
|
2, 'undefined label: %s' % target)
|
||||||
else:
|
else:
|
||||||
# reference to the named label; the final node will contain the
|
# reference to the named label; the final node will
|
||||||
# section name after the label
|
# contain the section name after the label
|
||||||
docname, labelid, sectname = self.labels.get(target, ('','',''))
|
docname, labelid, sectname = self.labels.get(target,
|
||||||
|
('','',''))
|
||||||
if not docname:
|
if not docname:
|
||||||
newnode = doctree.reporter.system_message(
|
newnode = doctree.reporter.system_message(
|
||||||
2, 'undefined label: %s -- if you don\'t ' % target +
|
2, 'undefined label: %s' % target +
|
||||||
'give a link caption the label must precede a section '
|
' -- if you don\'t give a link caption '
|
||||||
'header.')
|
'the label must precede a section header.')
|
||||||
if docname:
|
if docname:
|
||||||
newnode = nodes.reference('', '')
|
newnode = nodes.reference('', '')
|
||||||
innernode = nodes.emphasis(sectname, sectname)
|
innernode = nodes.emphasis(sectname, sectname)
|
||||||
if docname == fromdocname:
|
if docname == fromdocname:
|
||||||
newnode['refid'] = labelid
|
newnode['refid'] = labelid
|
||||||
else:
|
else:
|
||||||
# set more info in contnode in case the get_relative_uri call
|
# set more info in contnode; in case the
|
||||||
# raises NoUri, the builder will then have to resolve these
|
# get_relative_uri call raises NoUri,
|
||||||
|
# the builder will then have to resolve these
|
||||||
contnode = addnodes.pending_xref('')
|
contnode = addnodes.pending_xref('')
|
||||||
contnode['refdocname'] = docname
|
contnode['refdocname'] = docname
|
||||||
contnode['refsectname'] = sectname
|
contnode['refsectname'] = sectname
|
||||||
@ -1044,8 +1066,8 @@ class BuildEnvironment:
|
|||||||
newnode['refuri'] += '#' + labelid
|
newnode['refuri'] += '#' + labelid
|
||||||
newnode.append(innernode)
|
newnode.append(innernode)
|
||||||
elif typ == 'doc':
|
elif typ == 'doc':
|
||||||
# directly reference to document by source name; can be absolute
|
# directly reference to document by source name;
|
||||||
# or relative
|
# can be absolute or relative
|
||||||
docname = docname_join(fromdocname, target)
|
docname = docname_join(fromdocname, target)
|
||||||
if docname not in self.all_docs:
|
if docname not in self.all_docs:
|
||||||
newnode = doctree.reporter.system_message(
|
newnode = doctree.reporter.system_message(
|
||||||
@ -1077,7 +1099,8 @@ class BuildEnvironment:
|
|||||||
newnode.append(contnode)
|
newnode.append(contnode)
|
||||||
elif typ == 'option':
|
elif typ == 'option':
|
||||||
progname = node['refprogram']
|
progname = node['refprogram']
|
||||||
docname, labelid = self.progoptions.get((progname, target), ('', ''))
|
docname, labelid = self.progoptions.get((progname, target),
|
||||||
|
('', ''))
|
||||||
if not docname:
|
if not docname:
|
||||||
newnode = contnode
|
newnode = contnode
|
||||||
else:
|
else:
|
||||||
@ -1089,13 +1112,16 @@ class BuildEnvironment:
|
|||||||
fromdocname, docname) + '#' + labelid
|
fromdocname, docname) + '#' + labelid
|
||||||
newnode.append(contnode)
|
newnode.append(contnode)
|
||||||
elif typ in reftarget_roles:
|
elif typ in reftarget_roles:
|
||||||
docname, labelid = self.reftargets.get((typ, target), ('', ''))
|
docname, labelid = self.reftargets.get((typ, target),
|
||||||
|
('', ''))
|
||||||
if not docname:
|
if not docname:
|
||||||
if typ == 'term':
|
if typ == 'term':
|
||||||
self.warn(fromdocname, 'term not in glossary: %s' % target,
|
self.warn(fromdocname,
|
||||||
|
'term not in glossary: %s' % target,
|
||||||
node.line)
|
node.line)
|
||||||
elif typ == 'citation':
|
elif typ == 'citation':
|
||||||
self.warn(fromdocname, 'citation not found: %s' % target,
|
self.warn(fromdocname,
|
||||||
|
'citation not found: %s' % target,
|
||||||
node.line)
|
node.line)
|
||||||
newnode = contnode
|
newnode = contnode
|
||||||
else:
|
else:
|
||||||
@ -1110,8 +1136,8 @@ class BuildEnvironment:
|
|||||||
docname, synopsis, platform, deprecated = \
|
docname, synopsis, platform, deprecated = \
|
||||||
self.modules.get(target, ('','','', ''))
|
self.modules.get(target, ('','','', ''))
|
||||||
if not docname:
|
if not docname:
|
||||||
newnode = builder.app.emit_firstresult('missing-reference',
|
newnode = builder.app.emit_firstresult(
|
||||||
self, node, contnode)
|
'missing-reference', self, node, contnode)
|
||||||
if not newnode:
|
if not newnode:
|
||||||
newnode = contnode
|
newnode = contnode
|
||||||
elif docname == fromdocname:
|
elif docname == fromdocname:
|
||||||
@ -1133,8 +1159,8 @@ class BuildEnvironment:
|
|||||||
name, desc = self.find_desc(modname, clsname,
|
name, desc = self.find_desc(modname, clsname,
|
||||||
target, typ, searchorder)
|
target, typ, searchorder)
|
||||||
if not desc:
|
if not desc:
|
||||||
newnode = builder.app.emit_firstresult('missing-reference',
|
newnode = builder.app.emit_firstresult(
|
||||||
self, node, contnode)
|
'missing-reference', self, node, contnode)
|
||||||
if not newnode:
|
if not newnode:
|
||||||
newnode = contnode
|
newnode = contnode
|
||||||
else:
|
else:
|
||||||
@ -1148,7 +1174,8 @@ class BuildEnvironment:
|
|||||||
newnode['reftitle'] = name
|
newnode['reftitle'] = name
|
||||||
newnode.append(contnode)
|
newnode.append(contnode)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError('unknown xfileref node encountered: %s' % node)
|
raise RuntimeError('unknown xfileref node encountered: %s'
|
||||||
|
% node)
|
||||||
except NoUri:
|
except NoUri:
|
||||||
newnode = contnode
|
newnode = contnode
|
||||||
if newnode:
|
if newnode:
|
||||||
@ -1232,8 +1259,10 @@ class BuildEnvironment:
|
|||||||
m = _fixre.match(key)
|
m = _fixre.match(key)
|
||||||
if m:
|
if m:
|
||||||
if oldkey == m.group(1):
|
if oldkey == m.group(1):
|
||||||
# prefixes match: add entry as subitem of the previous entry
|
# prefixes match: add entry as subitem of the
|
||||||
oldsubitems.setdefault(m.group(2), [[], {}])[0].extend(targets)
|
# previous entry
|
||||||
|
oldsubitems.setdefault(m.group(2), [[], {}])[0].\
|
||||||
|
extend(targets)
|
||||||
del newlist[i]
|
del newlist[i]
|
||||||
continue
|
continue
|
||||||
oldkey = m.group(1)
|
oldkey = m.group(1)
|
||||||
@ -1253,7 +1282,8 @@ class BuildEnvironment:
|
|||||||
else:
|
else:
|
||||||
# get all other symbols under one heading
|
# get all other symbols under one heading
|
||||||
return 'Symbols'
|
return 'Symbols'
|
||||||
return [(key, list(group)) for (key, group) in groupby(newlist, keyfunc)]
|
return [(key, list(group))
|
||||||
|
for (key, group) in groupby(newlist, keyfunc)]
|
||||||
|
|
||||||
def collect_relations(self):
|
def collect_relations(self):
|
||||||
relations = {}
|
relations = {}
|
||||||
@ -1292,7 +1322,8 @@ class BuildEnvironment:
|
|||||||
# else it will stay None
|
# else it will stay None
|
||||||
# same for children
|
# same for children
|
||||||
if includes:
|
if includes:
|
||||||
for subindex, args in enumerate(izip(includes, [None] + includes,
|
for subindex, args in enumerate(izip(includes,
|
||||||
|
[None] + includes,
|
||||||
includes[1:] + [None])):
|
includes[1:] + [None])):
|
||||||
collect([(docname, subindex)] + parents, *args)
|
collect([(docname, subindex)] + parents, *args)
|
||||||
relations[docname] = [parents[0][0], previous, next]
|
relations[docname] = [parents[0][0], previous, next]
|
||||||
@ -1360,14 +1391,16 @@ class BuildEnvironment:
|
|||||||
|
|
||||||
def find_keyword(self, keyword, avoid_fuzzy=False, cutoff=0.6, n=20):
|
def find_keyword(self, keyword, avoid_fuzzy=False, cutoff=0.6, n=20):
|
||||||
"""
|
"""
|
||||||
Find keyword matches for a keyword. If there's an exact match, just return
|
Find keyword matches for a keyword. If there's an exact match,
|
||||||
it, else return a list of fuzzy matches if avoid_fuzzy isn't True.
|
just return it, else return a list of fuzzy matches if avoid_fuzzy
|
||||||
|
isn't True.
|
||||||
|
|
||||||
Keywords searched are: first modules, then descrefs.
|
Keywords searched are: first modules, then descrefs.
|
||||||
|
|
||||||
Returns: None if nothing found
|
Returns: None if nothing found
|
||||||
(type, docname, anchorname) if exact match found
|
(type, docname, anchorname) if exact match found
|
||||||
list of (quality, type, docname, anchorname, description) if fuzzy
|
list of (quality, type, docname, anchorname, description)
|
||||||
|
if fuzzy
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if keyword in self.modules:
|
if keyword in self.modules:
|
||||||
|
@ -193,7 +193,8 @@ class RstGenerator(object):
|
|||||||
docstrings.append(obj.__doc__)
|
docstrings.append(obj.__doc__)
|
||||||
|
|
||||||
# skip some lines in module docstrings if configured (deprecated!)
|
# skip some lines in module docstrings if configured (deprecated!)
|
||||||
if what == 'module' and self.env.config.automodule_skip_lines and docstrings:
|
if what == 'module' and self.env.config.automodule_skip_lines \
|
||||||
|
and docstrings:
|
||||||
docstrings[0] = '\n'.join(docstrings[0].splitlines()
|
docstrings[0] = '\n'.join(docstrings[0].splitlines()
|
||||||
[self.env.config.automodule_skip_lines:])
|
[self.env.config.automodule_skip_lines:])
|
||||||
|
|
||||||
@ -212,7 +213,8 @@ class RstGenerator(object):
|
|||||||
docstrings.append(initdocstring)
|
docstrings.append(initdocstring)
|
||||||
# the default is only the class docstring
|
# the default is only the class docstring
|
||||||
|
|
||||||
# make sure we have Unicode docstrings, then sanitize and split into lines
|
# make sure we have Unicode docstrings, then sanitize and split
|
||||||
|
# into lines
|
||||||
return [prepare_docstring(force_decode(docstring, encoding))
|
return [prepare_docstring(force_decode(docstring, encoding))
|
||||||
for docstring in docstrings]
|
for docstring in docstrings]
|
||||||
|
|
||||||
@ -233,8 +235,9 @@ class RstGenerator(object):
|
|||||||
Returns a tuple of: the full name, the module name, a path of
|
Returns a tuple of: the full name, the module name, a path of
|
||||||
names to get via getattr, the signature and return annotation.
|
names to get via getattr, the signature and return annotation.
|
||||||
"""
|
"""
|
||||||
# first, parse the definition -- auto directives for classes and functions
|
# first, parse the definition -- auto directives for classes and
|
||||||
# can contain a signature which is then used instead of an autogenerated one
|
# functions can contain a signature which is then used instead of
|
||||||
|
# an autogenerated one
|
||||||
try:
|
try:
|
||||||
mod, path, base, args, retann = py_ext_sig_re.match(name).groups()
|
mod, path, base, args, retann = py_ext_sig_re.match(name).groups()
|
||||||
except:
|
except:
|
||||||
@ -261,8 +264,8 @@ class RstGenerator(object):
|
|||||||
if path:
|
if path:
|
||||||
mod = path.rstrip('.')
|
mod = path.rstrip('.')
|
||||||
else:
|
else:
|
||||||
# if documenting a toplevel object without explicit module, it can
|
# if documenting a toplevel object without explicit module,
|
||||||
# be contained in another auto directive ...
|
# it can be contained in another auto directive ...
|
||||||
if hasattr(self.env, 'autodoc_current_module'):
|
if hasattr(self.env, 'autodoc_current_module'):
|
||||||
mod = self.env.autodoc_current_module
|
mod = self.env.autodoc_current_module
|
||||||
# ... or in the scope of a module directive
|
# ... or in the scope of a module directive
|
||||||
@ -276,8 +279,9 @@ class RstGenerator(object):
|
|||||||
mod_cls = path.rstrip('.')
|
mod_cls = path.rstrip('.')
|
||||||
else:
|
else:
|
||||||
mod_cls = None
|
mod_cls = None
|
||||||
# if documenting a class-level object without path, there must be a
|
# if documenting a class-level object without path,
|
||||||
# current class, either from a parent auto directive ...
|
# there must be a current class, either from a parent
|
||||||
|
# auto directive ...
|
||||||
if hasattr(self.env, 'autodoc_current_class'):
|
if hasattr(self.env, 'autodoc_current_class'):
|
||||||
mod_cls = self.env.autodoc_current_class
|
mod_cls = self.env.autodoc_current_class
|
||||||
# ... or from a class directive
|
# ... or from a class directive
|
||||||
@ -313,7 +317,8 @@ class RstGenerator(object):
|
|||||||
args = None
|
args = None
|
||||||
getargs = True
|
getargs = True
|
||||||
if what == 'class':
|
if what == 'class':
|
||||||
# for classes, the relevant signature is the __init__ method's
|
# for classes, the relevant signature is the
|
||||||
|
# __init__ method's
|
||||||
obj = getattr(obj, '__init__', None)
|
obj = getattr(obj, '__init__', None)
|
||||||
# classes without __init__ method, default __init__ or
|
# classes without __init__ method, default __init__ or
|
||||||
# __init__ written in C?
|
# __init__ written in C?
|
||||||
@ -334,8 +339,9 @@ class RstGenerator(object):
|
|||||||
args = None
|
args = None
|
||||||
err = e
|
err = e
|
||||||
|
|
||||||
result = self.env.app.emit_firstresult('autodoc-process-signature', what,
|
result = self.env.app.emit_firstresult(
|
||||||
name, obj, self.options, args, retann)
|
'autodoc-process-signature', what, name, obj,
|
||||||
|
self.options, args, retann)
|
||||||
if result:
|
if result:
|
||||||
args, retann = result
|
args, retann = result
|
||||||
|
|
||||||
@ -347,22 +353,24 @@ class RstGenerator(object):
|
|||||||
else:
|
else:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def generate(self, what, name, members, add_content, indent=u'', check_module=False,
|
def generate(self, what, name, members, add_content, indent=u'',
|
||||||
no_docstring=False):
|
check_module=False, no_docstring=False):
|
||||||
"""
|
"""
|
||||||
Generate reST for the object in self.result.
|
Generate reST for the object in self.result.
|
||||||
"""
|
"""
|
||||||
mod, objpath, args, retann = self.resolve_name(what, name)
|
mod, objpath, args, retann = self.resolve_name(what, name)
|
||||||
if not mod:
|
if not mod:
|
||||||
# need a module to import
|
# need a module to import
|
||||||
self.warn('don\'t know which module to import for autodocumenting %r '
|
self.warn('don\'t know which module to import for autodocumenting '
|
||||||
'(try placing a "module" or "currentmodule" directive in the '
|
'%r (try placing a "module" or "currentmodule" directive '
|
||||||
'document, or giving an explicit module name)' % name)
|
'in the document, or giving an explicit module name)'
|
||||||
|
% name)
|
||||||
return
|
return
|
||||||
# fully-qualified name
|
# fully-qualified name
|
||||||
fullname = mod + (objpath and '.' + '.'.join(objpath) or '')
|
fullname = mod + (objpath and '.' + '.'.join(objpath) or '')
|
||||||
|
|
||||||
# the name to put into the generated directive -- doesn't contain the module
|
# the name to put into the generated directive -- doesn't contain
|
||||||
|
# the module
|
||||||
name_in_directive = '.'.join(objpath) or mod
|
name_in_directive = '.'.join(objpath) or mod
|
||||||
|
|
||||||
# now, import the module and get object to document
|
# now, import the module and get object to document
|
||||||
@ -372,8 +380,8 @@ class RstGenerator(object):
|
|||||||
for part in objpath:
|
for part in objpath:
|
||||||
todoc = getattr(todoc, part)
|
todoc = getattr(todoc, part)
|
||||||
except (ImportError, AttributeError), err:
|
except (ImportError, AttributeError), err:
|
||||||
self.warn('autodoc can\'t import/find %s %r, it reported error: "%s", '
|
self.warn('autodoc can\'t import/find %s %r, it reported error: '
|
||||||
'please check your spelling and sys.path' %
|
'"%s", please check your spelling and sys.path' %
|
||||||
(what, str(fullname), err))
|
(what, str(fullname), err))
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -388,7 +396,7 @@ class RstGenerator(object):
|
|||||||
else:
|
else:
|
||||||
self.filename_set.add(analyzer.srcname)
|
self.filename_set.add(analyzer.srcname)
|
||||||
|
|
||||||
# check __module__ of object if wanted (for members not given explicitly)
|
# check __module__ of object for members not given explicitly
|
||||||
if check_module:
|
if check_module:
|
||||||
if hasattr(todoc, '__module__'):
|
if hasattr(todoc, '__module__'):
|
||||||
if todoc.__module__ != mod:
|
if todoc.__module__ != mod:
|
||||||
@ -417,16 +425,16 @@ class RstGenerator(object):
|
|||||||
if what == 'module':
|
if what == 'module':
|
||||||
# Add some module-specific options
|
# Add some module-specific options
|
||||||
if self.options.synopsis:
|
if self.options.synopsis:
|
||||||
self.result.append(indent + u' :synopsis: ' + self.options.synopsis,
|
self.result.append(indent + u' :synopsis: ' +
|
||||||
'<autodoc>')
|
self.options.synopsis, '<autodoc>')
|
||||||
if self.options.platform:
|
if self.options.platform:
|
||||||
self.result.append(indent + u' :platform: ' + self.options.platform,
|
self.result.append(indent + u' :platform: ' +
|
||||||
'<autodoc>')
|
self.options.platform, '<autodoc>')
|
||||||
if self.options.deprecated:
|
if self.options.deprecated:
|
||||||
self.result.append(indent + u' :deprecated:', '<autodoc>')
|
self.result.append(indent + u' :deprecated:', '<autodoc>')
|
||||||
else:
|
else:
|
||||||
# Be explicit about the module, this is necessary since .. class:: doesn't
|
# Be explicit about the module, this is necessary since .. class::
|
||||||
# support a prepended module name
|
# doesn't support a prepended module name
|
||||||
self.result.append(indent + u' :module: %s' % mod, '<autodoc>')
|
self.result.append(indent + u' :module: %s' % mod, '<autodoc>')
|
||||||
if self.options.noindex:
|
if self.options.noindex:
|
||||||
self.result.append(indent + u' :noindex:', '<autodoc>')
|
self.result.append(indent + u' :noindex:', '<autodoc>')
|
||||||
@ -439,7 +447,8 @@ class RstGenerator(object):
|
|||||||
u':class:`%s`' % b.__name__ or
|
u':class:`%s`' % b.__name__ or
|
||||||
u':class:`%s.%s`' % (b.__module__, b.__name__)
|
u':class:`%s.%s`' % (b.__module__, b.__name__)
|
||||||
for b in todoc.__bases__]
|
for b in todoc.__bases__]
|
||||||
self.result.append(indent + _(u' Bases: %s') % ', '.join(bases),
|
self.result.append(indent + _(u' Bases: %s') %
|
||||||
|
', '.join(bases),
|
||||||
'<autodoc>')
|
'<autodoc>')
|
||||||
self.result.append(u'', '<autodoc>')
|
self.result.append(u'', '<autodoc>')
|
||||||
|
|
||||||
@ -513,7 +522,8 @@ class RstGenerator(object):
|
|||||||
# base classes
|
# base classes
|
||||||
all_members = inspect.getmembers(todoc)
|
all_members = inspect.getmembers(todoc)
|
||||||
else:
|
else:
|
||||||
# __dict__ contains only the members directly defined in the class
|
# __dict__ contains only the members directly defined
|
||||||
|
# in the class
|
||||||
all_members = sorted(todoc.__dict__.iteritems())
|
all_members = sorted(todoc.__dict__.iteritems())
|
||||||
else:
|
else:
|
||||||
all_members = [(mname, getattr(todoc, mname)) for mname in members]
|
all_members = [(mname, getattr(todoc, mname)) for mname in members]
|
||||||
@ -524,7 +534,8 @@ class RstGenerator(object):
|
|||||||
for (membername, member) in all_members:
|
for (membername, member) in all_members:
|
||||||
# if isattr is True, the member is documented as an attribute
|
# if isattr is True, the member is documented as an attribute
|
||||||
isattr = False
|
isattr = False
|
||||||
# if content is not None, no extra content from docstrings will be added
|
# if content is not None, no extra content from docstrings
|
||||||
|
# will be added
|
||||||
content = None
|
content = None
|
||||||
|
|
||||||
if want_all_members and membername.startswith('_'):
|
if want_all_members and membername.startswith('_'):
|
||||||
@ -536,15 +547,18 @@ class RstGenerator(object):
|
|||||||
skip = False
|
skip = False
|
||||||
isattr = True
|
isattr = True
|
||||||
else:
|
else:
|
||||||
# ignore undocumented members if :undoc-members: is not given
|
# ignore undocumented members if :undoc-members:
|
||||||
|
# is not given
|
||||||
doc = getattr(member, '__doc__', None)
|
doc = getattr(member, '__doc__', None)
|
||||||
skip = not self.options.undoc_members and not doc
|
skip = not self.options.undoc_members and not doc
|
||||||
|
|
||||||
# give the user a chance to decide whether this member should be skipped
|
# give the user a chance to decide whether this member
|
||||||
|
# should be skipped
|
||||||
if self.env.app:
|
if self.env.app:
|
||||||
# let extensions preprocess docstrings
|
# let extensions preprocess docstrings
|
||||||
skip_user = self.env.app.emit_firstresult(
|
skip_user = self.env.app.emit_firstresult(
|
||||||
'autodoc-skip-member', what, membername, member, skip, self.options)
|
'autodoc-skip-member', what, membername, member,
|
||||||
|
skip, self.options)
|
||||||
if skip_user is not None:
|
if skip_user is not None:
|
||||||
skip = skip_user
|
skip = skip_user
|
||||||
if skip:
|
if skip:
|
||||||
@ -560,7 +574,8 @@ class RstGenerator(object):
|
|||||||
if member.__name__ != membername:
|
if member.__name__ != membername:
|
||||||
# assume it's aliased
|
# assume it's aliased
|
||||||
memberwhat = 'data'
|
memberwhat = 'data'
|
||||||
content = ViewList([_('alias of :class:`%s`') % member.__name__],
|
content = ViewList(
|
||||||
|
[_('alias of :class:`%s`') % member.__name__],
|
||||||
source='')
|
source='')
|
||||||
elif issubclass(member, base_exception):
|
elif issubclass(member, base_exception):
|
||||||
memberwhat = 'exception'
|
memberwhat = 'exception'
|
||||||
@ -577,7 +592,8 @@ class RstGenerator(object):
|
|||||||
if member.__name__ != membername:
|
if member.__name__ != membername:
|
||||||
# assume it's aliased
|
# assume it's aliased
|
||||||
memberwhat = 'attribute'
|
memberwhat = 'attribute'
|
||||||
content = ViewList([_('alias of :class:`%s`') % member.__name__],
|
content = ViewList(
|
||||||
|
[_('alias of :class:`%s`') % member.__name__],
|
||||||
source='')
|
source='')
|
||||||
else:
|
else:
|
||||||
memberwhat = 'class'
|
memberwhat = 'class'
|
||||||
@ -586,8 +602,8 @@ class RstGenerator(object):
|
|||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# give explicitly separated module name, so that members of inner classes
|
# give explicitly separated module name, so that members
|
||||||
# can be documented
|
# of inner classes can be documented
|
||||||
full_membername = mod + '::' + '.'.join(objpath + [membername])
|
full_membername = mod + '::' + '.'.join(objpath + [membername])
|
||||||
self.generate(memberwhat, full_membername, ['__all__'],
|
self.generate(memberwhat, full_membername, ['__all__'],
|
||||||
add_content=content, no_docstring=bool(content),
|
add_content=content, no_docstring=bool(content),
|
||||||
@ -653,13 +669,17 @@ def members_option(arg):
|
|||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
mod_options = {'members': members_option, 'undoc-members': directives.flag,
|
mod_options = {
|
||||||
|
'members': members_option, 'undoc-members': directives.flag,
|
||||||
'noindex': directives.flag, 'inherited-members': directives.flag,
|
'noindex': directives.flag, 'inherited-members': directives.flag,
|
||||||
'show-inheritance': directives.flag, 'synopsis': lambda x: x,
|
'show-inheritance': directives.flag, 'synopsis': lambda x: x,
|
||||||
'platform': lambda x: x, 'deprecated': directives.flag}
|
'platform': lambda x: x, 'deprecated': directives.flag
|
||||||
cls_options = {'members': members_option, 'undoc-members': directives.flag,
|
}
|
||||||
|
cls_options = {
|
||||||
|
'members': members_option, 'undoc-members': directives.flag,
|
||||||
'noindex': directives.flag, 'inherited-members': directives.flag,
|
'noindex': directives.flag, 'inherited-members': directives.flag,
|
||||||
'show-inheritance': directives.flag}
|
'show-inheritance': directives.flag
|
||||||
|
}
|
||||||
app.add_directive('automodule', automodule_directive,
|
app.add_directive('automodule', automodule_directive,
|
||||||
1, (1, 0, 1), **mod_options)
|
1, (1, 0, 1), **mod_options)
|
||||||
app.add_directive('autoclass', autoclass_directive,
|
app.add_directive('autoclass', autoclass_directive,
|
||||||
|
@ -53,16 +53,16 @@ class CoverageBuilder(Builder):
|
|||||||
|
|
||||||
self.c_ignorexps = {}
|
self.c_ignorexps = {}
|
||||||
for (name, exps) in self.config.coverage_ignore_c_items.iteritems():
|
for (name, exps) in self.config.coverage_ignore_c_items.iteritems():
|
||||||
self.c_ignorexps[name] = compile_regex_list('coverage_ignore_c_items',
|
self.c_ignorexps[name] = compile_regex_list(
|
||||||
exps, self.warn)
|
'coverage_ignore_c_items', exps, self.warn)
|
||||||
self.mod_ignorexps = compile_regex_list('coverage_ignore_modules',
|
self.mod_ignorexps = compile_regex_list(
|
||||||
self.config.coverage_ignore_modules,
|
'coverage_ignore_modules', self.config.coverage_ignore_modules,
|
||||||
self.warn)
|
self.warn)
|
||||||
self.cls_ignorexps = compile_regex_list('coverage_ignore_classes',
|
self.cls_ignorexps = compile_regex_list(
|
||||||
self.config.coverage_ignore_classes,
|
'coverage_ignore_classes', self.config.coverage_ignore_classes,
|
||||||
self.warn)
|
self.warn)
|
||||||
self.fun_ignorexps = compile_regex_list('coverage_ignore_functions',
|
self.fun_ignorexps = compile_regex_list(
|
||||||
self.config.coverage_ignore_functions,
|
'coverage_ignore_functions', self.config.coverage_ignore_functions,
|
||||||
self.warn)
|
self.warn)
|
||||||
|
|
||||||
def get_outdated_docs(self):
|
def get_outdated_docs(self):
|
||||||
@ -128,7 +128,8 @@ class CoverageBuilder(Builder):
|
|||||||
try:
|
try:
|
||||||
mod = __import__(mod_name, fromlist=['foo'])
|
mod = __import__(mod_name, fromlist=['foo'])
|
||||||
except ImportError, err:
|
except ImportError, err:
|
||||||
self.warn('module %s could not be imported: %s' % (mod_name, err))
|
self.warn('module %s could not be imported: %s' %
|
||||||
|
(mod_name, err))
|
||||||
self.py_undoc[mod_name] = {'error': err}
|
self.py_undoc[mod_name] = {'error': err}
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -168,7 +169,8 @@ class CoverageBuilder(Builder):
|
|||||||
|
|
||||||
attrs = []
|
attrs = []
|
||||||
|
|
||||||
for attr_name, attr in inspect.getmembers(obj, inspect.ismethod):
|
for attr_name, attr in inspect.getmembers(
|
||||||
|
obj, inspect.ismethod):
|
||||||
if attr_name[0] == '_':
|
if attr_name[0] == '_':
|
||||||
# starts with an underscore, ignore it
|
# starts with an underscore, ignore it
|
||||||
continue
|
continue
|
||||||
|
@ -262,13 +262,15 @@ Doctest summary
|
|||||||
for group in groups.itervalues():
|
for group in groups.itervalues():
|
||||||
group.add_code(code)
|
group.add_code(code)
|
||||||
if self.config.doctest_global_setup:
|
if self.config.doctest_global_setup:
|
||||||
code = TestCode(self.config.doctest_global_setup, 'testsetup', lineno=0)
|
code = TestCode(self.config.doctest_global_setup,
|
||||||
|
'testsetup', lineno=0)
|
||||||
for group in groups.itervalues():
|
for group in groups.itervalues():
|
||||||
group.add_code(code, prepend=True)
|
group.add_code(code, prepend=True)
|
||||||
if not groups:
|
if not groups:
|
||||||
return
|
return
|
||||||
|
|
||||||
self._out('\nDocument: %s\n----------%s\n' % (docname, '-'*len(docname)))
|
self._out('\nDocument: %s\n----------%s\n' %
|
||||||
|
(docname, '-'*len(docname)))
|
||||||
for group in groups.itervalues():
|
for group in groups.itervalues():
|
||||||
self.test_group(group, self.env.doc2path(docname, base=None))
|
self.test_group(group, self.env.doc2path(docname, base=None))
|
||||||
# Separately count results from setup code
|
# Separately count results from setup code
|
||||||
@ -287,7 +289,8 @@ Doctest summary
|
|||||||
ns = {}
|
ns = {}
|
||||||
examples = []
|
examples = []
|
||||||
for setup in group.setup:
|
for setup in group.setup:
|
||||||
examples.append(doctest.Example(setup.code, '', lineno=setup.lineno))
|
examples.append(doctest.Example(setup.code, '',
|
||||||
|
lineno=setup.lineno))
|
||||||
if examples:
|
if examples:
|
||||||
# simulate a doctest with the setup code
|
# simulate a doctest with the setup code
|
||||||
setup_doctest = doctest.DocTest(examples, {},
|
setup_doctest = doctest.DocTest(examples, {},
|
||||||
|
@ -13,8 +13,8 @@
|
|||||||
This stuff is only included in the built docs for unstable versions.
|
This stuff is only included in the built docs for unstable versions.
|
||||||
|
|
||||||
The argument for ``ifconfig`` is a plain Python expression, evaluated in the
|
The argument for ``ifconfig`` is a plain Python expression, evaluated in the
|
||||||
namespace of the project configuration (that is, all variables from ``conf.py``
|
namespace of the project configuration (that is, all variables from
|
||||||
are available.)
|
``conf.py`` are available.)
|
||||||
|
|
||||||
:copyright: Copyright 2007-2009 by the Sphinx team, see AUTHORS.
|
:copyright: Copyright 2007-2009 by the Sphinx team, see AUTHORS.
|
||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
|
@ -32,7 +32,8 @@ def html_visit_displaymath(self, node):
|
|||||||
if i == 0:
|
if i == 0:
|
||||||
# necessary to e.g. set the id property correctly
|
# necessary to e.g. set the id property correctly
|
||||||
if node['number']:
|
if node['number']:
|
||||||
self.body.append('<span class="eqno">(%s)</span>' % node['number'])
|
self.body.append('<span class="eqno">(%s)</span>' %
|
||||||
|
node['number'])
|
||||||
self.body.append(self.starttag(node, 'div', CLASS='math'))
|
self.body.append(self.starttag(node, 'div', CLASS='math'))
|
||||||
else:
|
else:
|
||||||
# but only once!
|
# but only once!
|
||||||
|
@ -117,9 +117,9 @@ def render_math(self, math):
|
|||||||
if err.errno != 2: # No such file or directory
|
if err.errno != 2: # No such file or directory
|
||||||
raise
|
raise
|
||||||
if not hasattr(self.builder, '_mathpng_warned_latex'):
|
if not hasattr(self.builder, '_mathpng_warned_latex'):
|
||||||
self.builder.warn('LaTeX command %r cannot be run (needed for math '
|
self.builder.warn('LaTeX command %r cannot be run (needed for '
|
||||||
'display), check the pngmath_latex setting' %
|
'math display), check the pngmath_latex '
|
||||||
self.builder.config.pngmath_latex)
|
'setting' % self.builder.config.pngmath_latex)
|
||||||
self.builder._mathpng_warned_latex = True
|
self.builder._mathpng_warned_latex = True
|
||||||
return relfn, None
|
return relfn, None
|
||||||
finally:
|
finally:
|
||||||
@ -127,8 +127,8 @@ def render_math(self, math):
|
|||||||
|
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise MathExtError('latex exited with error:\n[stderr]\n%s\n[stdout]\n%s'
|
raise MathExtError('latex exited with error:\n[stderr]\n%s\n'
|
||||||
% (stderr, stdout))
|
'[stdout]\n%s' % (stderr, stdout))
|
||||||
|
|
||||||
ensuredir(path.dirname(outfn))
|
ensuredir(path.dirname(outfn))
|
||||||
# use some standard dvipng arguments
|
# use some standard dvipng arguments
|
||||||
@ -146,15 +146,15 @@ def render_math(self, math):
|
|||||||
if err.errno != 2: # No such file or directory
|
if err.errno != 2: # No such file or directory
|
||||||
raise
|
raise
|
||||||
if not hasattr(self.builder, '_mathpng_warned_dvipng'):
|
if not hasattr(self.builder, '_mathpng_warned_dvipng'):
|
||||||
self.builder.warn('dvipng command %r cannot be run (needed for math '
|
self.builder.warn('dvipng command %r cannot be run (needed for '
|
||||||
'display), check the pngmath_dvipng setting' %
|
'math display), check the pngmath_dvipng setting'
|
||||||
self.builder.config.pngmath_dvipng)
|
% self.builder.config.pngmath_dvipng)
|
||||||
self.builder._mathpng_warned_dvipng = True
|
self.builder._mathpng_warned_dvipng = True
|
||||||
return relfn, None
|
return relfn, None
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise MathExtError('dvipng exited with error:\n[stderr]\n%s\n[stdout]\n%s'
|
raise MathExtError('dvipng exited with error:\n[stderr]\n%s\n'
|
||||||
% (stderr, stdout))
|
'[stdout]\n%s' % (stderr, stdout))
|
||||||
depth = None
|
depth = None
|
||||||
if use_preview:
|
if use_preview:
|
||||||
for line in stdout.splitlines():
|
for line in stdout.splitlines():
|
||||||
@ -187,7 +187,8 @@ def html_visit_math(self, node):
|
|||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
self.body.append('<img class="math" src="%s" alt="%s" %s/>' %
|
self.body.append('<img class="math" src="%s" alt="%s" %s/>' %
|
||||||
(fname, self.encode(node['latex']).strip(),
|
(fname, self.encode(node['latex']).strip(),
|
||||||
depth and 'style="vertical-align: %dpx" ' % (-depth) or ''))
|
depth and 'style="vertical-align: %dpx" ' %
|
||||||
|
(-depth) or ''))
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
def html_visit_displaymath(self, node):
|
def html_visit_displaymath(self, node):
|
||||||
|
@ -55,7 +55,8 @@ class Refcounts(dict):
|
|||||||
refcount = None
|
refcount = None
|
||||||
else:
|
else:
|
||||||
refcount = int(refcount)
|
refcount = int(refcount)
|
||||||
# Update the entry with the new parameter or the result information.
|
# Update the entry with the new parameter or the result
|
||||||
|
# information.
|
||||||
if arg:
|
if arg:
|
||||||
entry.args.append((arg, type, refcount))
|
entry.args.append((arg, type, refcount))
|
||||||
else:
|
else:
|
||||||
@ -81,7 +82,8 @@ class Refcounts(dict):
|
|||||||
if entry.result_refs is None:
|
if entry.result_refs is None:
|
||||||
rc += "Always NULL."
|
rc += "Always NULL."
|
||||||
else:
|
else:
|
||||||
rc += (entry.result_refs and "New" or "Borrowed") + " reference."
|
rc += (entry.result_refs and "New" or "Borrowed") + \
|
||||||
|
" reference."
|
||||||
node.insert(0, refcount(rc, rc))
|
node.insert(0, refcount(rc, rc))
|
||||||
|
|
||||||
|
|
||||||
|
@ -72,8 +72,8 @@ def process_todo_nodes(app, doctree, fromdocname):
|
|||||||
para = nodes.paragraph()
|
para = nodes.paragraph()
|
||||||
filename = env.doc2path(todo_info['docname'], base=None)
|
filename = env.doc2path(todo_info['docname'], base=None)
|
||||||
description = (
|
description = (
|
||||||
_('(The original entry is located in %s, line %d and can be found ') %
|
_('(The original entry is located in %s, line %d and '
|
||||||
(filename, todo_info['lineno']))
|
'can be found ') % (filename, todo_info['lineno']))
|
||||||
para += nodes.Text(description, description)
|
para += nodes.Text(description, description)
|
||||||
|
|
||||||
# Create a reference
|
# Create a reference
|
||||||
|
@ -101,7 +101,8 @@ class PygmentsBridge(object):
|
|||||||
style = NoneStyle
|
style = NoneStyle
|
||||||
elif '.' in stylename:
|
elif '.' in stylename:
|
||||||
module, stylename = stylename.rsplit('.', 1)
|
module, stylename = stylename.rsplit('.', 1)
|
||||||
style = getattr(__import__(module, None, None, ['__name__']), stylename)
|
style = getattr(__import__(module, None, None, ['__name__']),
|
||||||
|
stylename)
|
||||||
else:
|
else:
|
||||||
style = get_style_by_name(stylename)
|
style = get_style_by_name(stylename)
|
||||||
if dest == 'html':
|
if dest == 'html':
|
||||||
|
@ -91,7 +91,8 @@ class AttrDocVisitor(nodes.NodeVisitor):
|
|||||||
return
|
return
|
||||||
if prev.type == sym.simple_stmt and \
|
if prev.type == sym.simple_stmt and \
|
||||||
prev[0].type == sym.expr_stmt and _eq in prev[0].children:
|
prev[0].type == sym.expr_stmt and _eq in prev[0].children:
|
||||||
# need to "eval" the string because it's returned in its original form
|
# need to "eval" the string because it's returned in its
|
||||||
|
# original form
|
||||||
docstring = literals.evalString(node[0].value, self.encoding)
|
docstring = literals.evalString(node[0].value, self.encoding)
|
||||||
docstring = prepare_docstring(docstring)
|
docstring = prepare_docstring(docstring)
|
||||||
self.add_docstring(prev[0], docstring)
|
self.add_docstring(prev[0], docstring)
|
||||||
@ -159,7 +160,8 @@ class ModuleAnalyzer(object):
|
|||||||
try:
|
try:
|
||||||
source = mod.__loader__.get_source(modname)
|
source = mod.__loader__.get_source(modname)
|
||||||
except Exception, err:
|
except Exception, err:
|
||||||
raise PycodeError('error getting source for %r' % modname, err)
|
raise PycodeError('error getting source for %r' % modname,
|
||||||
|
err)
|
||||||
obj = cls.for_string(source, modname)
|
obj = cls.for_string(source, modname)
|
||||||
cls.cache['module', modname] = obj
|
cls.cache['module', modname] = obj
|
||||||
return obj
|
return obj
|
||||||
@ -279,8 +281,9 @@ class ModuleAnalyzer(object):
|
|||||||
namespace.pop()
|
namespace.pop()
|
||||||
result[fullname] = (dtype, startline, endline)
|
result[fullname] = (dtype, startline, endline)
|
||||||
elif type == token.NEWLINE:
|
elif type == token.NEWLINE:
|
||||||
# if this line contained a definition, expect an INDENT to start the
|
# if this line contained a definition, expect an INDENT
|
||||||
# suite; if there is no such INDENT it's a one-line definition
|
# to start the suite; if there is no such INDENT
|
||||||
|
# it's a one-line definition
|
||||||
if defline:
|
if defline:
|
||||||
defline = False
|
defline = False
|
||||||
expect_indent = True
|
expect_indent = True
|
||||||
@ -292,7 +295,8 @@ if __name__ == '__main__':
|
|||||||
import time, pprint
|
import time, pprint
|
||||||
x0 = time.time()
|
x0 = time.time()
|
||||||
#ma = ModuleAnalyzer.for_file(__file__.rstrip('c'), 'sphinx.builders.html')
|
#ma = ModuleAnalyzer.for_file(__file__.rstrip('c'), 'sphinx.builders.html')
|
||||||
ma = ModuleAnalyzer.for_file('sphinx/builders/html.py', 'sphinx.builders.html')
|
ma = ModuleAnalyzer.for_file('sphinx/builders/html.py',
|
||||||
|
'sphinx.builders.html')
|
||||||
ma.tokenize()
|
ma.tokenize()
|
||||||
x1 = time.time()
|
x1 = time.time()
|
||||||
ma.parse()
|
ma.parse()
|
||||||
|
@ -102,7 +102,8 @@ class Node(BaseNode):
|
|||||||
ch.parent = self
|
ch.parent = self
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '%s(%s, %r)' % (self.__class__.__name__, self.type, self.children)
|
return '%s(%s, %r)' % (self.__class__.__name__,
|
||||||
|
self.type, self.children)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""This reproduces the input source exactly."""
|
"""This reproduces the input source exactly."""
|
||||||
@ -174,7 +175,8 @@ def nice_repr(node, number2name, prefix=False):
|
|||||||
', '.join(map(_repr, node.children)))
|
', '.join(map(_repr, node.children)))
|
||||||
def _prepr(node):
|
def _prepr(node):
|
||||||
if isinstance(node, Leaf):
|
if isinstance(node, Leaf):
|
||||||
return "%s(%r, %r)" % (number2name[node.type], node.prefix, node.value)
|
return "%s(%r, %r)" % (number2name[node.type],
|
||||||
|
node.prefix, node.value)
|
||||||
else:
|
else:
|
||||||
return "%s(%s)" % (number2name[node.type],
|
return "%s(%s)" % (number2name[node.type],
|
||||||
', '.join(map(_prepr, node.children)))
|
', '.join(map(_prepr, node.children)))
|
||||||
|
@ -15,7 +15,8 @@ from os import path
|
|||||||
TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
|
TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
|
||||||
|
|
||||||
from sphinx.util import make_filename
|
from sphinx.util import make_filename
|
||||||
from sphinx.util.console import purple, bold, red, turquoise, nocolor, color_terminal
|
from sphinx.util.console import purple, bold, red, turquoise, \
|
||||||
|
nocolor, color_terminal
|
||||||
from sphinx.util import texescape
|
from sphinx.util import texescape
|
||||||
|
|
||||||
|
|
||||||
@ -29,9 +30,6 @@ QUICKSTART_CONF = '''\
|
|||||||
#
|
#
|
||||||
# This file is execfile()d with the current directory set to its containing dir.
|
# This file is execfile()d with the current directory set to its containing dir.
|
||||||
#
|
#
|
||||||
# The contents of this file are pickled, so don't put values in the namespace
|
|
||||||
# that aren't pickleable (module imports are okay, they're removed automatically).
|
|
||||||
#
|
|
||||||
# Note that not all possible configuration values are present in this
|
# Note that not all possible configuration values are present in this
|
||||||
# autogenerated file.
|
# autogenerated file.
|
||||||
#
|
#
|
||||||
@ -45,8 +43,7 @@ import sys, os
|
|||||||
# absolute, like shown here.
|
# absolute, like shown here.
|
||||||
#sys.path.append(os.path.abspath('.'))
|
#sys.path.append(os.path.abspath('.'))
|
||||||
|
|
||||||
# General configuration
|
# -- General configuration -----------------------------------------------------
|
||||||
# ---------------------
|
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
@ -112,8 +109,7 @@ exclude_trees = [%(exclude_trees)s]
|
|||||||
pygments_style = 'sphinx'
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
|
||||||
# Options for HTML output
|
# -- Options for HTML output ---------------------------------------------------
|
||||||
# -----------------------
|
|
||||||
|
|
||||||
# The style sheet to use for HTML and HTML Help pages. A file of that name
|
# The style sheet to use for HTML and HTML Help pages. A file of that name
|
||||||
# must exist either in Sphinx' static/ path, or in one of the custom paths
|
# must exist either in Sphinx' static/ path, or in one of the custom paths
|
||||||
@ -180,8 +176,7 @@ html_static_path = ['%(dot)sstatic']
|
|||||||
htmlhelp_basename = '%(project_fn)sdoc'
|
htmlhelp_basename = '%(project_fn)sdoc'
|
||||||
|
|
||||||
|
|
||||||
# Options for LaTeX output
|
# -- Options for LaTeX output --------------------------------------------------
|
||||||
# ------------------------
|
|
||||||
|
|
||||||
# The paper size ('letter' or 'a4').
|
# The paper size ('letter' or 'a4').
|
||||||
#latex_paper_size = 'letter'
|
#latex_paper_size = 'letter'
|
||||||
@ -190,7 +185,7 @@ htmlhelp_basename = '%(project_fn)sdoc'
|
|||||||
#latex_font_size = '10pt'
|
#latex_font_size = '10pt'
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, document class [howto/manual]).
|
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
('%(master)s', '%(project_fn)s.tex', ur'%(project_doc_texescaped)s',
|
('%(master)s', '%(project_fn)s.tex', ur'%(project_doc_texescaped)s',
|
||||||
ur'%(author_texescaped)s', 'manual'),
|
ur'%(author_texescaped)s', 'manual'),
|
||||||
@ -217,11 +212,12 @@ latex_documents = [
|
|||||||
INTERSPHINX_CONFIG = '''
|
INTERSPHINX_CONFIG = '''
|
||||||
|
|
||||||
# Example configuration for intersphinx: refer to the Python standard library.
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
intersphinx_mapping = {'http://docs.python.org/dev': None}
|
intersphinx_mapping = {'http://docs.python.org/': None}
|
||||||
'''
|
'''
|
||||||
|
|
||||||
MASTER_FILE = '''\
|
MASTER_FILE = '''\
|
||||||
.. %(project)s documentation master file, created by sphinx-quickstart on %(now)s.
|
.. %(project)s documentation master file, created by
|
||||||
|
sphinx-quickstart on %(now)s.
|
||||||
You can adapt this file completely to your liking, but it should at least
|
You can adapt this file completely to your liking, but it should at least
|
||||||
contain the root `toctree` directive.
|
contain the root `toctree` directive.
|
||||||
|
|
||||||
@ -267,7 +263,7 @@ help:
|
|||||||
\t@echo " htmlhelp to make HTML files and a HTML help project"
|
\t@echo " htmlhelp to make HTML files and a HTML help project"
|
||||||
\t@echo " qthelp to make HTML files and a qthelp project"
|
\t@echo " qthelp to make HTML files and a qthelp project"
|
||||||
\t@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
\t@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||||
\t@echo " changes to make an overview over all changed/added/deprecated items"
|
\t@echo " changes to make an overview of all changed/added/deprecated items"
|
||||||
\t@echo " linkcheck to check all external links for integrity"
|
\t@echo " linkcheck to check all external links for integrity"
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
@ -466,8 +462,9 @@ def do_prompt(d, key, text, default=None, validator=nonempty):
|
|||||||
if TERM_ENCODING:
|
if TERM_ENCODING:
|
||||||
x = x.decode(TERM_ENCODING)
|
x = x.decode(TERM_ENCODING)
|
||||||
else:
|
else:
|
||||||
print turquoise('* Note: non-ASCII characters entered and terminal '
|
print turquoise('* Note: non-ASCII characters entered '
|
||||||
'encoding unknown -- assuming UTF-8 or Latin-1.')
|
'and terminal encoding unknown -- assuming '
|
||||||
|
'UTF-8 or Latin-1.')
|
||||||
try:
|
try:
|
||||||
x = x.decode('utf-8')
|
x = x.decode('utf-8')
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
@ -502,8 +499,8 @@ Enter the root path for documentation.'''
|
|||||||
'selected root path.')
|
'selected root path.')
|
||||||
print 'sphinx-quickstart will not overwrite existing Sphinx projects.'
|
print 'sphinx-quickstart will not overwrite existing Sphinx projects.'
|
||||||
print
|
print
|
||||||
do_prompt(d, 'path', 'Please enter a new root path (or just Enter to exit)',
|
do_prompt(d, 'path', 'Please enter a new root path (or just Enter '
|
||||||
'', is_path)
|
'to exit)', '', is_path)
|
||||||
if not d['path']:
|
if not d['path']:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -516,8 +513,8 @@ Either, you use a directory "_build" within the root path, or you separate
|
|||||||
|
|
||||||
print '''
|
print '''
|
||||||
Inside the root directory, two more directories will be created; "_templates"
|
Inside the root directory, two more directories will be created; "_templates"
|
||||||
for custom HTML templates and "_static" for custom stylesheets and other
|
for custom HTML templates and "_static" for custom stylesheets and other static
|
||||||
static files. You can enter another prefix (such as ".") to replace the underscore.'''
|
files. You can enter another prefix (such as ".") to replace the underscore.'''
|
||||||
do_prompt(d, 'dot', 'Name prefix for templates and static dir', '_', ok)
|
do_prompt(d, 'dot', 'Name prefix for templates and static dir', '_', ok)
|
||||||
|
|
||||||
print '''
|
print '''
|
||||||
@ -549,26 +546,29 @@ Please indicate if you want to use one of the following Sphinx extensions:'''
|
|||||||
'from modules (y/N)', 'n', boolean)
|
'from modules (y/N)', 'n', boolean)
|
||||||
do_prompt(d, 'ext_doctest', 'doctest: automatically test code snippets '
|
do_prompt(d, 'ext_doctest', 'doctest: automatically test code snippets '
|
||||||
'in doctest blocks (y/N)', 'n', boolean)
|
'in doctest blocks (y/N)', 'n', boolean)
|
||||||
do_prompt(d, 'ext_intersphinx', 'intersphinx: link between Sphinx documentation '
|
do_prompt(d, 'ext_intersphinx', 'intersphinx: link between Sphinx '
|
||||||
'of different projects (y/N)', 'n', boolean)
|
'documentation of different projects (y/N)', 'n', boolean)
|
||||||
print '''
|
print '''
|
||||||
A Makefile and a Windows command file can be generated for you so that you
|
A Makefile and a Windows command file can be generated for you so that you
|
||||||
only have to run e.g. `make html' instead of invoking sphinx-build
|
only have to run e.g. `make html' instead of invoking sphinx-build
|
||||||
directly.'''
|
directly.'''
|
||||||
do_prompt(d, 'makefile', 'Create Makefile? (Y/n)', 'y', boolean)
|
do_prompt(d, 'makefile', 'Create Makefile? (Y/n)', 'y', boolean)
|
||||||
do_prompt(d, 'batchfile', 'Create Windows command file? (Y/n)', 'y', boolean)
|
do_prompt(d, 'batchfile', 'Create Windows command file? (Y/n)',
|
||||||
|
'y', boolean)
|
||||||
|
|
||||||
d['project_fn'] = make_filename(d['project'])
|
d['project_fn'] = make_filename(d['project'])
|
||||||
d['now'] = time.asctime()
|
d['now'] = time.asctime()
|
||||||
d['underline'] = len(d['project']) * '='
|
d['underline'] = len(d['project']) * '='
|
||||||
d['extensions'] = ', '.join(
|
d['extensions'] = ', '.join(
|
||||||
repr('sphinx.ext.' + name) for name in ('autodoc', 'doctest', 'intersphinx')
|
repr('sphinx.ext.' + name) for name in ('autodoc', 'doctest',
|
||||||
|
'intersphinx')
|
||||||
if d['ext_' + name].upper() in ('Y', 'YES'))
|
if d['ext_' + name].upper() in ('Y', 'YES'))
|
||||||
d['copyright'] = time.strftime('%Y') + ', ' + d['author']
|
d['copyright'] = time.strftime('%Y') + ', ' + d['author']
|
||||||
d['author_texescaped'] = unicode(d['author']).translate(texescape.tex_escape_map)
|
d['author_texescaped'] = unicode(d['author']).\
|
||||||
|
translate(texescape.tex_escape_map)
|
||||||
d['project_doc'] = d['project'] + ' Documentation'
|
d['project_doc'] = d['project'] + ' Documentation'
|
||||||
d['project_doc_texescaped'] = \
|
d['project_doc_texescaped'] = unicode(d['project'] + ' Documentation').\
|
||||||
unicode(d['project'] + ' Documentation').translate(texescape.tex_escape_map)
|
translate(texescape.tex_escape_map)
|
||||||
|
|
||||||
if not path.isdir(d['path']):
|
if not path.isdir(d['path']):
|
||||||
mkdir_p(d['path'])
|
mkdir_p(d['path'])
|
||||||
|
@ -36,7 +36,8 @@ for rolename, nodeclass in generic_docroles.iteritems():
|
|||||||
roles.register_generic_role(rolename, nodeclass)
|
roles.register_generic_role(rolename, nodeclass)
|
||||||
|
|
||||||
|
|
||||||
def indexmarkup_role(typ, rawtext, etext, lineno, inliner, options={}, content=[]):
|
def indexmarkup_role(typ, rawtext, etext, lineno, inliner,
|
||||||
|
options={}, content=[]):
|
||||||
env = inliner.document.settings.env
|
env = inliner.document.settings.env
|
||||||
if not typ:
|
if not typ:
|
||||||
typ = env.config.default_role
|
typ = env.config.default_role
|
||||||
@ -56,13 +57,14 @@ def indexmarkup_role(typ, rawtext, etext, lineno, inliner, options={}, content=[
|
|||||||
options, content)[0]
|
options, content)[0]
|
||||||
return [indexnode, targetnode] + xref_nodes, []
|
return [indexnode, targetnode] + xref_nodes, []
|
||||||
elif typ == 'pep':
|
elif typ == 'pep':
|
||||||
indexnode['entries'] = [('single',
|
indexnode['entries'] = [
|
||||||
_('Python Enhancement Proposals!PEP %s') % text,
|
('single', _('Python Enhancement Proposals!PEP %s') % text,
|
||||||
targetid, 'PEP %s' % text)]
|
targetid, 'PEP %s' % text)]
|
||||||
try:
|
try:
|
||||||
pepnum = int(text)
|
pepnum = int(text)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
msg = inliner.reporter.error('invalid PEP number %s' % text, line=lineno)
|
msg = inliner.reporter.error('invalid PEP number %s' % text,
|
||||||
|
line=lineno)
|
||||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||||
return [prb], [msg]
|
return [prb], [msg]
|
||||||
ref = inliner.document.settings.pep_base_url + 'pep-%04d' % pepnum
|
ref = inliner.document.settings.pep_base_url + 'pep-%04d' % pepnum
|
||||||
@ -76,7 +78,8 @@ def indexmarkup_role(typ, rawtext, etext, lineno, inliner, options={}, content=[
|
|||||||
try:
|
try:
|
||||||
rfcnum = int(text)
|
rfcnum = int(text)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
msg = inliner.reporter.error('invalid RFC number %s' % text, line=lineno)
|
msg = inliner.reporter.error('invalid RFC number %s' % text,
|
||||||
|
line=lineno)
|
||||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||||
return [prb], [msg]
|
return [prb], [msg]
|
||||||
ref = inliner.document.settings.rfc_base_url + inliner.rfc_url % rfcnum
|
ref = inliner.document.settings.rfc_base_url + inliner.rfc_url % rfcnum
|
||||||
@ -129,7 +132,8 @@ def xfileref_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|||||||
modname=env.currmodule, classname=env.currclass)
|
modname=env.currmodule, classname=env.currclass)
|
||||||
# we may need the line number for warnings
|
# we may need the line number for warnings
|
||||||
pnode.line = lineno
|
pnode.line = lineno
|
||||||
# the link title may differ from the target, but by default they are the same
|
# the link title may differ from the target, but by default
|
||||||
|
# they are the same
|
||||||
title = target = text
|
title = target = text
|
||||||
titleistarget = True
|
titleistarget = True
|
||||||
# look if explicit title and target are given with `foo <bar>` syntax
|
# look if explicit title and target are given with `foo <bar>` syntax
|
||||||
@ -146,7 +150,8 @@ def xfileref_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|||||||
target = text[brace+1:]
|
target = text[brace+1:]
|
||||||
title = text[:brace]
|
title = text[:brace]
|
||||||
# special target for Python object cross-references
|
# special target for Python object cross-references
|
||||||
if typ in ('data', 'exc', 'func', 'class', 'const', 'attr', 'meth', 'mod', 'obj'):
|
if typ in ('data', 'exc', 'func', 'class', 'const', 'attr',
|
||||||
|
'meth', 'mod', 'obj'):
|
||||||
# fix-up parentheses in link title
|
# fix-up parentheses in link title
|
||||||
if titleistarget:
|
if titleistarget:
|
||||||
title = title.lstrip('.') # only has a meaning for the target
|
title = title.lstrip('.') # only has a meaning for the target
|
||||||
@ -171,7 +176,8 @@ def xfileref_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|||||||
elif typ == 'option':
|
elif typ == 'option':
|
||||||
program = env.currprogram
|
program = env.currprogram
|
||||||
if titleistarget:
|
if titleistarget:
|
||||||
if ' ' in title and not (title.startswith('/') or title.startswith('-')):
|
if ' ' in title and not (title.startswith('/') or
|
||||||
|
title.startswith('-')):
|
||||||
program, target = re.split(' (?=-|--|/)', title, 1)
|
program, target = re.split(' (?=-|--|/)', title, 1)
|
||||||
program = ws_re.sub('-', program)
|
program = ws_re.sub('-', program)
|
||||||
target = target.strip()
|
target = target.strip()
|
||||||
@ -190,18 +196,21 @@ def xfileref_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
|||||||
# remove all whitespace to avoid referencing problems
|
# remove all whitespace to avoid referencing problems
|
||||||
target = ws_re.sub('', target)
|
target = ws_re.sub('', target)
|
||||||
pnode['reftarget'] = target
|
pnode['reftarget'] = target
|
||||||
pnode += innernodetypes.get(typ, nodes.literal)(rawtext, title, classes=['xref'])
|
pnode += innernodetypes.get(typ, nodes.literal)(rawtext, title,
|
||||||
|
classes=['xref'])
|
||||||
return [pnode], []
|
return [pnode], []
|
||||||
|
|
||||||
|
|
||||||
def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||||
return [nodes.emphasis(
|
return [nodes.emphasis(
|
||||||
rawtext, utils.unescape(text).replace('-->', u'\N{TRIANGULAR BULLET}'))], []
|
rawtext,
|
||||||
|
utils.unescape(text).replace('-->', u'\N{TRIANGULAR BULLET}'))], []
|
||||||
|
|
||||||
|
|
||||||
_litvar_re = re.compile('{([^}]+)}')
|
_litvar_re = re.compile('{([^}]+)}')
|
||||||
|
|
||||||
def emph_literal_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
def emph_literal_role(typ, rawtext, text, lineno, inliner,
|
||||||
|
options={}, content=[]):
|
||||||
text = utils.unescape(text)
|
text = utils.unescape(text)
|
||||||
pos = 0
|
pos = 0
|
||||||
retnode = nodes.literal(role=typ.lower())
|
retnode = nodes.literal(role=typ.lower())
|
||||||
|
@ -84,6 +84,7 @@ class BuildDoc(Command):
|
|||||||
from docutils.utils import SystemMessage
|
from docutils.utils import SystemMessage
|
||||||
if isinstance(err, SystemMessage):
|
if isinstance(err, SystemMessage):
|
||||||
sys.stderr, darkred('reST markup error:')
|
sys.stderr, darkred('reST markup error:')
|
||||||
print >>sys.stderr, err.args[0].encode('ascii', 'backslashreplace')
|
print >>sys.stderr, err.args[0].encode('ascii',
|
||||||
|
'backslashreplace')
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
@ -162,7 +162,8 @@ def educateQuotes(s):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Special case if the very first character is a quote
|
# Special case if the very first character is a quote
|
||||||
# followed by punctuation at a non-word-break. Close the quotes by brute force:
|
# followed by punctuation at a non-word-break. Close the quotes
|
||||||
|
# by brute force:
|
||||||
s = single_quote_start_re.sub("’", s)
|
s = single_quote_start_re.sub("’", s)
|
||||||
s = double_quote_start_re.sub("”", s)
|
s = double_quote_start_re.sub("”", s)
|
||||||
|
|
||||||
@ -200,7 +201,8 @@ def educateQuotesLatex(s, dquotes=("``", "''")):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Special case if the very first character is a quote
|
# Special case if the very first character is a quote
|
||||||
# followed by punctuation at a non-word-break. Close the quotes by brute force:
|
# followed by punctuation at a non-word-break. Close the quotes
|
||||||
|
# by brute force:
|
||||||
s = single_quote_start_re.sub("\x04", s)
|
s = single_quote_start_re.sub("\x04", s)
|
||||||
s = double_quote_start_re.sub("\x02", s)
|
s = double_quote_start_re.sub("\x02", s)
|
||||||
|
|
||||||
@ -300,4 +302,5 @@ __author__ = "Chad Miller <smartypantspy@chad.org>"
|
|||||||
__version__ = "1.5_1.5: Sat, 13 Aug 2005 15:50:24 -0400"
|
__version__ = "1.5_1.5: Sat, 13 Aug 2005 15:50:24 -0400"
|
||||||
__url__ = "http://wiki.chad.org/SmartyPantsPy"
|
__url__ = "http://wiki.chad.org/SmartyPantsPy"
|
||||||
__description__ = \
|
__description__ = \
|
||||||
"Smart-quotes, smart-ellipses, and smart-dashes for weblog entries in pyblosxom"
|
"Smart-quotes, smart-ellipses, and smart-dashes for weblog entries" \
|
||||||
|
" in pyblosxom"
|
||||||
|
@ -111,14 +111,16 @@ class PorterStemmer(object):
|
|||||||
return self.cons(j)
|
return self.cons(j)
|
||||||
|
|
||||||
def cvc(self, i):
|
def cvc(self, i):
|
||||||
"""cvc(i) is TRUE <=> i-2,i-1,i has the form consonant - vowel - consonant
|
"""cvc(i) is TRUE <=> i-2,i-1,i has the form
|
||||||
|
consonant - vowel - consonant
|
||||||
and also if the second c is not w,x or y. this is used when trying to
|
and also if the second c is not w,x or y. this is used when trying to
|
||||||
restore an e at the end of a short e.g.
|
restore an e at the end of a short e.g.
|
||||||
|
|
||||||
cav(e), lov(e), hop(e), crim(e), but
|
cav(e), lov(e), hop(e), crim(e), but
|
||||||
snow, box, tray.
|
snow, box, tray.
|
||||||
"""
|
"""
|
||||||
if i < (self.k0 + 2) or not self.cons(i) or self.cons(i-1) or not self.cons(i-2):
|
if i < (self.k0 + 2) or not self.cons(i) or self.cons(i-1) \
|
||||||
|
or not self.cons(i-2):
|
||||||
return 0
|
return 0
|
||||||
ch = self.b[i]
|
ch = self.b[i]
|
||||||
if ch == 'w' or ch == 'x' or ch == 'y':
|
if ch == 'w' or ch == 'x' or ch == 'y':
|
||||||
@ -138,7 +140,8 @@ class PorterStemmer(object):
|
|||||||
return 1
|
return 1
|
||||||
|
|
||||||
def setto(self, s):
|
def setto(self, s):
|
||||||
"""setto(s) sets (j+1),...k to the characters in the string s, readjusting k."""
|
"""setto(s) sets (j+1),...k to the characters in the string s,
|
||||||
|
readjusting k."""
|
||||||
length = len(s)
|
length = len(s)
|
||||||
self.b = self.b[:self.j+1] + s + self.b[self.j+length+1:]
|
self.b = self.b[:self.j+1] + s + self.b[self.j+length+1:]
|
||||||
self.k = self.j + length
|
self.k = self.j + length
|
||||||
@ -193,7 +196,8 @@ class PorterStemmer(object):
|
|||||||
self.setto("e")
|
self.setto("e")
|
||||||
|
|
||||||
def step1c(self):
|
def step1c(self):
|
||||||
"""step1c() turns terminal y to i when there is another vowel in the stem."""
|
"""step1c() turns terminal y to i when there is another vowel in
|
||||||
|
the stem."""
|
||||||
if (self.ends("y") and self.vowelinstem()):
|
if (self.ends("y") and self.vowelinstem()):
|
||||||
self.b = self.b[:self.k] + 'i' + self.b[self.k+1:]
|
self.b = self.b[:self.k] + 'i' + self.b[self.k+1:]
|
||||||
|
|
||||||
@ -236,7 +240,8 @@ class PorterStemmer(object):
|
|||||||
# To match the published algorithm, delete this phrase
|
# To match the published algorithm, delete this phrase
|
||||||
|
|
||||||
def step3(self):
|
def step3(self):
|
||||||
"""step3() dels with -ic-, -full, -ness etc. similar strategy to step2."""
|
"""step3() dels with -ic-, -full, -ness etc. similar strategy
|
||||||
|
to step2."""
|
||||||
if self.b[self.k] == 'e':
|
if self.b[self.k] == 'e':
|
||||||
if self.ends("icate"): self.r("ic")
|
if self.ends("icate"): self.r("ic")
|
||||||
elif self.ends("ative"): self.r("")
|
elif self.ends("ative"): self.r("")
|
||||||
|
@ -68,13 +68,15 @@ class HTMLTranslator(BaseTranslator):
|
|||||||
# the id is set automatically
|
# the id is set automatically
|
||||||
self.body.append(self.starttag(node, 'dt'))
|
self.body.append(self.starttag(node, 'dt'))
|
||||||
# anchor for per-desc interactive data
|
# anchor for per-desc interactive data
|
||||||
if node.parent['desctype'] != 'describe' and node['ids'] and node['first']:
|
if node.parent['desctype'] != 'describe' \
|
||||||
|
and node['ids'] and node['first']:
|
||||||
self.body.append('<!--[%s]-->' % node['ids'][0])
|
self.body.append('<!--[%s]-->' % node['ids'][0])
|
||||||
if node.parent['desctype'] in ('class', 'exception'):
|
if node.parent['desctype'] in ('class', 'exception'):
|
||||||
self.body.append('%s ' % node.parent['desctype'])
|
self.body.append('%s ' % node.parent['desctype'])
|
||||||
def depart_desc_signature(self, node):
|
def depart_desc_signature(self, node):
|
||||||
if node['ids'] and self.add_permalinks and self.builder.add_permalinks:
|
if node['ids'] and self.add_permalinks and self.builder.add_permalinks:
|
||||||
self.body.append(u'<a class="headerlink" href="#%s" ' % node['ids'][0] +
|
self.body.append(u'<a class="headerlink" href="#%s" '
|
||||||
|
% node['ids'][0] +
|
||||||
u'title="%s">\u00B6</a>' %
|
u'title="%s">\u00B6</a>' %
|
||||||
_('Permalink to this definition'))
|
_('Permalink to this definition'))
|
||||||
self.body.append('</dt>\n')
|
self.body.append('</dt>\n')
|
||||||
@ -192,14 +194,17 @@ class HTMLTranslator(BaseTranslator):
|
|||||||
# most probably a parsed-literal block -- don't highlight
|
# most probably a parsed-literal block -- don't highlight
|
||||||
return BaseTranslator.visit_literal_block(self, node)
|
return BaseTranslator.visit_literal_block(self, node)
|
||||||
lang = self.highlightlang
|
lang = self.highlightlang
|
||||||
linenos = node.rawsource.count('\n') >= self.highlightlinenothreshold - 1
|
linenos = node.rawsource.count('\n') >= \
|
||||||
|
self.highlightlinenothreshold - 1
|
||||||
if node.has_key('language'):
|
if node.has_key('language'):
|
||||||
# code-block directives
|
# code-block directives
|
||||||
lang = node['language']
|
lang = node['language']
|
||||||
if node.has_key('linenos'):
|
if node.has_key('linenos'):
|
||||||
linenos = node['linenos']
|
linenos = node['linenos']
|
||||||
highlighted = self.highlighter.highlight_block(node.rawsource, lang, linenos)
|
highlighted = self.highlighter.highlight_block(node.rawsource,
|
||||||
starttag = self.starttag(node, 'div', suffix='', CLASS='highlight-%s' % lang)
|
lang, linenos)
|
||||||
|
starttag = self.starttag(node, 'div', suffix='',
|
||||||
|
CLASS='highlight-%s' % lang)
|
||||||
self.body.append(starttag + highlighted + '</div>\n')
|
self.body.append(starttag + highlighted + '</div>\n')
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
@ -211,7 +216,8 @@ class HTMLTranslator(BaseTranslator):
|
|||||||
if len(node.children) == 1 and \
|
if len(node.children) == 1 and \
|
||||||
node.children[0] in ('None', 'True', 'False'):
|
node.children[0] in ('None', 'True', 'False'):
|
||||||
node['classes'].append('xref')
|
node['classes'].append('xref')
|
||||||
self.body.append(self.starttag(node, 'tt', '', CLASS='docutils literal'))
|
self.body.append(self.starttag(node, 'tt', '',
|
||||||
|
CLASS='docutils literal'))
|
||||||
self.protect_literal_text += 1
|
self.protect_literal_text += 1
|
||||||
def depart_literal(self, node):
|
def depart_literal(self, node):
|
||||||
self.protect_literal_text -= 1
|
self.protect_literal_text -= 1
|
||||||
@ -243,7 +249,8 @@ class HTMLTranslator(BaseTranslator):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def visit_centered(self, node):
|
def visit_centered(self, node):
|
||||||
self.body.append(self.starttag(node, 'p', CLASS="centered") + '<strong>')
|
self.body.append(self.starttag(node, 'p', CLASS="centered")
|
||||||
|
+ '<strong>')
|
||||||
def depart_centered(self, node):
|
def depart_centered(self, node):
|
||||||
self.body.append('</strong></p>')
|
self.body.append('</strong></p>')
|
||||||
|
|
||||||
|
@ -167,7 +167,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
'pointsize': builder.config.latex_font_size,
|
'pointsize': builder.config.latex_font_size,
|
||||||
# if empty, the title is set to the first section title
|
# if empty, the title is set to the first section title
|
||||||
'title': document.settings.title,
|
'title': document.settings.title,
|
||||||
'date': ustrftime(builder.config.today_fmt or _('%B %d, %Y')),
|
'date': ustrftime(builder.config.today_fmt
|
||||||
|
or _('%B %d, %Y')),
|
||||||
'release': builder.config.release,
|
'release': builder.config.release,
|
||||||
'author': document.settings.author,
|
'author': document.settings.author,
|
||||||
'releasename': _('Release'),
|
'releasename': _('Release'),
|
||||||
@ -253,7 +254,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
for bi in self.bibitems:
|
for bi in self.bibitems:
|
||||||
# cite_key: underscores must not be escaped
|
# cite_key: underscores must not be escaped
|
||||||
cite_key = bi[0].replace(r"\_", "_")
|
cite_key = bi[0].replace(r"\_", "_")
|
||||||
self.body.append('\\bibitem[%s]{%s}{%s}\n' % (bi[0], cite_key, bi[1]))
|
self.body.append('\\bibitem[%s]{%s}{%s}\n' %
|
||||||
|
(bi[0], cite_key, bi[1]))
|
||||||
self.body.append('\\end{thebibliography}\n')
|
self.body.append('\\end{thebibliography}\n')
|
||||||
self.bibitems = []
|
self.bibitems = []
|
||||||
|
|
||||||
@ -304,7 +306,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
# self.body.append(r'\hypertarget{%s}{}' % id)
|
# self.body.append(r'\hypertarget{%s}{}' % id)
|
||||||
# self.written_ids.add(id)
|
# self.written_ids.add(id)
|
||||||
def depart_section(self, node):
|
def depart_section(self, node):
|
||||||
self.sectionlevel = max(self.sectionlevel - 1, self.top_sectionlevel - 1)
|
self.sectionlevel = max(self.sectionlevel - 1,
|
||||||
|
self.top_sectionlevel - 1)
|
||||||
|
|
||||||
def visit_problematic(self, node):
|
def visit_problematic(self, node):
|
||||||
self.body.append(r'{\color{red}\bfseries{}')
|
self.body.append(r'{\color{red}\bfseries{}')
|
||||||
@ -335,7 +338,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
|
|
||||||
def visit_production(self, node):
|
def visit_production(self, node):
|
||||||
if node['tokenname']:
|
if node['tokenname']:
|
||||||
self.body.append('\\production{%s}{' % self.encode(node['tokenname']))
|
self.body.append('\\production{%s}{' %
|
||||||
|
self.encode(node['tokenname']))
|
||||||
else:
|
else:
|
||||||
self.body.append('\\productioncont{')
|
self.body.append('\\productioncont{')
|
||||||
def depart_production(self, node):
|
def depart_production(self, node):
|
||||||
@ -352,7 +356,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
# the environment already handles this
|
# the environment already handles this
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
elif self.this_is_the_title:
|
elif self.this_is_the_title:
|
||||||
if len(node.children) != 1 and not isinstance(node.children[0], nodes.Text):
|
if len(node.children) != 1 and not isinstance(node.children[0],
|
||||||
|
nodes.Text):
|
||||||
self.builder.warn('document title is not a single Text node')
|
self.builder.warn('document title is not a single Text node')
|
||||||
if not self.elements['title']:
|
if not self.elements['title']:
|
||||||
# text needs to be escaped since it is inserted into
|
# text needs to be escaped since it is inserted into
|
||||||
@ -585,7 +590,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
else:
|
else:
|
||||||
if self.table.has_verbatim:
|
if self.table.has_verbatim:
|
||||||
colwidth = 0.95 / self.table.colcount
|
colwidth = 0.95 / self.table.colcount
|
||||||
colspec = ('p{%.3f\\textwidth}|' % colwidth) * self.table.colcount
|
colspec = ('p{%.3f\\textwidth}|' % colwidth) * \
|
||||||
|
self.table.colcount
|
||||||
self.body.append('{|' + colspec + '}\n')
|
self.body.append('{|' + colspec + '}\n')
|
||||||
else:
|
else:
|
||||||
self.body.append('{|' + ('L|' * self.table.colcount) + '}\n')
|
self.body.append('{|' + ('L|' * self.table.colcount) + '}\n')
|
||||||
@ -648,7 +654,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
# this is a list in the source, but should be rendered as a
|
# this is a list in the source, but should be rendered as a
|
||||||
# comma-separated list here
|
# comma-separated list here
|
||||||
self.body.append('\n\n')
|
self.body.append('\n\n')
|
||||||
self.body.append(', '.join(n.astext() for n in node.children[0].children) + '.')
|
self.body.append(', '.join(n.astext()
|
||||||
|
for n in node.children[0].children) + '.')
|
||||||
self.body.append('\n\n')
|
self.body.append('\n\n')
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
@ -743,9 +750,10 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
|
|
||||||
def visit_module(self, node):
|
def visit_module(self, node):
|
||||||
modname = node['modname']
|
modname = node['modname']
|
||||||
self.body.append('\n\\declaremodule[%s]{}{%s}' % (modname.replace('_', ''),
|
self.body.append('\n\\declaremodule[%s]{}{%s}' % (
|
||||||
self.encode(modname)))
|
modname.replace('_', ''), self.encode(modname)))
|
||||||
self.body.append('\n\\modulesynopsis{%s}' % self.encode(node['synopsis']))
|
self.body.append('\n\\modulesynopsis{%s}' %
|
||||||
|
self.encode(node['synopsis']))
|
||||||
if node.has_key('platform'):
|
if node.has_key('platform'):
|
||||||
self.body.append('\\platform{%s}' % self.encode(node['platform']))
|
self.body.append('\\platform{%s}' % self.encode(node['platform']))
|
||||||
def depart_module(self, node):
|
def depart_module(self, node):
|
||||||
@ -922,15 +930,18 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
entries = node['entries']
|
entries = node['entries']
|
||||||
for type, string, tid, _ in entries:
|
for type, string, tid, _ in entries:
|
||||||
if type == 'single':
|
if type == 'single':
|
||||||
self.body.append(r'\index{%s}' % scre.sub('!', self.encode(string)))
|
self.body.append(r'\index{%s}' %
|
||||||
|
scre.sub('!', self.encode(string)))
|
||||||
elif type == 'pair':
|
elif type == 'pair':
|
||||||
parts = tuple(self.encode(x.strip()) for x in string.split(';', 1))
|
parts = tuple(self.encode(x.strip())
|
||||||
|
for x in string.split(';', 1))
|
||||||
try:
|
try:
|
||||||
self.body.append(r'\indexii{%s}{%s}' % parts)
|
self.body.append(r'\indexii{%s}{%s}' % parts)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
self.builder.warn('invalid pair index entry %r' % string)
|
self.builder.warn('invalid pair index entry %r' % string)
|
||||||
elif type == 'triple':
|
elif type == 'triple':
|
||||||
parts = tuple(self.encode(x.strip()) for x in string.split(';', 2))
|
parts = tuple(self.encode(x.strip())
|
||||||
|
for x in string.split(';', 2))
|
||||||
try:
|
try:
|
||||||
self.body.append(r'\indexiii{%s}{%s}{%s}' % parts)
|
self.body.append(r'\indexiii{%s}{%s}{%s}' % parts)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@ -957,7 +968,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
|||||||
self.context.append('}')
|
self.context.append('}')
|
||||||
elif uri.startswith('%'):
|
elif uri.startswith('%'):
|
||||||
hashindex = uri.find('#')
|
hashindex = uri.find('#')
|
||||||
targetname = (hashindex == -1) and '--doc-' + uri[1:] or uri[hashindex+1:]
|
targetname = (hashindex == -1) and '--doc-' + uri[1:] \
|
||||||
|
or uri[hashindex+1:]
|
||||||
self.body.append('\\hyperlink{%s}{' % targetname)
|
self.body.append('\\hyperlink{%s}{' % targetname)
|
||||||
self.context.append('}')
|
self.context.append('}')
|
||||||
elif uri.startswith('@token'):
|
elif uri.startswith('@token'):
|
||||||
|
@ -419,7 +419,8 @@ class TextTranslator(nodes.NodeVisitor):
|
|||||||
|
|
||||||
def visit_acks(self, node):
|
def visit_acks(self, node):
|
||||||
self.new_state(0)
|
self.new_state(0)
|
||||||
self.add_text(', '.join(n.astext() for n in node.children[0].children) + '.')
|
self.add_text(', '.join(n.astext() for n in node.children[0].children)
|
||||||
|
+ '.')
|
||||||
self.end_state()
|
self.end_state()
|
||||||
raise nodes.SkipNode
|
raise nodes.SkipNode
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ def check_syntax(fn, lines):
|
|||||||
def check_style_and_encoding(fn, lines):
|
def check_style_and_encoding(fn, lines):
|
||||||
encoding = 'ascii'
|
encoding = 'ascii'
|
||||||
for lno, line in enumerate(lines):
|
for lno, line in enumerate(lines):
|
||||||
if len(line) > 90:
|
if len(line) > 81:
|
||||||
yield lno+1, "line too long"
|
yield lno+1, "line too long"
|
||||||
if lno < 2:
|
if lno < 2:
|
||||||
co = coding_re.search(line)
|
co = coding_re.search(line)
|
||||||
|
Loading…
Reference in New Issue
Block a user