mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Move builders and writers into new packages.
This commit is contained in:
@@ -10,11 +10,14 @@
|
||||
"""
|
||||
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
__revision__ = '$Revision$'
|
||||
__version__ = '0.5'
|
||||
__released__ = '0.5'
|
||||
|
||||
package_dir = path.abspath(path.dirname(__file__))
|
||||
|
||||
|
||||
def main(argv=sys.argv):
|
||||
if sys.version_info[:3] < (2, 4, 0):
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import codecs
|
||||
from os import path
|
||||
|
||||
from sphinx import package_dir
|
||||
from sphinx.util import mtimes_of_files
|
||||
from sphinx.application import TemplateBridge
|
||||
|
||||
@@ -88,7 +89,7 @@ class TranslatorEnvironment(Environment):
|
||||
class BuiltinTemplates(TemplateBridge):
|
||||
def init(self, builder):
|
||||
self.templates = {}
|
||||
base_templates_path = path.join(path.dirname(__file__), 'templates')
|
||||
base_templates_path = path.join(package_dir, 'templates')
|
||||
ext_templates_path = [path.join(builder.confdir, dir)
|
||||
for dir in builder.config.templates_path]
|
||||
self.templates_path = [base_templates_path] + ext_templates_path
|
||||
|
||||
@@ -22,7 +22,7 @@ from docutils.parsers.rst import directives, roles
|
||||
import sphinx
|
||||
from sphinx.roles import xfileref_role, innernodetypes
|
||||
from sphinx.config import Config
|
||||
from sphinx.builder import builtin_builders, StandaloneHTMLBuilder
|
||||
from sphinx.builders import BUILTIN_BUILDERS
|
||||
from sphinx.directives import desc_directive, target_directive, additional_xref_types
|
||||
from sphinx.environment import SphinxStandaloneReader
|
||||
from sphinx.util.console import bold
|
||||
@@ -77,7 +77,7 @@ class Sphinx(object):
|
||||
confoverrides, status, warning=sys.stderr, freshenv=False):
|
||||
self.next_listener_id = 0
|
||||
self._listeners = {}
|
||||
self.builderclasses = builtin_builders.copy()
|
||||
self.builderclasses = BUILTIN_BUILDERS.copy()
|
||||
self.builder = None
|
||||
|
||||
self.srcdir = srcdir
|
||||
@@ -125,6 +125,11 @@ class Sphinx(object):
|
||||
buildername)))
|
||||
|
||||
builderclass = self.builderclasses[buildername]
|
||||
if isinstance(builderclass, tuple):
|
||||
# builtin builder
|
||||
mod, cls = builderclass
|
||||
builderclass = getattr(
|
||||
__import__('sphinx.builders.' + mod, None, None, [cls]), cls)
|
||||
self.builder = builderclass(self, freshenv=freshenv)
|
||||
self.emit('builder-inited')
|
||||
|
||||
@@ -220,8 +225,12 @@ class Sphinx(object):
|
||||
if not hasattr(builder, 'name'):
|
||||
raise ExtensionError('Builder class %s has no "name" attribute' % builder)
|
||||
if builder.name in self.builderclasses:
|
||||
raise ExtensionError('Builder %r already exists (in module %s)' % (
|
||||
builder.name, self.builderclasses[builder.name].__module__))
|
||||
if isinstance(self.builderclasses[builder.name], tuple):
|
||||
raise ExtensionError('Builder %r is a builtin builder' %
|
||||
builder.name)
|
||||
else:
|
||||
raise ExtensionError('Builder %r already exists (in module %s)' % (
|
||||
builder.name, self.builderclasses[builder.name].__module__))
|
||||
self.builderclasses[builder.name] = builder
|
||||
|
||||
def add_config_value(self, name, default, rebuild_env):
|
||||
@@ -243,11 +252,11 @@ class Sphinx(object):
|
||||
raise ExtensionError('Value for key %r must be a (visit, depart) '
|
||||
'function tuple' % key)
|
||||
if key == 'html':
|
||||
from sphinx.htmlwriter import HTMLTranslator as translator
|
||||
from sphinx.writers.html import HTMLTranslator as translator
|
||||
elif key == 'latex':
|
||||
from sphinx.latexwriter import LaTeXTranslator as translator
|
||||
from sphinx.writers.latex import LaTeXTranslator as translator
|
||||
elif key == 'text':
|
||||
from sphinx.textwriter import TextTranslator as translator
|
||||
from sphinx.writers.text import TextTranslator as translator
|
||||
else:
|
||||
# ignore invalid keys for compatibility
|
||||
continue
|
||||
@@ -284,6 +293,7 @@ class Sphinx(object):
|
||||
SphinxStandaloneReader.transforms.append(transform)
|
||||
|
||||
def add_javascript(self, filename):
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
StandaloneHTMLBuilder.script_files.append(
|
||||
posixpath.join('_static', filename))
|
||||
|
||||
|
||||
1274
sphinx/builder.py
1274
sphinx/builder.py
File diff suppressed because it is too large
Load Diff
328
sphinx/builders/__init__.py
Normal file
328
sphinx/builders/__init__.py
Normal file
@@ -0,0 +1,328 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.builders
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Builder superclass for all builders.
|
||||
|
||||
:copyright: 2007-2008 by Georg Brandl, Sebastian Wiesner, Horst Gutmann.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
import os
|
||||
import gettext
|
||||
from os import path
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import package_dir, locale
|
||||
from sphinx.util import SEP, relative_uri
|
||||
from sphinx.environment import BuildEnvironment
|
||||
from sphinx.util.console import bold, purple, darkgreen
|
||||
|
||||
# side effect: registers roles and directives
|
||||
from sphinx import roles
|
||||
from sphinx import directives
|
||||
|
||||
|
||||
ENV_PICKLE_FILENAME = 'environment.pickle'
|
||||
|
||||
|
||||
class Builder(object):
|
||||
"""
|
||||
Builds target formats from the reST sources.
|
||||
"""
|
||||
|
||||
# builder's name, for the -b command line options
|
||||
name = ''
|
||||
|
||||
def __init__(self, app, env=None, freshenv=False):
|
||||
self.srcdir = app.srcdir
|
||||
self.confdir = app.confdir
|
||||
self.outdir = app.outdir
|
||||
self.doctreedir = app.doctreedir
|
||||
if not path.isdir(self.doctreedir):
|
||||
os.makedirs(self.doctreedir)
|
||||
|
||||
self.app = app
|
||||
self.warn = app.warn
|
||||
self.info = app.info
|
||||
self.config = app.config
|
||||
|
||||
self.load_i18n()
|
||||
|
||||
# images that need to be copied over (source -> dest)
|
||||
self.images = {}
|
||||
|
||||
# if None, this is set in load_env()
|
||||
self.env = env
|
||||
self.freshenv = freshenv
|
||||
|
||||
self.init()
|
||||
self.load_env()
|
||||
|
||||
# helper methods
|
||||
|
||||
def init(self):
|
||||
"""Load necessary templates and perform initialization."""
|
||||
raise NotImplementedError
|
||||
|
||||
def init_templates(self):
|
||||
# Call this from init() if you need templates.
|
||||
if self.config.template_bridge:
|
||||
self.templates = self.app.import_object(
|
||||
self.config.template_bridge, 'template_bridge setting')()
|
||||
else:
|
||||
from sphinx._jinja import BuiltinTemplates
|
||||
self.templates = BuiltinTemplates()
|
||||
self.templates.init(self)
|
||||
|
||||
def get_target_uri(self, docname, typ=None):
|
||||
"""
|
||||
Return the target URI for a document name (typ can be used to qualify
|
||||
the link characteristic for individual builders).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_relative_uri(self, from_, to, typ=None):
|
||||
"""
|
||||
Return a relative URI between two source filenames. May raise environment.NoUri
|
||||
if there's no way to return a sensible URI.
|
||||
"""
|
||||
return relative_uri(self.get_target_uri(from_),
|
||||
self.get_target_uri(to, typ))
|
||||
|
||||
def get_outdated_docs(self):
|
||||
"""
|
||||
Return an iterable of output files that are outdated, or a string describing
|
||||
what an update build will build.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def status_iterator(self, iterable, summary, colorfunc=darkgreen):
|
||||
l = -1
|
||||
for item in iterable:
|
||||
if l == -1:
|
||||
self.info(bold(summary), nonl=1)
|
||||
l = 0
|
||||
self.info(colorfunc(item) + ' ', nonl=1)
|
||||
yield item
|
||||
if l == 0:
|
||||
self.info()
|
||||
|
||||
supported_image_types = []
|
||||
|
||||
def post_process_images(self, doctree):
|
||||
"""
|
||||
Pick the best candidate for all image URIs.
|
||||
"""
|
||||
for node in doctree.traverse(nodes.image):
|
||||
if '?' in node['candidates']:
|
||||
# don't rewrite nonlocal image URIs
|
||||
continue
|
||||
if '*' not in node['candidates']:
|
||||
for imgtype in self.supported_image_types:
|
||||
candidate = node['candidates'].get(imgtype, None)
|
||||
if candidate:
|
||||
break
|
||||
else:
|
||||
self.warn('%s:%s: no matching candidate for image URI %r' %
|
||||
(node.source, getattr(node, 'lineno', ''), node['uri']))
|
||||
continue
|
||||
node['uri'] = candidate
|
||||
else:
|
||||
candidate = node['uri']
|
||||
if candidate not in self.env.images:
|
||||
# non-existing URI; let it alone
|
||||
continue
|
||||
self.images[candidate] = self.env.images[candidate][1]
|
||||
|
||||
# build methods
|
||||
|
||||
def load_i18n(self):
|
||||
"""
|
||||
Load translated strings from the configured localedirs if
|
||||
enabled in the configuration.
|
||||
"""
|
||||
self.translator = None
|
||||
if self.config.language is not None:
|
||||
self.info(bold('loading translations [%s]... ' % self.config.language),
|
||||
nonl=True)
|
||||
locale_dirs = [path.join(package_dir, 'locale')] + \
|
||||
[path.join(self.srcdir, x) for x in self.config.locale_dirs]
|
||||
for dir_ in locale_dirs:
|
||||
try:
|
||||
trans = gettext.translation('sphinx', localedir=dir_,
|
||||
languages=[self.config.language])
|
||||
if self.translator is None:
|
||||
self.translator = trans
|
||||
else:
|
||||
self.translator._catalog.update(trans.catalog)
|
||||
except Exception:
|
||||
# Language couldn't be found in the specified path
|
||||
pass
|
||||
if self.translator is not None:
|
||||
self.info('done')
|
||||
else:
|
||||
self.info('locale not available')
|
||||
if self.translator is None:
|
||||
self.translator = gettext.NullTranslations()
|
||||
self.translator.install(unicode=True)
|
||||
locale.init() # translate common labels
|
||||
|
||||
def load_env(self):
|
||||
"""Set up the build environment."""
|
||||
if self.env:
|
||||
return
|
||||
if not self.freshenv:
|
||||
try:
|
||||
self.info(bold('loading pickled environment... '), nonl=True)
|
||||
self.env = BuildEnvironment.frompickle(self.config,
|
||||
path.join(self.doctreedir, ENV_PICKLE_FILENAME))
|
||||
self.info('done')
|
||||
except Exception, err:
|
||||
if type(err) is IOError and err.errno == 2:
|
||||
self.info('not found')
|
||||
else:
|
||||
self.info('failed: %s' % err)
|
||||
self.env = BuildEnvironment(self.srcdir, self.doctreedir, self.config)
|
||||
self.env.find_files(self.config)
|
||||
else:
|
||||
self.env = BuildEnvironment(self.srcdir, self.doctreedir, self.config)
|
||||
self.env.find_files(self.config)
|
||||
self.env.set_warnfunc(self.warn)
|
||||
|
||||
def build_all(self):
|
||||
"""Build all source files."""
|
||||
self.build(None, summary='all source files', method='all')
|
||||
|
||||
def build_specific(self, filenames):
|
||||
"""Only rebuild as much as needed for changes in the source_filenames."""
|
||||
# bring the filenames to the canonical format, that is,
|
||||
# relative to the source directory and without source_suffix.
|
||||
dirlen = len(self.srcdir) + 1
|
||||
to_write = []
|
||||
suffix = self.config.source_suffix
|
||||
for filename in filenames:
|
||||
filename = path.abspath(filename)[dirlen:]
|
||||
if filename.endswith(suffix):
|
||||
filename = filename[:-len(suffix)]
|
||||
filename = filename.replace(os.path.sep, SEP)
|
||||
to_write.append(filename)
|
||||
self.build(to_write, method='specific',
|
||||
summary='%d source files given on command '
|
||||
'line' % len(to_write))
|
||||
|
||||
def build_update(self):
|
||||
"""Only rebuild files changed or added since last build."""
|
||||
to_build = self.get_outdated_docs()
|
||||
if isinstance(to_build, str):
|
||||
self.build(['__all__'], to_build)
|
||||
else:
|
||||
to_build = list(to_build)
|
||||
self.build(to_build,
|
||||
summary='targets for %d source files that are '
|
||||
'out of date' % len(to_build))
|
||||
|
||||
def build(self, docnames, summary=None, method='update'):
|
||||
if summary:
|
||||
self.info(bold('building [%s]: ' % self.name), nonl=1)
|
||||
self.info(summary)
|
||||
|
||||
updated_docnames = []
|
||||
# while reading, collect all warnings from docutils
|
||||
warnings = []
|
||||
self.env.set_warnfunc(warnings.append)
|
||||
self.info(bold('updating environment: '), nonl=1)
|
||||
iterator = self.env.update(self.config, self.srcdir, self.doctreedir, self.app)
|
||||
# the first item in the iterator is a summary message
|
||||
self.info(iterator.next())
|
||||
for docname in self.status_iterator(iterator, 'reading sources... ', purple):
|
||||
updated_docnames.append(docname)
|
||||
# nothing further to do, the environment has already done the reading
|
||||
for warning in warnings:
|
||||
if warning.strip():
|
||||
self.warn(warning)
|
||||
self.env.set_warnfunc(self.warn)
|
||||
|
||||
if updated_docnames:
|
||||
# save the environment
|
||||
self.info(bold('pickling environment... '), nonl=True)
|
||||
self.env.topickle(path.join(self.doctreedir, ENV_PICKLE_FILENAME))
|
||||
self.info('done')
|
||||
|
||||
# global actions
|
||||
self.info(bold('checking consistency... '), nonl=True)
|
||||
self.env.check_consistency()
|
||||
self.info('done')
|
||||
else:
|
||||
if method == 'update' and not docnames:
|
||||
self.info(bold('no targets are out of date.'))
|
||||
return
|
||||
|
||||
# another indirection to support methods which don't build files
|
||||
# individually
|
||||
self.write(docnames, updated_docnames, method)
|
||||
|
||||
# finish (write static files etc.)
|
||||
self.finish()
|
||||
if self.app._warncount:
|
||||
self.info(bold('build succeeded, %s warning%s.' %
|
||||
(self.app._warncount,
|
||||
self.app._warncount != 1 and 's' or '')))
|
||||
else:
|
||||
self.info(bold('build succeeded.'))
|
||||
|
||||
def write(self, build_docnames, updated_docnames, method='update'):
|
||||
if build_docnames is None or build_docnames == ['__all__']:
|
||||
# build_all
|
||||
build_docnames = self.env.found_docs
|
||||
if method == 'update':
|
||||
# build updated ones as well
|
||||
docnames = set(build_docnames) | set(updated_docnames)
|
||||
else:
|
||||
docnames = set(build_docnames)
|
||||
|
||||
# add all toctree-containing files that may have changed
|
||||
for docname in list(docnames):
|
||||
for tocdocname in self.env.files_to_rebuild.get(docname, []):
|
||||
docnames.add(tocdocname)
|
||||
docnames.add(self.config.master_doc)
|
||||
|
||||
self.info(bold('preparing documents... '), nonl=True)
|
||||
self.prepare_writing(docnames)
|
||||
self.info('done')
|
||||
|
||||
# write target files
|
||||
warnings = []
|
||||
self.env.set_warnfunc(warnings.append)
|
||||
for docname in self.status_iterator(sorted(docnames),
|
||||
'writing output... ', darkgreen):
|
||||
doctree = self.env.get_and_resolve_doctree(docname, self)
|
||||
self.write_doc(docname, doctree)
|
||||
for warning in warnings:
|
||||
if warning.strip():
|
||||
self.warn(warning)
|
||||
self.env.set_warnfunc(self.warn)
|
||||
|
||||
def prepare_writing(self, docnames):
|
||||
raise NotImplementedError
|
||||
|
||||
def write_doc(self, docname, doctree):
|
||||
raise NotImplementedError
|
||||
|
||||
def finish(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
BUILTIN_BUILDERS = {
|
||||
'html': ('html', 'StandaloneHTMLBuilder'),
|
||||
'pickle': ('html', 'PickleHTMLBuilder'),
|
||||
'json': ('html', 'JSONHTMLBuilder'),
|
||||
'web': ('html', 'PickleHTMLBuilder'),
|
||||
'htmlhelp': ('htmlhelp', 'HTMLHelpBuilder'),
|
||||
'latex': ('latex', 'LaTeXBuilder'),
|
||||
'text': ('text', 'TextBuilder'),
|
||||
'changes': ('changes', 'ChangesBuilder'),
|
||||
'linkcheck': ('linkcheck', 'CheckExternalLinksBuilder'),
|
||||
}
|
||||
137
sphinx/builders/changes.py
Normal file
137
sphinx/builders/changes.py
Normal file
@@ -0,0 +1,137 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.builders.changes
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Changelog builder.
|
||||
|
||||
:copyright: 2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import shutil
|
||||
from os import path
|
||||
from cgi import escape
|
||||
|
||||
from sphinx import package_dir
|
||||
from sphinx.util import ensuredir, os_path
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.util.console import bold
|
||||
|
||||
|
||||
class ChangesBuilder(Builder):
|
||||
"""
|
||||
Write a summary with all versionadded/changed directives.
|
||||
"""
|
||||
name = 'changes'
|
||||
|
||||
def init(self):
|
||||
self.init_templates()
|
||||
|
||||
def get_outdated_docs(self):
|
||||
return self.outdir
|
||||
|
||||
typemap = {
|
||||
'versionadded': 'added',
|
||||
'versionchanged': 'changed',
|
||||
'deprecated': 'deprecated',
|
||||
}
|
||||
|
||||
def write(self, *ignored):
|
||||
version = self.config.version
|
||||
libchanges = {}
|
||||
apichanges = []
|
||||
otherchanges = {}
|
||||
if version not in self.env.versionchanges:
|
||||
self.info(bold('no changes in this version.'))
|
||||
return
|
||||
self.info(bold('writing summary file...'))
|
||||
for type, docname, lineno, module, descname, content in \
|
||||
self.env.versionchanges[version]:
|
||||
ttext = self.typemap[type]
|
||||
context = content.replace('\n', ' ')
|
||||
if descname and docname.startswith('c-api'):
|
||||
if not descname:
|
||||
continue
|
||||
if context:
|
||||
entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext, context)
|
||||
else:
|
||||
entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
|
||||
apichanges.append((entry, docname, lineno))
|
||||
elif descname or module:
|
||||
if not module:
|
||||
module = _('Builtins')
|
||||
if not descname:
|
||||
descname = _('Module level')
|
||||
if context:
|
||||
entry = '<b>%s</b>: <i>%s:</i> %s' % (descname, ttext, context)
|
||||
else:
|
||||
entry = '<b>%s</b>: <i>%s</i>.' % (descname, ttext)
|
||||
libchanges.setdefault(module, []).append((entry, docname, lineno))
|
||||
else:
|
||||
if not context:
|
||||
continue
|
||||
entry = '<i>%s:</i> %s' % (ttext.capitalize(), context)
|
||||
title = self.env.titles[docname].astext()
|
||||
otherchanges.setdefault((docname, title), []).append(
|
||||
(entry, docname, lineno))
|
||||
|
||||
ctx = {
|
||||
'project': self.config.project,
|
||||
'version': version,
|
||||
'docstitle': self.config.html_title,
|
||||
'shorttitle': self.config.html_short_title,
|
||||
'libchanges': sorted(libchanges.iteritems()),
|
||||
'apichanges': sorted(apichanges),
|
||||
'otherchanges': sorted(otherchanges.iteritems()),
|
||||
'show_sphinx': self.config.html_show_sphinx,
|
||||
}
|
||||
f = open(path.join(self.outdir, 'index.html'), 'w')
|
||||
try:
|
||||
f.write(self.templates.render('changes/frameset.html', ctx))
|
||||
finally:
|
||||
f.close()
|
||||
f = open(path.join(self.outdir, 'changes.html'), 'w')
|
||||
try:
|
||||
f.write(self.templates.render('changes/versionchanges.html', ctx))
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
hltext = ['.. versionadded:: %s' % version,
|
||||
'.. versionchanged:: %s' % version,
|
||||
'.. deprecated:: %s' % version]
|
||||
|
||||
def hl(no, line):
|
||||
line = '<a name="L%s"> </a>' % no + escape(line)
|
||||
for x in hltext:
|
||||
if x in line:
|
||||
line = '<span class="hl">%s</span>' % line
|
||||
break
|
||||
return line
|
||||
|
||||
self.info(bold('copying source files...'))
|
||||
for docname in self.env.all_docs:
|
||||
f = open(self.env.doc2path(docname))
|
||||
lines = f.readlines()
|
||||
targetfn = path.join(self.outdir, 'rst', os_path(docname)) + '.html'
|
||||
ensuredir(path.dirname(targetfn))
|
||||
f = codecs.open(targetfn, 'w', 'utf8')
|
||||
try:
|
||||
text = ''.join(hl(i+1, line) for (i, line) in enumerate(lines))
|
||||
ctx = {'filename': self.env.doc2path(docname, None), 'text': text}
|
||||
f.write(self.templates.render('changes/rstsource.html', ctx))
|
||||
finally:
|
||||
f.close()
|
||||
shutil.copyfile(path.join(package_dir, 'static', 'default.css'),
|
||||
path.join(self.outdir, 'default.css'))
|
||||
|
||||
def hl(self, text, version):
|
||||
text = escape(text)
|
||||
for directive in ['versionchanged', 'versionadded', 'deprecated']:
|
||||
text = text.replace('.. %s:: %s' % (directive, version),
|
||||
'<b>.. %s:: %s</b>' % (directive, version))
|
||||
return text
|
||||
|
||||
def finish(self):
|
||||
pass
|
||||
607
sphinx/builders/html.py
Normal file
607
sphinx/builders/html.py
Normal file
@@ -0,0 +1,607 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.builders.html
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Several HTML builders.
|
||||
|
||||
:copyright: 2007-2008 by Georg Brandl, Armin Ronacher.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
import os
|
||||
import codecs
|
||||
import shutil
|
||||
import cPickle as pickle
|
||||
from os import path
|
||||
|
||||
from docutils.io import DocTreeInput, StringOutput
|
||||
from docutils.core import publish_parts
|
||||
from docutils.utils import new_document
|
||||
from docutils.frontend import OptionParser
|
||||
from docutils.readers.doctree import Reader as DoctreeReader
|
||||
|
||||
from sphinx import package_dir, __version__
|
||||
from sphinx.util import SEP, os_path, relative_uri, ensuredir, ustrftime
|
||||
from sphinx.search import js_index
|
||||
from sphinx.builders import Builder, ENV_PICKLE_FILENAME
|
||||
from sphinx.highlighting import PygmentsBridge
|
||||
from sphinx.util.console import bold
|
||||
from sphinx.writers.html import HTMLWriter, HTMLTranslator, SmartyPantsHTMLTranslator
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
json = None
|
||||
|
||||
|
||||
INVENTORY_FILENAME = 'objects.inv'
|
||||
LAST_BUILD_FILENAME = 'last_build'
|
||||
|
||||
|
||||
class StandaloneHTMLBuilder(Builder):
|
||||
"""
|
||||
Builds standalone HTML docs.
|
||||
"""
|
||||
name = 'html'
|
||||
copysource = True
|
||||
out_suffix = '.html'
|
||||
indexer_format = js_index
|
||||
supported_image_types = ['image/svg+xml', 'image/png', 'image/gif',
|
||||
'image/jpeg']
|
||||
searchindex_filename = 'searchindex.js'
|
||||
add_header_links = True
|
||||
add_definition_links = True
|
||||
|
||||
# This is a class attribute because it is mutated by Sphinx.add_javascript.
|
||||
script_files = ['_static/jquery.js', '_static/doctools.js']
|
||||
|
||||
def init(self):
|
||||
"""Load templates."""
|
||||
self.init_templates()
|
||||
self.init_translator_class()
|
||||
if self.config.html_file_suffix:
|
||||
self.out_suffix = self.config.html_file_suffix
|
||||
|
||||
if self.config.language is not None:
|
||||
jsfile = path.join(package_dir, 'locale', self.config.language,
|
||||
'LC_MESSAGES', 'sphinx.js')
|
||||
if path.isfile(jsfile):
|
||||
self.script_files.append('_static/translations.js')
|
||||
|
||||
def init_translator_class(self):
|
||||
if self.config.html_translator_class:
|
||||
self.translator_class = self.app.import_object(
|
||||
self.config.html_translator_class, 'html_translator_class setting')
|
||||
elif self.config.html_use_smartypants:
|
||||
self.translator_class = SmartyPantsHTMLTranslator
|
||||
else:
|
||||
self.translator_class = HTMLTranslator
|
||||
|
||||
def render_partial(self, node):
|
||||
"""Utility: Render a lone doctree node."""
|
||||
doc = new_document('<partial node>')
|
||||
doc.append(node)
|
||||
return publish_parts(
|
||||
doc,
|
||||
source_class=DocTreeInput,
|
||||
reader=DoctreeReader(),
|
||||
writer=HTMLWriter(self),
|
||||
settings_overrides={'output_encoding': 'unicode'}
|
||||
)
|
||||
|
||||
def prepare_writing(self, docnames):
|
||||
from sphinx.search import IndexBuilder
|
||||
|
||||
self.indexer = IndexBuilder(self.env)
|
||||
self.load_indexer(docnames)
|
||||
self.docwriter = HTMLWriter(self)
|
||||
self.docsettings = OptionParser(
|
||||
defaults=self.env.settings,
|
||||
components=(self.docwriter,)).get_default_values()
|
||||
|
||||
# format the "last updated on" string, only once is enough since it
|
||||
# typically doesn't include the time of day
|
||||
lufmt = self.config.html_last_updated_fmt
|
||||
if lufmt is not None:
|
||||
self.last_updated = ustrftime(lufmt or _('%b %d, %Y'))
|
||||
else:
|
||||
self.last_updated = None
|
||||
|
||||
logo = self.config.html_logo and \
|
||||
path.basename(self.config.html_logo) or ''
|
||||
|
||||
favicon = self.config.html_favicon and \
|
||||
path.basename(self.config.html_favicon) or ''
|
||||
if favicon and os.path.splitext(favicon)[1] != '.ico':
|
||||
self.warn('html_favicon is not an .ico file')
|
||||
|
||||
if not isinstance(self.config.html_use_opensearch, basestring):
|
||||
self.warn('html_use_opensearch config value must now be a string')
|
||||
|
||||
self.relations = self.env.collect_relations()
|
||||
|
||||
rellinks = []
|
||||
if self.config.html_use_index:
|
||||
rellinks.append(('genindex', _('General Index'), 'I', _('index')))
|
||||
if self.config.html_use_modindex and self.env.modules:
|
||||
rellinks.append(('modindex', _('Global Module Index'), 'M', _('modules')))
|
||||
|
||||
self.globalcontext = dict(
|
||||
project = self.config.project,
|
||||
release = self.config.release,
|
||||
version = self.config.version,
|
||||
last_updated = self.last_updated,
|
||||
copyright = self.config.copyright,
|
||||
master_doc = self.config.master_doc,
|
||||
style = self.config.html_style,
|
||||
use_opensearch = self.config.html_use_opensearch,
|
||||
docstitle = self.config.html_title,
|
||||
shorttitle = self.config.html_short_title,
|
||||
show_sphinx = self.config.html_show_sphinx,
|
||||
file_suffix = self.out_suffix,
|
||||
script_files = self.script_files,
|
||||
sphinx_version = __version__,
|
||||
rellinks = rellinks,
|
||||
builder = self.name,
|
||||
parents = [],
|
||||
logo = logo,
|
||||
favicon = favicon,
|
||||
)
|
||||
self.globalcontext.update(self.config.html_context)
|
||||
|
||||
def get_doc_context(self, docname, body, metatags):
|
||||
"""Collect items for the template context of a page."""
|
||||
# find out relations
|
||||
prev = next = None
|
||||
parents = []
|
||||
rellinks = self.globalcontext['rellinks'][:]
|
||||
related = self.relations.get(docname)
|
||||
titles = self.env.titles
|
||||
if related and related[2]:
|
||||
try:
|
||||
next = {'link': self.get_relative_uri(docname, related[2]),
|
||||
'title': self.render_partial(titles[related[2]])['title']}
|
||||
rellinks.append((related[2], next['title'], 'N', _('next')))
|
||||
except KeyError:
|
||||
next = None
|
||||
if related and related[1]:
|
||||
try:
|
||||
prev = {'link': self.get_relative_uri(docname, related[1]),
|
||||
'title': self.render_partial(titles[related[1]])['title']}
|
||||
rellinks.append((related[1], prev['title'], 'P', _('previous')))
|
||||
except KeyError:
|
||||
# the relation is (somehow) not in the TOC tree, handle that gracefully
|
||||
prev = None
|
||||
while related and related[0]:
|
||||
try:
|
||||
parents.append(
|
||||
{'link': self.get_relative_uri(docname, related[0]),
|
||||
'title': self.render_partial(titles[related[0]])['title']})
|
||||
except KeyError:
|
||||
pass
|
||||
related = self.relations.get(related[0])
|
||||
if parents:
|
||||
parents.pop() # remove link to the master file; we have a generic
|
||||
# "back to index" link already
|
||||
parents.reverse()
|
||||
|
||||
# title rendered as HTML
|
||||
title = titles.get(docname)
|
||||
title = title and self.render_partial(title)['title'] or ''
|
||||
# the name for the copied source
|
||||
sourcename = self.config.html_copy_source and docname + '.txt' or ''
|
||||
|
||||
# metadata for the document
|
||||
meta = self.env.metadata.get(docname)
|
||||
|
||||
return dict(
|
||||
parents = parents,
|
||||
prev = prev,
|
||||
next = next,
|
||||
title = title,
|
||||
meta = meta,
|
||||
body = body,
|
||||
metatags = metatags,
|
||||
rellinks = rellinks,
|
||||
sourcename = sourcename,
|
||||
toc = self.render_partial(self.env.get_toc_for(docname))['fragment'],
|
||||
# only display a TOC if there's more than one item to show
|
||||
display_toc = (self.env.toc_num_entries[docname] > 1),
|
||||
)
|
||||
|
||||
def write_doc(self, docname, doctree):
|
||||
self.post_process_images(doctree)
|
||||
destination = StringOutput(encoding='utf-8')
|
||||
doctree.settings = self.docsettings
|
||||
|
||||
self.imgpath = relative_uri(self.get_target_uri(docname), '_images')
|
||||
self.docwriter.write(doctree, destination)
|
||||
self.docwriter.assemble_parts()
|
||||
body = self.docwriter.parts['fragment']
|
||||
metatags = self.docwriter.clean_meta
|
||||
|
||||
ctx = self.get_doc_context(docname, body, metatags)
|
||||
self.index_page(docname, doctree, ctx.get('title', ''))
|
||||
self.handle_page(docname, ctx, event_arg=doctree)
|
||||
|
||||
def finish(self):
|
||||
self.info(bold('writing additional files...'), nonl=1)
|
||||
|
||||
# the global general index
|
||||
|
||||
if self.config.html_use_index:
|
||||
# the total count of lines for each index letter, used to distribute
|
||||
# the entries into two columns
|
||||
genindex = self.env.create_index(self)
|
||||
indexcounts = []
|
||||
for _, entries in genindex:
|
||||
indexcounts.append(sum(1 + len(subitems)
|
||||
for _, (_, subitems) in entries))
|
||||
|
||||
genindexcontext = dict(
|
||||
genindexentries = genindex,
|
||||
genindexcounts = indexcounts,
|
||||
split_index = self.config.html_split_index,
|
||||
)
|
||||
self.info(' genindex', nonl=1)
|
||||
|
||||
if self.config.html_split_index:
|
||||
self.handle_page('genindex', genindexcontext, 'genindex-split.html')
|
||||
self.handle_page('genindex-all', genindexcontext, 'genindex.html')
|
||||
for (key, entries), count in zip(genindex, indexcounts):
|
||||
ctx = {'key': key, 'entries': entries, 'count': count,
|
||||
'genindexentries': genindex}
|
||||
self.handle_page('genindex-' + key, ctx, 'genindex-single.html')
|
||||
else:
|
||||
self.handle_page('genindex', genindexcontext, 'genindex.html')
|
||||
|
||||
# the global module index
|
||||
|
||||
if self.config.html_use_modindex and self.env.modules:
|
||||
# the sorted list of all modules, for the global module index
|
||||
modules = sorted(((mn, (self.get_relative_uri('modindex', fn) +
|
||||
'#module-' + mn, sy, pl, dep))
|
||||
for (mn, (fn, sy, pl, dep)) in
|
||||
self.env.modules.iteritems()),
|
||||
key=lambda x: x[0].lower())
|
||||
# collect all platforms
|
||||
platforms = set()
|
||||
# sort out collapsable modules
|
||||
modindexentries = []
|
||||
letters = []
|
||||
pmn = ''
|
||||
num_toplevels = 0
|
||||
num_collapsables = 0
|
||||
cg = 0 # collapse group
|
||||
fl = '' # first letter
|
||||
for mn, (fn, sy, pl, dep) in modules:
|
||||
pl = pl and pl.split(', ') or []
|
||||
platforms.update(pl)
|
||||
if fl != mn[0].lower() and mn[0] != '_':
|
||||
# heading
|
||||
modindexentries.append(['', False, 0, False,
|
||||
mn[0].upper(), '', [], False])
|
||||
letters.append(mn[0].upper())
|
||||
tn = mn.split('.')[0]
|
||||
if tn != mn:
|
||||
# submodule
|
||||
if pmn == tn:
|
||||
# first submodule - make parent collapsable
|
||||
modindexentries[-1][1] = True
|
||||
num_collapsables += 1
|
||||
elif not pmn.startswith(tn):
|
||||
# submodule without parent in list, add dummy entry
|
||||
cg += 1
|
||||
modindexentries.append([tn, True, cg, False, '', '', [], False])
|
||||
else:
|
||||
num_toplevels += 1
|
||||
cg += 1
|
||||
modindexentries.append([mn, False, cg, (tn != mn), fn, sy, pl, dep])
|
||||
pmn = mn
|
||||
fl = mn[0].lower()
|
||||
platforms = sorted(platforms)
|
||||
|
||||
# apply heuristics when to collapse modindex at page load:
|
||||
# only collapse if number of toplevel modules is larger than
|
||||
# number of submodules
|
||||
collapse = len(modules) - num_toplevels < num_toplevels
|
||||
|
||||
modindexcontext = dict(
|
||||
modindexentries = modindexentries,
|
||||
platforms = platforms,
|
||||
letters = letters,
|
||||
collapse_modindex = collapse,
|
||||
)
|
||||
self.info(' modindex', nonl=1)
|
||||
self.handle_page('modindex', modindexcontext, 'modindex.html')
|
||||
|
||||
# the search page
|
||||
if self.name != 'htmlhelp':
|
||||
self.info(' search', nonl=1)
|
||||
self.handle_page('search', {}, 'search.html')
|
||||
|
||||
# additional pages from conf.py
|
||||
for pagename, template in self.config.html_additional_pages.items():
|
||||
self.info(' '+pagename, nonl=1)
|
||||
self.handle_page(pagename, {}, template)
|
||||
|
||||
if self.config.html_use_opensearch and self.name != 'htmlhelp':
|
||||
self.info(' opensearch', nonl=1)
|
||||
fn = path.join(self.outdir, '_static', 'opensearch.xml')
|
||||
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
|
||||
|
||||
self.info()
|
||||
|
||||
# copy image files
|
||||
if self.images:
|
||||
self.info(bold('copying images...'), nonl=True)
|
||||
ensuredir(path.join(self.outdir, '_images'))
|
||||
for src, dest in self.images.iteritems():
|
||||
self.info(' '+src, nonl=1)
|
||||
shutil.copyfile(path.join(self.srcdir, src),
|
||||
path.join(self.outdir, '_images', dest))
|
||||
self.info()
|
||||
|
||||
# copy static files
|
||||
self.info(bold('copying static files... '), nonl=True)
|
||||
ensuredir(path.join(self.outdir, '_static'))
|
||||
# first, create pygments style file
|
||||
f = open(path.join(self.outdir, '_static', 'pygments.css'), 'w')
|
||||
f.write(PygmentsBridge('html', self.config.pygments_style).get_stylesheet())
|
||||
f.close()
|
||||
# then, copy translations JavaScript file
|
||||
if self.config.language is not None:
|
||||
jsfile = path.join(package_dir, 'locale', self.config.language,
|
||||
'LC_MESSAGES', 'sphinx.js')
|
||||
if path.isfile(jsfile):
|
||||
shutil.copyfile(jsfile, path.join(self.outdir, '_static',
|
||||
'translations.js'))
|
||||
# then, copy over all user-supplied static files
|
||||
staticdirnames = [path.join(package_dir, 'static')] + \
|
||||
[path.join(self.confdir, spath)
|
||||
for spath in self.config.html_static_path]
|
||||
for staticdirname in staticdirnames:
|
||||
for filename in os.listdir(staticdirname):
|
||||
if filename.startswith('.'):
|
||||
continue
|
||||
fullname = path.join(staticdirname, filename)
|
||||
targetname = path.join(self.outdir, '_static', filename)
|
||||
if path.isfile(fullname):
|
||||
shutil.copyfile(fullname, targetname)
|
||||
elif path.isdir(fullname):
|
||||
if filename in self.config.exclude_dirnames:
|
||||
continue
|
||||
if path.exists(targetname):
|
||||
shutil.rmtree(targetname)
|
||||
shutil.copytree(fullname, targetname)
|
||||
# last, copy logo file (handled differently)
|
||||
if self.config.html_logo:
|
||||
logobase = path.basename(self.config.html_logo)
|
||||
shutil.copyfile(path.join(self.confdir, self.config.html_logo),
|
||||
path.join(self.outdir, '_static', logobase))
|
||||
self.info('done')
|
||||
|
||||
# dump the search index
|
||||
self.handle_finish()
|
||||
|
||||
def get_outdated_docs(self):
|
||||
if self.templates:
|
||||
template_mtime = self.templates.newest_template_mtime()
|
||||
else:
|
||||
template_mtime = 0
|
||||
for docname in self.env.found_docs:
|
||||
if docname not in self.env.all_docs:
|
||||
yield docname
|
||||
continue
|
||||
targetname = self.env.doc2path(docname, self.outdir, self.out_suffix)
|
||||
try:
|
||||
targetmtime = path.getmtime(targetname)
|
||||
except Exception:
|
||||
targetmtime = 0
|
||||
try:
|
||||
srcmtime = max(path.getmtime(self.env.doc2path(docname)),
|
||||
template_mtime)
|
||||
if srcmtime > targetmtime:
|
||||
yield docname
|
||||
except EnvironmentError:
|
||||
# source doesn't exist anymore
|
||||
pass
|
||||
|
||||
def load_indexer(self, docnames):
|
||||
keep = set(self.env.all_docs) - set(docnames)
|
||||
try:
|
||||
f = open(path.join(self.outdir, self.searchindex_filename), 'rb')
|
||||
try:
|
||||
self.indexer.load(f, self.indexer_format)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError, ValueError):
|
||||
if keep:
|
||||
self.warn("search index couldn't be loaded, but not all documents "
|
||||
"will be built: the index will be incomplete.")
|
||||
# delete all entries for files that will be rebuilt
|
||||
self.indexer.prune(keep)
|
||||
|
||||
def index_page(self, pagename, doctree, title):
|
||||
# only index pages with title
|
||||
if self.indexer is not None and title:
|
||||
self.indexer.feed(pagename, title, doctree)
|
||||
|
||||
# --------- these are overwritten by the serialization builder
|
||||
|
||||
def get_target_uri(self, docname, typ=None):
|
||||
return docname + self.out_suffix
|
||||
|
||||
def handle_page(self, pagename, addctx, templatename='page.html',
|
||||
outfilename=None, event_arg=None):
|
||||
ctx = self.globalcontext.copy()
|
||||
# current_page_name is backwards compatibility
|
||||
ctx['pagename'] = ctx['current_page_name'] = pagename
|
||||
|
||||
def pathto(otheruri, resource=False,
|
||||
baseuri=self.get_target_uri(pagename)):
|
||||
if not resource:
|
||||
otheruri = self.get_target_uri(otheruri)
|
||||
return relative_uri(baseuri, otheruri)
|
||||
ctx['pathto'] = pathto
|
||||
ctx['hasdoc'] = lambda name: name in self.env.all_docs
|
||||
ctx['customsidebar'] = self.config.html_sidebars.get(pagename)
|
||||
ctx.update(addctx)
|
||||
|
||||
self.app.emit('html-page-context', pagename, templatename, ctx, event_arg)
|
||||
|
||||
output = self.templates.render(templatename, ctx)
|
||||
if not outfilename:
|
||||
outfilename = path.join(self.outdir, os_path(pagename) + self.out_suffix)
|
||||
ensuredir(path.dirname(outfilename)) # normally different from self.outdir
|
||||
try:
|
||||
f = codecs.open(outfilename, 'w', 'utf-8')
|
||||
try:
|
||||
f.write(output)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError), err:
|
||||
self.warn("Error writing file %s: %s" % (outfilename, err))
|
||||
if self.copysource and ctx.get('sourcename'):
|
||||
# copy the source file for the "show source" link
|
||||
source_name = path.join(self.outdir, '_sources', os_path(ctx['sourcename']))
|
||||
ensuredir(path.dirname(source_name))
|
||||
shutil.copyfile(self.env.doc2path(pagename), source_name)
|
||||
|
||||
def handle_finish(self):
|
||||
self.info(bold('dumping search index... '), nonl=True)
|
||||
self.indexer.prune(self.env.all_docs)
|
||||
f = open(path.join(self.outdir, self.searchindex_filename), 'wb')
|
||||
try:
|
||||
self.indexer.dump(f, self.indexer_format)
|
||||
finally:
|
||||
f.close()
|
||||
self.info('done')
|
||||
|
||||
self.info(bold('dumping object inventory... '), nonl=True)
|
||||
f = open(path.join(self.outdir, INVENTORY_FILENAME), 'w')
|
||||
try:
|
||||
f.write('# Sphinx inventory version 1\n')
|
||||
f.write('# Project: %s\n' % self.config.project.encode('utf-8'))
|
||||
f.write('# Version: %s\n' % self.config.version)
|
||||
for modname, info in self.env.modules.iteritems():
|
||||
f.write('%s mod %s\n' % (modname, self.get_target_uri(info[0])))
|
||||
for refname, (docname, desctype) in self.env.descrefs.iteritems():
|
||||
f.write('%s %s %s\n' % (refname, desctype, self.get_target_uri(docname)))
|
||||
finally:
|
||||
f.close()
|
||||
self.info('done')
|
||||
|
||||
|
||||
class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
An abstract builder that serializes the HTML generated.
|
||||
"""
|
||||
#: the serializing implementation to use. Set this to a module that
|
||||
#: implements a `dump`, `load`, `dumps` and `loads` functions
|
||||
#: (pickle, simplejson etc.)
|
||||
implementation = None
|
||||
|
||||
#: the filename for the global context file
|
||||
globalcontext_filename = None
|
||||
|
||||
supported_image_types = ('image/svg+xml', 'image/png', 'image/gif',
|
||||
'image/jpeg')
|
||||
|
||||
def init(self):
|
||||
self.init_translator_class()
|
||||
self.templates = None # no template bridge necessary
|
||||
|
||||
def get_target_uri(self, docname, typ=None):
|
||||
if docname == 'index':
|
||||
return ''
|
||||
if docname.endswith(SEP + 'index'):
|
||||
return docname[:-5] # up to sep
|
||||
return docname + SEP
|
||||
|
||||
def handle_page(self, pagename, ctx, templatename='page.html',
|
||||
outfilename=None, event_arg=None):
|
||||
ctx['current_page_name'] = pagename
|
||||
sidebarfile = self.config.html_sidebars.get(pagename)
|
||||
if sidebarfile:
|
||||
ctx['customsidebar'] = sidebarfile
|
||||
|
||||
if not outfilename:
|
||||
outfilename = path.join(self.outdir, os_path(pagename) + self.out_suffix)
|
||||
|
||||
self.app.emit('html-page-context', pagename, templatename, ctx, event_arg)
|
||||
|
||||
ensuredir(path.dirname(outfilename))
|
||||
f = open(outfilename, 'wb')
|
||||
try:
|
||||
self.implementation.dump(ctx, f, 2)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# if there is a source file, copy the source file for the
|
||||
# "show source" link
|
||||
if ctx.get('sourcename'):
|
||||
source_name = path.join(self.outdir, '_sources',
|
||||
os_path(ctx['sourcename']))
|
||||
ensuredir(path.dirname(source_name))
|
||||
shutil.copyfile(self.env.doc2path(pagename), source_name)
|
||||
|
||||
def handle_finish(self):
|
||||
# dump the global context
|
||||
outfilename = path.join(self.outdir, self.globalcontext_filename)
|
||||
f = open(outfilename, 'wb')
|
||||
try:
|
||||
self.implementation.dump(self.globalcontext, f, 2)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# super here to dump the search index
|
||||
StandaloneHTMLBuilder.handle_finish(self)
|
||||
|
||||
# copy the environment file from the doctree dir to the output dir
|
||||
# as needed by the web app
|
||||
shutil.copyfile(path.join(self.doctreedir, ENV_PICKLE_FILENAME),
|
||||
path.join(self.outdir, ENV_PICKLE_FILENAME))
|
||||
|
||||
# touch 'last build' file, used by the web application to determine
|
||||
# when to reload its environment and clear the cache
|
||||
open(path.join(self.outdir, LAST_BUILD_FILENAME), 'w').close()
|
||||
|
||||
|
||||
class PickleHTMLBuilder(SerializingHTMLBuilder):
|
||||
"""
|
||||
A Builder that dumps the generated HTML into pickle files.
|
||||
"""
|
||||
implementation = pickle
|
||||
indexer_format = pickle
|
||||
name = 'pickle'
|
||||
out_suffix = '.fpickle'
|
||||
globalcontext_filename = 'globalcontext.pickle'
|
||||
searchindex_filename = 'searchindex.pickle'
|
||||
|
||||
# compatibility alias
|
||||
WebHTMLBuilder = PickleHTMLBuilder
|
||||
|
||||
|
||||
class JSONHTMLBuilder(SerializingHTMLBuilder):
|
||||
"""
|
||||
A builder that dumps the generated HTML into JSON files.
|
||||
"""
|
||||
implementation = json
|
||||
indexer_format = json
|
||||
name = 'json'
|
||||
out_suffix = '.fjson'
|
||||
globalcontext_filename = 'globalcontext.json'
|
||||
searchindex_filename = 'searchindex.json'
|
||||
|
||||
def init(self):
|
||||
if json is None:
|
||||
from sphinx.application import SphinxError
|
||||
raise SphinxError('The module simplejson (or json in Python >= 2.6) '
|
||||
'is not available. The JSONHTMLBuilder builder '
|
||||
'will not work.')
|
||||
SerializingHTMLBuilder.init(self)
|
||||
245
sphinx/builders/htmlhelp.py
Normal file
245
sphinx/builders/htmlhelp.py
Normal file
@@ -0,0 +1,245 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.builders.htmlhelp
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Build HTML help support files.
|
||||
Parts adapted from Python's Doc/tools/prechm.py.
|
||||
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
import os
|
||||
import cgi
|
||||
from os import path
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
|
||||
|
||||
# Project file (*.hhp) template. 'outname' is the file basename (like
|
||||
# the pythlp in pythlp.hhp); 'version' is the doc version number (like
|
||||
# the 2.2 in Python 2.2).
|
||||
# The magical numbers in the long line under [WINDOWS] set most of the
|
||||
# user-visible features (visible buttons, tabs, etc).
|
||||
# About 0x10384e: This defines the buttons in the help viewer. The
|
||||
# following defns are taken from htmlhelp.h. Not all possibilities
|
||||
# actually work, and not all those that work are available from the Help
|
||||
# Workshop GUI. In particular, the Zoom/Font button works and is not
|
||||
# available from the GUI. The ones we're using are marked with 'x':
|
||||
#
|
||||
# 0x000002 Hide/Show x
|
||||
# 0x000004 Back x
|
||||
# 0x000008 Forward x
|
||||
# 0x000010 Stop
|
||||
# 0x000020 Refresh
|
||||
# 0x000040 Home x
|
||||
# 0x000080 Forward
|
||||
# 0x000100 Back
|
||||
# 0x000200 Notes
|
||||
# 0x000400 Contents
|
||||
# 0x000800 Locate x
|
||||
# 0x001000 Options x
|
||||
# 0x002000 Print x
|
||||
# 0x004000 Index
|
||||
# 0x008000 Search
|
||||
# 0x010000 History
|
||||
# 0x020000 Favorites
|
||||
# 0x040000 Jump 1
|
||||
# 0x080000 Jump 2
|
||||
# 0x100000 Zoom/Font x
|
||||
# 0x200000 TOC Next
|
||||
# 0x400000 TOC Prev
|
||||
|
||||
project_template = '''\
|
||||
[OPTIONS]
|
||||
Binary TOC=Yes
|
||||
Binary Index=No
|
||||
Compiled file=%(outname)s.chm
|
||||
Contents file=%(outname)s.hhc
|
||||
Default Window=%(outname)s
|
||||
Default topic=index.html
|
||||
Display compile progress=No
|
||||
Full text search stop list file=%(outname)s.stp
|
||||
Full-text search=Yes
|
||||
Index file=%(outname)s.hhk
|
||||
Language=0x409
|
||||
Title=%(title)s
|
||||
|
||||
[WINDOWS]
|
||||
%(outname)s="%(title)s","%(outname)s.hhc","%(outname)s.hhk",\
|
||||
"index.html","index.html",,,,,0x63520,220,0x10384e,[0,0,1024,768],,,,,,,0
|
||||
|
||||
[FILES]
|
||||
'''
|
||||
|
||||
contents_header = '''\
|
||||
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
|
||||
<HTML>
|
||||
<HEAD>
|
||||
<meta name="GENERATOR" content="Microsoft® HTML Help Workshop 4.1">
|
||||
<!-- Sitemap 1.0 -->
|
||||
</HEAD><BODY>
|
||||
<OBJECT type="text/site properties">
|
||||
<param name="Window Styles" value="0x801227">
|
||||
<param name="ImageType" value="Folder">
|
||||
</OBJECT>
|
||||
<UL>
|
||||
'''
|
||||
|
||||
contents_footer = '''\
|
||||
</UL></BODY></HTML>
|
||||
'''
|
||||
|
||||
object_sitemap = '''\
|
||||
<OBJECT type="text/sitemap">
|
||||
<param name="Name" value="%s">
|
||||
<param name="Local" value="%s">
|
||||
</OBJECT>
|
||||
'''
|
||||
|
||||
# List of words the full text search facility shouldn't index. This
|
||||
# becomes file outname.stp. Note that this list must be pretty small!
|
||||
# Different versions of the MS docs claim the file has a maximum size of
|
||||
# 256 or 512 bytes (including \r\n at the end of each line).
|
||||
# Note that "and", "or", "not" and "near" are operators in the search
|
||||
# language, so no point indexing them even if we wanted to.
|
||||
stopwords = """
|
||||
a and are as at
|
||||
be but by
|
||||
for
|
||||
if in into is it
|
||||
near no not
|
||||
of on or
|
||||
such
|
||||
that the their then there these they this to
|
||||
was will with
|
||||
""".split()
|
||||
|
||||
|
||||
class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
Builder that also outputs Windows HTML help project, contents and index files.
|
||||
Adapted from the original Doc/tools/prechm.py.
|
||||
"""
|
||||
name = 'htmlhelp'
|
||||
|
||||
# don't copy the reST source
|
||||
copysource = False
|
||||
supported_image_types = ['image/png', 'image/gif', 'image/jpeg']
|
||||
|
||||
# don't add links
|
||||
add_header_links = False
|
||||
add_definition_links = False
|
||||
|
||||
def init(self):
|
||||
StandaloneHTMLBuilder.init(self)
|
||||
# the output files for HTML help must be .html only
|
||||
self.out_suffix = '.html'
|
||||
|
||||
def handle_finish(self):
|
||||
self.build_hhx(self, self.outdir, self.config.htmlhelp_basename)
|
||||
|
||||
def build_hhx(self, outdir, outname):
|
||||
self.info('dumping stopword list...')
|
||||
f = open(path.join(outdir, outname+'.stp'), 'w')
|
||||
try:
|
||||
for word in sorted(stopwords):
|
||||
print >>f, word
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
self.info('writing project file...')
|
||||
f = open(path.join(outdir, outname+'.hhp'), 'w')
|
||||
try:
|
||||
f.write(project_template % {'outname': outname,
|
||||
'title': self.config.html_title,
|
||||
'version': self.config.version,
|
||||
'project': self.config.project})
|
||||
if not outdir.endswith(os.sep):
|
||||
outdir += os.sep
|
||||
olen = len(outdir)
|
||||
for root, dirs, files in os.walk(outdir):
|
||||
staticdir = (root == path.join(outdir, '_static'))
|
||||
for fn in files:
|
||||
if (staticdir and not fn.endswith('.js')) or fn.endswith('.html'):
|
||||
print >>f, path.join(root, fn)[olen:].replace(os.sep, '\\')
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
self.info('writing TOC file...')
|
||||
f = open(path.join(outdir, outname+'.hhc'), 'w')
|
||||
try:
|
||||
f.write(contents_header)
|
||||
# special books
|
||||
f.write('<LI> ' + object_sitemap % (self.config.html_short_title,
|
||||
'index.html'))
|
||||
if self.config.html_use_modindex:
|
||||
f.write('<LI> ' + object_sitemap % (_('Global Module Index'),
|
||||
'modindex.html'))
|
||||
# the TOC
|
||||
tocdoc = self.env.get_and_resolve_doctree(self.config.master_doc, self,
|
||||
prune_toctrees=False)
|
||||
def write_toc(node, ullevel=0):
|
||||
if isinstance(node, nodes.list_item):
|
||||
f.write('<LI> ')
|
||||
for subnode in node:
|
||||
write_toc(subnode, ullevel)
|
||||
elif isinstance(node, nodes.reference):
|
||||
link = node['refuri']
|
||||
title = cgi.escape(node.astext()).replace('"','"')
|
||||
item = object_sitemap % (title, link)
|
||||
f.write(item.encode('ascii', 'xmlcharrefreplace'))
|
||||
elif isinstance(node, nodes.bullet_list):
|
||||
if ullevel != 0:
|
||||
f.write('<UL>\n')
|
||||
for subnode in node:
|
||||
write_toc(subnode, ullevel+1)
|
||||
if ullevel != 0:
|
||||
f.write('</UL>\n')
|
||||
elif isinstance(node, addnodes.compact_paragraph):
|
||||
for subnode in node:
|
||||
write_toc(subnode, ullevel)
|
||||
istoctree = lambda node: isinstance(node, addnodes.compact_paragraph) and \
|
||||
node.has_key('toctree')
|
||||
for node in tocdoc.traverse(istoctree):
|
||||
write_toc(node)
|
||||
f.write(contents_footer)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
self.info('writing index file...')
|
||||
index = self.env.create_index(self)
|
||||
f = open(path.join(outdir, outname+'.hhk'), 'w')
|
||||
try:
|
||||
f.write('<UL>\n')
|
||||
def write_index(title, refs, subitems):
|
||||
def write_param(name, value):
|
||||
item = ' <param name="%s" value="%s">\n' % (name, value)
|
||||
f.write(item.encode('ascii', 'xmlcharrefreplace'))
|
||||
title = cgi.escape(title)
|
||||
f.write('<LI> <OBJECT type="text/sitemap">\n')
|
||||
write_param('Keyword', title)
|
||||
if len(refs) == 0:
|
||||
write_param('See Also', title)
|
||||
elif len(refs) == 1:
|
||||
write_param('Local', refs[0])
|
||||
else:
|
||||
for i, ref in enumerate(refs):
|
||||
write_param('Name', '[%d] %s' % (i, ref)) # XXX: better title?
|
||||
write_param('Local', ref)
|
||||
f.write('</OBJECT>\n')
|
||||
if subitems:
|
||||
f.write('<UL> ')
|
||||
for subitem in subitems:
|
||||
write_index(subitem[0], subitem[1], [])
|
||||
f.write('</UL>')
|
||||
for (key, group) in index:
|
||||
for title, (refs, subitems) in group:
|
||||
write_index(title, refs, subitems)
|
||||
f.write('</UL>\n')
|
||||
finally:
|
||||
f.close()
|
||||
185
sphinx/builders/latex.py
Normal file
185
sphinx/builders/latex.py
Normal file
@@ -0,0 +1,185 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.builders.latex
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
LaTeX builder.
|
||||
|
||||
:copyright: 2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from os import path
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.io import FileOutput
|
||||
from docutils.utils import new_document
|
||||
from docutils.frontend import OptionParser
|
||||
|
||||
from sphinx import package_dir, addnodes
|
||||
from sphinx.util import SEP, texescape
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.environment import NoUri
|
||||
from sphinx.util.console import bold, darkgreen
|
||||
from sphinx.writers.latex import LaTeXWriter
|
||||
|
||||
|
||||
class LaTeXBuilder(Builder):
|
||||
"""
|
||||
Builds LaTeX output to create PDF.
|
||||
"""
|
||||
name = 'latex'
|
||||
supported_image_types = ['application/pdf', 'image/png', 'image/gif',
|
||||
'image/jpeg']
|
||||
|
||||
def init(self):
|
||||
self.docnames = []
|
||||
self.document_data = []
|
||||
texescape.init()
|
||||
|
||||
def get_outdated_docs(self):
|
||||
return 'all documents' # for now
|
||||
|
||||
def get_target_uri(self, docname, typ=None):
|
||||
if typ == 'token':
|
||||
# token references are always inside production lists and must be
|
||||
# replaced by \token{} in LaTeX
|
||||
return '@token'
|
||||
if docname not in self.docnames:
|
||||
raise NoUri
|
||||
else:
|
||||
return ''
|
||||
|
||||
def init_document_data(self):
|
||||
preliminary_document_data = map(list, self.config.latex_documents)
|
||||
if not preliminary_document_data:
|
||||
self.warn('No "latex_documents" config value found; no documents '
|
||||
'will be written.')
|
||||
return
|
||||
# assign subdirs to titles
|
||||
self.titles = []
|
||||
for entry in preliminary_document_data:
|
||||
docname = entry[0]
|
||||
if docname not in self.env.all_docs:
|
||||
self.warn('"latex_documents" config value references unknown '
|
||||
'document %s' % docname)
|
||||
continue
|
||||
self.document_data.append(entry)
|
||||
if docname.endswith(SEP+'index'):
|
||||
docname = docname[:-5]
|
||||
self.titles.append((docname, entry[2]))
|
||||
|
||||
def write(self, *ignored):
|
||||
# first, assemble the "appendix" docs that are in every PDF
|
||||
appendices = []
|
||||
for fname in self.config.latex_appendices:
|
||||
appendices.append(self.env.get_doctree(fname))
|
||||
|
||||
docwriter = LaTeXWriter(self)
|
||||
docsettings = OptionParser(
|
||||
defaults=self.env.settings,
|
||||
components=(docwriter,)).get_default_values()
|
||||
|
||||
self.init_document_data()
|
||||
|
||||
for entry in self.document_data:
|
||||
docname, targetname, title, author, docclass = entry[:5]
|
||||
toctree_only = False
|
||||
if len(entry) > 5:
|
||||
toctree_only = entry[5]
|
||||
destination = FileOutput(
|
||||
destination_path=path.join(self.outdir, targetname),
|
||||
encoding='utf-8')
|
||||
self.info("processing " + targetname + "... ", nonl=1)
|
||||
doctree = self.assemble_doctree(docname, toctree_only,
|
||||
appendices=(docclass == 'manual') and appendices or [])
|
||||
self.post_process_images(doctree)
|
||||
self.info("writing... ", nonl=1)
|
||||
doctree.settings = docsettings
|
||||
doctree.settings.author = author
|
||||
doctree.settings.title = title
|
||||
doctree.settings.docname = docname
|
||||
doctree.settings.docclass = docclass
|
||||
docwriter.write(doctree, destination)
|
||||
self.info("done")
|
||||
|
||||
def assemble_doctree(self, indexfile, toctree_only, appendices):
|
||||
self.docnames = set([indexfile] + appendices)
|
||||
self.info(darkgreen(indexfile) + " ", nonl=1)
|
||||
def process_tree(docname, tree):
|
||||
tree = tree.deepcopy()
|
||||
for toctreenode in tree.traverse(addnodes.toctree):
|
||||
newnodes = []
|
||||
includefiles = map(str, toctreenode['includefiles'])
|
||||
for includefile in includefiles:
|
||||
try:
|
||||
self.info(darkgreen(includefile) + " ", nonl=1)
|
||||
subtree = process_tree(includefile,
|
||||
self.env.get_doctree(includefile))
|
||||
self.docnames.add(includefile)
|
||||
except Exception:
|
||||
self.warn('%s: toctree contains ref to nonexisting file %r' %
|
||||
(docname, includefile))
|
||||
else:
|
||||
sof = addnodes.start_of_file()
|
||||
sof.children = subtree.children
|
||||
newnodes.append(sof)
|
||||
toctreenode.parent.replace(toctreenode, newnodes)
|
||||
return tree
|
||||
tree = self.env.get_doctree(indexfile)
|
||||
if toctree_only:
|
||||
# extract toctree nodes from the tree and put them in a fresh document
|
||||
new_tree = new_document('<latex output>')
|
||||
new_sect = nodes.section()
|
||||
new_sect += nodes.title(u'<Set title in conf.py>', u'<Set title in conf.py>')
|
||||
new_tree += new_sect
|
||||
for node in tree.traverse(addnodes.toctree):
|
||||
new_sect += node
|
||||
tree = new_tree
|
||||
largetree = process_tree(indexfile, tree)
|
||||
largetree.extend(appendices)
|
||||
self.info()
|
||||
self.info("resolving references...")
|
||||
self.env.resolve_references(largetree, indexfile, self)
|
||||
# resolve :ref:s to distant tex files -- we can't add a cross-reference,
|
||||
# but append the document name
|
||||
for pendingnode in largetree.traverse(addnodes.pending_xref):
|
||||
docname = pendingnode['refdocname']
|
||||
sectname = pendingnode['refsectname']
|
||||
newnodes = [nodes.emphasis(sectname, sectname)]
|
||||
for subdir, title in self.titles:
|
||||
if docname.startswith(subdir):
|
||||
newnodes.append(nodes.Text(_(' (in '), _(' (in ')))
|
||||
newnodes.append(nodes.emphasis(title, title))
|
||||
newnodes.append(nodes.Text(')', ')'))
|
||||
break
|
||||
else:
|
||||
pass
|
||||
pendingnode.replace_self(newnodes)
|
||||
return largetree
|
||||
|
||||
def finish(self):
|
||||
# copy image files
|
||||
if self.images:
|
||||
self.info(bold('copying images...'), nonl=1)
|
||||
for src, dest in self.images.iteritems():
|
||||
self.info(' '+src, nonl=1)
|
||||
shutil.copyfile(path.join(self.srcdir, src),
|
||||
path.join(self.outdir, dest))
|
||||
self.info()
|
||||
|
||||
# the logo is handled differently
|
||||
if self.config.latex_logo:
|
||||
logobase = path.basename(self.config.latex_logo)
|
||||
shutil.copyfile(path.join(self.confdir, self.config.latex_logo),
|
||||
path.join(self.outdir, logobase))
|
||||
|
||||
self.info(bold('copying TeX support files... '), nonl=True)
|
||||
staticdirname = path.join(package_dir, 'texinputs')
|
||||
for filename in os.listdir(staticdirname):
|
||||
if not filename.startswith('.'):
|
||||
shutil.copyfile(path.join(staticdirname, filename),
|
||||
path.join(self.outdir, filename))
|
||||
self.info('done')
|
||||
@@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.linkcheck
|
||||
~~~~~~~~~~~~~~~~
|
||||
sphinx.builders.linkcheck
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The CheckExternalLinksBuilder class.
|
||||
|
||||
@@ -15,7 +15,7 @@ from urllib2 import build_opener, HTTPError
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.builder import Builder
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.util.console import purple, red, darkgreen
|
||||
|
||||
# create an opener that will simulate a browser user-agent
|
||||
68
sphinx/builders/text.py
Normal file
68
sphinx/builders/text.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.builders.text
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Plain-text Sphinx builder.
|
||||
|
||||
:copyright: 2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
from os import path
|
||||
|
||||
from docutils.io import StringOutput
|
||||
|
||||
from sphinx.util import ensuredir, os_path
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.writers.text import TextWriter
|
||||
|
||||
|
||||
class TextBuilder(Builder):
|
||||
name = 'text'
|
||||
out_suffix = '.txt'
|
||||
|
||||
def init(self):
|
||||
pass
|
||||
|
||||
def get_outdated_docs(self):
|
||||
for docname in self.env.found_docs:
|
||||
if docname not in self.env.all_docs:
|
||||
yield docname
|
||||
continue
|
||||
targetname = self.env.doc2path(docname, self.outdir, self.out_suffix)
|
||||
try:
|
||||
targetmtime = path.getmtime(targetname)
|
||||
except Exception:
|
||||
targetmtime = 0
|
||||
try:
|
||||
srcmtime = path.getmtime(self.env.doc2path(docname))
|
||||
if srcmtime > targetmtime:
|
||||
yield docname
|
||||
except EnvironmentError:
|
||||
# source doesn't exist anymore
|
||||
pass
|
||||
|
||||
def get_target_uri(self, docname, typ=None):
|
||||
return ''
|
||||
|
||||
def prepare_writing(self, docnames):
|
||||
self.writer = TextWriter(self)
|
||||
|
||||
def write_doc(self, docname, doctree):
|
||||
destination = StringOutput(encoding='utf-8')
|
||||
self.writer.write(doctree, destination)
|
||||
outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
|
||||
ensuredir(path.dirname(outfilename)) # normally different from self.outdir
|
||||
try:
|
||||
f = codecs.open(outfilename, 'w', 'utf-8')
|
||||
try:
|
||||
f.write(self.writer.output)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError), err:
|
||||
self.warn("Error writing file %s: %s" % (outfilename, err))
|
||||
|
||||
def finish(self):
|
||||
pass
|
||||
@@ -31,7 +31,7 @@ from os import path
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.builder import INVENTORY_FILENAME
|
||||
from sphinx.builders import INVENTORY_FILENAME
|
||||
|
||||
|
||||
def fetch_inventory(app, uri, inv):
|
||||
|
||||
@@ -1,220 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.htmlhelp
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Build HTML help support files.
|
||||
Adapted from the original Doc/tools/prechm.py.
|
||||
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
import os
|
||||
import cgi
|
||||
from os import path
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
|
||||
# Project file (*.hhp) template. 'outname' is the file basename (like
|
||||
# the pythlp in pythlp.hhp); 'version' is the doc version number (like
|
||||
# the 2.2 in Python 2.2).
|
||||
# The magical numbers in the long line under [WINDOWS] set most of the
|
||||
# user-visible features (visible buttons, tabs, etc).
|
||||
# About 0x10384e: This defines the buttons in the help viewer. The
|
||||
# following defns are taken from htmlhelp.h. Not all possibilities
|
||||
# actually work, and not all those that work are available from the Help
|
||||
# Workshop GUI. In particular, the Zoom/Font button works and is not
|
||||
# available from the GUI. The ones we're using are marked with 'x':
|
||||
#
|
||||
# 0x000002 Hide/Show x
|
||||
# 0x000004 Back x
|
||||
# 0x000008 Forward x
|
||||
# 0x000010 Stop
|
||||
# 0x000020 Refresh
|
||||
# 0x000040 Home x
|
||||
# 0x000080 Forward
|
||||
# 0x000100 Back
|
||||
# 0x000200 Notes
|
||||
# 0x000400 Contents
|
||||
# 0x000800 Locate x
|
||||
# 0x001000 Options x
|
||||
# 0x002000 Print x
|
||||
# 0x004000 Index
|
||||
# 0x008000 Search
|
||||
# 0x010000 History
|
||||
# 0x020000 Favorites
|
||||
# 0x040000 Jump 1
|
||||
# 0x080000 Jump 2
|
||||
# 0x100000 Zoom/Font x
|
||||
# 0x200000 TOC Next
|
||||
# 0x400000 TOC Prev
|
||||
|
||||
project_template = '''\
|
||||
[OPTIONS]
|
||||
Binary TOC=Yes
|
||||
Binary Index=No
|
||||
Compiled file=%(outname)s.chm
|
||||
Contents file=%(outname)s.hhc
|
||||
Default Window=%(outname)s
|
||||
Default topic=index.html
|
||||
Display compile progress=No
|
||||
Full text search stop list file=%(outname)s.stp
|
||||
Full-text search=Yes
|
||||
Index file=%(outname)s.hhk
|
||||
Language=0x409
|
||||
Title=%(title)s
|
||||
|
||||
[WINDOWS]
|
||||
%(outname)s="%(title)s","%(outname)s.hhc","%(outname)s.hhk",\
|
||||
"index.html","index.html",,,,,0x63520,220,0x10384e,[0,0,1024,768],,,,,,,0
|
||||
|
||||
[FILES]
|
||||
'''
|
||||
|
||||
contents_header = '''\
|
||||
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
|
||||
<HTML>
|
||||
<HEAD>
|
||||
<meta name="GENERATOR" content="Microsoft® HTML Help Workshop 4.1">
|
||||
<!-- Sitemap 1.0 -->
|
||||
</HEAD><BODY>
|
||||
<OBJECT type="text/site properties">
|
||||
<param name="Window Styles" value="0x801227">
|
||||
<param name="ImageType" value="Folder">
|
||||
</OBJECT>
|
||||
<UL>
|
||||
'''
|
||||
|
||||
contents_footer = '''\
|
||||
</UL></BODY></HTML>
|
||||
'''
|
||||
|
||||
object_sitemap = '''\
|
||||
<OBJECT type="text/sitemap">
|
||||
<param name="Name" value="%s">
|
||||
<param name="Local" value="%s">
|
||||
</OBJECT>
|
||||
'''
|
||||
|
||||
# List of words the full text search facility shouldn't index. This
|
||||
# becomes file outname.stp. Note that this list must be pretty small!
|
||||
# Different versions of the MS docs claim the file has a maximum size of
|
||||
# 256 or 512 bytes (including \r\n at the end of each line).
|
||||
# Note that "and", "or", "not" and "near" are operators in the search
|
||||
# language, so no point indexing them even if we wanted to.
|
||||
stopwords = """
|
||||
a and are as at
|
||||
be but by
|
||||
for
|
||||
if in into is it
|
||||
near no not
|
||||
of on or
|
||||
such
|
||||
that the their then there these they this to
|
||||
was will with
|
||||
""".split()
|
||||
|
||||
|
||||
def build_hhx(builder, outdir, outname):
|
||||
builder.info('dumping stopword list...')
|
||||
f = open(path.join(outdir, outname+'.stp'), 'w')
|
||||
try:
|
||||
for word in sorted(stopwords):
|
||||
print >>f, word
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
builder.info('writing project file...')
|
||||
f = open(path.join(outdir, outname+'.hhp'), 'w')
|
||||
try:
|
||||
f.write(project_template % {'outname': outname,
|
||||
'title': builder.config.html_title,
|
||||
'version': builder.config.version,
|
||||
'project': builder.config.project})
|
||||
if not outdir.endswith(os.sep):
|
||||
outdir += os.sep
|
||||
olen = len(outdir)
|
||||
for root, dirs, files in os.walk(outdir):
|
||||
staticdir = (root == path.join(outdir, '_static'))
|
||||
for fn in files:
|
||||
if (staticdir and not fn.endswith('.js')) or fn.endswith('.html'):
|
||||
print >>f, path.join(root, fn)[olen:].replace(os.sep, '\\')
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
builder.info('writing TOC file...')
|
||||
f = open(path.join(outdir, outname+'.hhc'), 'w')
|
||||
try:
|
||||
f.write(contents_header)
|
||||
# special books
|
||||
f.write('<LI> ' + object_sitemap % (builder.config.html_short_title,
|
||||
'index.html'))
|
||||
if builder.config.html_use_modindex:
|
||||
f.write('<LI> ' + object_sitemap % (_('Global Module Index'),
|
||||
'modindex.html'))
|
||||
# the TOC
|
||||
tocdoc = builder.env.get_and_resolve_doctree(builder.config.master_doc, builder,
|
||||
prune_toctrees=False)
|
||||
def write_toc(node, ullevel=0):
|
||||
if isinstance(node, nodes.list_item):
|
||||
f.write('<LI> ')
|
||||
for subnode in node:
|
||||
write_toc(subnode, ullevel)
|
||||
elif isinstance(node, nodes.reference):
|
||||
link = node['refuri']
|
||||
title = cgi.escape(node.astext()).replace('"','"')
|
||||
item = object_sitemap % (title, link)
|
||||
f.write(item.encode('ascii', 'xmlcharrefreplace'))
|
||||
elif isinstance(node, nodes.bullet_list):
|
||||
if ullevel != 0:
|
||||
f.write('<UL>\n')
|
||||
for subnode in node:
|
||||
write_toc(subnode, ullevel+1)
|
||||
if ullevel != 0:
|
||||
f.write('</UL>\n')
|
||||
elif isinstance(node, addnodes.compact_paragraph):
|
||||
for subnode in node:
|
||||
write_toc(subnode, ullevel)
|
||||
istoctree = lambda node: isinstance(node, addnodes.compact_paragraph) and \
|
||||
node.has_key('toctree')
|
||||
for node in tocdoc.traverse(istoctree):
|
||||
write_toc(node)
|
||||
f.write(contents_footer)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
builder.info('writing index file...')
|
||||
index = builder.env.create_index(builder)
|
||||
f = open(path.join(outdir, outname+'.hhk'), 'w')
|
||||
try:
|
||||
f.write('<UL>\n')
|
||||
def write_index(title, refs, subitems):
|
||||
def write_param(name, value):
|
||||
item = ' <param name="%s" value="%s">\n' % (name, value)
|
||||
f.write(item.encode('ascii', 'xmlcharrefreplace'))
|
||||
title = cgi.escape(title)
|
||||
f.write('<LI> <OBJECT type="text/sitemap">\n')
|
||||
write_param('Keyword', title)
|
||||
if len(refs) == 0:
|
||||
write_param('See Also', title)
|
||||
elif len(refs) == 1:
|
||||
write_param('Local', refs[0])
|
||||
else:
|
||||
for i, ref in enumerate(refs):
|
||||
write_param('Name', '[%d] %s' % (i, ref)) # XXX: better title?
|
||||
write_param('Local', ref)
|
||||
f.write('</OBJECT>\n')
|
||||
if subitems:
|
||||
f.write('<UL> ')
|
||||
for subitem in subitems:
|
||||
write_index(subitem[0], subitem[1], [])
|
||||
f.write('</UL>')
|
||||
for (key, group) in index:
|
||||
for title, (refs, subitems) in group:
|
||||
write_index(title, refs, subitems)
|
||||
f.write('</UL>\n')
|
||||
finally:
|
||||
f.close()
|
||||
10
sphinx/writers/__init__.py
Normal file
10
sphinx/writers/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.writers
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
Custom docutils writers.
|
||||
|
||||
:copyright: 2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
@@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.htmlwriter
|
||||
~~~~~~~~~~~~~~~~~
|
||||
sphinx.writers.html
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
docutils writers handling Sphinx' custom nodes.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.latexwriter
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
sphinx.writers.latex
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Custom docutils writer for LaTeX.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.textwriter
|
||||
~~~~~~~~~~~~~~~~~
|
||||
sphinx.writers.text
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Custom docutils writer for plain text.
|
||||
|
||||
@@ -20,7 +20,7 @@ from util import *
|
||||
from etree13 import ElementTree as ET
|
||||
|
||||
from sphinx.builder import StandaloneHTMLBuilder, LaTeXBuilder
|
||||
from sphinx.latexwriter import LaTeXTranslator
|
||||
from sphinx.writers.latex import LaTeXTranslator
|
||||
|
||||
|
||||
html_warnfile = StringIO()
|
||||
|
||||
@@ -17,8 +17,8 @@ from docutils import frontend, utils, nodes
|
||||
from docutils.parsers import rst
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.htmlwriter import HTMLWriter, SmartyPantsHTMLTranslator
|
||||
from sphinx.latexwriter import LaTeXWriter, LaTeXTranslator
|
||||
from sphinx.writers.html import HTMLWriter, SmartyPantsHTMLTranslator
|
||||
from sphinx.writers.latex import LaTeXWriter, LaTeXTranslator
|
||||
|
||||
def setup_module():
|
||||
global app, settings, parser
|
||||
|
||||
Reference in New Issue
Block a user