mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Add single-file HTML builder. Closes #151.
This commit is contained in:
parent
f3fc36b9f0
commit
744a519c92
2
CHANGES
2
CHANGES
@ -1,6 +1,8 @@
|
||||
Release 1.0 (in development)
|
||||
============================
|
||||
|
||||
* Added single-file HTML builder.
|
||||
|
||||
* Added ``tab-width`` option to ``literalinclude`` directive.
|
||||
|
||||
* The ``html_sidebars`` config value can now contain patterns as
|
||||
|
@ -37,6 +37,11 @@ dirhtml:
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in _build/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) _build/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in _build/singlehtml."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) _build/text
|
||||
@echo
|
||||
|
@ -36,6 +36,16 @@ The builder's "name" must be given to the **-b** command-line option of
|
||||
|
||||
.. versionadded:: 0.6
|
||||
|
||||
.. class:: SingleFileHTMLBuilder
|
||||
|
||||
This is an HTML builder that combines the whole project in one output file.
|
||||
(Obviously this only works with smaller projects.) The file is named like
|
||||
the master document. No indices will be generated.
|
||||
|
||||
Its name is ``singlehtml``.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
|
||||
.. module:: sphinx.builders.htmlhelp
|
||||
.. class:: HTMLHelpBuilder
|
||||
|
||||
|
@ -380,17 +380,18 @@ class Builder(object):
|
||||
|
||||
|
||||
BUILTIN_BUILDERS = {
|
||||
'html': ('html', 'StandaloneHTMLBuilder'),
|
||||
'dirhtml': ('html', 'DirectoryHTMLBuilder'),
|
||||
'pickle': ('html', 'PickleHTMLBuilder'),
|
||||
'json': ('html', 'JSONHTMLBuilder'),
|
||||
'web': ('html', 'PickleHTMLBuilder'),
|
||||
'htmlhelp': ('htmlhelp', 'HTMLHelpBuilder'),
|
||||
'devhelp': ('devhelp', 'DevhelpBuilder'),
|
||||
'qthelp': ('qthelp', 'QtHelpBuilder'),
|
||||
'epub': ('epub', 'EpubBuilder'),
|
||||
'latex': ('latex', 'LaTeXBuilder'),
|
||||
'text': ('text', 'TextBuilder'),
|
||||
'changes': ('changes', 'ChangesBuilder'),
|
||||
'linkcheck': ('linkcheck', 'CheckExternalLinksBuilder'),
|
||||
'html': ('html', 'StandaloneHTMLBuilder'),
|
||||
'dirhtml': ('html', 'DirectoryHTMLBuilder'),
|
||||
'singlehtml': ('html', 'SingleFileHTMLBuilder'),
|
||||
'pickle': ('html', 'PickleHTMLBuilder'),
|
||||
'json': ('html', 'JSONHTMLBuilder'),
|
||||
'web': ('html', 'PickleHTMLBuilder'),
|
||||
'htmlhelp': ('htmlhelp', 'HTMLHelpBuilder'),
|
||||
'devhelp': ('devhelp', 'DevhelpBuilder'),
|
||||
'qthelp': ('qthelp', 'QtHelpBuilder'),
|
||||
'epub': ('epub', 'EpubBuilder'),
|
||||
'latex': ('latex', 'LaTeXBuilder'),
|
||||
'text': ('text', 'TextBuilder'),
|
||||
'changes': ('changes', 'ChangesBuilder'),
|
||||
'linkcheck': ('linkcheck', 'CheckExternalLinksBuilder'),
|
||||
}
|
||||
|
@ -28,14 +28,16 @@ from docutils.frontend import OptionParser
|
||||
from docutils.readers.doctree import Reader as DoctreeReader
|
||||
|
||||
from sphinx import package_dir, __version__
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import SEP, os_path, relative_uri, ensuredir, patmatch, \
|
||||
movefile, ustrftime, copy_static_entry, copyfile, compile_matchers, any
|
||||
movefile, ustrftime, copy_static_entry, copyfile, compile_matchers, any, \
|
||||
inline_all_toctrees
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.search import js_index
|
||||
from sphinx.theming import Theme
|
||||
from sphinx.builders import Builder, ENV_PICKLE_FILENAME
|
||||
from sphinx.highlighting import PygmentsBridge
|
||||
from sphinx.util.console import bold
|
||||
from sphinx.util.console import bold, darkgreen
|
||||
from sphinx.writers.html import HTMLWriter, HTMLTranslator, \
|
||||
SmartyPantsHTMLTranslator
|
||||
|
||||
@ -379,127 +381,12 @@ class StandaloneHTMLBuilder(Builder):
|
||||
self.info(bold('writing additional files...'), nonl=1)
|
||||
|
||||
# the global general index
|
||||
|
||||
if self.config.html_use_index:
|
||||
# the total count of lines for each index letter, used to distribute
|
||||
# the entries into two columns
|
||||
genindex = self.env.create_index(self)
|
||||
indexcounts = []
|
||||
for _, entries in genindex:
|
||||
indexcounts.append(sum(1 + len(subitems)
|
||||
for _, (_, subitems) in entries))
|
||||
|
||||
genindexcontext = dict(
|
||||
genindexentries = genindex,
|
||||
genindexcounts = indexcounts,
|
||||
split_index = self.config.html_split_index,
|
||||
)
|
||||
self.info(' genindex', nonl=1)
|
||||
|
||||
if self.config.html_split_index:
|
||||
self.handle_page('genindex', genindexcontext,
|
||||
'genindex-split.html')
|
||||
self.handle_page('genindex-all', genindexcontext,
|
||||
'genindex.html')
|
||||
for (key, entries), count in zip(genindex, indexcounts):
|
||||
ctx = {'key': key, 'entries': entries, 'count': count,
|
||||
'genindexentries': genindex}
|
||||
self.handle_page('genindex-' + key, ctx,
|
||||
'genindex-single.html')
|
||||
else:
|
||||
self.handle_page('genindex', genindexcontext, 'genindex.html')
|
||||
self.write_genindex()
|
||||
|
||||
# the global module index
|
||||
|
||||
if self.config.html_use_modindex and self.env.modules:
|
||||
# the sorted list of all modules, for the global module index
|
||||
modules = sorted(((mn, (self.get_relative_uri('modindex', fn) +
|
||||
'#module-' + mn, sy, pl, dep))
|
||||
for (mn, (fn, sy, pl, dep)) in
|
||||
self.env.modules.iteritems()),
|
||||
key=lambda x: x[0].lower())
|
||||
# collect all platforms
|
||||
platforms = set()
|
||||
# sort out collapsable modules
|
||||
modindexentries = []
|
||||
letters = []
|
||||
pmn = ''
|
||||
num_toplevels = 0
|
||||
num_collapsables = 0
|
||||
cg = 0 # collapse group
|
||||
fl = '' # first letter
|
||||
for mn, (fn, sy, pl, dep) in modules:
|
||||
pl = pl and pl.split(', ') or []
|
||||
platforms.update(pl)
|
||||
|
||||
ignore = self.env.config['modindex_common_prefix']
|
||||
ignore = sorted(ignore, key=len, reverse=True)
|
||||
for i in ignore:
|
||||
if mn.startswith(i):
|
||||
mn = mn[len(i):]
|
||||
stripped = i
|
||||
break
|
||||
else:
|
||||
stripped = ''
|
||||
|
||||
# we stripped the whole module name
|
||||
if not mn:
|
||||
continue
|
||||
|
||||
if fl != mn[0].lower() and mn[0] != '_':
|
||||
# heading
|
||||
letter = mn[0].upper()
|
||||
if letter not in letters:
|
||||
modindexentries.append(['', False, 0, False,
|
||||
letter, '', [], False, ''])
|
||||
letters.append(letter)
|
||||
tn = mn.split('.')[0]
|
||||
if tn != mn:
|
||||
# submodule
|
||||
if pmn == tn:
|
||||
# first submodule - make parent collapsable
|
||||
modindexentries[-1][1] = True
|
||||
num_collapsables += 1
|
||||
elif not pmn.startswith(tn):
|
||||
# submodule without parent in list, add dummy entry
|
||||
cg += 1
|
||||
modindexentries.append([tn, True, cg, False, '', '',
|
||||
[], False, stripped])
|
||||
else:
|
||||
num_toplevels += 1
|
||||
cg += 1
|
||||
modindexentries.append([mn, False, cg, (tn != mn), fn, sy, pl,
|
||||
dep, stripped])
|
||||
pmn = mn
|
||||
fl = mn[0].lower()
|
||||
platforms = sorted(platforms)
|
||||
|
||||
# apply heuristics when to collapse modindex at page load:
|
||||
# only collapse if number of toplevel modules is larger than
|
||||
# number of submodules
|
||||
collapse = len(modules) - num_toplevels < num_toplevels
|
||||
|
||||
# As some parts of the module names may have been stripped, those
|
||||
# names have changed, thus it is necessary to sort the entries.
|
||||
if ignore:
|
||||
def sorthelper(entry):
|
||||
name = entry[0]
|
||||
if name == '':
|
||||
# heading
|
||||
name = entry[4]
|
||||
return name.lower()
|
||||
|
||||
modindexentries.sort(key=sorthelper)
|
||||
letters.sort()
|
||||
|
||||
modindexcontext = dict(
|
||||
modindexentries = modindexentries,
|
||||
platforms = platforms,
|
||||
letters = letters,
|
||||
collapse_modindex = collapse,
|
||||
)
|
||||
self.info(' modindex', nonl=1)
|
||||
self.handle_page('modindex', modindexcontext, 'modindex.html')
|
||||
self.write_modindex()
|
||||
|
||||
# the search page
|
||||
if self.name != 'htmlhelp':
|
||||
@ -518,6 +405,134 @@ class StandaloneHTMLBuilder(Builder):
|
||||
|
||||
self.info()
|
||||
|
||||
self.copy_image_files()
|
||||
self.copy_download_files()
|
||||
self.copy_static_files()
|
||||
self.write_buildinfo()
|
||||
|
||||
# dump the search index
|
||||
self.handle_finish()
|
||||
|
||||
def write_genindex(self):
|
||||
# the total count of lines for each index letter, used to distribute
|
||||
# the entries into two columns
|
||||
genindex = self.env.create_index(self)
|
||||
indexcounts = []
|
||||
for _, entries in genindex:
|
||||
indexcounts.append(sum(1 + len(subitems)
|
||||
for _, (_, subitems) in entries))
|
||||
|
||||
genindexcontext = dict(
|
||||
genindexentries = genindex,
|
||||
genindexcounts = indexcounts,
|
||||
split_index = self.config.html_split_index,
|
||||
)
|
||||
self.info(' genindex', nonl=1)
|
||||
|
||||
if self.config.html_split_index:
|
||||
self.handle_page('genindex', genindexcontext,
|
||||
'genindex-split.html')
|
||||
self.handle_page('genindex-all', genindexcontext,
|
||||
'genindex.html')
|
||||
for (key, entries), count in zip(genindex, indexcounts):
|
||||
ctx = {'key': key, 'entries': entries, 'count': count,
|
||||
'genindexentries': genindex}
|
||||
self.handle_page('genindex-' + key, ctx,
|
||||
'genindex-single.html')
|
||||
else:
|
||||
self.handle_page('genindex', genindexcontext, 'genindex.html')
|
||||
|
||||
def write_modindex(self):
|
||||
# the sorted list of all modules, for the global module index
|
||||
modules = sorted(((mn, (self.get_relative_uri('modindex', fn) +
|
||||
'#module-' + mn, sy, pl, dep))
|
||||
for (mn, (fn, sy, pl, dep)) in
|
||||
self.env.modules.iteritems()),
|
||||
key=lambda x: x[0].lower())
|
||||
# collect all platforms
|
||||
platforms = set()
|
||||
# sort out collapsable modules
|
||||
modindexentries = []
|
||||
letters = []
|
||||
pmn = ''
|
||||
num_toplevels = 0
|
||||
num_collapsables = 0
|
||||
cg = 0 # collapse group
|
||||
fl = '' # first letter
|
||||
for mn, (fn, sy, pl, dep) in modules:
|
||||
pl = pl and pl.split(', ') or []
|
||||
platforms.update(pl)
|
||||
|
||||
ignore = self.env.config['modindex_common_prefix']
|
||||
ignore = sorted(ignore, key=len, reverse=True)
|
||||
for i in ignore:
|
||||
if mn.startswith(i):
|
||||
mn = mn[len(i):]
|
||||
stripped = i
|
||||
break
|
||||
else:
|
||||
stripped = ''
|
||||
|
||||
# we stripped the whole module name
|
||||
if not mn:
|
||||
continue
|
||||
|
||||
if fl != mn[0].lower() and mn[0] != '_':
|
||||
# heading
|
||||
letter = mn[0].upper()
|
||||
if letter not in letters:
|
||||
modindexentries.append(['', False, 0, False,
|
||||
letter, '', [], False, ''])
|
||||
letters.append(letter)
|
||||
tn = mn.split('.')[0]
|
||||
if tn != mn:
|
||||
# submodule
|
||||
if pmn == tn:
|
||||
# first submodule - make parent collapsable
|
||||
modindexentries[-1][1] = True
|
||||
num_collapsables += 1
|
||||
elif not pmn.startswith(tn):
|
||||
# submodule without parent in list, add dummy entry
|
||||
cg += 1
|
||||
modindexentries.append([tn, True, cg, False, '', '',
|
||||
[], False, stripped])
|
||||
else:
|
||||
num_toplevels += 1
|
||||
cg += 1
|
||||
modindexentries.append([mn, False, cg, (tn != mn), fn, sy, pl,
|
||||
dep, stripped])
|
||||
pmn = mn
|
||||
fl = mn[0].lower()
|
||||
platforms = sorted(platforms)
|
||||
|
||||
# apply heuristics when to collapse modindex at page load:
|
||||
# only collapse if number of toplevel modules is larger than
|
||||
# number of submodules
|
||||
collapse = len(modules) - num_toplevels < num_toplevels
|
||||
|
||||
# As some parts of the module names may have been stripped, those
|
||||
# names have changed, thus it is necessary to sort the entries.
|
||||
if ignore:
|
||||
def sorthelper(entry):
|
||||
name = entry[0]
|
||||
if name == '':
|
||||
# heading
|
||||
name = entry[4]
|
||||
return name.lower()
|
||||
|
||||
modindexentries.sort(key=sorthelper)
|
||||
letters.sort()
|
||||
|
||||
modindexcontext = dict(
|
||||
modindexentries = modindexentries,
|
||||
platforms = platforms,
|
||||
letters = letters,
|
||||
collapse_modindex = collapse,
|
||||
)
|
||||
self.info(' modindex', nonl=1)
|
||||
self.handle_page('modindex', modindexcontext, 'modindex.html')
|
||||
|
||||
def copy_image_files(self):
|
||||
# copy image files
|
||||
if self.images:
|
||||
self.info(bold('copying images...'), nonl=True)
|
||||
@ -532,6 +547,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
(path.join(self.srcdir, src), err))
|
||||
self.info()
|
||||
|
||||
def copy_download_files(self):
|
||||
# copy downloadable files
|
||||
if self.env.dlfiles:
|
||||
self.info(bold('copying downloadable files...'), nonl=True)
|
||||
@ -546,6 +562,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
(path.join(self.srcdir, src), err))
|
||||
self.info()
|
||||
|
||||
def copy_static_files(self):
|
||||
# copy static files
|
||||
self.info(bold('copying static files... '), nonl=True)
|
||||
ensuredir(path.join(self.outdir, '_static'))
|
||||
@ -593,7 +610,9 @@ class StandaloneHTMLBuilder(Builder):
|
||||
if not path.isfile(icontarget):
|
||||
copyfile(path.join(self.confdir, self.config.html_favicon),
|
||||
icontarget)
|
||||
self.info('done')
|
||||
|
||||
def write_buildinfo(self):
|
||||
# write build info file
|
||||
fp = open(path.join(self.outdir, '.buildinfo'), 'w')
|
||||
try:
|
||||
@ -605,11 +624,6 @@ class StandaloneHTMLBuilder(Builder):
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
self.info('done')
|
||||
|
||||
# dump the search index
|
||||
self.handle_finish()
|
||||
|
||||
def cleanup(self):
|
||||
# clean up theme stuff
|
||||
if self.theme:
|
||||
@ -751,19 +765,10 @@ class StandaloneHTMLBuilder(Builder):
|
||||
copyfile(self.env.doc2path(pagename), source_name)
|
||||
|
||||
def handle_finish(self):
|
||||
self.info(bold('dumping search index... '), nonl=True)
|
||||
self.indexer.prune(self.env.all_docs)
|
||||
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
||||
# first write to a temporary file, so that if dumping fails,
|
||||
# the existing index won't be overwritten
|
||||
f = open(searchindexfn + '.tmp', 'wb')
|
||||
try:
|
||||
self.indexer.dump(f, self.indexer_format)
|
||||
finally:
|
||||
f.close()
|
||||
movefile(searchindexfn + '.tmp', searchindexfn)
|
||||
self.info('done')
|
||||
self.dump_search_index()
|
||||
self.dump_inventory()
|
||||
|
||||
def dump_inventory(self):
|
||||
self.info(bold('dumping object inventory... '), nonl=True)
|
||||
f = open(path.join(self.outdir, INVENTORY_FILENAME), 'w')
|
||||
try:
|
||||
@ -779,6 +784,20 @@ class StandaloneHTMLBuilder(Builder):
|
||||
f.close()
|
||||
self.info('done')
|
||||
|
||||
def dump_search_index(self):
|
||||
self.info(bold('dumping search index... '), nonl=True)
|
||||
self.indexer.prune(self.env.all_docs)
|
||||
searchindexfn = path.join(self.outdir, self.searchindex_filename)
|
||||
# first write to a temporary file, so that if dumping fails,
|
||||
# the existing index won't be overwritten
|
||||
f = open(searchindexfn + '.tmp', 'wb')
|
||||
try:
|
||||
self.indexer.dump(f, self.indexer_format)
|
||||
finally:
|
||||
f.close()
|
||||
movefile(searchindexfn + '.tmp', searchindexfn)
|
||||
self.info('done')
|
||||
|
||||
|
||||
class DirectoryHTMLBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
@ -806,6 +825,110 @@ class DirectoryHTMLBuilder(StandaloneHTMLBuilder):
|
||||
return outfilename
|
||||
|
||||
|
||||
class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
A StandaloneHTMLBuilder subclass that puts the whole document tree on one
|
||||
HTML page.
|
||||
"""
|
||||
name = 'singlehtml'
|
||||
copysource = False
|
||||
|
||||
def get_outdated_docs(self):
|
||||
return 'all documents'
|
||||
|
||||
def get_target_uri(self, docname, typ=None):
|
||||
if docname in self.env.all_docs:
|
||||
# all references are on the same page...
|
||||
return self.config.master_doc + self.out_suffix + \
|
||||
'#document-' + docname
|
||||
else:
|
||||
# chances are this is a html_additional_page
|
||||
return docname + self.out_suffix
|
||||
|
||||
def get_relative_uri(self, from_, to, typ=None):
|
||||
# ignore source
|
||||
return self.get_target_uri(to, typ)
|
||||
|
||||
def fix_refuris(self, tree):
|
||||
# fix refuris with double anchor
|
||||
fname = self.config.master_doc + self.out_suffix
|
||||
for refnode in tree.traverse(nodes.reference):
|
||||
if 'refuri' not in refnode:
|
||||
continue
|
||||
refuri = refnode['refuri']
|
||||
hashindex = refuri.find('#')
|
||||
if hashindex < 0:
|
||||
continue
|
||||
hashindex = refuri.find('#', hashindex+1)
|
||||
if hashindex >= 0:
|
||||
refnode['refuri'] = fname + refuri[hashindex:]
|
||||
|
||||
def assemble_doctree(self):
|
||||
master = self.config.master_doc
|
||||
tree = self.env.get_doctree(master)
|
||||
tree = inline_all_toctrees(self, set(), master, tree, darkgreen)
|
||||
tree['docname'] = master
|
||||
self.env.resolve_references(tree, master, self)
|
||||
self.fix_refuris(tree)
|
||||
return tree
|
||||
|
||||
def get_doc_context(self, docname, body, metatags):
|
||||
# no relation links...
|
||||
toc = self.env.get_toctree_for(self.config.master_doc, self, False)
|
||||
self.fix_refuris(toc)
|
||||
toc = self.render_partial(toc)['fragment']
|
||||
return dict(
|
||||
parents = [],
|
||||
prev = None,
|
||||
next = None,
|
||||
docstitle = None,
|
||||
title = self.config.html_title,
|
||||
meta = None,
|
||||
body = body,
|
||||
metatags = metatags,
|
||||
rellinks = [],
|
||||
sourcename = '',
|
||||
toc = toc,
|
||||
display_toc = True,
|
||||
)
|
||||
|
||||
def write(self, *ignored):
|
||||
docnames = self.env.all_docs
|
||||
|
||||
self.info(bold('preparing documents... '), nonl=True)
|
||||
self.prepare_writing(docnames)
|
||||
self.info('done')
|
||||
|
||||
self.info(bold('assembling single document... '), nonl=True)
|
||||
doctree = self.assemble_doctree()
|
||||
self.info()
|
||||
self.info(bold('writing... '), nonl=True)
|
||||
self.write_doc(self.config.master_doc, doctree)
|
||||
self.info('done')
|
||||
|
||||
def finish(self):
|
||||
# no indices or search pages are supported
|
||||
self.info(bold('writing additional files...'), nonl=1)
|
||||
|
||||
# additional pages from conf.py
|
||||
for pagename, template in self.config.html_additional_pages.items():
|
||||
self.info(' '+pagename, nonl=1)
|
||||
self.handle_page(pagename, {}, template)
|
||||
|
||||
if self.config.html_use_opensearch:
|
||||
self.info(' opensearch', nonl=1)
|
||||
fn = path.join(self.outdir, '_static', 'opensearch.xml')
|
||||
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
|
||||
|
||||
self.info()
|
||||
|
||||
self.copy_image_files()
|
||||
self.copy_download_files()
|
||||
self.copy_static_files()
|
||||
self.write_buildinfo()
|
||||
self.dump_inventory()
|
||||
|
||||
|
||||
class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
An abstract builder that serializes the generated HTML.
|
||||
|
@ -14,6 +14,7 @@ from os import path
|
||||
|
||||
TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
|
||||
|
||||
from sphinx import __version__
|
||||
from sphinx.util import make_filename
|
||||
from sphinx.util.console import purple, bold, red, turquoise, \
|
||||
nocolor, color_terminal
|
||||
@ -301,24 +302,25 @@ PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) \
|
||||
$(SPHINXOPTS) %(rsrcdir)s
|
||||
|
||||
.PHONY: help clean html dirhtml pickle json htmlhelp qthelp epub \
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp epub \
|
||||
latex changes linkcheck doctest
|
||||
|
||||
help:
|
||||
\t@echo "Please use \\`make <target>' where <target> is one of"
|
||||
\t@echo " html to make standalone HTML files"
|
||||
\t@echo " dirhtml to make HTML files named index.html in directories"
|
||||
\t@echo " pickle to make pickle files"
|
||||
\t@echo " json to make JSON files"
|
||||
\t@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
\t@echo " qthelp to make HTML files and a qthelp project"
|
||||
\t@echo " devhelp to make HTML files and a Devhelp project"
|
||||
\t@echo " epub to make an epub"
|
||||
\t@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
\t@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
\t@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
\t@echo " linkcheck to check all external links for integrity"
|
||||
\t@echo " doctest to run all doctests embedded in the documentation \
|
||||
\t@echo " html to make standalone HTML files"
|
||||
\t@echo " dirhtml to make HTML files named index.html in directories"
|
||||
\t@echo " singlehtml to make a single large HTML file"
|
||||
\t@echo " pickle to make pickle files"
|
||||
\t@echo " json to make JSON files"
|
||||
\t@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
\t@echo " qthelp to make HTML files and a qthelp project"
|
||||
\t@echo " devhelp to make HTML files and a Devhelp project"
|
||||
\t@echo " epub to make an epub"
|
||||
\t@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
\t@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
\t@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
\t@echo " linkcheck to check all external links for integrity"
|
||||
\t@echo " doctest to run all doctests embedded in the documentation \
|
||||
(if enabled)"
|
||||
|
||||
clean:
|
||||
@ -334,6 +336,11 @@ dirhtml:
|
||||
\t@echo
|
||||
\t@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
\t$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
\t@echo
|
||||
\t@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
\t$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
\t@echo
|
||||
@ -421,18 +428,19 @@ if "%%1" == "" goto help
|
||||
if "%%1" == "help" (
|
||||
\t:help
|
||||
\techo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
\techo. html to make standalone HTML files
|
||||
\techo. dirhtml to make HTML files named index.html in directories
|
||||
\techo. pickle to make pickle files
|
||||
\techo. json to make JSON files
|
||||
\techo. htmlhelp to make HTML files and a HTML help project
|
||||
\techo. qthelp to make HTML files and a qthelp project
|
||||
\techo. devhelp to make HTML files and a Devhelp project
|
||||
\techo. epub to make an epub
|
||||
\techo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
\techo. changes to make an overview over all changed/added/deprecated items
|
||||
\techo. linkcheck to check all external links for integrity
|
||||
\techo. doctest to run all doctests embedded in the documentation if enabled
|
||||
\techo. html to make standalone HTML files
|
||||
\techo. dirhtml to make HTML files named index.html in directories
|
||||
\techo. singlehtml to make a single large HTML file
|
||||
\techo. pickle to make pickle files
|
||||
\techo. json to make JSON files
|
||||
\techo. htmlhelp to make HTML files and a HTML help project
|
||||
\techo. qthelp to make HTML files and a qthelp project
|
||||
\techo. devhelp to make HTML files and a Devhelp project
|
||||
\techo. epub to make an epub
|
||||
\techo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
\techo. changes to make an overview over all changed/added/deprecated items
|
||||
\techo. linkcheck to check all external links for integrity
|
||||
\techo. doctest to run all doctests embedded in the documentation if enabled
|
||||
\tgoto end
|
||||
)
|
||||
|
||||
@ -456,6 +464,13 @@ if "%%1" == "dirhtml" (
|
||||
\tgoto end
|
||||
)
|
||||
|
||||
if "%%1" == "singlehtml" (
|
||||
\t%%SPHINXBUILD%% -b singlehtml %%ALLSPHINXOPTS%% %%BUILDDIR%%/singlehtml
|
||||
\techo.
|
||||
\techo.Build finished. The HTML pages are in %%BUILDDIR%%/singlehtml.
|
||||
\tgoto end
|
||||
)
|
||||
|
||||
if "%%1" == "pickle" (
|
||||
\t%%SPHINXBUILD%% -b pickle %%ALLSPHINXOPTS%% %%BUILDDIR%%/pickle
|
||||
\techo.
|
||||
@ -614,7 +629,7 @@ def inner_main(args):
|
||||
if not color_terminal():
|
||||
nocolor()
|
||||
|
||||
print bold('Welcome to the Sphinx quickstart utility.')
|
||||
print bold('Welcome to the Sphinx %s quickstart utility.') % __version__
|
||||
print '''
|
||||
Please enter values for the following settings (just press Enter to
|
||||
accept a default value, if one is given in brackets).'''
|
||||
|
@ -60,6 +60,12 @@ class HTMLTranslator(BaseTranslator):
|
||||
self.protect_literal_text = 0
|
||||
self.add_permalinks = builder.config.html_add_permalinks
|
||||
|
||||
def visit_start_of_file(self, node):
|
||||
# only occurs in the single-file builder
|
||||
self.body.append('<span id="document-%s"></span>' % node['docname'])
|
||||
def depart_start_of_file(self, node):
|
||||
pass
|
||||
|
||||
def visit_desc(self, node):
|
||||
self.body.append(self.starttag(node, 'dl', CLASS=node['desctype']))
|
||||
def depart_desc(self, node):
|
||||
|
@ -128,6 +128,10 @@ def test_qthelp(app):
|
||||
def test_epub(app):
|
||||
app.builder.build_all()
|
||||
|
||||
@with_app(buildername='changes', cleanenv=True)
|
||||
@with_app(buildername='changes')
|
||||
def test_changes(app):
|
||||
app.builder.build_all()
|
||||
|
||||
@with_app(buildername='singlehtml', cleanenv=True)
|
||||
def test_singlehtml(app):
|
||||
app.builder.build_all()
|
||||
|
Loading…
Reference in New Issue
Block a user