mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
A few refactorings in Sphinx.
This commit is contained in:
parent
1fbdc410b7
commit
b09e628b0f
@ -3,7 +3,7 @@
|
||||
Sphinx
|
||||
~~~~~~
|
||||
|
||||
The Python documentation toolchain.
|
||||
The Sphinx documentation toolchain.
|
||||
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
@ -14,8 +14,8 @@ import getopt
|
||||
from os import path
|
||||
from cStringIO import StringIO
|
||||
|
||||
from .builder import builders
|
||||
from .util.console import nocolor
|
||||
from sphinx.builder import builders
|
||||
from sphinx.util.console import nocolor
|
||||
|
||||
__version__ = '$Revision: 5369 $'
|
||||
|
||||
@ -31,7 +31,6 @@ options: -b <builder> -- builder to use (one of %s)
|
||||
-E -- don't use a saved environment, always read all files
|
||||
-d <path> -- path for the cached environment and doctree files
|
||||
(default outdir/.doctrees)
|
||||
-O <option[=value]> -- give option to to the builder (-O help for list)
|
||||
-D <setting=value> -- override a setting in sourcedir/conf.py
|
||||
-N -- do not do colored output
|
||||
-q -- no output on stdout, just warnings on stderr
|
||||
@ -44,7 +43,7 @@ modi:
|
||||
|
||||
def main(argv):
|
||||
try:
|
||||
opts, args = getopt.getopt(argv[1:], 'ab:d:O:D:NEqP')
|
||||
opts, args = getopt.getopt(argv[1:], 'ab:d:D:NEqP')
|
||||
srcdirname = path.abspath(args[0])
|
||||
if not path.isdir(srcdirname):
|
||||
print >>sys.stderr, 'Error: Cannot find source directory.'
|
||||
@ -70,9 +69,8 @@ def main(argv):
|
||||
return 1
|
||||
|
||||
builder = all_files = None
|
||||
opt_help = freshenv = use_pdb = False
|
||||
freshenv = use_pdb = False
|
||||
status = sys.stdout
|
||||
options = {}
|
||||
confoverrides = {}
|
||||
doctreedir = path.join(outdirname, '.doctrees')
|
||||
for opt, val in opts:
|
||||
@ -88,18 +86,6 @@ def main(argv):
|
||||
all_files = True
|
||||
elif opt == '-d':
|
||||
doctreedir = val
|
||||
elif opt == '-O':
|
||||
if val == 'help':
|
||||
opt_help = True
|
||||
continue
|
||||
if '=' in val:
|
||||
key, val = val.split('=')
|
||||
try:
|
||||
val = int(val)
|
||||
except: pass
|
||||
else:
|
||||
key, val = val, True
|
||||
options[key] = val
|
||||
elif opt == '-D':
|
||||
key, val = val.split('=')
|
||||
try:
|
||||
@ -125,14 +111,8 @@ def main(argv):
|
||||
|
||||
builderobj = builders[builder]
|
||||
|
||||
if opt_help:
|
||||
print 'Options recognized by the %s builder:' % builder
|
||||
for optname, description in builderobj.option_spec.iteritems():
|
||||
print ' * %s: %s' % (optname, description)
|
||||
return 0
|
||||
|
||||
try:
|
||||
builderobj = builderobj(srcdirname, outdirname, doctreedir, options,
|
||||
builderobj = builderobj(srcdirname, outdirname, doctreedir,
|
||||
status_stream=status,
|
||||
warning_stream=sys.stderr,
|
||||
confoverrides=confoverrides,
|
||||
@ -146,7 +126,8 @@ def main(argv):
|
||||
except:
|
||||
if not use_pdb:
|
||||
raise
|
||||
import pdb
|
||||
import pdb, traceback
|
||||
traceback.print_exc()
|
||||
pdb.post_mortem(sys.exc_info()[2])
|
||||
|
||||
|
||||
|
@ -8,11 +8,41 @@
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
import codecs
|
||||
from os import path
|
||||
|
||||
sys.path.insert(0, path.dirname(__file__))
|
||||
|
||||
from jinja import Environment, FileSystemLoader
|
||||
from jinja import Environment
|
||||
from jinja.loaders import BaseLoader
|
||||
from jinja.exceptions import TemplateNotFound
|
||||
|
||||
class SphinxFileSystemLoader(BaseLoader):
|
||||
"""
|
||||
A loader that loads templates either relative to one of a list of given
|
||||
paths, or from an absolute path.
|
||||
"""
|
||||
|
||||
def __init__(self, paths):
|
||||
self.searchpaths = map(path.abspath, paths)
|
||||
|
||||
def get_source(self, environment, name, parent):
|
||||
name = name.replace('/', path.sep)
|
||||
if path.isabs(name):
|
||||
if not path.exists(name):
|
||||
raise TemplateNotFound(name)
|
||||
filename = name
|
||||
else:
|
||||
for searchpath in self.searchpaths:
|
||||
if path.exists(path.join(searchpath, name)):
|
||||
filename = path.join(searchpath, name)
|
||||
break
|
||||
else:
|
||||
raise TemplateNotFound(name)
|
||||
f = codecs.open(filename, 'r', environment.template_charset)
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
@ -3,6 +3,8 @@
|
||||
sphinx.addnodes
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Additional docutils nodes.
|
||||
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
|
@ -8,7 +8,6 @@
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
import sys
|
||||
@ -21,26 +20,27 @@ import cStringIO as StringIO
|
||||
from os import path
|
||||
from cgi import escape
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.io import StringOutput, FileOutput, DocTreeInput
|
||||
from docutils.core import publish_parts
|
||||
from docutils.utils import new_document
|
||||
from docutils.readers import doctree
|
||||
from docutils.frontend import OptionParser
|
||||
|
||||
from .util import (get_matching_files, attrdict, status_iterator, ensuredir,
|
||||
get_category, relative_uri, os_path, SEP)
|
||||
from .htmlhelp import build_hhx
|
||||
from .patchlevel import get_version_info, get_sys_version_info
|
||||
from .htmlwriter import HTMLWriter
|
||||
from .latexwriter import LaTeXWriter
|
||||
from .environment import BuildEnvironment, NoUri
|
||||
from .highlighting import pygments, highlight_block, get_stylesheet
|
||||
from .util.console import bold, purple, green
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import (get_matching_files, attrdict, status_iterator,
|
||||
ensuredir, relative_uri, os_path, SEP)
|
||||
from sphinx.htmlhelp import build_hhx
|
||||
from sphinx.patchlevel import get_version_info, get_sys_version_info
|
||||
from sphinx.htmlwriter import HTMLWriter
|
||||
from sphinx.latexwriter import LaTeXWriter
|
||||
from sphinx.environment import BuildEnvironment, NoUri
|
||||
from sphinx.highlighting import pygments, highlight_block, get_stylesheet
|
||||
from sphinx.util.console import bold, purple, green
|
||||
|
||||
from . import addnodes
|
||||
# side effect: registers roles and directives
|
||||
from . import roles
|
||||
from . import directives
|
||||
from sphinx import roles
|
||||
from sphinx import directives
|
||||
|
||||
ENV_PICKLE_FILENAME = 'environment.pickle'
|
||||
LAST_BUILD_FILENAME = 'last_build'
|
||||
@ -53,33 +53,18 @@ class relpath_to(object):
|
||||
self.builder = builder
|
||||
def __call__(self, otheruri, resource=False):
|
||||
if not resource:
|
||||
otheruri = self.builder.get_target_uri(otheruri)
|
||||
otheruri = self.builder.get_target_uri(otheruri + '.rst')
|
||||
return relative_uri(self.baseuri, otheruri)
|
||||
|
||||
|
||||
class collect_env_warnings(object):
|
||||
def __init__(self, builder):
|
||||
self.builder = builder
|
||||
self.warnings = []
|
||||
def __enter__(self):
|
||||
self.builder.env.set_warnfunc(self.warnings.append)
|
||||
def __exit__(self, *args):
|
||||
self.builder.env.set_warnfunc(self.builder.warn)
|
||||
for warning in self.warnings:
|
||||
self.builder.warn(warning)
|
||||
|
||||
|
||||
class Builder(object):
|
||||
"""
|
||||
Builds target formats from the reST sources.
|
||||
"""
|
||||
|
||||
option_spec = {}
|
||||
|
||||
def __init__(self, srcdirname, outdirname, doctreedirname,
|
||||
options, confoverrides=None, env=None,
|
||||
status_stream=None, warning_stream=None,
|
||||
freshenv=False):
|
||||
confoverrides=None, env=None, freshenv=False,
|
||||
status_stream=None, warning_stream=None):
|
||||
self.srcdir = srcdirname
|
||||
self.outdir = outdirname
|
||||
self.doctreedir = doctreedirname
|
||||
@ -87,9 +72,6 @@ class Builder(object):
|
||||
os.mkdir(doctreedirname)
|
||||
self.freshenv = freshenv
|
||||
|
||||
self.options = attrdict(options)
|
||||
self.validate_options()
|
||||
|
||||
self.status_stream = status_stream or sys.stdout
|
||||
self.warning_stream = warning_stream or sys.stderr
|
||||
|
||||
@ -97,7 +79,12 @@ class Builder(object):
|
||||
self.env = env
|
||||
|
||||
self.config = {}
|
||||
execfile(path.join(srcdirname, 'conf.py'), self.config)
|
||||
olddir = os.getcwd()
|
||||
try:
|
||||
os.chdir(srcdirname)
|
||||
execfile(path.join(srcdirname, 'conf.py'), self.config)
|
||||
finally:
|
||||
os.chdir(olddir)
|
||||
# remove potentially pickling-problematic values
|
||||
del self.config['__builtins__']
|
||||
for key, val in self.config.items():
|
||||
@ -105,31 +92,23 @@ class Builder(object):
|
||||
del self.config[key]
|
||||
if confoverrides:
|
||||
self.config.update(confoverrides)
|
||||
# replace version info if 'auto'
|
||||
if self.config['version'] == 'auto' or self.config['release'] == 'auto':
|
||||
# replace version info if '<auto>'
|
||||
if self.config['version'] == '<auto>' or self.config['release'] == '<auto>':
|
||||
try:
|
||||
version, release = get_version_info(srcdirname)
|
||||
except (IOError, OSError):
|
||||
version, release = get_sys_version_info()
|
||||
self.warn('Can\'t get version info from Include/patchlevel.h, '
|
||||
'using version of this interpreter (%s).' % release)
|
||||
if self.config['version'] == 'auto':
|
||||
if self.config['version'] == '<auto>':
|
||||
self.config['version'] = version
|
||||
if self.config['release'] == 'auto':
|
||||
if self.config['release'] == '<auto>':
|
||||
self.config['release'] = release
|
||||
|
||||
self.init()
|
||||
|
||||
# helper methods
|
||||
|
||||
def validate_options(self):
|
||||
for option in self.options:
|
||||
if option not in self.option_spec:
|
||||
raise ValueError('Got unexpected option %s' % option)
|
||||
for option in self.option_spec:
|
||||
if option not in self.options:
|
||||
self.options[option] = False
|
||||
|
||||
def msg(self, message='', nonl=False, nobold=False):
|
||||
if not nobold: message = bold(message)
|
||||
if nonl:
|
||||
@ -214,16 +193,20 @@ class Builder(object):
|
||||
|
||||
updated_filenames = []
|
||||
# while reading, collect all warnings from docutils
|
||||
with collect_env_warnings(self):
|
||||
self.msg('reading, updating environment:', nonl=1)
|
||||
iterator = self.env.update(self.config)
|
||||
self.msg(iterator.next(), nonl=1, nobold=1)
|
||||
for filename in iterator:
|
||||
if not updated_filenames:
|
||||
self.msg('')
|
||||
updated_filenames.append(filename)
|
||||
self.msg(purple(filename), nonl=1, nobold=1)
|
||||
self.msg()
|
||||
warnings = []
|
||||
self.env.set_warnfunc(warnings.append)
|
||||
self.msg('reading, updating environment:', nonl=1)
|
||||
iterator = self.env.update(self.config)
|
||||
self.msg(iterator.next(), nonl=1, nobold=1)
|
||||
for filename in iterator:
|
||||
if not updated_filenames:
|
||||
self.msg('')
|
||||
updated_filenames.append(filename)
|
||||
self.msg(purple(filename), nonl=1, nobold=1)
|
||||
self.msg()
|
||||
for warning in warnings:
|
||||
self.warn(warning)
|
||||
self.env.set_warnfunc(self.warn)
|
||||
|
||||
if updated_filenames:
|
||||
# save the environment
|
||||
@ -260,12 +243,16 @@ class Builder(object):
|
||||
self.prepare_writing(filenames)
|
||||
|
||||
# write target files
|
||||
with collect_env_warnings(self):
|
||||
self.msg('writing output...')
|
||||
for filename in status_iterator(sorted(filenames), green,
|
||||
stream=self.status_stream):
|
||||
doctree = self.env.get_and_resolve_doctree(filename, self)
|
||||
self.write_file(filename, doctree)
|
||||
warnings = []
|
||||
self.env.set_warnfunc(warnings.append)
|
||||
self.msg('writing output...')
|
||||
for filename in status_iterator(sorted(filenames), green,
|
||||
stream=self.status_stream):
|
||||
doctree = self.env.get_and_resolve_doctree(filename, self)
|
||||
self.write_file(filename, doctree)
|
||||
for warning in warnings:
|
||||
self.warn(warning)
|
||||
self.env.set_warnfunc(self.warn)
|
||||
|
||||
def prepare_writing(self, filenames):
|
||||
raise NotImplementedError
|
||||
@ -288,22 +275,28 @@ class StandaloneHTMLBuilder(Builder):
|
||||
def init(self):
|
||||
"""Load templates."""
|
||||
# lazily import this, maybe other builders won't need it
|
||||
from ._jinja import Environment, FileSystemLoader
|
||||
from sphinx._jinja import Environment, SphinxFileSystemLoader
|
||||
|
||||
# load templates
|
||||
self.templates = {}
|
||||
templates_path = path.join(path.dirname(__file__), 'templates')
|
||||
jinja_env = Environment(loader=FileSystemLoader(templates_path),
|
||||
# disable traceback, more likely that something in the
|
||||
# application is broken than in the templates
|
||||
friendly_traceback=False)
|
||||
self.jinja_env = Environment(loader=SphinxFileSystemLoader([templates_path]),
|
||||
# disable traceback, more likely that something
|
||||
# in the application is broken than in the templates
|
||||
friendly_traceback=False)
|
||||
# pre-load built-in templates
|
||||
for fname in os.listdir(templates_path):
|
||||
if fname.endswith('.html'):
|
||||
self.templates[fname[:-5]] = jinja_env.get_template(fname)
|
||||
self.templates[fname] = self.jinja_env.get_template(fname)
|
||||
|
||||
def get_template(self, name):
|
||||
if name in self.templates:
|
||||
return self.templates[name]
|
||||
return self.jinja_env.get_template(name)
|
||||
|
||||
def render_partial(self, node):
|
||||
"""Utility: Render a lone doctree node."""
|
||||
doc = new_document('foo')
|
||||
doc = new_document('<partial node>')
|
||||
doc.append(node)
|
||||
return publish_parts(
|
||||
doc,
|
||||
@ -314,7 +307,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
)
|
||||
|
||||
def prepare_writing(self, filenames):
|
||||
from .search import IndexBuilder
|
||||
from sphinx.search import IndexBuilder
|
||||
self.indexer = IndexBuilder()
|
||||
self.load_indexer(filenames)
|
||||
self.docwriter = HTMLWriter(self)
|
||||
@ -331,13 +324,15 @@ class StandaloneHTMLBuilder(Builder):
|
||||
self.last_updated = None
|
||||
|
||||
self.globalcontext = dict(
|
||||
last_updated = self.last_updated,
|
||||
builder = self.name,
|
||||
project = self.config.get('project', 'Python'),
|
||||
copyright = self.config.get('copyright', ''),
|
||||
release = self.config['release'],
|
||||
version = self.config['version'],
|
||||
last_updated = self.last_updated,
|
||||
builder = self.name,
|
||||
parents = [],
|
||||
len = len,
|
||||
titles = {},
|
||||
len = len, # the built-in
|
||||
)
|
||||
|
||||
def write_file(self, filename, doctree):
|
||||
@ -375,7 +370,6 @@ class StandaloneHTMLBuilder(Builder):
|
||||
context = dict(
|
||||
title = title,
|
||||
sourcename = sourcename,
|
||||
pathto = relpath_to(self, self.get_target_uri(filename)),
|
||||
body = self.docwriter.parts['fragment'],
|
||||
toc = self.render_partial(self.env.get_toc_for(filename))['fragment'],
|
||||
# only display a TOC if there's more than one item to show
|
||||
@ -386,7 +380,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
)
|
||||
|
||||
self.index_file(filename, doctree, title)
|
||||
self.handle_file(filename, context)
|
||||
self.handle_page(filename[:-4], context)
|
||||
|
||||
def finish(self):
|
||||
self.msg('writing additional files...')
|
||||
@ -402,10 +396,8 @@ class StandaloneHTMLBuilder(Builder):
|
||||
genindexcontext = dict(
|
||||
genindexentries = self.env.index,
|
||||
genindexcounts = indexcounts,
|
||||
current_page_name = 'genindex',
|
||||
pathto = relpath_to(self, self.get_target_uri('genindex.rst')),
|
||||
)
|
||||
self.handle_file('genindex.rst', genindexcontext, 'genindex')
|
||||
self.handle_page('genindex', genindexcontext, 'genindex.html')
|
||||
|
||||
# the global module index
|
||||
|
||||
@ -422,12 +414,12 @@ class StandaloneHTMLBuilder(Builder):
|
||||
cg = 0 # collapse group
|
||||
fl = '' # first letter
|
||||
for mn, (fn, sy, pl, dep) in modules:
|
||||
pl = pl.split(', ') if pl else []
|
||||
pl = pl and pl.split(', ') or []
|
||||
platforms.update(pl)
|
||||
if fl != mn[0].lower() and mn[0] != '_':
|
||||
modindexentries.append(['', False, 0, False,
|
||||
mn[0].upper(), '', [], False])
|
||||
tn = mn.partition('.')[0]
|
||||
tn = mn.split('.')[0]
|
||||
if tn != mn:
|
||||
# submodule
|
||||
if pmn == tn:
|
||||
@ -447,32 +439,22 @@ class StandaloneHTMLBuilder(Builder):
|
||||
modindexcontext = dict(
|
||||
modindexentries = modindexentries,
|
||||
platforms = platforms,
|
||||
current_page_name = 'modindex',
|
||||
pathto = relpath_to(self, self.get_target_uri('modindex.rst')),
|
||||
)
|
||||
self.handle_file('modindex.rst', modindexcontext, 'modindex')
|
||||
|
||||
# the download page
|
||||
downloadcontext = dict(
|
||||
pathto = relpath_to(self, self.get_target_uri('download.rst')),
|
||||
current_page_name = 'download',
|
||||
download_base_url = self.config['html_download_base_url'],
|
||||
)
|
||||
self.handle_file('download.rst', downloadcontext, 'download')
|
||||
|
||||
# the index page
|
||||
indexcontext = dict(
|
||||
pathto = relpath_to(self, self.get_target_uri('index.rst')),
|
||||
current_page_name = 'index',
|
||||
)
|
||||
self.handle_file('index.rst', indexcontext, 'index')
|
||||
self.handle_page('modindex', modindexcontext, 'modindex.html')
|
||||
|
||||
# the search page
|
||||
searchcontext = dict(
|
||||
pathto = relpath_to(self, self.get_target_uri('search.rst')),
|
||||
current_page_name = 'search',
|
||||
)
|
||||
self.handle_file('search.rst', searchcontext, 'search')
|
||||
self.handle_page('search', {}, 'search.html')
|
||||
|
||||
# additional pages from conf.py
|
||||
for pagename, template in self.config.get('html_additional_pages', {}).items():
|
||||
template = path.join(self.srcdir, template)
|
||||
self.handle_page(pagename, {}, template)
|
||||
|
||||
# the index page
|
||||
indextemplate = self.config.get('html_index')
|
||||
if indextemplate:
|
||||
indextemplate = path.join(self.srcdir, indextemplate)
|
||||
self.handle_page('index', {'indextemplate': indextemplate}, 'index.html')
|
||||
|
||||
# copy style files
|
||||
self.msg('copying style files...')
|
||||
@ -512,8 +494,11 @@ class StandaloneHTMLBuilder(Builder):
|
||||
|
||||
def load_indexer(self, filenames):
|
||||
try:
|
||||
with open(path.join(self.outdir, 'searchindex.json'), 'r') as f:
|
||||
f = open(path.join(self.outdir, 'searchindex.json'), 'r')
|
||||
try:
|
||||
self.indexer.load(f, 'json')
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
# delete all entries for files that will be rebuilt
|
||||
@ -522,32 +507,43 @@ class StandaloneHTMLBuilder(Builder):
|
||||
def index_file(self, filename, doctree, title):
|
||||
# only index pages with title
|
||||
if self.indexer is not None and title:
|
||||
category = get_category(filename)
|
||||
if category is not None:
|
||||
self.indexer.feed(self.get_target_uri(filename)[:-5], # strip '.html'
|
||||
category, title, doctree)
|
||||
self.indexer.feed(self.get_target_uri(filename)[:-5], # strip '.html'
|
||||
title, doctree)
|
||||
|
||||
def handle_file(self, filename, context, templatename='page'):
|
||||
def handle_page(self, pagename, context, templatename='page.html'):
|
||||
ctx = self.globalcontext.copy()
|
||||
ctx['current_page_name'] = pagename
|
||||
ctx['pathto'] = relpath_to(self, self.get_target_uri(pagename+'.rst'))
|
||||
ctx['hasdoc'] = lambda name: name+'.rst' in self.env.all_files
|
||||
sidebarfile = self.config.get('html_sidebars', {}).get(pagename)
|
||||
if sidebarfile:
|
||||
ctx['customsidebar'] = path.join(self.srcdir, sidebarfile)
|
||||
ctx.update(context)
|
||||
output = self.templates[templatename].render(ctx)
|
||||
outfilename = path.join(self.outdir, os_path(filename)[:-4] + '.html')
|
||||
|
||||
output = self.get_template(templatename).render(ctx)
|
||||
outfilename = path.join(self.outdir, os_path(pagename) + '.html')
|
||||
ensuredir(path.dirname(outfilename)) # normally different from self.outdir
|
||||
try:
|
||||
with codecs.open(outfilename, 'w', 'utf-8') as fp:
|
||||
fp.write(output)
|
||||
f = codecs.open(outfilename, 'w', 'utf-8')
|
||||
try:
|
||||
f.write(output)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError), err:
|
||||
self.warn("Error writing file %s: %s" % (outfilename, err))
|
||||
if self.copysource and context.get('sourcename'):
|
||||
# copy the source file for the "show source" link
|
||||
shutil.copyfile(path.join(self.srcdir, os_path(filename)),
|
||||
shutil.copyfile(path.join(self.srcdir, os_path(pagename+'.rst')),
|
||||
path.join(self.outdir, os_path(context['sourcename'])))
|
||||
|
||||
def handle_finish(self):
|
||||
self.msg('dumping search index...')
|
||||
self.indexer.prune([self.get_target_uri(fn)[:-5] for fn in self.env.all_files])
|
||||
with open(path.join(self.outdir, 'searchindex.json'), 'w') as f:
|
||||
f = open(path.join(self.outdir, 'searchindex.json'), 'w')
|
||||
try:
|
||||
self.indexer.dump(f, 'json')
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
|
||||
class WebHTMLBuilder(StandaloneHTMLBuilder):
|
||||
@ -581,44 +577,57 @@ class WebHTMLBuilder(StandaloneHTMLBuilder):
|
||||
|
||||
def load_indexer(self, filenames):
|
||||
try:
|
||||
with open(path.join(self.outdir, 'searchindex.pickle'), 'r') as f:
|
||||
f = open(path.join(self.outdir, 'searchindex.pickle'), 'r')
|
||||
try:
|
||||
self.indexer.load(f, 'pickle')
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
# delete all entries for files that will be rebuilt
|
||||
self.indexer.prune(set(self.env.all_files) - set(filenames))
|
||||
|
||||
def index_file(self, filename, doctree, title):
|
||||
# only index pages with title and category
|
||||
# only index pages with title
|
||||
if self.indexer is not None and title:
|
||||
category = get_category(filename)
|
||||
if category is not None:
|
||||
self.indexer.feed(filename, category, title, doctree)
|
||||
self.indexer.feed(filename, title, doctree)
|
||||
|
||||
def handle_file(self, filename, context, templatename='page'):
|
||||
outfilename = path.join(self.outdir, os_path(filename)[:-4] + '.fpickle')
|
||||
def handle_page(self, pagename, context, templatename='page.html'):
|
||||
context['current_page_name'] = pagename
|
||||
sidebarfile = self.config.get('html_sidebars', {}).get(pagename, '')
|
||||
if sidebarfile:
|
||||
context['customsidebar'] = path.join(self.srcdir, sidebarfile)
|
||||
outfilename = path.join(self.outdir, os_path(pagename) + '.fpickle')
|
||||
ensuredir(path.dirname(outfilename))
|
||||
context.pop('pathto', None) # can't be pickled
|
||||
with file(outfilename, 'wb') as fp:
|
||||
pickle.dump(context, fp, 2)
|
||||
f = open(outfilename, 'wb')
|
||||
try:
|
||||
pickle.dump(context, f, 2)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# if there is a source file, copy the source file for the "show source" link
|
||||
if context.get('sourcename'):
|
||||
source_name = path.join(self.outdir, 'sources',
|
||||
os_path(context['sourcename']))
|
||||
ensuredir(path.dirname(source_name))
|
||||
shutil.copyfile(path.join(self.srcdir, os_path(filename)), source_name)
|
||||
shutil.copyfile(path.join(self.srcdir, os_path(pagename)+'.rst'), source_name)
|
||||
|
||||
def handle_finish(self):
|
||||
# dump the global context
|
||||
outfilename = path.join(self.outdir, 'globalcontext.pickle')
|
||||
with file(outfilename, 'wb') as fp:
|
||||
pickle.dump(self.globalcontext, fp, 2)
|
||||
f = open(outfilename, 'wb')
|
||||
try:
|
||||
pickle.dump(self.globalcontext, f, 2)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
self.msg('dumping search index...')
|
||||
self.indexer.prune(self.env.all_files)
|
||||
with open(path.join(self.outdir, 'searchindex.pickle'), 'wb') as f:
|
||||
f = open(path.join(self.outdir, 'searchindex.pickle'), 'wb')
|
||||
try:
|
||||
self.indexer.dump(f, 'pickle')
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# copy the environment file from the doctree dir to the output dir
|
||||
# as needed by the web app
|
||||
@ -641,15 +650,11 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
||||
"""
|
||||
name = 'htmlhelp'
|
||||
|
||||
option_spec = {
|
||||
'outname': 'Output file base name (default "pydoc")'
|
||||
}
|
||||
|
||||
# don't copy the reST source
|
||||
copysource = False
|
||||
|
||||
def handle_finish(self):
|
||||
build_hhx(self, self.outdir, self.options.get('outname') or 'pydoc')
|
||||
build_hhx(self, self.outdir, self.config.get('htmlhelp_basename', 'pydoc'))
|
||||
|
||||
|
||||
class LaTeXBuilder(Builder):
|
||||
@ -660,6 +665,17 @@ class LaTeXBuilder(Builder):
|
||||
|
||||
def init(self):
|
||||
self.filenames = []
|
||||
self.document_data = map(list, self.config.get('latex_documents', ()))
|
||||
|
||||
# assign subdirs to titles
|
||||
self.titles = []
|
||||
for entry in self.document_data:
|
||||
# replace version with real version
|
||||
entry[0] = entry[0].replace('<auto>', self.config['version'])
|
||||
sourcename = entry[0]
|
||||
if sourcename.endswith('/index.rst'):
|
||||
sourcename = sourcename[:-9]
|
||||
self.titles.append((sourcename, entry[2]))
|
||||
|
||||
def get_outdated_files(self):
|
||||
return 'all documents' # for now
|
||||
@ -674,49 +690,42 @@ class LaTeXBuilder(Builder):
|
||||
else:
|
||||
return ''
|
||||
|
||||
def get_document_data(self):
|
||||
# Python specific...
|
||||
for toplevel in ["c-api", "distutils", "documenting", "extending",
|
||||
"install", "reference", "tutorial", "using", "library"]:
|
||||
yield (toplevel + SEP + 'index.rst', toplevel+'.tex', 'manual')
|
||||
yield ('whatsnew' + SEP + self.config['version'] + '.rst',
|
||||
'whatsnew.tex', 'howto')
|
||||
for howto in [fn for fn in self.env.all_files
|
||||
if fn.startswith('howto'+SEP)
|
||||
and not fn.endswith('index.rst')]:
|
||||
yield (howto, 'howto-'+howto[6:-4]+'.tex', 'howto')
|
||||
|
||||
def write(self, *ignored):
|
||||
# first, assemble the "special" docs that are in every PDF
|
||||
specials = []
|
||||
for fname in ["glossary", "about", "license", "copyright"]:
|
||||
specials.append(self.env.get_doctree(fname+".rst"))
|
||||
# first, assemble the "appendix" docs that are in every PDF
|
||||
appendices = []
|
||||
for fname in self.config.get('latex_appendices', []):
|
||||
appendices.append(self.env.get_doctree(fname))
|
||||
|
||||
docwriter = LaTeXWriter(self)
|
||||
docsettings = OptionParser(
|
||||
defaults=self.env.settings,
|
||||
components=(docwriter,)).get_default_values()
|
||||
|
||||
for sourcename, targetname, docclass in self.get_document_data():
|
||||
if not self.document_data:
|
||||
self.warn('No "latex_documents" config setting found; no documents '
|
||||
'will be written.')
|
||||
|
||||
for sourcename, targetname, title, author, docclass in self.document_data:
|
||||
destination = FileOutput(
|
||||
destination_path=path.join(self.outdir, targetname),
|
||||
encoding='utf-8')
|
||||
print "processing", targetname + "...",
|
||||
doctree = self.assemble_doctree(
|
||||
sourcename, specials=(docclass == 'manual') and specials or [])
|
||||
sourcename, appendices=(docclass == 'manual') and appendices or [])
|
||||
print "writing...",
|
||||
doctree.settings = docsettings
|
||||
doctree.settings.author = author
|
||||
doctree.settings.filename = sourcename
|
||||
doctree.settings.docclass = docclass
|
||||
output = docwriter.write(doctree, destination)
|
||||
print "done"
|
||||
|
||||
def assemble_doctree(self, indexfile, specials):
|
||||
def assemble_doctree(self, indexfile, appendices):
|
||||
self.filenames = set([indexfile, 'glossary.rst', 'about.rst',
|
||||
'license.rst', 'copyright.rst'])
|
||||
print green(indexfile),
|
||||
def process_tree(filename, tree):
|
||||
#tree = tree.deepcopy() XXX
|
||||
tree = tree.deepcopy()
|
||||
for toctreenode in tree.traverse(addnodes.toctree):
|
||||
newnodes = []
|
||||
includefiles = map(str, toctreenode['includefiles'])
|
||||
@ -734,11 +743,25 @@ class LaTeXBuilder(Builder):
|
||||
toctreenode.parent.replace(toctreenode, newnodes)
|
||||
return tree
|
||||
largetree = process_tree(indexfile, self.env.get_doctree(indexfile))
|
||||
largetree.extend(specials)
|
||||
largetree.extend(appendices)
|
||||
print
|
||||
print "resolving references..."
|
||||
# XXX problem here: :ref:s to distant tex files
|
||||
self.env.resolve_references(largetree, indexfile, self)
|
||||
# resolve :ref:s to distant tex files -- we can't add a cross-reference,
|
||||
# but append the document name
|
||||
for pendingnode in largetree.traverse(addnodes.pending_xref):
|
||||
filename = pendingnode['reffilename']
|
||||
sectname = pendingnode['refsectname']
|
||||
newnodes = [nodes.emphasis(sectname, sectname)]
|
||||
for subdir, title in self.titles:
|
||||
if filename.startswith(subdir):
|
||||
newnodes.append(nodes.Text(' (in ', ' (in '))
|
||||
newnodes.append(nodes.emphasis(title, title))
|
||||
newnodes.append(nodes.Text(')', ')'))
|
||||
break
|
||||
else:
|
||||
pass
|
||||
pendingnode.replace_self(newnodes)
|
||||
return largetree
|
||||
|
||||
def finish(self):
|
||||
@ -757,15 +780,15 @@ class ChangesBuilder(Builder):
|
||||
name = 'changes'
|
||||
|
||||
def init(self):
|
||||
from ._jinja import Environment, FileSystemLoader
|
||||
from sphinx._jinja import Environment, FileSystemLoader
|
||||
templates_path = path.join(path.dirname(__file__), 'templates')
|
||||
jinja_env = Environment(loader=FileSystemLoader(templates_path),
|
||||
jinja_env = Environment(loader=SphinxFileSystemLoader([templates_path]),
|
||||
# disable traceback, more likely that something in the
|
||||
# application is broken than in the templates
|
||||
friendly_traceback=False)
|
||||
self.ftemplate = jinja_env.get_template('versionchanges_frameset.html')
|
||||
self.vtemplate = jinja_env.get_template('versionchanges.html')
|
||||
self.stemplate = jinja_env.get_template('rstsource.html')
|
||||
self.ftemplate = jinja_env.get_template('changes/frameset.html')
|
||||
self.vtemplate = jinja_env.get_template('changes/versionchanges.html')
|
||||
self.stemplate = jinja_env.get_template('changes/rstsource.html')
|
||||
|
||||
def get_outdated_files(self):
|
||||
return self.outdir
|
||||
@ -813,15 +836,22 @@ class ChangesBuilder(Builder):
|
||||
(entry, filename, lineno))
|
||||
|
||||
ctx = {
|
||||
'project': self.config.get('project', 'Python'),
|
||||
'version': ver,
|
||||
'libchanges': sorted(libchanges.iteritems()),
|
||||
'apichanges': sorted(apichanges),
|
||||
'otherchanges': sorted(otherchanges.iteritems()),
|
||||
}
|
||||
with open(path.join(self.outdir, 'index.html'), 'w') as f:
|
||||
f = open(path.join(self.outdir, 'index.html'), 'w')
|
||||
try:
|
||||
f.write(self.ftemplate.render(ctx))
|
||||
with open(path.join(self.outdir, 'changes.html'), 'w') as f:
|
||||
finally:
|
||||
f.close()
|
||||
f = open(path.join(self.outdir, 'changes.html'), 'w')
|
||||
try:
|
||||
f.write(self.vtemplate.render(ctx))
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
hltext = ['.. versionadded:: %s' % ver,
|
||||
'.. versionchanged:: %s' % ver,
|
||||
@ -837,14 +867,17 @@ class ChangesBuilder(Builder):
|
||||
|
||||
self.msg('copying source files...')
|
||||
for filename in self.env.all_files:
|
||||
with open(path.join(self.srcdir, os_path(filename))) as f:
|
||||
lines = f.readlines()
|
||||
f = open(path.join(self.srcdir, os_path(filename)))
|
||||
lines = f.readlines()
|
||||
targetfn = path.join(self.outdir, 'rst', os_path(filename)) + '.html'
|
||||
ensuredir(path.dirname(targetfn))
|
||||
with codecs.open(targetfn, 'w', 'utf8') as f:
|
||||
f = codecs.open(targetfn, 'w', 'utf8')
|
||||
try:
|
||||
text = ''.join(hl(i+1, line) for (i, line) in enumerate(lines))
|
||||
ctx = {'filename': filename, 'text': text}
|
||||
f.write(self.stemplate.render(ctx))
|
||||
finally:
|
||||
f.close()
|
||||
shutil.copyfile(path.join(path.dirname(__file__), 'style', 'default.css'),
|
||||
path.join(self.outdir, 'default.css'))
|
||||
|
||||
|
@ -8,7 +8,6 @@
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import re
|
||||
import string
|
||||
@ -19,7 +18,7 @@ from docutils import nodes
|
||||
from docutils.parsers.rst import directives, roles
|
||||
from docutils.parsers.rst.directives import admonitions
|
||||
|
||||
from . import addnodes
|
||||
from sphinx import addnodes
|
||||
|
||||
# ------ index markup --------------------------------------------------------------
|
||||
|
||||
@ -142,7 +141,7 @@ def parse_py_signature(signode, sig, desctype, env):
|
||||
else:
|
||||
fullname = env.currclass + '.' + name
|
||||
else:
|
||||
fullname = classname + name if classname else name
|
||||
fullname = classname and classname + name or name
|
||||
|
||||
if classname:
|
||||
signode += addnodes.desc_classname(classname, classname)
|
||||
@ -285,7 +284,7 @@ def add_refcount_annotation(env, node, name):
|
||||
if entry.result_refs is None:
|
||||
rc += "Always NULL."
|
||||
else:
|
||||
rc += ("New" if entry.result_refs else "Borrowed") + " reference."
|
||||
rc += (entry.result_refs and "New" or "Borrowed") + " reference."
|
||||
node += addnodes.refcount(rc, rc)
|
||||
|
||||
|
||||
@ -347,7 +346,7 @@ def desc_directive(desctype, arguments, options, content, lineno,
|
||||
# only add target and index entry if this is the first description of the
|
||||
# function name in this desc block
|
||||
if not noindex and name not in names:
|
||||
fullname = (env.currmodule + '.' if env.currmodule else '') + name
|
||||
fullname = (env.currmodule and env.currmodule + '.' or '') + name
|
||||
# note target
|
||||
if fullname not in state.document.ids:
|
||||
signode['names'].append(fullname)
|
||||
@ -612,8 +611,9 @@ def literalinclude_directive(name, arguments, options, content, lineno,
|
||||
fn = path.normpath(path.join(source_dir, fn))
|
||||
|
||||
try:
|
||||
with open(fn) as f:
|
||||
text = f.read()
|
||||
f = open(fn)
|
||||
text = f.read()
|
||||
f.close()
|
||||
except (IOError, OSError):
|
||||
retnode = state.document.reporter.warning(
|
||||
'Include file %r not found or reading it failed' % arguments[0], line=lineno)
|
||||
|
@ -8,18 +8,22 @@
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import re
|
||||
import os
|
||||
import time
|
||||
import heapq
|
||||
import hashlib
|
||||
import difflib
|
||||
import itertools
|
||||
import cPickle as pickle
|
||||
from os import path
|
||||
from string import uppercase
|
||||
try:
|
||||
import hashlib
|
||||
md5 = hashlib.md5
|
||||
except:
|
||||
import md5
|
||||
md5 = md5.new
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.io import FileInput
|
||||
@ -37,9 +41,9 @@ Body.enum.converters['loweralpha'] = \
|
||||
Body.enum.converters['lowerroman'] = \
|
||||
Body.enum.converters['upperroman'] = lambda x: None
|
||||
|
||||
from . import addnodes
|
||||
from .util import get_matching_files, os_path, SEP
|
||||
from .refcounting import Refcounts
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import get_matching_files, os_path, SEP
|
||||
from sphinx.refcounting import Refcounts
|
||||
|
||||
default_settings = {
|
||||
'embed_stylesheet': False,
|
||||
@ -156,8 +160,11 @@ class BuildEnvironment:
|
||||
|
||||
@staticmethod
|
||||
def frompickle(filename):
|
||||
with open(filename, 'rb') as picklefile:
|
||||
picklefile = open(filename, 'rb')
|
||||
try:
|
||||
env = pickle.load(picklefile)
|
||||
finally:
|
||||
picklefile.close()
|
||||
if env.version != ENV_VERSION:
|
||||
raise IOError('env version not current')
|
||||
return env
|
||||
@ -166,8 +173,11 @@ class BuildEnvironment:
|
||||
# remove unpicklable attributes
|
||||
warnfunc = self._warnfunc
|
||||
self.set_warnfunc(None)
|
||||
with open(filename, 'wb') as picklefile:
|
||||
picklefile = open(filename, 'wb')
|
||||
try:
|
||||
pickle.dump(self, picklefile, pickle.HIGHEST_PROTOCOL)
|
||||
finally:
|
||||
picklefile.close()
|
||||
# reset stream
|
||||
self.set_warnfunc(warnfunc)
|
||||
|
||||
@ -178,9 +188,8 @@ class BuildEnvironment:
|
||||
self.srcdir = srcdir
|
||||
self.config = {}
|
||||
|
||||
# read the refcounts file
|
||||
self.refcounts = Refcounts.fromfile(
|
||||
path.join(self.srcdir, 'data', 'refcounts.dat'))
|
||||
# refcount data if present
|
||||
self.refcounts = {}
|
||||
|
||||
# the docutils settings for building
|
||||
self.settings = default_settings.copy()
|
||||
@ -194,7 +203,7 @@ class BuildEnvironment:
|
||||
|
||||
# Build times -- to determine changed files
|
||||
# Also use this as an inventory of all existing and built filenames.
|
||||
self.all_files = {} # filename -> (mtime, md5) at the time of build
|
||||
self.all_files = {} # filename -> (mtime, md5sum) at the time of build
|
||||
|
||||
# File metadata
|
||||
self.metadata = {} # filename -> dict of metadata items
|
||||
@ -291,21 +300,22 @@ class BuildEnvironment:
|
||||
os_path(filename)[:-3] + 'doctree')):
|
||||
changed.append(filename)
|
||||
continue
|
||||
mtime, md5 = self.all_files[filename]
|
||||
mtime, md5sum = self.all_files[filename]
|
||||
newmtime = path.getmtime(path.join(self.srcdir, os_path(filename)))
|
||||
if newmtime == mtime:
|
||||
continue
|
||||
# check the MD5
|
||||
#with file(path.join(self.srcdir, filename), 'rb') as f:
|
||||
# newmd5 = hashlib.md5(f.read()).digest()
|
||||
#if newmd5 != md5:
|
||||
# newmd5sum = md5(f.read()).digest()
|
||||
#if newmd5sum != md5sum:
|
||||
changed.append(filename)
|
||||
|
||||
return added, changed, removed
|
||||
|
||||
# If one of these config values changes, all files need to be re-read.
|
||||
influential_config_values = [
|
||||
'version', 'release', 'today', 'today_fmt', 'unused_files'
|
||||
'version', 'release', 'today', 'today_fmt', 'unused_files',
|
||||
'project', 'refcount_file', 'add_function_parentheses', 'add_module_names'
|
||||
]
|
||||
|
||||
def update(self, config):
|
||||
@ -330,14 +340,15 @@ class BuildEnvironment:
|
||||
|
||||
self.config = config
|
||||
|
||||
# read the refcounts file
|
||||
if self.config.get('refcount_file'):
|
||||
self.refcounts = Refcounts.fromfile(
|
||||
path.join(self.srcdir, self.config['refcount_file']))
|
||||
|
||||
# clear all files no longer present
|
||||
for filename in removed:
|
||||
self.clear_file(filename)
|
||||
|
||||
# re-read the refcount file
|
||||
self.refcounts = Refcounts.fromfile(
|
||||
path.join(self.srcdir, 'data', 'refcounts.dat'))
|
||||
|
||||
# read all new and changed files
|
||||
for filename in added + changed:
|
||||
yield filename
|
||||
@ -364,9 +375,12 @@ class BuildEnvironment:
|
||||
self.build_toc_from(filename, doctree)
|
||||
|
||||
# calculate the MD5 of the file at time of build
|
||||
with file(src_path, 'rb') as f:
|
||||
md5 = hashlib.md5(f.read()).digest()
|
||||
self.all_files[filename] = (path.getmtime(src_path), md5)
|
||||
f = open(src_path, 'rb')
|
||||
try:
|
||||
md5sum = md5(f.read()).digest()
|
||||
finally:
|
||||
f.close()
|
||||
self.all_files[filename] = (path.getmtime(src_path), md5sum)
|
||||
|
||||
# make it picklable
|
||||
doctree.reporter = None
|
||||
@ -388,8 +402,11 @@ class BuildEnvironment:
|
||||
dirname = path.dirname(doctree_filename)
|
||||
if not path.isdir(dirname):
|
||||
os.makedirs(dirname)
|
||||
with file(doctree_filename, 'wb') as f:
|
||||
f = open(doctree_filename, 'wb')
|
||||
try:
|
||||
pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
|
||||
finally:
|
||||
f.close()
|
||||
else:
|
||||
return doctree
|
||||
|
||||
@ -446,9 +463,9 @@ class BuildEnvironment:
|
||||
includefiles_len = len(includefiles)
|
||||
for i, includefile in enumerate(includefiles):
|
||||
# the "previous" file for the first toctree item is the parent
|
||||
previous = includefiles[i-1] if i > 0 else filename
|
||||
previous = i > 0 and includefiles[i-1] or filename
|
||||
# the "next" file for the last toctree item is the parent again
|
||||
next = includefiles[i+1] if i < includefiles_len-1 else filename
|
||||
next = i < includefiles_len-1 and includefiles[i+1] or filename
|
||||
self.toctree_relations[includefile] = [filename, previous, next]
|
||||
# note that if the included file is rebuilt, this one must be
|
||||
# too (since the TOC of the included file could have changed)
|
||||
@ -539,8 +556,11 @@ class BuildEnvironment:
|
||||
def get_doctree(self, filename):
|
||||
"""Read the doctree for a file from the pickle and return it."""
|
||||
doctree_filename = path.join(self.doctreedir, os_path(filename)[:-3] + 'doctree')
|
||||
with file(doctree_filename, 'rb') as f:
|
||||
f = open(doctree_filename, 'rb')
|
||||
try:
|
||||
doctree = pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
doctree.reporter = Reporter(filename, 2, 4, stream=RedirStream(self._warnfunc))
|
||||
return doctree
|
||||
|
||||
@ -617,9 +637,11 @@ class BuildEnvironment:
|
||||
if filename == docfilename:
|
||||
newnode['refid'] = labelid
|
||||
else:
|
||||
# in case the following calls raises NoUri...
|
||||
# else the final node will contain a label name
|
||||
contnode = innernode
|
||||
# set more info in contnode in case the following call
|
||||
# raises NoUri, the builder will have to resolve these
|
||||
contnode = addnodes.pending_xref('')
|
||||
contnode['reffilename'] = filename
|
||||
contnode['refsectname'] = sectname
|
||||
newnode['refuri'] = builder.get_relative_uri(
|
||||
docfilename, filename) + '#' + labelid
|
||||
newnode.append(innernode)
|
||||
@ -669,14 +691,14 @@ class BuildEnvironment:
|
||||
newnode['refuri'] = (
|
||||
builder.get_relative_uri(docfilename, filename) + anchor)
|
||||
newnode['reftitle'] = '%s%s%s' % (
|
||||
('(%s) ' % platform if platform else ''),
|
||||
synopsis, (' (deprecated)' if deprecated else ''))
|
||||
(platform and '(%s) ' % platform),
|
||||
synopsis, (deprecated and ' (deprecated)' or ''))
|
||||
newnode.append(contnode)
|
||||
else:
|
||||
# "descrefs"
|
||||
modname = node['modname']
|
||||
clsname = node['classname']
|
||||
searchorder = 1 if node.hasattr('refspecific') else 0
|
||||
searchorder = node.hasattr('refspecific') and 1 or 0
|
||||
name, desc = self.find_desc(modname, clsname,
|
||||
target, typ, searchorder)
|
||||
if not desc:
|
||||
@ -716,7 +738,10 @@ class BuildEnvironment:
|
||||
# new entry types must be listed in directives.py!
|
||||
for type, string, tid, alias in entries:
|
||||
if type == 'single':
|
||||
entry, _, subentry = string.partition(';')
|
||||
try:
|
||||
entry, subentry = string.split(';', 1)
|
||||
except:
|
||||
entry, subentry = string, ''
|
||||
add_entry(entry.strip(), subentry.strip())
|
||||
elif type == 'pair':
|
||||
first, second = map(lambda x: x.strip(), string.split(';', 1))
|
||||
|
@ -9,9 +9,9 @@
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import cgi
|
||||
import parser
|
||||
from collections import defaultdict
|
||||
|
||||
try:
|
||||
import pygments
|
||||
@ -42,7 +42,7 @@ else:
|
||||
Number: '#208050',
|
||||
})
|
||||
|
||||
lexers = defaultdict(TextLexer,
|
||||
lexers = dict(
|
||||
none = TextLexer(),
|
||||
python = PythonLexer(),
|
||||
pycon = PythonConsoleLexer(),
|
||||
@ -71,8 +71,12 @@ def highlight_block(source, lang, dest='html'):
|
||||
else:
|
||||
# maybe Python -- try parsing it
|
||||
try:
|
||||
parser.suite('from __future__ import with_statement\n' +
|
||||
source + '\n')
|
||||
# if we're using 2.5, use the with statement
|
||||
if sys.version_info >= (2, 5):
|
||||
parser.suite('from __future__ import with_statement\n' +
|
||||
source + '\n')
|
||||
else:
|
||||
parser.suite(source + '\n')
|
||||
except (SyntaxError, UnicodeEncodeError):
|
||||
return unhighlighted()
|
||||
else:
|
||||
|
@ -9,7 +9,6 @@
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
import cgi
|
||||
@ -17,7 +16,7 @@ from os import path
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from . import addnodes
|
||||
from sphinx import addnodes
|
||||
|
||||
# Project file (*.hhp) template. 'outname' is the file basename (like
|
||||
# the pythlp in pythlp.hhp); 'version' is the doc version number (like
|
||||
@ -64,10 +63,10 @@ Full text search stop list file=%(outname)s.stp
|
||||
Full-text search=Yes
|
||||
Index file=%(outname)s.hhk
|
||||
Language=0x409
|
||||
Title=Python %(version)s Documentation
|
||||
Title=%(project)s %(version)s Documentation
|
||||
|
||||
[WINDOWS]
|
||||
%(outname)s="Python %(version)s Documentation","%(outname)s.hhc","%(outname)s.hhk",\
|
||||
%(outname)s="%(project)s %(version)s Documentation","%(outname)s.hhc","%(outname)s.hhk",\
|
||||
"index.html","index.html",,,,,0x63520,220,0x10384e,[0,0,1024,768],,,,,,,0
|
||||
|
||||
[FILES]
|
||||
@ -119,24 +118,32 @@ was will with
|
||||
|
||||
def build_hhx(builder, outdir, outname):
|
||||
builder.msg('dumping stopword list...')
|
||||
with open(path.join(outdir, outname+'.stp'), 'w') as f:
|
||||
f = open(path.join(outdir, outname+'.stp'), 'w')
|
||||
try:
|
||||
for word in sorted(stopwords):
|
||||
print >>f, word
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
builder.msg('writing project file...')
|
||||
with open(path.join(outdir, outname+'.hhp'), 'w') as f:
|
||||
f = open(path.join(outdir, outname+'.hhp'), 'w')
|
||||
try:
|
||||
f.write(project_template % {'outname': outname,
|
||||
'version': builder.config['version']})
|
||||
'version': builder.config['version'],
|
||||
'project': builder.config['project']})
|
||||
if not outdir.endswith(os.sep):
|
||||
outdir += os.sep
|
||||
olen = len(outdir)
|
||||
for root, dirs, files in os.walk(outdir):
|
||||
for fn in files:
|
||||
if fn.endswith(('.html', '.css', '.js')):
|
||||
if fn.endswith('.html') or fn.endswith('.css') or fn.endswith('.js'):
|
||||
print >>f, path.join(root, fn)[olen:].replace('/', '\\')
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
builder.msg('writing TOC file...')
|
||||
with open(path.join(outdir, outname+'.hhc'), 'w') as f:
|
||||
f = open(path.join(outdir, outname+'.hhc'), 'w')
|
||||
try:
|
||||
f.write(contents_header)
|
||||
# special books
|
||||
f.write('<LI> ' + object_sitemap % ('Main page', 'index.html'))
|
||||
@ -167,9 +174,12 @@ def build_hhx(builder, outdir, outname):
|
||||
write_toc(node[0], ullevel)
|
||||
write_toc(toc)
|
||||
f.write(contents_footer)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
builder.msg('writing index file...')
|
||||
with open(path.join(outdir, outname+'.hhk'), 'w') as f:
|
||||
f = open(path.join(outdir, outname+'.hhk'), 'w')
|
||||
try:
|
||||
f.write('<UL>\n')
|
||||
def write_index(title, refs, subitems):
|
||||
if refs:
|
||||
@ -186,3 +196,5 @@ def build_hhx(builder, outdir, outname):
|
||||
for title, (refs, subitems) in group:
|
||||
write_index(title, refs, subitems)
|
||||
f.write('</UL>\n')
|
||||
finally:
|
||||
f.close()
|
||||
|
@ -12,7 +12,7 @@
|
||||
from docutils import nodes
|
||||
from docutils.writers.html4css1 import Writer, HTMLTranslator as BaseTranslator
|
||||
|
||||
from .util.smartypants import sphinx_smarty_pants
|
||||
from sphinx.util.smartypants import sphinx_smarty_pants
|
||||
|
||||
|
||||
class HTMLWriter(Writer):
|
||||
@ -162,7 +162,7 @@ def translator_class(builder):
|
||||
|
||||
# overwritten
|
||||
def visit_literal_block(self, node):
|
||||
from .highlighting import highlight_block
|
||||
from sphinx.highlighting import highlight_block
|
||||
self.body.append(highlight_block(node.rawsource, self.highlightlang))
|
||||
raise nodes.SkipNode
|
||||
|
||||
@ -246,7 +246,8 @@ def translator_class(builder):
|
||||
def depart_title(self, node):
|
||||
close_tag = self.context[-1]
|
||||
if builder.name != 'htmlhelp' and \
|
||||
close_tag.startswith(('</h', '</a></h')) and \
|
||||
(close_tag.startswith('</h') or
|
||||
close_tag.startswith('</a></h')) and \
|
||||
node.parent.hasattr('ids') and node.parent['ids']:
|
||||
aname = node.parent['ids'][0]
|
||||
# add permalink anchor
|
||||
|
@ -18,8 +18,8 @@ import string
|
||||
|
||||
from docutils import frontend, nodes, languages, writers, utils
|
||||
|
||||
from . import addnodes
|
||||
from . import highlighting
|
||||
from sphinx import addnodes
|
||||
from sphinx import highlighting
|
||||
|
||||
|
||||
HEADER = r'''%% Generated by Sphinx.
|
||||
@ -30,12 +30,8 @@ HEADER = r'''%% Generated by Sphinx.
|
||||
\title{%(title)s}
|
||||
\date{%(date)s}
|
||||
\release{%(release)s}
|
||||
\author{Guido van Rossum\\ %% XXX
|
||||
Fred L. Drake, Jr., editor}
|
||||
\authoraddress{
|
||||
\strong{Python Software Foundation}\\
|
||||
Email: \email{docs@python.org}
|
||||
}
|
||||
\author{%(author)s}
|
||||
%(preamble)s
|
||||
\makeindex
|
||||
\makemodindex
|
||||
'''
|
||||
@ -103,6 +99,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.options = {'docclass': docclass,
|
||||
'papersize': paper,
|
||||
'pointsize': builder.config.get('latex_font_size', '10pt'),
|
||||
'preamble': builder.config['latex_preamble'],
|
||||
'author': document.settings.author,
|
||||
'filename': document.settings.filename,
|
||||
'title': None, # is determined later
|
||||
'release': builder.config['release'],
|
||||
@ -112,7 +110,10 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.descstack = []
|
||||
self.highlightlang = 'python'
|
||||
self.written_ids = set()
|
||||
self.top_sectionlevel = 0 if docclass == 'manual' else 1
|
||||
if docclass == 'manual':
|
||||
self.top_sectionlevel = 0
|
||||
else:
|
||||
self.top_sectionlevel = 1
|
||||
# flags
|
||||
self.verbatim = None
|
||||
self.in_title = 0
|
||||
@ -577,7 +578,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
uri = node.get('refuri', '')
|
||||
if self.in_title or not uri:
|
||||
self.context.append('')
|
||||
elif uri.startswith(('mailto:', 'http:', 'ftp:')):
|
||||
elif uri.startswith('mailto:') or uri.startswith('http:') or \
|
||||
uri.startswith('ftp:'):
|
||||
self.body.append('\\href{%s}{' % self.encode(uri))
|
||||
self.context.append('}')
|
||||
elif uri.startswith('#'):
|
||||
|
@ -6,10 +6,11 @@
|
||||
Extract version info from Include/patchlevel.h.
|
||||
Adapted from Doc/tools/getversioninfo.
|
||||
|
||||
XXX Python specific
|
||||
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
import re
|
||||
@ -23,12 +24,15 @@ def get_version_info(srcdir):
|
||||
rx = re.compile(r"\s*#define\s+([a-zA-Z][a-zA-Z_0-9]*)\s+([a-zA-Z_0-9]+)")
|
||||
|
||||
d = {}
|
||||
with open(patchlevel_h) as f:
|
||||
f = open(patchlevel_h)
|
||||
try:
|
||||
for line in f:
|
||||
m = rx.match(line)
|
||||
if m is not None:
|
||||
name, value = m.group(1, 2)
|
||||
d[name] = value
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
release = version = "%s.%s" % (d["PY_MAJOR_VERSION"], d["PY_MINOR_VERSION"])
|
||||
micro = int(d["PY_MICRO_VERSION"])
|
||||
|
@ -9,8 +9,6 @@
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
|
||||
class RCEntry:
|
||||
def __init__(self, name):
|
||||
@ -24,7 +22,8 @@ class Refcounts(dict):
|
||||
@classmethod
|
||||
def fromfile(cls, filename):
|
||||
d = cls()
|
||||
with open(filename, 'r') as fp:
|
||||
fp = open(filename, 'r')
|
||||
try:
|
||||
for line in fp:
|
||||
line = line.strip()
|
||||
if line[:1] in ("", "#"):
|
||||
@ -49,4 +48,6 @@ class Refcounts(dict):
|
||||
else:
|
||||
entry.result_type = type
|
||||
entry.result_refs = refcount
|
||||
finally:
|
||||
fp.close()
|
||||
return d
|
||||
|
@ -14,7 +14,7 @@ import re
|
||||
from docutils import nodes, utils
|
||||
from docutils.parsers.rst import roles
|
||||
|
||||
from . import addnodes
|
||||
from sphinx import addnodes
|
||||
|
||||
ws_re = re.compile(r'\s+')
|
||||
|
||||
@ -128,7 +128,10 @@ def xfileref_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
if typ == 'term':
|
||||
pnode['reftarget'] = ws_re.sub(' ', text).lower()
|
||||
elif typ == 'option':
|
||||
pnode['reftarget'] = text[1:] if text[0] in '-/' else text
|
||||
if text[0] in '-/':
|
||||
pnode['reftarget'] = text[1:]
|
||||
else:
|
||||
pnode['reftarget'] = text
|
||||
else:
|
||||
pnode['reftarget'] = ws_re.sub('', text)
|
||||
pnode['modname'] = env.currmodule
|
||||
|
@ -10,12 +10,11 @@
|
||||
"""
|
||||
import re
|
||||
import pickle
|
||||
from collections import defaultdict
|
||||
|
||||
from docutils.nodes import Text, NodeVisitor
|
||||
|
||||
from .util.stemmer import PorterStemmer
|
||||
from .util.json import dump_json, load_json
|
||||
from sphinx.util.stemmer import PorterStemmer
|
||||
from sphinx.util.json import dump_json, load_json
|
||||
|
||||
|
||||
word_re = re.compile(r'\w+(?u)')
|
||||
@ -61,18 +60,14 @@ class IndexBuilder(object):
|
||||
self._titles = {}
|
||||
# stemmed word -> set(filenames)
|
||||
self._mapping = {}
|
||||
# category -> set(filenames)
|
||||
self._categories = {}
|
||||
|
||||
def load(self, stream, format):
|
||||
"""Reconstruct from frozen data."""
|
||||
frozen = self.formats[format][1](stream.read())
|
||||
index2fn = frozen[0]
|
||||
self._titles = dict(zip(frozen[0], frozen[2]))
|
||||
self._categories = dict((k, set(index2fn[i] for i in v))
|
||||
for (k, v) in frozen[1].iteritems())
|
||||
self._titles = dict(zip(frozen[0], frozen[1]))
|
||||
self._mapping = dict((k, set(index2fn[i] for i in v))
|
||||
for (k, v) in frozen[3].iteritems())
|
||||
for (k, v) in frozen[2].iteritems())
|
||||
|
||||
def dump(self, stream, format):
|
||||
"""Dump the frozen index to a stream."""
|
||||
@ -87,8 +82,6 @@ class IndexBuilder(object):
|
||||
fn2index = dict((f, i) for (i, f) in enumerate(fns))
|
||||
return [
|
||||
fns,
|
||||
dict((k, [fn2index[fn] for fn in v])
|
||||
for (k, v) in self._categories.iteritems()),
|
||||
titles,
|
||||
dict((k, [fn2index[fn] for fn in v])
|
||||
for (k, v) in self._mapping.iteritems()),
|
||||
@ -103,13 +96,10 @@ class IndexBuilder(object):
|
||||
self._titles = new_titles
|
||||
for wordnames in self._mapping.itervalues():
|
||||
wordnames.intersection_update(filenames)
|
||||
for catnames in self._categories.itervalues():
|
||||
catnames.intersection_update(filenames)
|
||||
|
||||
def feed(self, filename, category, title, doctree):
|
||||
def feed(self, filename, title, doctree):
|
||||
"""Feed a doctree to the index."""
|
||||
self._titles[filename] = title
|
||||
self._categories.setdefault(category, set()).add(filename)
|
||||
|
||||
visitor = WordCollector(doctree)
|
||||
doctree.walk(visitor)
|
||||
@ -125,25 +115,24 @@ class SearchFrontend(object):
|
||||
"""
|
||||
|
||||
def __init__(self, index):
|
||||
self.filenames, self.areas, self.titles, self.words = index
|
||||
self.filenames, self.titles, self.words = index
|
||||
self._stemmer = Stemmer()
|
||||
|
||||
def query(self, required, excluded, areas):
|
||||
file_map = defaultdict(set)
|
||||
def query(self, required, excluded):
|
||||
file_map = {}
|
||||
for word in required:
|
||||
if word not in self.words:
|
||||
break
|
||||
for fid in self.words[word]:
|
||||
file_map[fid].add(word)
|
||||
file_map.setdefault(fid, set()).add(word)
|
||||
|
||||
return sorted(((self.filenames[fid], self.titles[fid])
|
||||
for fid, words in file_map.iteritems()
|
||||
if len(words) == len(required) and
|
||||
any(fid in self.areas.get(area, ()) for area in areas) and not
|
||||
if len(words) == len(required) and not
|
||||
any(fid in self.words.get(word, ()) for word in excluded)
|
||||
), key=lambda x: x[1].lower())
|
||||
|
||||
def search(self, searchstring, areas):
|
||||
def search(self, searchstring):
|
||||
required = set()
|
||||
excluded = set()
|
||||
for word in searchstring.split():
|
||||
@ -154,4 +143,4 @@ class SearchFrontend(object):
|
||||
storage = required
|
||||
storage.add(self._stemmer.stem(word.lower()))
|
||||
|
||||
return self.query(required, excluded, areas)
|
||||
return self.query(required, excluded)
|
||||
|
@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Python Doc Design
|
||||
* Sphinx Doc Design
|
||||
*/
|
||||
|
||||
body {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Python Doc Design -- Right Side Bar Overrides
|
||||
* Sphinx Doc Design -- Right Side Bar Overrides
|
||||
*/
|
||||
|
||||
|
||||
|
@ -228,27 +228,15 @@ var Search = {
|
||||
var params = $.getQueryParameters();
|
||||
if (params.q) {
|
||||
var query = params.q[0];
|
||||
var areas = params.area || [];
|
||||
|
||||
// auto default
|
||||
if (areas.length == 1 && areas[0] == 'default') {
|
||||
areas = ['tutorial', 'library', 'install', 'distutils'];
|
||||
}
|
||||
|
||||
// update input fields
|
||||
$('input[@type="checkbox"]').each(function() {
|
||||
this.checked = $.contains(areas, this.value);
|
||||
});
|
||||
$('input[@name="q"]')[0].value = query;
|
||||
|
||||
this.performSearch(query, areas);
|
||||
this.performSearch(query);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* perform a search for something
|
||||
*/
|
||||
performSearch : function(query, areas) {
|
||||
performSearch : function(query) {
|
||||
// create the required interface elements
|
||||
var out = $('#search-results');
|
||||
var title = $('<h2>Searching</h2>').appendTo(out);
|
||||
@ -301,14 +289,12 @@ var Search = {
|
||||
console.debug('SEARCH: searching for:');
|
||||
console.info('required: ', searchwords);
|
||||
console.info('excluded: ', excluded);
|
||||
console.info('areas: ', areas);
|
||||
|
||||
// fetch searchindex and perform search
|
||||
$.getJSON('searchindex.json', function(data) {
|
||||
|
||||
// prepare search
|
||||
var filenames = data[0];
|
||||
var areaMap = data[1];
|
||||
var titles = data[2]
|
||||
var words = data[3];
|
||||
var fileMap = {};
|
||||
@ -342,38 +328,25 @@ var Search = {
|
||||
if (fileMap[file].length != searchwords.length) {
|
||||
continue;
|
||||
}
|
||||
var valid = false;
|
||||
|
||||
// check if the file is in one of the searched
|
||||
// areas.
|
||||
for (var i = 0; i < areas.length; i++) {
|
||||
if ($.contains(areaMap[areas[i]] || [], file)) {
|
||||
valid = true;
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
// ensure that none of the excluded words is in the
|
||||
// search result.
|
||||
if (valid) {
|
||||
for (var i = 0; i < excluded.length; i++) {
|
||||
if ($.contains(words[excluded[i]] || [], file)) {
|
||||
valid = false;
|
||||
break;
|
||||
}
|
||||
for (var i = 0; i < excluded.length; i++) {
|
||||
if ($.contains(words[excluded[i]] || [], file)) {
|
||||
valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// if we have still a valid result we can add it
|
||||
// to the result list
|
||||
if (valid) {
|
||||
results.push([filenames[file], titles[file]]);
|
||||
}
|
||||
// if we have still a valid result we can add it
|
||||
// to the result list
|
||||
if (valid) {
|
||||
results.push([filenames[file], titles[file]]);
|
||||
}
|
||||
}
|
||||
|
||||
// delete unused variables in order to not waste
|
||||
// memory until list is retrieved completely
|
||||
delete filenames, areaMap, titles, words, data;
|
||||
delete filenames, titles, words, data;
|
||||
|
||||
// now sort the results by title
|
||||
results.sort(function(a, b) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Python Doc Design -- Sticky Sidebar Overrides
|
||||
* Sphinx Doc Design -- Sticky Sidebar Overrides
|
||||
*/
|
||||
|
||||
div.sidebar {
|
||||
|
@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Python Doc Design
|
||||
* Sphinx Doc Design -- traditional python.org style
|
||||
*/
|
||||
|
||||
body {
|
||||
|
@ -2,7 +2,7 @@
|
||||
"http://www.w3.org/TR/html4/frameset.dtd">
|
||||
<html>
|
||||
<head>
|
||||
<title>Changes in Version {{ version }} — Python Documentation</title>
|
||||
<title>Changes in Version {{ version }} — {{ project }} Documentation</title>
|
||||
</head>
|
||||
<frameset cols="45%,*">
|
||||
<frame name="main" src="changes.html">
|
@ -2,7 +2,7 @@
|
||||
"http://www.w3.org/TR/html4/loose.dtd">
|
||||
<html>
|
||||
<head>
|
||||
<title>{{ filename }} — Python Documentation</title>
|
||||
<title>{{ filename }} — {{ project }} Documentation</title>
|
||||
<style type="text/css">
|
||||
.hl { background-color: yellow }
|
||||
</style>
|
@ -9,7 +9,7 @@
|
||||
<head>
|
||||
<link rel="stylesheet" href="default.css">
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
<title>Changes in Version {{ version }} — Python Documentation</title>
|
||||
<title>Changes in Version {{ version }} — {{ project }} Documentation</title>
|
||||
</head>
|
||||
<body>
|
||||
<div class="document">
|
@ -1,53 +0,0 @@
|
||||
{% extends "layout.html" %}
|
||||
{% set title = 'Download' %}
|
||||
{% block body %}
|
||||
|
||||
<h1>Download Python {{ release }} Documentation
|
||||
{%- if last_updated %} (last updated on {{ last_updated }}){% endif %}</h1>
|
||||
|
||||
<p>To download an archive containing all the documents for this version of
|
||||
Python in one of various formats, follow one of links in this table. The numbers
|
||||
in the table are the size of the download files in Kilobytes.</p>
|
||||
|
||||
{# XXX download links, relative to download_base_url #}
|
||||
|
||||
|
||||
<p>These archives contain all the content in the documentation section.</p>
|
||||
|
||||
<h2>Unpacking</h2>
|
||||
|
||||
<p>Unix users should download the .tar.bz2 archives; these are bzipped tar
|
||||
archives and can be handled in the usual way using tar and the bzip2
|
||||
program. The <a href="http://www.info-zip.org">InfoZIP</a> unzip program can be
|
||||
used to handle the ZIP archives if desired. The .tar.bz2 archives provide the
|
||||
best compression and fastest download times.</p>
|
||||
|
||||
<p>Windows users can use the ZIP archives since those are customary on that
|
||||
platform. These are created on Unix using the InfoZIP zip program. They may be
|
||||
unpacked using the free WiZ tool (from the InfoZIP developers) or any other
|
||||
tool for handling ZIP archives; any of them should work.</p>
|
||||
|
||||
<p>Note that the .tar.bz2 files are smaller than the other archives; Windows
|
||||
users may want to install the bzip2 tools on their systems as well. Windows
|
||||
binaries for a command-line tool are available at <a
|
||||
href="http://www.bzip.org">The bzip2 and libbzip2 official home page</a>, but
|
||||
most other archiving utilities support the tar and bzip2 formats as well.</p>
|
||||
|
||||
|
||||
<h2>Problems</h2>
|
||||
|
||||
<p><strong>Printing PDFs using Adobe Acrobat Reader 5.0:</strong> Adobe has
|
||||
reportedly admitted that there is a bug in Acrobat Reader 5.0 which causes it
|
||||
not to print at least some PDF files generated by pdfTeX. This software is used
|
||||
to produce the PDF version of the Python documentation, and our documents
|
||||
definately trigger this bug in Acrobat Reader. To print the PDF files, use
|
||||
Acrobat Reader 4.x, ghostscript, or xpdf.</p>
|
||||
|
||||
<p>Reportedly, Acrobat Reader 6.0 can print these documents without this
|
||||
problem, but we've not yet had an opportunity to confirm the report. Sadly,
|
||||
version 6.0 is not yet available on Unix platforms.</p>
|
||||
|
||||
<p>If you have comments or suggestions for the Python documentation, please send
|
||||
email to <a href="docs@python.org">docs@python.org</a>.</p>
|
||||
|
||||
{% endblock %}
|
@ -4,67 +4,27 @@
|
||||
(pathto('@rss/recent'), 'application/rss+xml', 'Recent Comments')
|
||||
] %}
|
||||
{% block body %}
|
||||
<h1>Python Documentation</h1>
|
||||
<h1>{{ project }} Documentation</h1>
|
||||
<p>
|
||||
Welcome! This is the documentation for Python
|
||||
Welcome! This is the documentation for {{ project }}
|
||||
{{ release }}{% if last_updated %}, last updated {{ last_updated }}{% endif %}.
|
||||
</p>
|
||||
|
||||
<p><strong>Parts of the documentation:</strong></p>
|
||||
<table class="contentstable" align="center"><tr>
|
||||
<td width="50%">
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("whatsnew/" + version + ".rst") }}">What's new in Python {{ version }}?</a><br>
|
||||
<span class="linkdescr">changes since previous major release</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("tutorial/index.rst") }}">Tutorial</a><br>
|
||||
<span class="linkdescr">start here</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("using/index.rst") }}">Using Python</a><br>
|
||||
<span class="linkdescr">how to use Python on different platforms</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("reference/index.rst") }}">Language Reference</a><br>
|
||||
<span class="linkdescr">describes syntax and language elements</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("library/index.rst") }}">Library Reference</a><br>
|
||||
<span class="linkdescr">keep this under your pillow</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("howto/index.rst") }}">Python HOWTOs</a><br>
|
||||
<span class="linkdescr">in-depth documents on specific topics</span></p>
|
||||
</td><td width="50%">
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("extending/index.rst") }}">Extending and Embedding</a><br>
|
||||
<span class="linkdescr">tutorial for C/C++ programmers</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("c-api/index.rst") }}">Python/C API</a><br>
|
||||
<span class="linkdescr">reference for C/C++ programmers</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("install/index.rst") }}">Installing Python Modules</a><br>
|
||||
<span class="linkdescr">information for installers & sys-admins</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("distutils/index.rst") }}">Distributing Python Modules</a><br>
|
||||
<span class="linkdescr">sharing modules with others</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("documenting/index.rst") }}">Documenting Python</a><br>
|
||||
<span class="linkdescr">guide for documentation authors</span></p>
|
||||
</td></tr>
|
||||
</table>
|
||||
|
||||
{% if indextemplate %}
|
||||
{{ rendertemplate(indextemplate) }}
|
||||
{% else %}
|
||||
<p><strong>Indices and tables:</strong></p>
|
||||
<table class="contentstable" align="center"><tr>
|
||||
<td width="50%">
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("modindex.rst") }}">Global Module Index</a><br>
|
||||
<span class="linkdescr">quick access to all modules</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("genindex.rst") }}">General Index</a><br>
|
||||
<span class="linkdescr">all functions, classes, terms</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("glossary.rst") }}">Glossary</a><br>
|
||||
<span class="linkdescr">the most important terms explained</span></p>
|
||||
</td><td width="50%">
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("search.rst") }}">Search page</a><br>
|
||||
<span class="linkdescr">search this documentation</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("contents.rst") }}">Complete Table of Contents</a><br>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("contents") }}">Complete Table of Contents</a><br>
|
||||
<span class="linkdescr">lists all sections and subsections</span></p>
|
||||
</td></tr>
|
||||
</table>
|
||||
|
||||
<p><strong>Meta information:</strong></p>
|
||||
<table class="contentstable" align="center"><tr>
|
||||
<td width="50%">
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("bugs.rst") }}">Reporting bugs</a></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("about.rst") }}">About the documentation</a></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("search") }}">Search page</a><br>
|
||||
<span class="linkdescr">search this documentation</span></p>
|
||||
</td><td width="50%">
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("license.rst") }}">History and License of Python</a></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("copyright.rst") }}">Copyright</a></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("modindex") }}">Global Module Index</a><br>
|
||||
<span class="linkdescr">quick access to all modules</span></p>
|
||||
<p class="biglink"><a class="biglink" href="{{ pathto("genindex") }}">General Index</a><br>
|
||||
<span class="linkdescr">all functions, classes, terms</span></p>
|
||||
</td></tr>
|
||||
</table>
|
||||
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
@ -1,4 +1,4 @@
|
||||
{% if builder != 'htmlhelp' %}{% set titlesuffix = " — Python Documentation" %}{% endif -%}
|
||||
{% if builder != 'htmlhelp' %}{% set titlesuffix = " — " + project + " Documentation" %}{% endif -%}
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
|
||||
"http://www.w3.org/TR/html4/loose.dtd">
|
||||
<html>
|
||||
@ -6,7 +6,7 @@
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
<title>{{ title|striptags }}{{ titlesuffix }}</title>
|
||||
{%- if builder == 'web' %}
|
||||
<link rel="stylesheet" href="{{ pathto('index.rst') }}?do=stylesheet{%
|
||||
<link rel="stylesheet" href="{{ pathto('index') }}?do=stylesheet{%
|
||||
if in_admin_panel %}&admin=yes{% endif %}" type="text/css">
|
||||
{%- for link, type, title in page_links %}
|
||||
<link rel="alternate" type="{{ type|e(true) }}" title="{{ title|e(true) }}" href="{{ link|e(true) }}">
|
||||
@ -26,12 +26,16 @@
|
||||
<script type="text/javascript" src="{{ pathto('style/interface.js', 1) }}"></script>
|
||||
<script type="text/javascript" src="{{ pathto('style/doctools.js', 1) }}"></script>
|
||||
{%- endif %}
|
||||
<link rel="author" title="About these documents" href="{{ pathto('about.rst') }}">
|
||||
<link rel="contents" title="Global table of contents" href="{{ pathto('contents.rst') }}">
|
||||
<link rel="index" title="Global index" href="{{ pathto('genindex.rst') }}">
|
||||
<link rel="search" title="Search" href="{{ pathto('search.rst') }}">
|
||||
<link rel="copyright" title="Copyright" href="{{ pathto('copyright.rst') }}">
|
||||
<link rel="top" title="Python Documentation" href="{{ pathto('index.rst') }}">
|
||||
{%- if hasdoc('about') %}
|
||||
<link rel="author" title="About these documents" href="{{ pathto('about') }}">
|
||||
{%- endif %}
|
||||
<link rel="contents" title="Global table of contents" href="{{ pathto('contents') }}">
|
||||
<link rel="index" title="Global index" href="{{ pathto('genindex') }}">
|
||||
<link rel="search" title="Search" href="{{ pathto('search') }}">
|
||||
{%- if hasdoc('copyright') %}
|
||||
<link rel="copyright" title="Copyright" href="{{ pathto('copyright') }}">
|
||||
{%- endif %}
|
||||
<link rel="top" title="{{ project }} Documentation" href="{{ pathto('index') }}">
|
||||
{%- if parents %}
|
||||
<link rel="up" title="{{ parents[-1].title|striptags }}" href="{{ parents[-1].link|e }}">
|
||||
{%- endif %}
|
||||
@ -48,8 +52,8 @@
|
||||
<div class="related">
|
||||
<h3>Navigation</h3>
|
||||
<ul>
|
||||
<li class="right" style="margin-right: 10px"><a href="{{ pathto('genindex.rst') }}" title="General Index" accesskey="I">index</a></li>
|
||||
<li class="right"><a href="{{ pathto('modindex.rst') }}" title="Global Module Index" accesskey="M">modules</a> |</li>
|
||||
<li class="right" style="margin-right: 10px"><a href="{{ pathto('genindex') }}" title="General Index" accesskey="I">index</a></li>
|
||||
<li class="right"><a href="{{ pathto('modindex') }}" title="Global Module Index" accesskey="M">modules</a> |</li>
|
||||
{%- if next %}
|
||||
<li class="right"><a href="{{ next.link|e }}" title="{{ next.title|striptags }}" accesskey="N">next</a> |</li>
|
||||
{%- endif %}
|
||||
@ -57,10 +61,10 @@
|
||||
<li class="right"><a href="{{ prev.link|e }}" title="{{ prev.title|striptags }}" accesskey="P">previous</a> |</li>
|
||||
{%- endif %}
|
||||
{%- if builder == 'web' %}
|
||||
<li class="right"><a href="{{ pathto('settings.rst') }}"
|
||||
<li class="right"><a href="{{ pathto('settings') }}"
|
||||
title="Customize your viewing settings" accesskey="S">settings</a> |</li>
|
||||
{%- endif %}
|
||||
<li><a href="{{ pathto('index.rst') }}">Python v{{ release }} Documentation</a> »</li>
|
||||
<li><a href="{{ pathto('index') }}">{{ project }} v{{ release }} Documentation</a> »</li>
|
||||
{%- for parent in parents %}
|
||||
<li><a href="{{ parent.link|e }}" accesskey="U">{{ parent.title }}</a> »</li>
|
||||
{%- endfor %}
|
||||
@ -69,26 +73,72 @@
|
||||
{% endfilter %}
|
||||
<div class="document">
|
||||
<div class="documentwrapper">
|
||||
{%- if builder != 'htmlhelp' %}
|
||||
{%- if builder != 'htmlhelp' %}
|
||||
<div class="bodywrapper">
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
<div class="body">
|
||||
{% block body %}{% endblock %}
|
||||
</div>
|
||||
{%- if builder != 'htmlhelp' %}
|
||||
{%- if builder != 'htmlhelp' %}
|
||||
</div>
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
</div>
|
||||
{%- if builder != 'htmlhelp' %}
|
||||
{%- include "sidebar.html" %}
|
||||
<div class="sidebar">
|
||||
<div class="sidebarwrapper">
|
||||
{% if display_toc %}
|
||||
<h3>Table Of Contents</h3>
|
||||
{{ toc }}
|
||||
{% endif %}
|
||||
{%- if prev %}
|
||||
<h4>Previous topic</h4>
|
||||
<p class="topless"><a href="{{ prev.link|e }}" title="previous chapter">{{ prev.title }}</a></p>
|
||||
{%- endif %}
|
||||
{%- if next %}
|
||||
<h4>Next topic</h4>
|
||||
<p class="topless"><a href="{{ next.link|e }}" title="next chapter">{{ next.title }}</a></p>
|
||||
{%- endif %}
|
||||
{% if sourcename %}
|
||||
<h3>This Page</h3>
|
||||
<ul class="this-page-menu">
|
||||
{% if builder == 'web' %}
|
||||
<li><a href="#comments">Comments ({{ comments|length }} so far)</a></li>
|
||||
<li><a href="{{ pathto('@edit/' + sourcename)|e }}">Suggest Change</a></li>
|
||||
<li><a href="{{ pathto('@source/' + sourcename)|e }}">Show Source</a></li>
|
||||
{% elif builder == 'html' %}
|
||||
<li><a href="{{ pathto(sourcename, true)|e }}">Show Source</a></li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% if customsidebar %}
|
||||
{{ rendertemplate(customsidebar) }}
|
||||
{% endif %}
|
||||
{% if current_page_name != "search" %}
|
||||
<h3>{{ builder == 'web' and 'Keyword' or 'Quick' }} search</h3>
|
||||
<form class="search" action="{{ pathto('search') }}" method="get">
|
||||
<input type="text" name="q" size="18"> <input type="submit" value="Go">
|
||||
<input type="hidden" name="check_keywords" value="yes">
|
||||
<input type="hidden" name="area" value="default">
|
||||
</form>
|
||||
{% if builder == 'web' %}
|
||||
<p style="font-size: 90%">Enter a module, class or function name.</p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{%- endif %}
|
||||
<div class="clearer"></div>
|
||||
</div>
|
||||
{{ relbar }}
|
||||
<div class="footer">
|
||||
© <a href="{{ pathto('copyright.rst') }}">Copyright</a>
|
||||
1990-2007, Python Software Foundation.
|
||||
{% if last_updated %}Last updated on {{ last_updated }}.{% endif %}
|
||||
{%- if hasdoc('copyright') %}
|
||||
© <a href="{{ pathto('copyright') }}">Copyright</a> {{ copyright }}.
|
||||
{%- else %}
|
||||
© Copyright {{ copyright }}.
|
||||
{%- endif %}
|
||||
{%- if last_updated %}
|
||||
Last updated on {{ last_updated }}.
|
||||
{%- endif %}
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -6,39 +6,14 @@
|
||||
{% block body %}
|
||||
<h1 id="search-documentation">Search Documentation</h1>
|
||||
<p>
|
||||
From here you can search the Python documentation. Enter your search
|
||||
From here you can search the {{ project }} documentation. Enter your search
|
||||
words into the box below and click "search". Note that the search
|
||||
function will automatically search for all of the words. Pages
|
||||
containing less words won't appear in the result list.
|
||||
</p>
|
||||
<p>
|
||||
In order to speed up the results you can limit your search by
|
||||
excluding some of the sections listed below.
|
||||
</p>
|
||||
<form action="" method="get">
|
||||
<input type="text" name="q" value="">
|
||||
<input type="submit" value="search">
|
||||
<p>
|
||||
Sections:
|
||||
</p>
|
||||
<ul class="fakelist">
|
||||
{% for id, name, checked in [
|
||||
('tutorial', 'Python Tutorial', true),
|
||||
('library', 'Library Reference', true),
|
||||
('using', 'Using Python', true),
|
||||
('reference', 'Language Reference', false),
|
||||
('extending', 'Extending and Embedding', false),
|
||||
('c-api', 'Python/C API', false),
|
||||
('install', 'Installing Python Modules', true),
|
||||
('distutils', 'Distributing Python Modules', true),
|
||||
('documenting', 'Documenting Python', false),
|
||||
('whatsnew', 'What\'s new in Python?', false),
|
||||
] -%}
|
||||
<li><input type="checkbox" name="area" id="area-{{ id }}" value="{{ id
|
||||
}}"{% if checked %} checked{% endif %}>
|
||||
<label for="area-{{ id }}">{{ name }}</label></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</form>
|
||||
{% if search_performed %}
|
||||
<h2>Search Results</h2>
|
||||
|
@ -1,6 +0,0 @@
|
||||
{% extends "layout.html" %}
|
||||
{% set title = 'Page Source' %}
|
||||
{% block body %}
|
||||
<h1 id="page-source">Page Source</h1>
|
||||
{{ highlighted_code }}
|
||||
{% endblock %}
|
@ -1,62 +0,0 @@
|
||||
{# this file is included by layout.html #}
|
||||
<div class="sidebar">
|
||||
<div class="sidebarwrapper">
|
||||
{% if display_toc %}
|
||||
<h3>Table Of Contents</h3>
|
||||
{{ toc }}
|
||||
{% endif %}
|
||||
{%- if prev %}
|
||||
<h4>Previous topic</h4>
|
||||
<p class="topless"><a href="{{ prev.link|e }}" title="previous chapter">{{ prev.title }}</a></p>
|
||||
{%- endif %}
|
||||
{%- if next %}
|
||||
<h4>Next topic</h4>
|
||||
<p class="topless"><a href="{{ next.link|e }}" title="next chapter">{{ next.title }}</a></p>
|
||||
{%- endif %}
|
||||
{% if sourcename %}
|
||||
<h3>This Page</h3>
|
||||
<ul class="this-page-menu">
|
||||
{% if builder == 'web' %}
|
||||
<li><a href="#comments">Comments ({{ comments|length }} so far)</a></li>
|
||||
<li><a href="{{ pathto('@edit/' + sourcename)|e }}">Suggest Change</a></li>
|
||||
<li><a href="{{ pathto('@source/' + sourcename)|e }}">Show Source</a></li>
|
||||
{% elif builder == 'html' %}
|
||||
<li><a href="{{ pathto(sourcename, true)|e }}">Show Source</a></li>
|
||||
{% endif %}
|
||||
{# <li><a href="http://bugs.python.org/XXX?page={{ sourcename|e }}">Report Bug</a></li> #}
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% if current_page_name == "index" %}
|
||||
<h3>Download</h3>
|
||||
<p><a href="{{ pathto('download.rst')|e }}">Download these documents</a></p>
|
||||
<h3>Other resources</h3>
|
||||
<ul>
|
||||
{# XXX: many of these should probably be merged in the main docs #}
|
||||
<li><a href="http://www.python.org/doc/faq/">FAQs</a></li>
|
||||
<li><a href="http://www.python.org/doc/intros/">Introductions</a></li>
|
||||
<li><a href="http://www.python.org/doc/essays/">Guido's Essays</a></li>
|
||||
<li><a href="http://www.python.org/doc/newstyle/">New-style Classes</a></li>
|
||||
<li><a href="http://www.python.org/dev/peps/">PEP Index</a></li>
|
||||
<li><a href="http://wiki.python.org/moin/BeginnersGuide">Beginner's Guide</a></li>
|
||||
<li><a href="http://www.python.org/topics/">Topic Guides</a></li>
|
||||
<li><a href="http://wiki.python.org/moin/PythonBooks">Book List</a></li>
|
||||
<li><a href="http://www.python.org/doc/av/">Audio/Visual Talks</a></li>
|
||||
<li><a href="http://www.python.org/doc/other/">Other Doc Collections</a></li>
|
||||
<li> </li>
|
||||
<li><a href="http://www.python.org/doc/versions/">Previous versions</a></li>
|
||||
<li> </li>
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% if current_page_name != "search" %}
|
||||
<h3>{{ builder == 'web' and 'Keyword' or 'Quick' }} search</h3>
|
||||
<form class="search" action="{{ pathto('search.rst') }}" method="get">
|
||||
<input type="text" name="q" size="18"> <input type="submit" value="Go">
|
||||
<input type="hidden" name="check_keywords" value="yes">
|
||||
<input type="hidden" name="area" value="default">
|
||||
</form>
|
||||
{% if builder == 'web' %}
|
||||
<p style="font-size: 90%">Enter a module, class or function name.</p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
@ -3,7 +3,7 @@
|
||||
<h1>Moderate Comments</h1>
|
||||
<p>
|
||||
From here you can delete and edit comments. If you want to be
|
||||
informed about new comments you can use the <a href="{{ pathto('index.rst')
|
||||
informed about new comments you can use the <a href="{{ pathto('index')
|
||||
}}?feed=recent_comments">feed</a> provided.
|
||||
</p>
|
||||
<form action="" method="post">
|
||||
@ -71,7 +71,7 @@
|
||||
<th colspan="4" class="recent_comments">
|
||||
<a href="{{ pathto('@admin/moderate_comments/recent_comments/', true)
|
||||
}}">Recent Comments</a>
|
||||
<span class="meta">(<a href="{{ pathto('index.rst')
|
||||
<span class="meta">(<a href="{{ pathto('index')
|
||||
}}?feed=recent_comments">feed</a>)</span>
|
||||
</th>
|
||||
</tr>
|
@ -19,7 +19,7 @@
|
||||
<p>
|
||||
HTML is not supported, relative link targets are treated as
|
||||
quicklinks and code blocks that start with ">>>" are
|
||||
highlighted as interactive python sessions.
|
||||
highlighted as interactive Python sessions.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
@ -22,9 +22,10 @@
|
||||
<h1 id="suggest-changes-for-this-page">Suggest changes for this page</h1>
|
||||
{% if not rendered %}
|
||||
<p>Here you can edit the source of “{{ doctitle|striptags }}” and
|
||||
submit the results as a patch to the Python documentation team. If you want
|
||||
to know more about reST, the markup language used, read
|
||||
<a href="{{ pathto('documenting/index.rst') }}">Documenting Python</a>.</p>
|
||||
submit the results as a patch to the {{ project }} documentation team.
|
||||
{# XXX Python specific #}
|
||||
If you want to know more about reST, the markup language used, read
|
||||
<a href="{{ pathto('documenting/index') }}">Documenting Python</a>.</p>
|
||||
{% endif %}
|
||||
<form action="{{ submiturl }}" method="post">
|
||||
<div id="suggest-changes-box">
|
@ -20,12 +20,12 @@
|
||||
</ul>
|
||||
{% endif %}
|
||||
<p>
|
||||
If you want to search the entire Python documentation for the string
|
||||
"{{ keyword|e }}", then <a href="{{ pathto('search.rst') }}?q={{ keyword|e
|
||||
If you want to search the entire {{ project }} documentation for the string
|
||||
"{{ keyword|e }}", then <a href="{{ pathto('search') }}?q={{ keyword|e
|
||||
}}">use the search function</a>.
|
||||
</p>
|
||||
<p>
|
||||
For a quick overview over all documented modules,
|
||||
<a href="{{ pathto('library/index.rst') }}">click here</a>.
|
||||
<a href="{{ pathto('modindex') }}">click here</a>.
|
||||
</p>
|
||||
{% endblock %}
|
@ -6,6 +6,6 @@
|
||||
The page {{ req.path|e }} does not exist on this server.
|
||||
</p>
|
||||
<p>
|
||||
Click here to <a href="{{ pathto('index.rst') }}">return to the index</a>.
|
||||
Click here to <a href="{{ pathto('index') }}">return to the index</a>.
|
||||
</p>
|
||||
{% endblock %}
|
@ -2,13 +2,13 @@
|
||||
{% set title = 'Settings' %}
|
||||
{% set current_page_name = 'settings' %}
|
||||
{% block body %}
|
||||
<h1>Python Documentation Settings</h1>
|
||||
<h1>{{ project }} Documentation Settings</h1>
|
||||
<p>
|
||||
Here you can customize how you want to view the Python documentation.
|
||||
Here you can customize how you want to view the {{ project }} documentation.
|
||||
These settings are saved using a cookie on your computer.
|
||||
</p>
|
||||
|
||||
<form action="{{ pathto('settings.rst') }}" method="post">
|
||||
<form action="{{ pathto('settings') }}" method="post">
|
||||
<p class="subhead">Select your stylesheet:</p>
|
||||
<p>
|
||||
{%- for design, (foo, descr) in known_designs %}
|
@ -5,7 +5,7 @@
|
||||
{% endblock %}
|
||||
{% block body %}
|
||||
<h1>Patch submitted</h1>
|
||||
<p>Your patch has been submitted to the Python documentation team and will be
|
||||
<p>Your patch has been submitted to the {{ project }} documentation team and will be
|
||||
processed shortly.</p>
|
||||
<p>You will be redirected to the
|
||||
<a href="{{ backlink|e }}">original documentation page</a> shortly.</p>
|
@ -77,14 +77,6 @@ def get_matching_files(dirname, pattern, exclude=()):
|
||||
yield canonical_path(qualified_name)
|
||||
|
||||
|
||||
def get_category(filename):
|
||||
"""Get the "category" part of a RST filename."""
|
||||
parts = filename.split(SEP, 1)
|
||||
if len(parts) < 2:
|
||||
return
|
||||
return parts[0]
|
||||
|
||||
|
||||
def shorten_result(text='', keywords=[], maxlen=240, fuzz=60):
|
||||
if not text:
|
||||
text = ''
|
||||
|
@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.util.stemmer
|
||||
|
@ -9,9 +9,9 @@
|
||||
:license: BSD.
|
||||
"""
|
||||
|
||||
from .util import render_template
|
||||
from .wsgiutil import Response, RedirectResponse, NotFound
|
||||
from .database import Comment
|
||||
from sphinx.web.util import render_template
|
||||
from sphinx.web.wsgiutil import Response, RedirectResponse, NotFound
|
||||
from sphinx.web.database import Comment
|
||||
|
||||
|
||||
class AdminPanel(object):
|
||||
|
@ -9,7 +9,7 @@
|
||||
:copyright: 2007-2008 by Armin Ronacher.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import re
|
||||
import urllib
|
||||
import time
|
||||
@ -43,14 +43,20 @@ class AntiSpam(object):
|
||||
else:
|
||||
lines = [l.strip() for l in data.splitlines()
|
||||
if not l.startswith('#')]
|
||||
with file(bad_content_file, 'w') as f:
|
||||
f = open(bad_content_file, 'w')
|
||||
try:
|
||||
f.write('\n'.join(lines))
|
||||
finally:
|
||||
f.close()
|
||||
last_change = int(time.time())
|
||||
|
||||
if lines is None:
|
||||
try:
|
||||
with file(bad_content_file) as f:
|
||||
f = open(bad_content_file)
|
||||
try:
|
||||
lines = [l.strip() for l in f]
|
||||
finally:
|
||||
f.close()
|
||||
except:
|
||||
lines = []
|
||||
self.rules = [re.compile(rule) for rule in lines if rule]
|
||||
|
@ -9,7 +9,6 @@
|
||||
:copyright: 2007-2008 by Georg Brandl, Armin Ronacher.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import os
|
||||
import re
|
||||
@ -24,24 +23,23 @@ import cPickle as pickle
|
||||
import cStringIO as StringIO
|
||||
from os import path
|
||||
from itertools import groupby
|
||||
from collections import defaultdict
|
||||
|
||||
from .feed import Feed
|
||||
from .mail import Email
|
||||
from .util import render_template, get_target_uri, blackhole_dict, striptags
|
||||
from .admin import AdminPanel
|
||||
from .userdb import UserDatabase
|
||||
from .robots import robots_txt
|
||||
from .oldurls import handle_html_url
|
||||
from .antispam import AntiSpam
|
||||
from .database import connect, set_connection, Comment
|
||||
from .wsgiutil import Request, Response, RedirectResponse, \
|
||||
from sphinx.web.feed import Feed
|
||||
from sphinx.web.mail import Email
|
||||
from sphinx.web.util import render_template, get_target_uri, blackhole_dict, striptags
|
||||
from sphinx.web.admin import AdminPanel
|
||||
from sphinx.web.userdb import UserDatabase
|
||||
from sphinx.web.robots import robots_txt
|
||||
from sphinx.web.oldurls import handle_html_url
|
||||
from sphinx.web.antispam import AntiSpam
|
||||
from sphinx.web.database import connect, set_connection, Comment
|
||||
from sphinx.web.wsgiutil import Request, Response, RedirectResponse, \
|
||||
JSONResponse, SharedDataMiddleware, NotFound, get_base_uri
|
||||
|
||||
from ..util import relative_uri
|
||||
from ..search import SearchFrontend
|
||||
from ..htmlwriter import HTMLWriter
|
||||
from ..builder import LAST_BUILD_FILENAME, ENV_PICKLE_FILENAME
|
||||
from sphinx.util import relative_uri
|
||||
from sphinx.search import SearchFrontend
|
||||
from sphinx.htmlwriter import HTMLWriter
|
||||
from sphinx.builder import LAST_BUILD_FILENAME, ENV_PICKLE_FILENAME
|
||||
|
||||
from docutils.io import StringOutput
|
||||
from docutils.utils import Reporter
|
||||
@ -123,8 +121,11 @@ class DocumentationApplication(object):
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
self.cache = blackhole_dict() if config['debug'] else {}
|
||||
self.freqmodules = defaultdict(int)
|
||||
if config['debug']:
|
||||
self.cache = blackhole_dict()
|
||||
else:
|
||||
self.cache = {}
|
||||
self.freqmodules = {}
|
||||
self.last_most_frequent = []
|
||||
self.generated_stylesheets = {}
|
||||
self.config = config
|
||||
@ -139,19 +140,31 @@ class DocumentationApplication(object):
|
||||
|
||||
|
||||
def load_env(self, new_mtime):
|
||||
with env_lock:
|
||||
env_lock.acquire()
|
||||
try:
|
||||
if self.buildmtime == new_mtime:
|
||||
# happens if another thread already reloaded the env
|
||||
return
|
||||
print "* Loading the environment..."
|
||||
with file(path.join(self.data_root, ENV_PICKLE_FILENAME), 'rb') as f:
|
||||
f = open(path.join(self.data_root, ENV_PICKLE_FILENAME), 'rb')
|
||||
try:
|
||||
self.env = pickle.load(f)
|
||||
with file(path.join(self.data_root, 'globalcontext.pickle'), 'rb') as f:
|
||||
finally:
|
||||
f.close()
|
||||
f = open(path.join(self.data_root, 'globalcontext.pickle'), 'rb')
|
||||
try:
|
||||
self.globalcontext = pickle.load(f)
|
||||
with file(path.join(self.data_root, 'searchindex.pickle'), 'rb') as f:
|
||||
finally:
|
||||
f.close()
|
||||
f = open(path.join(self.data_root, 'searchindex.pickle'), 'rb')
|
||||
try:
|
||||
self.search_frontend = SearchFrontend(pickle.load(f))
|
||||
finally:
|
||||
f.close()
|
||||
self.buildmtime = new_mtime
|
||||
self.cache.clear()
|
||||
finally:
|
||||
env_lock.release()
|
||||
|
||||
|
||||
def search(self, req):
|
||||
@ -167,12 +180,15 @@ class DocumentationApplication(object):
|
||||
"""
|
||||
Get the reST source of a page.
|
||||
"""
|
||||
page_id = self.env.get_real_filename(page)
|
||||
page_id = self.env.get_real_filename(page)[:-4]
|
||||
if page_id is None:
|
||||
raise NotFound()
|
||||
filename = path.join(self.data_root, 'sources', page_id)[:-3] + 'txt'
|
||||
with file(filename) as f:
|
||||
filename = path.join(self.data_root, 'sources', page_id) + '.txt'
|
||||
f = open(filename)
|
||||
try:
|
||||
return page_id, f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
|
||||
def show_source(self, req, page):
|
||||
@ -191,7 +207,7 @@ class DocumentationApplication(object):
|
||||
return Response(render_template(req, 'edit.html', self.globalcontext, dict(
|
||||
contents=contents,
|
||||
pagename=page,
|
||||
doctitle=self.globalcontext['titles'].get(page_id) or 'this page',
|
||||
doctitle=self.globalcontext['titles'].get(page_id+'.rst') or 'this page',
|
||||
submiturl=relative_uri('/@edit/'+page+'/', '/@submit/'+page),
|
||||
)))
|
||||
|
||||
@ -209,11 +225,11 @@ class DocumentationApplication(object):
|
||||
builder = MockBuilder()
|
||||
builder.config = env2.config
|
||||
writer = HTMLWriter(builder)
|
||||
doctree = env2.read_file(page_id, pathname, save_parsed=False)
|
||||
doctree = env2.get_and_resolve_doctree(page_id, builder, doctree)
|
||||
doctree = env2.read_file(page_id+'.rst', pathname, save_parsed=False)
|
||||
doctree = env2.get_and_resolve_doctree(page_id+'.rst', builder, doctree)
|
||||
doctree.settings = OptionParser(defaults=env2.settings,
|
||||
components=(writer,)).get_default_values()
|
||||
doctree.reporter = Reporter(page_id, 2, 4, stream=warning_stream)
|
||||
doctree.reporter = Reporter(page_id+'.rst', 2, 4, stream=warning_stream)
|
||||
output = writer.write(doctree, destination)
|
||||
writer.assemble_parts()
|
||||
return writer.parts['fragment']
|
||||
@ -302,7 +318,7 @@ class DocumentationApplication(object):
|
||||
referer = ''
|
||||
else:
|
||||
referer = referer[len(base):]
|
||||
referer = referer.rpartition('?')[0] or referer
|
||||
referer = referer.split('?')[0] or referer
|
||||
|
||||
if req.method == 'POST':
|
||||
if req.form.get('cancel'):
|
||||
@ -362,8 +378,11 @@ class DocumentationApplication(object):
|
||||
yield '@modindex'
|
||||
|
||||
filename = path.join(self.data_root, 'modindex.fpickle')
|
||||
with open(filename, 'rb') as f:
|
||||
f = open(filename, 'rb')
|
||||
try:
|
||||
context = pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
if showpf:
|
||||
entries = context['modindexentries']
|
||||
i = 0
|
||||
@ -386,7 +405,7 @@ class DocumentationApplication(object):
|
||||
"""
|
||||
Show the "new comment" form.
|
||||
"""
|
||||
page_id = self.env.get_real_filename(page)
|
||||
page_id = self.env.get_real_filename(page)[:-4]
|
||||
ajax_mode = req.args.get('mode') == 'ajax'
|
||||
target = req.args.get('target')
|
||||
page_comment_mode = not target
|
||||
@ -466,7 +485,7 @@ class DocumentationApplication(object):
|
||||
return
|
||||
|
||||
comment_url = '@comments/%s/' % url
|
||||
page_id = self.env.get_real_filename(url)
|
||||
page_id = self.env.get_real_filename(url)[:-4]
|
||||
tx = context['body']
|
||||
all_comments = Comment.get_for_page(page_id)
|
||||
global_comments = []
|
||||
@ -509,17 +528,17 @@ class DocumentationApplication(object):
|
||||
Show the requested documentation page or raise an
|
||||
`NotFound` exception to display a page with close matches.
|
||||
"""
|
||||
page_id = self.env.get_real_filename(url)
|
||||
page_id = self.env.get_real_filename(url)[:-4]
|
||||
if page_id is None:
|
||||
raise NotFound(show_keyword_matches=True)
|
||||
# increment view count of all modules on that page
|
||||
for modname in self.env.filemodules.get(page_id, ()):
|
||||
self.freqmodules[modname] += 1
|
||||
for modname in self.env.filemodules.get(page_id+'.rst', ()):
|
||||
self.freqmodules[modname] = self.freqmodules.get(modname, 0) + 1
|
||||
# comments enabled?
|
||||
comments = self.env.metadata[page_id].get('nocomments', False)
|
||||
comments = self.env.metadata[page_id+'.rst'].get('nocomments', False)
|
||||
|
||||
# how does the user want to view comments?
|
||||
commentmode = req.session.get('comments', 'inline') if comments else ''
|
||||
commentmode = comments and req.session.get('comments', 'inline') or ''
|
||||
|
||||
# show "old URL" message? -> no caching possible
|
||||
oldurl = req.args.get('oldurl')
|
||||
@ -530,9 +549,12 @@ class DocumentationApplication(object):
|
||||
yield page_id + '|' + commentmode
|
||||
|
||||
# cache miss; load the page and render it
|
||||
filename = path.join(self.data_root, page_id[:-3] + 'fpickle')
|
||||
with open(filename, 'rb') as f:
|
||||
filename = path.join(self.data_root, page_id + '.fpickle')
|
||||
f = open(filename, 'rb')
|
||||
try:
|
||||
context = pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# add comments to paqe text
|
||||
if commentmode != 'none':
|
||||
@ -546,8 +568,11 @@ class DocumentationApplication(object):
|
||||
def get_special_page(self, req, name):
|
||||
yield '@'+name
|
||||
filename = path.join(self.data_root, name + '.fpickle')
|
||||
with open(filename, 'rb') as f:
|
||||
f = open(filename, 'rb')
|
||||
try:
|
||||
context = pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
yield render_template(req, name+'.html',
|
||||
self.globalcontext, context)
|
||||
|
||||
@ -559,8 +584,8 @@ class DocumentationApplication(object):
|
||||
feed.add_item(comment.title, comment.author, comment.url,
|
||||
comment.parsed_comment_body, comment.pub_date)
|
||||
else:
|
||||
page_id = self.env.get_real_filename(url)
|
||||
doctitle = striptags(self.globalcontext['titles'].get(page_id, url))
|
||||
page_id = self.env.get_real_filename(url)[:-4]
|
||||
doctitle = striptags(self.globalcontext['titles'].get(page_id+'.rst', url))
|
||||
feed = Feed(req, 'Comments for "%s"' % doctitle,
|
||||
'List of comments for the topic "%s"' % doctitle, url)
|
||||
for comment in Comment.get_for_page(page_id):
|
||||
@ -632,7 +657,7 @@ class DocumentationApplication(object):
|
||||
'close_matches': close_matches,
|
||||
'good_matches_count': good_matches,
|
||||
'keyword': term
|
||||
}, self.globalcontext), status=404 if is_error_page else 404)
|
||||
}, self.globalcontext), status=404)
|
||||
|
||||
|
||||
def get_user_stylesheet(self, req):
|
||||
@ -650,15 +675,21 @@ class DocumentationApplication(object):
|
||||
else:
|
||||
stylesheet = []
|
||||
for filename in known_designs[style][0]:
|
||||
with file(path.join(self.data_root, 'style', filename)) as f:
|
||||
f = open(path.join(self.data_root, 'style', filename))
|
||||
try:
|
||||
stylesheet.append(f.read())
|
||||
finally:
|
||||
f.close()
|
||||
stylesheet = '\n'.join(stylesheet)
|
||||
if not self.config.get('debug'):
|
||||
self.generated_stylesheets[style] = stylesheet
|
||||
|
||||
if req.args.get('admin') == 'yes':
|
||||
with file(path.join(self.data_root, 'style', 'admin.css')) as f:
|
||||
f = open(path.join(self.data_root, 'style', 'admin.css'))
|
||||
try:
|
||||
stylesheet += '\n' + f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# XXX: add timestamp based http caching
|
||||
return Response(stylesheet, mimetype='text/css')
|
||||
|
@ -17,7 +17,7 @@ import sqlite3
|
||||
from datetime import datetime
|
||||
from threading import local
|
||||
|
||||
from .markup import markup
|
||||
from sphinx.web.markup import markup
|
||||
|
||||
|
||||
_thread_local = local()
|
||||
@ -88,14 +88,14 @@ class Comment(object):
|
||||
@property
|
||||
def url(self):
|
||||
return '%s#comment-%s' % (
|
||||
self.associated_page[:-4],
|
||||
self.associated_page,
|
||||
self.comment_id
|
||||
)
|
||||
|
||||
@property
|
||||
def parsed_comment_body(self):
|
||||
from .util import get_target_uri
|
||||
from ..util import relative_uri
|
||||
from sphinx.web.util import get_target_uri
|
||||
from sphinx.util import relative_uri
|
||||
uri = get_target_uri(self.associated_page)
|
||||
def make_rel_link(keyword):
|
||||
return relative_uri(uri, 'q/%s/' % keyword)
|
||||
@ -153,7 +153,7 @@ class Comment(object):
|
||||
cur = get_cursor()
|
||||
cur.execute('''select * from comments where associated_page = ?
|
||||
order by associated_name, comment_id %s''' %
|
||||
('desc' if reverse else 'asc'),
|
||||
(reverse and 'desc' or 'asc'),
|
||||
(associated_page,))
|
||||
try:
|
||||
return [Comment._make_comment(row) for row in cur]
|
||||
|
@ -42,7 +42,7 @@ import cgi
|
||||
import re
|
||||
from urlparse import urlparse
|
||||
|
||||
from ..highlighting import highlight_block
|
||||
from sphinx.highlighting import highlight_block
|
||||
|
||||
|
||||
inline_formatting = {
|
||||
@ -212,7 +212,7 @@ class MarkupParser(object):
|
||||
elif protocol == 'javascript':
|
||||
href = href[11:]
|
||||
paragraph.append('<a href="%s"%s>%s</a>' % (cgi.escape(href),
|
||||
' rel="nofollow"' if nofollow else '',
|
||||
nofollow and ' rel="nofollow"' or '',
|
||||
cgi.escape(caption)))
|
||||
elif token == 'code_block':
|
||||
result.append(highlight_block(data, 'python'))
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
import re
|
||||
|
||||
from .wsgiutil import RedirectResponse, NotFound
|
||||
from sphinx.web.wsgiutil import RedirectResponse, NotFound
|
||||
|
||||
|
||||
_module_re = re.compile(r'module-(.*)\.html')
|
||||
@ -78,7 +78,7 @@ def handle_html_url(req, url):
|
||||
# tutorial
|
||||
elif url[:4] == 'tut/':
|
||||
try:
|
||||
node = int(url[8:].partition('.html')[0])
|
||||
node = int(url[8:].split('.html')[0])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
|
@ -63,7 +63,7 @@ def restart_with_reloader():
|
||||
|
||||
def run_with_reloader(main_func, extra_watch):
|
||||
"""
|
||||
Run the given function in an independent python interpreter.
|
||||
Run the given function in an independent Python interpreter.
|
||||
"""
|
||||
if os.environ.get('RUN_MAIN') == 'true':
|
||||
thread.start_new_thread(main_func, ())
|
||||
|
@ -10,11 +10,10 @@
|
||||
:copyright: 2007-2008 by Armin Ronacher.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
from os import path
|
||||
from hashlib import sha1
|
||||
from random import choice, randrange
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
def gen_password(length=8, add_numbers=True, mix_case=True,
|
||||
@ -56,17 +55,19 @@ class UserDatabase(object):
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
self.users = {}
|
||||
self.privileges = defaultdict(set)
|
||||
self.privileges = {}
|
||||
if path.exists(filename):
|
||||
with file(filename) as f:
|
||||
f = open(filename)
|
||||
try:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line and line[0] != '#':
|
||||
parts = line.split(':')
|
||||
self.users[parts[0]] = parts[1]
|
||||
self.privileges[parts[0]].update(x for x in
|
||||
parts[2].split(',')
|
||||
if x)
|
||||
self.privileges.setdefault(parts[0], set()).update(
|
||||
x for x in parts[2].split(',') if x)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def set_password(self, user, password):
|
||||
"""Encode the password for a user (also adds users)."""
|
||||
@ -84,7 +85,10 @@ class UserDatabase(object):
|
||||
self.users[user] == sha1('%s|%s' % (user, password)).hexdigest()
|
||||
|
||||
def save(self):
|
||||
with file(self.filename, 'w') as f:
|
||||
f = open(self.filename, 'w')
|
||||
try:
|
||||
for username, password in self.users.iteritems():
|
||||
privileges = ','.join(self.privileges.get(username, ()))
|
||||
f.write('%s:%s:%s\n' % (username, password, privileges))
|
||||
finally:
|
||||
f.close()
|
||||
|
@ -8,22 +8,21 @@
|
||||
:copyright: 2007-2008 by Georg Brandl.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import re
|
||||
from os import path
|
||||
|
||||
from ..util import relative_uri
|
||||
from .._jinja import Environment, FileSystemLoader
|
||||
from sphinx.util import relative_uri
|
||||
from sphinx._jinja import Environment, FileSystemLoader
|
||||
|
||||
|
||||
def get_target_uri(source_filename):
|
||||
"""Get the web-URI for a given reST file name."""
|
||||
if source_filename == 'index.rst':
|
||||
"""Get the web-URI for a given reST file name (without extension)."""
|
||||
if source_filename == 'index':
|
||||
return ''
|
||||
if source_filename.endswith('/index.rst'):
|
||||
return source_filename[:-9] # up to /
|
||||
return source_filename[:-4] + '/'
|
||||
if source_filename.endswith('/index'):
|
||||
return source_filename[:-5] # up to /
|
||||
return source_filename + '/'
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Python documentation web application configuration file
|
||||
# Sphinx documentation web application configuration file
|
||||
#
|
||||
|
||||
# Where the server listens.
|
||||
|
@ -9,7 +9,6 @@
|
||||
:copyright: 2007-2008 by Armin Ronacher.
|
||||
:license: BSD.
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
import cgi
|
||||
import urllib
|
||||
@ -23,8 +22,8 @@ from hashlib import sha1
|
||||
from datetime import datetime
|
||||
from cStringIO import StringIO
|
||||
|
||||
from .util import lazy_property
|
||||
from ..util.json import dump_json
|
||||
from sphinx.web.util import lazy_property
|
||||
from sphinx.util.json import dump_json
|
||||
|
||||
|
||||
HTTP_STATUS_CODES = {
|
||||
@ -371,8 +370,11 @@ class Session(dict):
|
||||
self.sid = sid
|
||||
if sid is not None:
|
||||
if path.exists(self.filename):
|
||||
with file(self.filename, 'rb') as f:
|
||||
f = open(self.filename, 'rb')
|
||||
try:
|
||||
self.update(pickle.load(f))
|
||||
finally:
|
||||
f.close()
|
||||
self._orig = dict(self)
|
||||
|
||||
@property
|
||||
@ -387,8 +389,11 @@ class Session(dict):
|
||||
def save(self):
|
||||
if self.sid is None:
|
||||
self.sid = sha1('%s|%s' % (time(), random())).hexdigest()
|
||||
with file(self.filename, 'wb') as f:
|
||||
f = open(self.filename, 'wb')
|
||||
try:
|
||||
pickle.dump(dict(self), f, pickle.HIGHEST_PROTOCOL)
|
||||
finally:
|
||||
f.close()
|
||||
self._orig = dict(self)
|
||||
|
||||
|
||||
@ -669,8 +674,11 @@ class SharedDataMiddleware(object):
|
||||
start_response('200 OK', [('Content-Type', mime_type),
|
||||
('Cache-Control', 'public'),
|
||||
('Expires', expiry)])
|
||||
with open(filename, 'rb') as f:
|
||||
f = open(filename, 'rb')
|
||||
try:
|
||||
return [f.read()]
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
p = environ.get('PATH_INFO', '')
|
||||
|
Loading…
Reference in New Issue
Block a user