mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
merge with trunk
This commit is contained in:
commit
b0e2c44dc8
@ -15,7 +15,7 @@ from os import path
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.util import SEP, relative_uri
|
||||
from sphinx.util.os import SEP, relative_uri
|
||||
from sphinx.util.console import bold, purple, darkgreen, term_width_line
|
||||
|
||||
# side effect: registers roles and directives
|
||||
|
@ -14,7 +14,8 @@ from os import path
|
||||
from cgi import escape
|
||||
|
||||
from sphinx import package_dir
|
||||
from sphinx.util import ensuredir, os_path, copy_static_entry
|
||||
from sphinx.util import copy_static_entry
|
||||
from sphinx.util.os import ensuredir, os_path
|
||||
from sphinx.theming import Theme
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.util.console import bold
|
||||
|
@ -18,6 +18,7 @@ import zipfile
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
||||
from sphinx.util.os import EEXIST
|
||||
|
||||
|
||||
# (Fragment) templates from which the metainfo files content.opf, toc.ncx,
|
||||
@ -244,7 +245,7 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
try:
|
||||
os.mkdir(path.dirname(fn))
|
||||
except OSError, err:
|
||||
if err.errno != os.errno.EEXIST:
|
||||
if err.errno != EEXIST:
|
||||
raise
|
||||
f = codecs.open(path.join(outdir, outname), 'w', 'utf-8')
|
||||
try:
|
||||
|
@ -29,10 +29,12 @@ from docutils.frontend import OptionParser
|
||||
from docutils.readers.doctree import Reader as DoctreeReader
|
||||
|
||||
from sphinx import package_dir, __version__
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import SEP, os_path, relative_uri, ensuredir, patmatch, \
|
||||
movefile, ustrftime, copy_static_entry, copyfile, compile_matchers, any, \
|
||||
inline_all_toctrees
|
||||
from sphinx.util import copy_static_entry
|
||||
from sphinx.util.os import SEP, os_path, relative_uri, ensuredir, movefile, \
|
||||
ustrftime, copyfile
|
||||
from sphinx.util.nodes import inline_all_toctrees
|
||||
from sphinx.util.matching import patmatch, compile_matchers
|
||||
from sphinx.util.pycompat import any
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.search import js_index
|
||||
from sphinx.theming import Theme
|
||||
|
@ -18,10 +18,11 @@ from docutils.utils import new_document
|
||||
from docutils.frontend import OptionParser
|
||||
|
||||
from sphinx import package_dir, addnodes
|
||||
from sphinx.util import SEP, texescape, copyfile
|
||||
from sphinx.util import texescape
|
||||
from sphinx.util.os import SEP, copyfile
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.environment import NoUri
|
||||
from sphinx.util import inline_all_toctrees
|
||||
from sphinx.util.nodes import inline_all_toctrees
|
||||
from sphinx.util.console import bold, darkgreen
|
||||
from sphinx.writers.latex import LaTeXWriter
|
||||
|
||||
|
@ -14,7 +14,7 @@ from os import path
|
||||
|
||||
from docutils.io import StringOutput
|
||||
|
||||
from sphinx.util import ensuredir, os_path
|
||||
from sphinx.util.os import ensuredir, os_path
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.writers.text import TextWriter
|
||||
|
||||
|
@ -13,8 +13,8 @@ import os
|
||||
import re
|
||||
from os import path
|
||||
|
||||
from sphinx.util import make_filename
|
||||
from sphinx.errors import ConfigError
|
||||
from sphinx.util.os import make_filename
|
||||
|
||||
nonascii_re = re.compile(r'[\x80-\xff]')
|
||||
|
||||
|
@ -7,16 +7,15 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive, directives
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.locale import pairindextypes
|
||||
from sphinx.util import patfilter, ws_re, url_re, docname_join, \
|
||||
explicit_title_re
|
||||
from sphinx.util import url_re, docname_join
|
||||
from sphinx.util.nodes import explicit_title_re
|
||||
from sphinx.util.compat import make_admonition
|
||||
from sphinx.util.matching import patfilter
|
||||
|
||||
|
||||
class TocTree(Directive):
|
||||
@ -47,7 +46,6 @@ class TocTree(Directive):
|
||||
# and title may be None if the document's title is to be used
|
||||
entries = []
|
||||
includefiles = []
|
||||
includetitles = {}
|
||||
all_docnames = env.found_docs.copy()
|
||||
# don't add the currently visited file in catch-all patterns
|
||||
all_docnames.remove(env.docname)
|
||||
|
@ -34,9 +34,11 @@ from docutils.transforms import Transform
|
||||
from docutils.transforms.parts import ContentsFilter
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import movefile, get_matching_docs, SEP, ustrftime, \
|
||||
docname_join, FilenameUniqDict, url_re, make_refnode, clean_astext, \
|
||||
compile_matchers
|
||||
from sphinx.util import url_re, get_matching_docs, docname_join, \
|
||||
FilenameUniqDict
|
||||
from sphinx.util.os import movefile, SEP, ustrftime
|
||||
from sphinx.util.nodes import clean_astext, make_refnode
|
||||
from sphinx.util.matching import compile_matchers
|
||||
from sphinx.errors import SphinxError, ExtensionError
|
||||
|
||||
|
||||
@ -72,7 +74,6 @@ def lookup_domain_element(env, type, name):
|
||||
return element, []
|
||||
raise ElementLookupError
|
||||
|
||||
|
||||
default_settings = {
|
||||
'embed_stylesheet': False,
|
||||
'cloak_email_addresses': True,
|
||||
|
@ -20,9 +20,10 @@ from docutils import nodes
|
||||
from docutils.utils import assemble_option_dict
|
||||
from docutils.statemachine import ViewList
|
||||
|
||||
from sphinx.util import rpartition, nested_parse_with_titles, force_decode
|
||||
from sphinx.util import rpartition, force_decode
|
||||
from sphinx.pycode import ModuleAnalyzer, PycodeError
|
||||
from sphinx.application import ExtensionError
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
from sphinx.util.compat import Directive
|
||||
from sphinx.util.inspect import isdescriptor, safe_getmembers, safe_getattr
|
||||
from sphinx.util.docstrings import prepare_docstring
|
||||
|
@ -58,14 +58,12 @@ import re
|
||||
import sys
|
||||
import inspect
|
||||
import posixpath
|
||||
from os import path
|
||||
|
||||
from docutils.parsers.rst import directives
|
||||
from docutils.statemachine import ViewList
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes, roles
|
||||
from sphinx.util import patfilter
|
||||
from sphinx.util.compat import Directive
|
||||
|
||||
|
||||
|
@ -20,15 +20,14 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import optparse
|
||||
import inspect
|
||||
import pydoc
|
||||
import optparse
|
||||
|
||||
from jinja2 import FileSystemLoader, TemplateNotFound
|
||||
from jinja2.sandbox import SandboxedEnvironment
|
||||
|
||||
from sphinx.ext.autosummary import import_by_name, get_documenter
|
||||
from sphinx.util import ensuredir
|
||||
from sphinx.util.os import ensuredir
|
||||
from sphinx.jinja2glue import BuiltinTemplateLoader
|
||||
|
||||
def main(argv=sys.argv):
|
||||
|
@ -26,7 +26,7 @@
|
||||
|
||||
from docutils import nodes, utils
|
||||
|
||||
from sphinx.util import split_explicit_title
|
||||
from sphinx.util.nodes import split_explicit_title
|
||||
|
||||
|
||||
def make_link_role(base_url, prefix):
|
||||
|
@ -24,7 +24,7 @@ from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.util import ensuredir, ENOENT, EPIPE
|
||||
from sphinx.util.os import ensuredir, ENOENT, EPIPE
|
||||
from sphinx.util.compat import Directive
|
||||
|
||||
|
||||
|
@ -23,7 +23,7 @@ except ImportError:
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.util import ensuredir, ENOENT
|
||||
from sphinx.util.os import ensuredir, ENOENT
|
||||
from sphinx.util.png import read_png_depth, write_png_depth
|
||||
from sphinx.ext.mathbase import setup_math as mathbase_setup, wrap_displaymath
|
||||
|
||||
|
@ -17,7 +17,7 @@ from jinja2 import FileSystemLoader, BaseLoader, TemplateNotFound, \
|
||||
from jinja2.utils import open_if_exists
|
||||
from jinja2.sandbox import SandboxedEnvironment
|
||||
|
||||
from sphinx.util import mtimes_of_files
|
||||
from sphinx.util.os import mtimes_of_files
|
||||
from sphinx.application import TemplateBridge
|
||||
|
||||
|
||||
|
@ -15,7 +15,7 @@ from os import path
|
||||
TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
|
||||
|
||||
from sphinx import __version__
|
||||
from sphinx.util import make_filename
|
||||
from sphinx.util.os import make_filename
|
||||
from sphinx.util.console import purple, bold, red, turquoise, \
|
||||
nocolor, color_terminal
|
||||
from sphinx.util import texescape
|
||||
|
@ -16,7 +16,8 @@ from docutils import nodes, utils
|
||||
from docutils.parsers.rst import roles
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import ws_re, split_explicit_title
|
||||
from sphinx.util import ws_re
|
||||
from sphinx.util.nodes import split_explicit_title
|
||||
|
||||
|
||||
generic_docroles = {
|
||||
|
@ -12,9 +12,6 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import errno
|
||||
import types
|
||||
import shutil
|
||||
import fnmatch
|
||||
import tempfile
|
||||
@ -23,91 +20,33 @@ import traceback
|
||||
from os import path
|
||||
|
||||
import docutils
|
||||
from docutils import nodes
|
||||
from docutils.utils import relative_path
|
||||
|
||||
import jinja2
|
||||
|
||||
import sphinx
|
||||
from sphinx import addnodes
|
||||
from sphinx.errors import PycodeError
|
||||
|
||||
# Errnos that we need.
|
||||
EEXIST = getattr(errno, 'EEXIST', 0)
|
||||
ENOENT = getattr(errno, 'ENOENT', 0)
|
||||
EPIPE = getattr(errno, 'EPIPE', 0)
|
||||
# import other utilities; partly for backwards compatibility, so don't
|
||||
# prune unused ones indiscriminately
|
||||
from sphinx.util.os import SEP, os_path, relative_uri, ensuredir, walk, \
|
||||
mtimes_of_files, movefile, copyfile, copytimes, make_filename, ustrftime
|
||||
from sphinx.util.nodes import nested_parse_with_titles, split_explicit_title, \
|
||||
explicit_title_re, caption_ref_re
|
||||
from sphinx.util.matching import patfilter
|
||||
|
||||
# Generally useful regular expressions.
|
||||
ws_re = re.compile(r'\s+')
|
||||
explicit_title_re = re.compile('^(.+?)\s*<(.*?)>$', re.DOTALL)
|
||||
caption_ref_re = explicit_title_re # b/w compat alias
|
||||
url_re = re.compile(r'(?P<schema>.+)://.*')
|
||||
|
||||
# SEP separates path elements in the canonical file names
|
||||
#
|
||||
# Define SEP as a manifest constant, not so much because we expect it to change
|
||||
# in the future as to avoid the suspicion that a stray "/" in the code is a
|
||||
# hangover from more *nix-oriented origins.
|
||||
SEP = "/"
|
||||
|
||||
def os_path(canonicalpath):
|
||||
return canonicalpath.replace(SEP, os.path.sep)
|
||||
|
||||
|
||||
def relative_uri(base, to):
|
||||
"""Return a relative URL from ``base`` to ``to``."""
|
||||
if to.startswith(SEP):
|
||||
return to
|
||||
b2 = base.split(SEP)
|
||||
t2 = to.split(SEP)
|
||||
# remove common segments
|
||||
for x, y in zip(b2, t2):
|
||||
if x != y:
|
||||
break
|
||||
b2.pop(0)
|
||||
t2.pop(0)
|
||||
return ('..' + SEP) * (len(b2)-1) + SEP.join(t2)
|
||||
|
||||
# High-level utility functions.
|
||||
|
||||
def docname_join(basedocname, docname):
|
||||
return posixpath.normpath(
|
||||
posixpath.join('/' + basedocname, '..', docname))[1:]
|
||||
|
||||
|
||||
def ensuredir(path):
|
||||
"""Ensure that a path exists."""
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError, err:
|
||||
# 0 for Jython/Win32
|
||||
if err.errno not in [0, EEXIST]:
|
||||
raise
|
||||
|
||||
|
||||
def walk(top, topdown=True, followlinks=False):
|
||||
"""
|
||||
Backport of os.walk from 2.6, where the followlinks argument was added.
|
||||
"""
|
||||
names = os.listdir(top)
|
||||
|
||||
dirs, nondirs = [], []
|
||||
for name in names:
|
||||
if path.isdir(path.join(top, name)):
|
||||
dirs.append(name)
|
||||
else:
|
||||
nondirs.append(name)
|
||||
|
||||
if topdown:
|
||||
yield top, dirs, nondirs
|
||||
for name in dirs:
|
||||
fullpath = path.join(top, name)
|
||||
if followlinks or not path.islink(fullpath):
|
||||
for x in walk(fullpath, topdown, followlinks):
|
||||
yield x
|
||||
if not topdown:
|
||||
yield top, dirs, nondirs
|
||||
|
||||
|
||||
def get_matching_files(dirname, exclude_matchers=()):
|
||||
"""
|
||||
Get all file names in a directory, recursively.
|
||||
@ -149,204 +88,6 @@ def get_matching_docs(dirname, suffix, exclude_matchers=()):
|
||||
yield filename[:-len(suffix)]
|
||||
|
||||
|
||||
def mtimes_of_files(dirnames, suffix):
|
||||
for dirname in dirnames:
|
||||
for root, dirs, files in os.walk(dirname):
|
||||
for sfile in files:
|
||||
if sfile.endswith(suffix):
|
||||
try:
|
||||
yield path.getmtime(path.join(root, sfile))
|
||||
except EnvironmentError:
|
||||
pass
|
||||
|
||||
|
||||
def shorten_result(text='', keywords=[], maxlen=240, fuzz=60):
|
||||
if not text:
|
||||
text = ''
|
||||
text_low = text.lower()
|
||||
beg = -1
|
||||
for k in keywords:
|
||||
i = text_low.find(k.lower())
|
||||
if (i > -1 and i < beg) or beg == -1:
|
||||
beg = i
|
||||
excerpt_beg = 0
|
||||
if beg > fuzz:
|
||||
for sep in ('.', ':', ';', '='):
|
||||
eb = text.find(sep, beg - fuzz, beg - 1)
|
||||
if eb > -1:
|
||||
eb += 1
|
||||
break
|
||||
else:
|
||||
eb = beg - fuzz
|
||||
excerpt_beg = eb
|
||||
if excerpt_beg < 0:
|
||||
excerpt_beg = 0
|
||||
msg = text[excerpt_beg:beg+maxlen]
|
||||
if beg > fuzz:
|
||||
msg = '... ' + msg
|
||||
if beg < len(text)-maxlen:
|
||||
msg = msg + ' ...'
|
||||
return msg
|
||||
|
||||
|
||||
class attrdict(dict):
|
||||
def __getattr__(self, key):
|
||||
return self[key]
|
||||
def __setattr__(self, key, val):
|
||||
self[key] = val
|
||||
def __delattr__(self, key):
|
||||
del self[key]
|
||||
|
||||
|
||||
def fmt_ex(ex):
|
||||
"""Format a single line with an exception description."""
|
||||
return traceback.format_exception_only(ex.__class__, ex)[-1].strip()
|
||||
|
||||
|
||||
def rpartition(s, t):
|
||||
"""Similar to str.rpartition from 2.5, but doesn't return the separator."""
|
||||
i = s.rfind(t)
|
||||
if i != -1:
|
||||
return s[:i], s[i+len(t):]
|
||||
return '', s
|
||||
|
||||
|
||||
def format_exception_cut_frames(x=1):
|
||||
"""
|
||||
Format an exception with traceback, but only the last x frames.
|
||||
"""
|
||||
typ, val, tb = sys.exc_info()
|
||||
#res = ['Traceback (most recent call last):\n']
|
||||
res = []
|
||||
tbres = traceback.format_tb(tb)
|
||||
res += tbres[-x:]
|
||||
res += traceback.format_exception_only(typ, val)
|
||||
return ''.join(res)
|
||||
|
||||
|
||||
def save_traceback():
|
||||
"""
|
||||
Save the current exception's traceback in a temporary file.
|
||||
"""
|
||||
exc = traceback.format_exc()
|
||||
fd, path = tempfile.mkstemp('.log', 'sphinx-err-')
|
||||
os.write(fd, '# Sphinx version: %s\n' % sphinx.__version__)
|
||||
os.write(fd, '# Docutils version: %s %s\n' % (docutils.__version__,
|
||||
docutils.__version_details__))
|
||||
os.write(fd, '# Jinja2 version: %s\n' % jinja2.__version__)
|
||||
os.write(fd, exc)
|
||||
os.close(fd)
|
||||
return path
|
||||
|
||||
|
||||
def _translate_pattern(pat):
|
||||
"""
|
||||
Translate a shell-style glob pattern to a regular expression.
|
||||
|
||||
Adapted from the fnmatch module, but enhanced so that single stars don't
|
||||
match slashes.
|
||||
"""
|
||||
i, n = 0, len(pat)
|
||||
res = ''
|
||||
while i < n:
|
||||
c = pat[i]
|
||||
i += 1
|
||||
if c == '*':
|
||||
if i < n and pat[i] == '*':
|
||||
# double star matches slashes too
|
||||
i += 1
|
||||
res = res + '.*'
|
||||
else:
|
||||
# single star doesn't match slashes
|
||||
res = res + '[^/]*'
|
||||
elif c == '?':
|
||||
# question mark doesn't match slashes too
|
||||
res = res + '[^/]'
|
||||
elif c == '[':
|
||||
j = i
|
||||
if j < n and pat[j] == '!':
|
||||
j += 1
|
||||
if j < n and pat[j] == ']':
|
||||
j += 1
|
||||
while j < n and pat[j] != ']':
|
||||
j += 1
|
||||
if j >= n:
|
||||
res = res + '\\['
|
||||
else:
|
||||
stuff = pat[i:j].replace('\\', '\\\\')
|
||||
i = j + 1
|
||||
if stuff[0] == '!':
|
||||
# negative pattern mustn't match slashes too
|
||||
stuff = '^/' + stuff[1:]
|
||||
elif stuff[0] == '^':
|
||||
stuff = '\\' + stuff
|
||||
res = '%s[%s]' % (res, stuff)
|
||||
else:
|
||||
res += re.escape(c)
|
||||
return res + '$'
|
||||
|
||||
def compile_matchers(patterns):
|
||||
return [re.compile(_translate_pattern(pat)).match for pat in patterns]
|
||||
|
||||
|
||||
_pat_cache = {}
|
||||
|
||||
def patmatch(name, pat):
|
||||
"""
|
||||
Return if name matches pat. Adapted from fnmatch module.
|
||||
"""
|
||||
if pat not in _pat_cache:
|
||||
_pat_cache[pat] = re.compile(_translate_pattern(pat))
|
||||
return _pat_cache[pat].match(name)
|
||||
|
||||
def patfilter(names, pat):
|
||||
"""
|
||||
Return the subset of the list NAMES that match PAT.
|
||||
Adapted from fnmatch module.
|
||||
"""
|
||||
if pat not in _pat_cache:
|
||||
_pat_cache[pat] = re.compile(_translate_pattern(pat))
|
||||
match = _pat_cache[pat].match
|
||||
return filter(match, names)
|
||||
|
||||
|
||||
no_fn_re = re.compile(r'[^a-zA-Z0-9_-]')
|
||||
|
||||
def make_filename(string):
|
||||
return no_fn_re.sub('', string)
|
||||
|
||||
|
||||
def nested_parse_with_titles(state, content, node):
|
||||
# hack around title style bookkeeping
|
||||
surrounding_title_styles = state.memo.title_styles
|
||||
surrounding_section_level = state.memo.section_level
|
||||
state.memo.title_styles = []
|
||||
state.memo.section_level = 0
|
||||
try:
|
||||
return state.nested_parse(content, 0, node, match_titles=1)
|
||||
finally:
|
||||
state.memo.title_styles = surrounding_title_styles
|
||||
state.memo.section_level = surrounding_section_level
|
||||
|
||||
|
||||
def ustrftime(format, *args):
|
||||
# strftime for unicode strings
|
||||
return time.strftime(unicode(format).encode('utf-8'), *args).decode('utf-8')
|
||||
|
||||
|
||||
class Tee(object):
|
||||
"""
|
||||
File-like object writing to two streams.
|
||||
"""
|
||||
def __init__(self, stream1, stream2):
|
||||
self.stream1 = stream1
|
||||
self.stream2 = stream2
|
||||
|
||||
def write(self, text):
|
||||
self.stream1.write(text)
|
||||
self.stream2.write(text)
|
||||
|
||||
|
||||
class FilenameUniqDict(dict):
|
||||
"""
|
||||
A dictionary that automatically generates unique names for its keys,
|
||||
@ -384,72 +125,12 @@ class FilenameUniqDict(dict):
|
||||
self._existing = state
|
||||
|
||||
|
||||
def parselinenos(spec, total):
|
||||
"""
|
||||
Parse a line number spec (such as "1,2,4-6") and return a list of
|
||||
wanted line numbers.
|
||||
"""
|
||||
items = list()
|
||||
parts = spec.split(',')
|
||||
for part in parts:
|
||||
try:
|
||||
begend = part.strip().split('-')
|
||||
if len(begend) > 2:
|
||||
raise ValueError
|
||||
if len(begend) == 1:
|
||||
items.append(int(begend[0])-1)
|
||||
else:
|
||||
start = (begend[0] == '') and 0 or int(begend[0])-1
|
||||
end = (begend[1] == '') and total or int(begend[1])
|
||||
items.extend(xrange(start, end))
|
||||
except Exception, err:
|
||||
raise ValueError('invalid line number spec: %r' % spec)
|
||||
return items
|
||||
|
||||
|
||||
def force_decode(string, encoding):
|
||||
if isinstance(string, str):
|
||||
if encoding:
|
||||
string = string.decode(encoding)
|
||||
else:
|
||||
try:
|
||||
# try decoding with utf-8, should only work for real UTF-8
|
||||
string = string.decode('utf-8')
|
||||
except UnicodeError:
|
||||
# last resort -- can't fail
|
||||
string = string.decode('latin1')
|
||||
return string
|
||||
|
||||
|
||||
def movefile(source, dest):
|
||||
"""Move a file, removing the destination if it exists."""
|
||||
if os.path.exists(dest):
|
||||
try:
|
||||
os.unlink(dest)
|
||||
except OSError:
|
||||
pass
|
||||
os.rename(source, dest)
|
||||
|
||||
|
||||
def copytimes(source, dest):
|
||||
"""Copy a file's modification times."""
|
||||
st = os.stat(source)
|
||||
if hasattr(os, 'utime'):
|
||||
os.utime(dest, (st.st_atime, st.st_mtime))
|
||||
|
||||
|
||||
def copyfile(source, dest):
|
||||
"""Copy a file and its modification times, if possible."""
|
||||
shutil.copyfile(source, dest)
|
||||
try:
|
||||
# don't do full copystat because the source may be read-only
|
||||
copytimes(source, dest)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def copy_static_entry(source, targetdir, builder, context={},
|
||||
exclude_matchers=(), level=0):
|
||||
"""Copy a HTML builder static_path entry from source to targetdir.
|
||||
|
||||
Handles all possible cases of files, directories and subdirectories.
|
||||
"""
|
||||
if exclude_matchers:
|
||||
relpath = relative_path(builder.srcdir, source)
|
||||
for matcher in exclude_matchers:
|
||||
@ -481,34 +162,19 @@ def copy_static_entry(source, targetdir, builder, context={},
|
||||
shutil.copytree(source, target)
|
||||
|
||||
|
||||
def clean_astext(node):
|
||||
"""Like node.astext(), but ignore images."""
|
||||
node = node.deepcopy()
|
||||
for img in node.traverse(docutils.nodes.image):
|
||||
img['alt'] = ''
|
||||
return node.astext()
|
||||
|
||||
|
||||
def split_explicit_title(text):
|
||||
"""Split role content into title and target, if given."""
|
||||
match = explicit_title_re.match(text)
|
||||
if match:
|
||||
return True, match.group(1), match.group(2)
|
||||
return False, text, text
|
||||
|
||||
|
||||
def make_refnode(builder, fromdocname, todocname, targetid, child, title=None):
|
||||
"""Shortcut to create a reference node."""
|
||||
node = nodes.reference('', '')
|
||||
if fromdocname == todocname:
|
||||
node['refid'] = targetid
|
||||
else:
|
||||
node['refuri'] = (builder.get_relative_uri(fromdocname, todocname)
|
||||
+ '#' + targetid)
|
||||
if title:
|
||||
node['reftitle'] = title
|
||||
node.append(child)
|
||||
return node
|
||||
def save_traceback():
|
||||
"""
|
||||
Save the current exception's traceback in a temporary file.
|
||||
"""
|
||||
exc = traceback.format_exc()
|
||||
fd, path = tempfile.mkstemp('.log', 'sphinx-err-')
|
||||
os.write(fd, '# Sphinx version: %s\n' % sphinx.__version__)
|
||||
os.write(fd, '# Docutils version: %s %s\n' % (docutils.__version__,
|
||||
docutils.__version_details__))
|
||||
os.write(fd, '# Jinja2 version: %s\n' % jinja2.__version__)
|
||||
os.write(fd, exc)
|
||||
os.close(fd)
|
||||
return path
|
||||
|
||||
|
||||
def get_module_source(modname):
|
||||
@ -543,73 +209,84 @@ def get_module_source(modname):
|
||||
return 'file', filename
|
||||
|
||||
|
||||
try:
|
||||
any = any
|
||||
except NameError:
|
||||
def any(gen):
|
||||
for i in gen:
|
||||
if i:
|
||||
return True
|
||||
return False
|
||||
# Low-level utility functions and classes.
|
||||
|
||||
|
||||
def inline_all_toctrees(builder, docnameset, docname, tree, colorfunc):
|
||||
"""Inline all toctrees in the *tree*.
|
||||
|
||||
Record all docnames in *docnameset*, and output docnames with *colorfunc*.
|
||||
class Tee(object):
|
||||
"""
|
||||
tree = tree.deepcopy()
|
||||
for toctreenode in tree.traverse(addnodes.toctree):
|
||||
newnodes = []
|
||||
includefiles = map(str, toctreenode['includefiles'])
|
||||
for includefile in includefiles:
|
||||
File-like object writing to two streams.
|
||||
"""
|
||||
def __init__(self, stream1, stream2):
|
||||
self.stream1 = stream1
|
||||
self.stream2 = stream2
|
||||
|
||||
def write(self, text):
|
||||
self.stream1.write(text)
|
||||
self.stream2.write(text)
|
||||
|
||||
|
||||
def parselinenos(spec, total):
|
||||
"""
|
||||
Parse a line number spec (such as "1,2,4-6") and return a list of
|
||||
wanted line numbers.
|
||||
"""
|
||||
items = list()
|
||||
parts = spec.split(',')
|
||||
for part in parts:
|
||||
try:
|
||||
builder.info(colorfunc(includefile) + " ", nonl=1)
|
||||
subtree = inline_all_toctrees(builder, docnameset, includefile,
|
||||
builder.env.get_doctree(includefile), colorfunc)
|
||||
docnameset.add(includefile)
|
||||
except Exception:
|
||||
builder.warn('toctree contains ref to nonexisting '
|
||||
'file %r' % includefile,
|
||||
builder.env.doc2path(docname))
|
||||
begend = part.strip().split('-')
|
||||
if len(begend) > 2:
|
||||
raise ValueError
|
||||
if len(begend) == 1:
|
||||
items.append(int(begend[0])-1)
|
||||
else:
|
||||
sof = addnodes.start_of_file(docname=includefile)
|
||||
sof.children = subtree.children
|
||||
newnodes.append(sof)
|
||||
toctreenode.parent.replace(toctreenode, newnodes)
|
||||
return tree
|
||||
start = (begend[0] == '') and 0 or int(begend[0])-1
|
||||
end = (begend[1] == '') and total or int(begend[1])
|
||||
items.extend(xrange(start, end))
|
||||
except Exception:
|
||||
raise ValueError('invalid line number spec: %r' % spec)
|
||||
return items
|
||||
|
||||
|
||||
# monkey-patch Node.traverse to get more speed
|
||||
# traverse() is called so many times during a build that it saves
|
||||
# on average 20-25% overall build time!
|
||||
def force_decode(string, encoding):
|
||||
"""Forcibly get a unicode string out of a bytestring."""
|
||||
if isinstance(string, str):
|
||||
if encoding:
|
||||
string = string.decode(encoding)
|
||||
else:
|
||||
try:
|
||||
# try decoding with utf-8, should only work for real UTF-8
|
||||
string = string.decode('utf-8')
|
||||
except UnicodeError:
|
||||
# last resort -- can't fail
|
||||
string = string.decode('latin1')
|
||||
return string
|
||||
|
||||
def _all_traverse(self, result):
|
||||
"""Version of Node.traverse() that doesn't need a condition."""
|
||||
result.append(self)
|
||||
for child in self.children:
|
||||
child._all_traverse(result)
|
||||
return result
|
||||
|
||||
def _fast_traverse(self, cls, result):
|
||||
"""Version of Node.traverse() that only supports instance checks."""
|
||||
if isinstance(self, cls):
|
||||
result.append(self)
|
||||
for child in self.children:
|
||||
child._fast_traverse(cls, result)
|
||||
return result
|
||||
class attrdict(dict):
|
||||
def __getattr__(self, key):
|
||||
return self[key]
|
||||
def __setattr__(self, key, val):
|
||||
self[key] = val
|
||||
def __delattr__(self, key):
|
||||
del self[key]
|
||||
|
||||
def _new_traverse(self, condition=None,
|
||||
include_self=1, descend=1, siblings=0, ascend=0):
|
||||
if include_self and descend and not siblings and not ascend:
|
||||
if condition is None:
|
||||
return self._all_traverse([])
|
||||
elif isinstance(condition, (types.ClassType, type)):
|
||||
return self._fast_traverse(condition, [])
|
||||
return self._old_traverse(condition, include_self,
|
||||
descend, siblings, ascend)
|
||||
|
||||
nodes.Node._old_traverse = nodes.Node.traverse
|
||||
nodes.Node._all_traverse = _all_traverse
|
||||
nodes.Node._fast_traverse = _fast_traverse
|
||||
nodes.Node.traverse = _new_traverse
|
||||
def rpartition(s, t):
|
||||
"""Similar to str.rpartition from 2.5, but doesn't return the separator."""
|
||||
i = s.rfind(t)
|
||||
if i != -1:
|
||||
return s[:i], s[i+len(t):]
|
||||
return '', s
|
||||
|
||||
|
||||
def format_exception_cut_frames(x=1):
|
||||
"""
|
||||
Format an exception with traceback, but only the last x frames.
|
||||
"""
|
||||
typ, val, tb = sys.exc_info()
|
||||
#res = ['Traceback (most recent call last):\n']
|
||||
res = []
|
||||
tbres = traceback.format_tb(tb)
|
||||
res += tbres[-x:]
|
||||
res += traceback.format_exception_only(typ, val)
|
||||
return ''.join(res)
|
||||
|
83
sphinx/util/matching.py
Normal file
83
sphinx/util/matching.py
Normal file
@ -0,0 +1,83 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.util.matching
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Pattern-matching utility functions for Sphinx.
|
||||
|
||||
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def _translate_pattern(pat):
|
||||
"""
|
||||
Translate a shell-style glob pattern to a regular expression.
|
||||
|
||||
Adapted from the fnmatch module, but enhanced so that single stars don't
|
||||
match slashes.
|
||||
"""
|
||||
i, n = 0, len(pat)
|
||||
res = ''
|
||||
while i < n:
|
||||
c = pat[i]
|
||||
i += 1
|
||||
if c == '*':
|
||||
if i < n and pat[i] == '*':
|
||||
# double star matches slashes too
|
||||
i += 1
|
||||
res = res + '.*'
|
||||
else:
|
||||
# single star doesn't match slashes
|
||||
res = res + '[^/]*'
|
||||
elif c == '?':
|
||||
# question mark doesn't match slashes too
|
||||
res = res + '[^/]'
|
||||
elif c == '[':
|
||||
j = i
|
||||
if j < n and pat[j] == '!':
|
||||
j += 1
|
||||
if j < n and pat[j] == ']':
|
||||
j += 1
|
||||
while j < n and pat[j] != ']':
|
||||
j += 1
|
||||
if j >= n:
|
||||
res = res + '\\['
|
||||
else:
|
||||
stuff = pat[i:j].replace('\\', '\\\\')
|
||||
i = j + 1
|
||||
if stuff[0] == '!':
|
||||
# negative pattern mustn't match slashes too
|
||||
stuff = '^/' + stuff[1:]
|
||||
elif stuff[0] == '^':
|
||||
stuff = '\\' + stuff
|
||||
res = '%s[%s]' % (res, stuff)
|
||||
else:
|
||||
res += re.escape(c)
|
||||
return res + '$'
|
||||
|
||||
def compile_matchers(patterns):
|
||||
return [re.compile(_translate_pattern(pat)).match for pat in patterns]
|
||||
|
||||
|
||||
_pat_cache = {}
|
||||
|
||||
def patmatch(name, pat):
|
||||
"""
|
||||
Return if name matches pat. Adapted from fnmatch module.
|
||||
"""
|
||||
if pat not in _pat_cache:
|
||||
_pat_cache[pat] = re.compile(_translate_pattern(pat))
|
||||
return _pat_cache[pat].match(name)
|
||||
|
||||
def patfilter(names, pat):
|
||||
"""
|
||||
Return the subset of the list NAMES that match PAT.
|
||||
Adapted from fnmatch module.
|
||||
"""
|
||||
if pat not in _pat_cache:
|
||||
_pat_cache[pat] = re.compile(_translate_pattern(pat))
|
||||
match = _pat_cache[pat].match
|
||||
return filter(match, names)
|
125
sphinx/util/nodes.py
Normal file
125
sphinx/util/nodes.py
Normal file
@ -0,0 +1,125 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.util.nodes
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Docutils node-related utility functions for Sphinx.
|
||||
|
||||
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import re
|
||||
import types
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
|
||||
|
||||
explicit_title_re = re.compile('^(.+?)\s*<(.*?)>$', re.DOTALL)
|
||||
caption_ref_re = explicit_title_re # b/w compat alias
|
||||
|
||||
|
||||
def nested_parse_with_titles(state, content, node):
|
||||
# hack around title style bookkeeping
|
||||
surrounding_title_styles = state.memo.title_styles
|
||||
surrounding_section_level = state.memo.section_level
|
||||
state.memo.title_styles = []
|
||||
state.memo.section_level = 0
|
||||
try:
|
||||
return state.nested_parse(content, 0, node, match_titles=1)
|
||||
finally:
|
||||
state.memo.title_styles = surrounding_title_styles
|
||||
state.memo.section_level = surrounding_section_level
|
||||
|
||||
|
||||
def clean_astext(node):
|
||||
"""Like node.astext(), but ignore images."""
|
||||
node = node.deepcopy()
|
||||
for img in node.traverse(nodes.image):
|
||||
img['alt'] = ''
|
||||
return node.astext()
|
||||
|
||||
|
||||
def split_explicit_title(text):
|
||||
"""Split role content into title and target, if given."""
|
||||
match = explicit_title_re.match(text)
|
||||
if match:
|
||||
return True, match.group(1), match.group(2)
|
||||
return False, text, text
|
||||
|
||||
|
||||
def inline_all_toctrees(builder, docnameset, docname, tree, colorfunc):
|
||||
"""Inline all toctrees in the *tree*.
|
||||
|
||||
Record all docnames in *docnameset*, and output docnames with *colorfunc*.
|
||||
"""
|
||||
tree = tree.deepcopy()
|
||||
for toctreenode in tree.traverse(addnodes.toctree):
|
||||
newnodes = []
|
||||
includefiles = map(str, toctreenode['includefiles'])
|
||||
for includefile in includefiles:
|
||||
try:
|
||||
builder.info(colorfunc(includefile) + " ", nonl=1)
|
||||
subtree = inline_all_toctrees(builder, docnameset, includefile,
|
||||
builder.env.get_doctree(includefile), colorfunc)
|
||||
docnameset.add(includefile)
|
||||
except Exception:
|
||||
builder.warn('toctree contains ref to nonexisting '
|
||||
'file %r' % includefile,
|
||||
builder.env.doc2path(docname))
|
||||
else:
|
||||
sof = addnodes.start_of_file(docname=includefile)
|
||||
sof.children = subtree.children
|
||||
newnodes.append(sof)
|
||||
toctreenode.parent.replace(toctreenode, newnodes)
|
||||
return tree
|
||||
|
||||
|
||||
def make_refnode(builder, fromdocname, todocname, targetid, child, title=None):
|
||||
"""Shortcut to create a reference node."""
|
||||
node = nodes.reference('', '')
|
||||
if fromdocname == todocname:
|
||||
node['refid'] = targetid
|
||||
else:
|
||||
node['refuri'] = (builder.get_relative_uri(fromdocname, todocname)
|
||||
+ '#' + targetid)
|
||||
if title:
|
||||
node['reftitle'] = title
|
||||
node.append(child)
|
||||
return node
|
||||
|
||||
# monkey-patch Node.traverse to get more speed
|
||||
# traverse() is called so many times during a build that it saves
|
||||
# on average 20-25% overall build time!
|
||||
|
||||
def _all_traverse(self, result):
|
||||
"""Version of Node.traverse() that doesn't need a condition."""
|
||||
result.append(self)
|
||||
for child in self.children:
|
||||
child._all_traverse(result)
|
||||
return result
|
||||
|
||||
def _fast_traverse(self, cls, result):
|
||||
"""Version of Node.traverse() that only supports instance checks."""
|
||||
if isinstance(self, cls):
|
||||
result.append(self)
|
||||
for child in self.children:
|
||||
child._fast_traverse(cls, result)
|
||||
return result
|
||||
|
||||
def _new_traverse(self, condition=None,
|
||||
include_self=1, descend=1, siblings=0, ascend=0):
|
||||
if include_self and descend and not siblings and not ascend:
|
||||
if condition is None:
|
||||
return self._all_traverse([])
|
||||
elif isinstance(condition, (types.ClassType, type)):
|
||||
return self._fast_traverse(condition, [])
|
||||
return self._old_traverse(condition, include_self,
|
||||
descend, siblings, ascend)
|
||||
|
||||
nodes.Node._old_traverse = nodes.Node.traverse
|
||||
nodes.Node._all_traverse = _all_traverse
|
||||
nodes.Node._fast_traverse = _fast_traverse
|
||||
nodes.Node.traverse = _new_traverse
|
130
sphinx/util/os.py
Normal file
130
sphinx/util/os.py
Normal file
@ -0,0 +1,130 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
sphinx.util.os
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
Operating system-related utility functions for Sphinx.
|
||||
|
||||
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import errno
|
||||
import shutil
|
||||
from os import path
|
||||
|
||||
# Errnos that we need.
|
||||
EEXIST = getattr(errno, 'EEXIST', 0)
|
||||
ENOENT = getattr(errno, 'ENOENT', 0)
|
||||
EPIPE = getattr(errno, 'EPIPE', 0)
|
||||
|
||||
# SEP separates path elements in the canonical file names
|
||||
#
|
||||
# Define SEP as a manifest constant, not so much because we expect it to change
|
||||
# in the future as to avoid the suspicion that a stray "/" in the code is a
|
||||
# hangover from more *nix-oriented origins.
|
||||
SEP = "/"
|
||||
|
||||
def os_path(canonicalpath):
|
||||
return canonicalpath.replace(SEP, path.sep)
|
||||
|
||||
|
||||
def relative_uri(base, to):
|
||||
"""Return a relative URL from ``base`` to ``to``."""
|
||||
if to.startswith(SEP):
|
||||
return to
|
||||
b2 = base.split(SEP)
|
||||
t2 = to.split(SEP)
|
||||
# remove common segments
|
||||
for x, y in zip(b2, t2):
|
||||
if x != y:
|
||||
break
|
||||
b2.pop(0)
|
||||
t2.pop(0)
|
||||
return ('..' + SEP) * (len(b2)-1) + SEP.join(t2)
|
||||
|
||||
|
||||
def ensuredir(path):
|
||||
"""Ensure that a path exists."""
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError, err:
|
||||
# 0 for Jython/Win32
|
||||
if err.errno not in [0, EEXIST]:
|
||||
raise
|
||||
|
||||
|
||||
def walk(top, topdown=True, followlinks=False):
|
||||
"""
|
||||
Backport of os.walk from 2.6, where the followlinks argument was added.
|
||||
"""
|
||||
names = os.listdir(top)
|
||||
|
||||
dirs, nondirs = [], []
|
||||
for name in names:
|
||||
if path.isdir(path.join(top, name)):
|
||||
dirs.append(name)
|
||||
else:
|
||||
nondirs.append(name)
|
||||
|
||||
if topdown:
|
||||
yield top, dirs, nondirs
|
||||
for name in dirs:
|
||||
fullpath = path.join(top, name)
|
||||
if followlinks or not path.islink(fullpath):
|
||||
for x in walk(fullpath, topdown, followlinks):
|
||||
yield x
|
||||
if not topdown:
|
||||
yield top, dirs, nondirs
|
||||
|
||||
|
||||
def mtimes_of_files(dirnames, suffix):
|
||||
for dirname in dirnames:
|
||||
for root, dirs, files in os.walk(dirname):
|
||||
for sfile in files:
|
||||
if sfile.endswith(suffix):
|
||||
try:
|
||||
yield path.getmtime(path.join(root, sfile))
|
||||
except EnvironmentError:
|
||||
pass
|
||||
|
||||
|
||||
def movefile(source, dest):
|
||||
"""Move a file, removing the destination if it exists."""
|
||||
if os.path.exists(dest):
|
||||
try:
|
||||
os.unlink(dest)
|
||||
except OSError:
|
||||
pass
|
||||
os.rename(source, dest)
|
||||
|
||||
|
||||
def copytimes(source, dest):
|
||||
"""Copy a file's modification times."""
|
||||
st = os.stat(source)
|
||||
if hasattr(os, 'utime'):
|
||||
os.utime(dest, (st.st_atime, st.st_mtime))
|
||||
|
||||
|
||||
def copyfile(source, dest):
|
||||
"""Copy a file and its modification times, if possible."""
|
||||
shutil.copyfile(source, dest)
|
||||
try:
|
||||
# don't do full copystat because the source may be read-only
|
||||
copytimes(source, dest)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
no_fn_re = re.compile(r'[^a-zA-Z0-9_-]')
|
||||
|
||||
def make_filename(string):
|
||||
return no_fn_re.sub('', string)
|
||||
|
||||
|
||||
def ustrftime(format, *args):
|
||||
# strftime for unicode strings
|
||||
return time.strftime(unicode(format).encode('utf-8'), *args).decode('utf-8')
|
@ -13,6 +13,17 @@ import sys
|
||||
import codecs
|
||||
import encodings
|
||||
|
||||
|
||||
try:
|
||||
any = any
|
||||
except NameError:
|
||||
def any(gen):
|
||||
for i in gen:
|
||||
if i:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
if sys.version_info < (2, 5):
|
||||
# Python 2.4 doesn't know the utf-8-sig encoding, so deliver it here
|
||||
|
||||
|
@ -23,7 +23,7 @@ from sphinx import addnodes
|
||||
from sphinx import highlighting
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.locale import admonitionlabels, versionlabels
|
||||
from sphinx.util import ustrftime
|
||||
from sphinx.util.os import ustrftime
|
||||
from sphinx.util.texescape import tex_escape_map
|
||||
from sphinx.util.smartypants import educateQuotesLatex
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user