mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge with 1.0
This commit is contained in:
commit
4abbe95a9c
9
CHANGES
9
CHANGES
@ -112,6 +112,15 @@ Features added
|
||||
Release 1.0.8 (Sep 23, 2011)
|
||||
============================
|
||||
|
||||
* #627: Fix tracebacks for AttributeErrors in autosummary generation.
|
||||
|
||||
* Fix the ``abbr`` role when the abbreviation has newlines in it.
|
||||
|
||||
* #727: Fix the links to search results with custom object types.
|
||||
|
||||
* #648: Fix line numbers reported in warnings about undefined
|
||||
references.
|
||||
|
||||
* #696, #666: Fix C++ array definitions and template arguments
|
||||
that are not type names.
|
||||
|
||||
|
@ -15,6 +15,7 @@ from docutils.parsers.rst import Directive, directives
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import parselinenos
|
||||
from sphinx.util.nodes import set_source_info
|
||||
|
||||
|
||||
class Highlight(Directive):
|
||||
@ -77,7 +78,7 @@ class CodeBlock(Directive):
|
||||
literal['linenos'] = 'linenos' in self.options
|
||||
if hl_lines is not None:
|
||||
literal['highlight_args'] = {'hl_lines': hl_lines}
|
||||
literal.line = self.lineno
|
||||
set_source_info(self, literal)
|
||||
return [literal]
|
||||
|
||||
|
||||
@ -197,8 +198,7 @@ class LiteralInclude(Directive):
|
||||
if self.options.get('tab-width'):
|
||||
text = text.expandtabs(self.options['tab-width'])
|
||||
retnode = nodes.literal_block(text, text, source=filename)
|
||||
retnode.line = 1
|
||||
retnode.attributes['line_number'] = self.lineno
|
||||
set_source_info(self, retnode)
|
||||
if self.options.get('language', ''):
|
||||
retnode['language'] = self.options['language']
|
||||
if 'linenos' in self.options:
|
||||
|
@ -17,7 +17,8 @@ from docutils.parsers.rst.directives.misc import Include as BaseInclude
|
||||
from sphinx import addnodes
|
||||
from sphinx.locale import _
|
||||
from sphinx.util import url_re, docname_join
|
||||
from sphinx.util.nodes import explicit_title_re, process_index_entry
|
||||
from sphinx.util.nodes import explicit_title_re, set_source_info, \
|
||||
process_index_entry
|
||||
from sphinx.util.compat import make_admonition
|
||||
from sphinx.util.matching import patfilter
|
||||
|
||||
@ -108,6 +109,7 @@ class TocTree(Directive):
|
||||
subnode['hidden'] = 'hidden' in self.options
|
||||
subnode['numbered'] = self.options.get('numbered', 0)
|
||||
subnode['titlesonly'] = 'titlesonly' in self.options
|
||||
set_source_info(self, subnode)
|
||||
wrappernode = nodes.compound(classes=['toctree-wrapper'])
|
||||
wrappernode.append(subnode)
|
||||
ret.append(wrappernode)
|
||||
@ -184,6 +186,7 @@ class VersionChange(Directive):
|
||||
def run(self):
|
||||
node = addnodes.versionmodified()
|
||||
node.document = self.state.document
|
||||
set_source_info(self, node)
|
||||
node['type'] = self.name
|
||||
node['version'] = self.arguments[0]
|
||||
if len(self.arguments) == 2:
|
||||
@ -196,7 +199,8 @@ class VersionChange(Directive):
|
||||
else:
|
||||
ret = [node]
|
||||
env = self.state.document.settings.env
|
||||
env.note_versionchange(node['type'], node['version'], node, self.lineno)
|
||||
# XXX should record node.source as well
|
||||
env.note_versionchange(node['type'], node['version'], node, node.line)
|
||||
return ret
|
||||
|
||||
|
||||
@ -238,7 +242,7 @@ class TabularColumns(Directive):
|
||||
def run(self):
|
||||
node = addnodes.tabular_col_spec()
|
||||
node['spec'] = self.arguments[0]
|
||||
node.line = self.lineno
|
||||
set_source_info(self, node)
|
||||
return [node]
|
||||
|
||||
|
||||
@ -332,7 +336,7 @@ class Only(Directive):
|
||||
def run(self):
|
||||
node = addnodes.only()
|
||||
node.document = self.state.document
|
||||
node.line = self.lineno
|
||||
set_source_info(self, node)
|
||||
node['expr'] = self.arguments[0]
|
||||
self.state.nested_parse(self.content, self.content_offset, node,
|
||||
match_titles=1)
|
||||
|
@ -159,11 +159,10 @@ class CObject(ObjectDescription):
|
||||
self.state.document.note_explicit_target(signode)
|
||||
inv = self.env.domaindata['c']['objects']
|
||||
if name in inv:
|
||||
self.env.warn(
|
||||
self.env.docname,
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate C object description of %s, ' % name +
|
||||
'other instance in ' + self.env.doc2path(inv[name][0]),
|
||||
self.lineno)
|
||||
line=self.lineno)
|
||||
inv[name] = (self.env.docname, self.objtype)
|
||||
|
||||
indextext = self.get_index_text(name)
|
||||
|
@ -981,8 +981,7 @@ class CPPObject(ObjectDescription):
|
||||
rv = self.parse_definition(parser)
|
||||
parser.assert_end()
|
||||
except DefinitionError, e:
|
||||
self.env.warn(self.env.docname,
|
||||
e.description, self.lineno)
|
||||
self.state_machine.reporter.warning(e.description, line=self.lineno)
|
||||
raise ValueError
|
||||
self.describe_signature(signode, rv)
|
||||
|
||||
@ -1128,8 +1127,8 @@ class CPPCurrentNamespace(Directive):
|
||||
prefix = parser.parse_type()
|
||||
parser.assert_end()
|
||||
except DefinitionError, e:
|
||||
self.env.warn(self.env.docname,
|
||||
e.description, self.lineno)
|
||||
self.state_machine.reporter.warning(e.description,
|
||||
line=self.lineno)
|
||||
else:
|
||||
env.temp_data['cpp:prefix'] = prefix
|
||||
return []
|
||||
@ -1203,9 +1202,7 @@ class CPPDomain(Domain):
|
||||
if not parser.eof or expr is None:
|
||||
raise DefinitionError('')
|
||||
except DefinitionError:
|
||||
refdoc = node.get('refdoc', fromdocname)
|
||||
env.warn(refdoc, 'unparseable C++ definition: %r' % target,
|
||||
node.line)
|
||||
env.warn_node('unparseable C++ definition: %r' % target, node)
|
||||
return None
|
||||
|
||||
parent = node.get('cpp:parent', None)
|
||||
|
@ -86,12 +86,11 @@ class JSObject(ObjectDescription):
|
||||
self.state.document.note_explicit_target(signode)
|
||||
objects = self.env.domaindata['js']['objects']
|
||||
if fullname in objects:
|
||||
self.env.warn(
|
||||
self.env.docname,
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate object description of %s, ' % fullname +
|
||||
'other instance in ' +
|
||||
self.env.doc2path(objects[fullname][0]),
|
||||
self.lineno)
|
||||
line=self.lineno)
|
||||
objects[fullname] = self.env.docname, self.objtype
|
||||
|
||||
indextext = self.get_index_text(objectname, name_obj)
|
||||
|
@ -217,13 +217,12 @@ class PyObject(ObjectDescription):
|
||||
self.state.document.note_explicit_target(signode)
|
||||
objects = self.env.domaindata['py']['objects']
|
||||
if fullname in objects:
|
||||
self.env.warn(
|
||||
self.env.docname,
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate object description of %s, ' % fullname +
|
||||
'other instance in ' +
|
||||
self.env.doc2path(objects[fullname][0]) +
|
||||
', use :noindex: for one of them',
|
||||
self.lineno)
|
||||
line=self.lineno)
|
||||
objects[fullname] = (self.env.docname, self.objtype)
|
||||
|
||||
indextext = self.get_index_text(modname, name_cls)
|
||||
@ -677,11 +676,10 @@ class PythonDomain(Domain):
|
||||
if not matches:
|
||||
return None
|
||||
elif len(matches) > 1:
|
||||
env.warn(fromdocname,
|
||||
'more than one target found for cross-reference '
|
||||
'%r: %s' % (target,
|
||||
', '.join(match[0] for match in matches)),
|
||||
node.line)
|
||||
env.warn_node(
|
||||
'more than one target found for cross-reference '
|
||||
'%r: %s' % (target, ', '.join(match[0] for match in matches)),
|
||||
node)
|
||||
name, obj = matches[0]
|
||||
|
||||
if obj[1] == 'module':
|
||||
|
@ -38,12 +38,10 @@ class ReSTMarkup(ObjectDescription):
|
||||
objects = self.env.domaindata['rst']['objects']
|
||||
key = (self.objtype, name)
|
||||
if key in objects:
|
||||
self.env.warn(self.env.docname,
|
||||
'duplicate description of %s %s, ' %
|
||||
(self.objtype, name) +
|
||||
'other instance in ' +
|
||||
self.env.doc2path(objects[key]),
|
||||
self.lineno)
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate description of %s %s, ' % (self.objtype, name) +
|
||||
'other instance in ' + self.env.doc2path(objects[key]),
|
||||
line=self.lineno)
|
||||
objects[key] = self.env.docname
|
||||
indextext = self.get_index_text(self.objtype, name)
|
||||
if indextext:
|
||||
|
@ -472,9 +472,8 @@ class StandardDomain(Domain):
|
||||
# link and object descriptions
|
||||
continue
|
||||
if name in labels:
|
||||
env.warn(docname, 'duplicate label %s, ' % name +
|
||||
'other instance in ' + env.doc2path(labels[name][0]),
|
||||
node.line)
|
||||
env.warn_node('duplicate label %s, ' % name + 'other instance '
|
||||
'in ' + env.doc2path(labels[name][0]), node)
|
||||
anonlabels[name] = docname, labelid
|
||||
if node.tagname == 'section':
|
||||
sectname = clean_astext(node[0]) # node[0] == title node
|
||||
|
@ -26,7 +26,8 @@ from itertools import izip, groupby
|
||||
from docutils import nodes
|
||||
from docutils.io import FileInput, NullOutput
|
||||
from docutils.core import Publisher
|
||||
from docutils.utils import Reporter, relative_path, new_document
|
||||
from docutils.utils import Reporter, relative_path, new_document, \
|
||||
get_source_line
|
||||
from docutils.readers import standalone
|
||||
from docutils.parsers.rst import roles, directives, Parser as RSTParser
|
||||
from docutils.parsers.rst.languages import en as english
|
||||
@ -38,7 +39,8 @@ from docutils.transforms.parts import ContentsFilter
|
||||
from sphinx import addnodes
|
||||
from sphinx.util import url_re, get_matching_docs, docname_join, split_into, \
|
||||
FilenameUniqDict
|
||||
from sphinx.util.nodes import clean_astext, make_refnode, extract_messages
|
||||
from sphinx.util.nodes import clean_astext, make_refnode, extract_messages, \
|
||||
WarningStream
|
||||
from sphinx.util.osutil import movefile, SEP, ustrftime
|
||||
from sphinx.util.matching import compile_matchers
|
||||
from sphinx.util.pycompat import all, class_types
|
||||
@ -86,14 +88,6 @@ versioning_conditions = {
|
||||
}
|
||||
|
||||
|
||||
class WarningStream(object):
|
||||
def __init__(self, warnfunc):
|
||||
self.warnfunc = warnfunc
|
||||
def write(self, text):
|
||||
if text.strip():
|
||||
self.warnfunc(text.strip(), None, '')
|
||||
|
||||
|
||||
class NoUri(Exception):
|
||||
"""Raised by get_relative_uri if there is no URI available."""
|
||||
pass
|
||||
@ -414,6 +408,9 @@ class BuildEnvironment:
|
||||
# strange argument order is due to backwards compatibility
|
||||
self._warnfunc(msg, (docname, lineno))
|
||||
|
||||
def warn_node(self, msg, node):
|
||||
self._warnfunc(msg, '%s:%s' % get_source_line(node))
|
||||
|
||||
def clear_doc(self, docname):
|
||||
"""Remove all traces of a source file in the inventory."""
|
||||
if docname in self.all_docs:
|
||||
@ -898,8 +895,8 @@ class BuildEnvironment:
|
||||
rel_filename, filename = self.relfn2path(targetname, docname)
|
||||
self.dependencies.setdefault(docname, set()).add(rel_filename)
|
||||
if not os.access(filename, os.R_OK):
|
||||
self.warn(docname, 'download file not readable: %s' % filename,
|
||||
getattr(node, 'line', None))
|
||||
self.warn_node('download file not readable: %s' % filename,
|
||||
node)
|
||||
continue
|
||||
uniquename = self.dlfiles.add_file(docname, filename)
|
||||
node['filename'] = uniquename
|
||||
@ -914,8 +911,7 @@ class BuildEnvironment:
|
||||
node['candidates'] = candidates = {}
|
||||
imguri = node['uri']
|
||||
if imguri.find('://') != -1:
|
||||
self.warn(docname, 'nonlocal image URI found: %s' % imguri,
|
||||
node.line)
|
||||
self.warn_node('nonlocal image URI found: %s' % imguri, node)
|
||||
candidates['?'] = imguri
|
||||
continue
|
||||
rel_imgpath, full_imgpath = self.relfn2path(imguri, docname)
|
||||
@ -936,9 +932,8 @@ class BuildEnvironment:
|
||||
finally:
|
||||
f.close()
|
||||
except (OSError, IOError), err:
|
||||
self.warn(docname, 'image file %s not '
|
||||
'readable: %s' % (filename, err),
|
||||
node.line)
|
||||
self.warn_node('image file %s not readable: %s' %
|
||||
(filename, err), node)
|
||||
if imgtype:
|
||||
candidates['image/' + imgtype] = new_imgpath
|
||||
else:
|
||||
@ -948,8 +943,8 @@ class BuildEnvironment:
|
||||
for imgpath in candidates.itervalues():
|
||||
self.dependencies.setdefault(docname, set()).add(imgpath)
|
||||
if not os.access(path.join(self.srcdir, imgpath), os.R_OK):
|
||||
self.warn(docname, 'image file not readable: %s' % imgpath,
|
||||
node.line)
|
||||
self.warn_node('image file not readable: %s' % imgpath,
|
||||
node)
|
||||
continue
|
||||
self.images.add_file(docname, imgpath)
|
||||
|
||||
@ -1071,9 +1066,9 @@ class BuildEnvironment:
|
||||
for node in document.traverse(nodes.citation):
|
||||
label = node[0].astext()
|
||||
if label in self.citations:
|
||||
self.warn(docname, 'duplicate citation %s, ' % label +
|
||||
'other instance in %s' % self.doc2path(
|
||||
self.citations[label][0]), node.line)
|
||||
self.warn_node('duplicate citation %s, ' % label +
|
||||
'other instance in %s' % self.doc2path(
|
||||
self.citations[label][0]), node)
|
||||
self.citations[label] = (docname, node['ids'][0])
|
||||
|
||||
def note_toctree(self, docname, toctreenode):
|
||||
@ -1353,15 +1348,15 @@ class BuildEnvironment:
|
||||
refnode.children = [nodes.Text(title)]
|
||||
if not toc.children:
|
||||
# empty toc means: no titles will show up in the toctree
|
||||
self.warn(docname,
|
||||
'toctree contains reference to document '
|
||||
'%r that doesn\'t have a title: no link '
|
||||
'will be generated' % ref, toctreenode.line)
|
||||
self.warn_node(
|
||||
'toctree contains reference to document %r that '
|
||||
'doesn\'t have a title: no link will be generated'
|
||||
% ref, toctreenode)
|
||||
except KeyError:
|
||||
# this is raised if the included file does not exist
|
||||
self.warn(docname, 'toctree contains reference to '
|
||||
'nonexisting document %r' % ref,
|
||||
toctreenode.line)
|
||||
self.warn_node(
|
||||
'toctree contains reference to nonexisting document %r'
|
||||
% ref, toctreenode)
|
||||
else:
|
||||
# if titles_only is given, only keep the main title and
|
||||
# sub-toctrees
|
||||
@ -1486,7 +1481,6 @@ class BuildEnvironment:
|
||||
warn = False
|
||||
if not warn:
|
||||
return
|
||||
refdoc = node.get('refdoc', fromdoc)
|
||||
if domain and typ in domain.dangling_warnings:
|
||||
msg = domain.dangling_warnings[typ]
|
||||
elif typ == 'doc':
|
||||
@ -1498,15 +1492,15 @@ class BuildEnvironment:
|
||||
(node['refdomain'], typ)
|
||||
else:
|
||||
msg = '%s reference target not found: %%(target)s' % typ
|
||||
self.warn(refdoc, msg % {'target': target}, node.line)
|
||||
self.warn_node(msg % {'target': target}, node)
|
||||
|
||||
def process_only_nodes(self, doctree, builder, fromdocname=None):
|
||||
for node in doctree.traverse(addnodes.only):
|
||||
try:
|
||||
ret = builder.tags.eval_condition(node['expr'])
|
||||
except Exception, err:
|
||||
self.warn(fromdocname, 'exception while evaluating only '
|
||||
'directive expression: %s' % err, node.line)
|
||||
self.warn_node('exception while evaluating only '
|
||||
'directive expression: %s' % err, node)
|
||||
node.replace_self(node.children)
|
||||
else:
|
||||
if ret:
|
||||
|
@ -30,6 +30,7 @@ from jinja2.sandbox import SandboxedEnvironment
|
||||
from sphinx.ext.autosummary import import_by_name, get_documenter
|
||||
from sphinx.jinja2glue import BuiltinTemplateLoader
|
||||
from sphinx.util.osutil import ensuredir
|
||||
from sphinx.util.inspect import safe_getattr
|
||||
|
||||
def main(argv=sys.argv):
|
||||
usage = """%prog [OPTIONS] SOURCEFILE ..."""
|
||||
@ -136,10 +137,14 @@ def generate_autosummary_docs(sources, output_dir=None, suffix='.rst',
|
||||
template = template_env.get_template('autosummary/base.rst')
|
||||
|
||||
def get_members(obj, typ, include_public=[]):
|
||||
items = [
|
||||
name for name in dir(obj)
|
||||
if get_documenter(getattr(obj, name), obj).objtype == typ
|
||||
]
|
||||
items = []
|
||||
for name in dir(obj):
|
||||
try:
|
||||
documenter = get_documenter(safe_getattr(obj, name), obj)
|
||||
except AttributeError:
|
||||
continue
|
||||
if documenter.objtype == typ:
|
||||
items.append(name)
|
||||
public = [x for x in items
|
||||
if x in include_public or not x.startswith('_')]
|
||||
return public, items
|
||||
|
@ -23,6 +23,7 @@ from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.util.nodes import set_source_info
|
||||
from sphinx.util.compat import Directive
|
||||
from sphinx.util.console import bold
|
||||
|
||||
@ -63,7 +64,7 @@ class TestDirective(Directive):
|
||||
else:
|
||||
groups = ['default']
|
||||
node = nodetype(code, code, testnodetype=self.name, groups=groups)
|
||||
node.line = self.lineno
|
||||
set_source_info(self, node)
|
||||
if test is not None:
|
||||
# only save if it differs from code
|
||||
node['test'] = test
|
||||
|
@ -36,10 +36,10 @@ def make_link_role(base_url, prefix):
|
||||
try:
|
||||
full_url = base_url % part
|
||||
except (TypeError, ValueError):
|
||||
env = inliner.document.settings.env
|
||||
env.warn(env.docname, 'unable to expand %s extlink with base '
|
||||
'URL %r, please make sure the base contains \'%%s\' '
|
||||
'exactly once' % (typ, base_url))
|
||||
inliner.reporter.warning(
|
||||
'unable to expand %s extlink with base URL %r, please make '
|
||||
'sure the base contains \'%%s\' exactly once'
|
||||
% (typ, base_url), line=lineno)
|
||||
full_url = base_url + part
|
||||
if not has_explicit_title:
|
||||
if prefix is None:
|
||||
|
@ -22,6 +22,7 @@
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx.util.nodes import set_source_info
|
||||
from sphinx.util.compat import Directive
|
||||
|
||||
|
||||
@ -39,7 +40,7 @@ class IfConfig(Directive):
|
||||
def run(self):
|
||||
node = ifconfig()
|
||||
node.document = self.state.document
|
||||
node.line = self.lineno
|
||||
set_source_info(self, node)
|
||||
node['expr'] = self.arguments[0]
|
||||
self.state.nested_parse(self.content, self.content_offset,
|
||||
node, match_titles=1)
|
||||
|
@ -159,7 +159,7 @@ def load_mappings(app):
|
||||
# new format
|
||||
name, (uri, inv) = key, value
|
||||
if not name.isalnum():
|
||||
env.warn(docname=None, msg='intersphinx identifier %r is not alphanumeric' % name)
|
||||
app.warn('intersphinx identifier %r is not alphanumeric' % name)
|
||||
else:
|
||||
# old format, no name
|
||||
name, uri, inv = None, key, value
|
||||
|
@ -12,6 +12,7 @@
|
||||
from docutils import nodes, utils
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
from sphinx.util.nodes import set_source_info
|
||||
from sphinx.util.compat import Directive
|
||||
|
||||
|
||||
@ -72,7 +73,7 @@ class MathDirective(Directive):
|
||||
node['nowrap'] = 'nowrap' in self.options
|
||||
node['docname'] = self.state.document.settings.env.docname
|
||||
ret = [node]
|
||||
node.line = self.lineno
|
||||
set_source_info(self, node)
|
||||
if hasattr(self, 'src'):
|
||||
node.source = self.src
|
||||
if node['label']:
|
||||
|
@ -32,7 +32,7 @@ class OldCDirective(Directive):
|
||||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
if not env.app._oldcmarkup_warned:
|
||||
env.warn(env.docname, WARNING_MSG, self.lineno)
|
||||
self.state_machine.reporter.warning(WARNING_MSG, line=self.lineno)
|
||||
env.app._oldcmarkup_warned = True
|
||||
newname = 'c:' + self.name[1:]
|
||||
newdir = env.lookup_domain_element('directive', newname)[0]
|
||||
@ -46,7 +46,7 @@ def old_crole(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
if not typ:
|
||||
typ = env.config.default_role
|
||||
if not env.app._oldcmarkup_warned:
|
||||
env.warn(env.docname, WARNING_MSG)
|
||||
inliner.reporter.warning(WARNING_MSG, line=lineno)
|
||||
env.app._oldcmarkup_warned = True
|
||||
newtyp = 'c:' + typ[1:]
|
||||
newrole = env.lookup_domain_element('role', newtyp)[0]
|
||||
|
@ -16,6 +16,7 @@ from docutils import nodes
|
||||
|
||||
from sphinx.locale import _
|
||||
from sphinx.environment import NoUri
|
||||
from sphinx.util.nodes import set_source_info
|
||||
from sphinx.util.compat import Directive, make_admonition
|
||||
|
||||
class todo_node(nodes.Admonition, nodes.Element): pass
|
||||
@ -41,7 +42,7 @@ class Todo(Directive):
|
||||
ad = make_admonition(todo_node, self.name, [_('Todo')], self.options,
|
||||
self.content, self.lineno, self.content_offset,
|
||||
self.block_text, self.state, self.state_machine)
|
||||
ad[0].line = self.lineno
|
||||
set_source_info(self, ad[0])
|
||||
return [targetnode] + ad
|
||||
|
||||
|
||||
@ -61,6 +62,7 @@ def process_todos(app, doctree):
|
||||
targetnode = None
|
||||
env.todo_all_todos.append({
|
||||
'docname': env.docname,
|
||||
'source': node.source or env.doc2path(env.docname),
|
||||
'lineno': node.line,
|
||||
'todo': node.deepcopy(),
|
||||
'target': targetnode,
|
||||
@ -105,9 +107,9 @@ def process_todo_nodes(app, doctree, fromdocname):
|
||||
|
||||
for todo_info in env.todo_all_todos:
|
||||
para = nodes.paragraph(classes=['todo-source'])
|
||||
filename = env.doc2path(todo_info['docname'], base=None)
|
||||
description = _('(The <<original entry>> is located in '
|
||||
' %s, line %d.)') % (filename, todo_info['lineno'])
|
||||
' %s, line %d.)') % \
|
||||
(todo_info['source'], todo_info['lineno'])
|
||||
desc1 = description[:description.find('<<')]
|
||||
desc2 = description[description.find('>>')+2:]
|
||||
para += nodes.Text(desc1, desc1)
|
||||
|
@ -18,7 +18,8 @@ from docutils.parsers.rst import roles
|
||||
from sphinx import addnodes
|
||||
from sphinx.locale import _
|
||||
from sphinx.util import ws_re
|
||||
from sphinx.util.nodes import split_explicit_title, process_index_entry
|
||||
from sphinx.util.nodes import split_explicit_title, process_index_entry, \
|
||||
set_role_source_info
|
||||
|
||||
|
||||
generic_docroles = {
|
||||
@ -126,7 +127,7 @@ class XRefRole(object):
|
||||
refnode = self.nodeclass(rawtext, reftype=role, refdomain=domain,
|
||||
refexplicit=has_explicit_title)
|
||||
# we may need the line number for warnings
|
||||
refnode.line = lineno
|
||||
set_role_source_info(inliner, lineno, refnode)
|
||||
title, target = self.process_link(
|
||||
env, refnode, has_explicit_title, title, target)
|
||||
# now that the target and title are finally determined, set them
|
||||
@ -257,7 +258,7 @@ def emph_literal_role(typ, rawtext, text, lineno, inliner,
|
||||
return [retnode], []
|
||||
|
||||
|
||||
_abbr_re = re.compile('\((.*)\)$')
|
||||
_abbr_re = re.compile('\((.*)\)$', re.S)
|
||||
|
||||
def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
text = utils.unescape(text)
|
||||
|
@ -164,7 +164,7 @@ class IndexBuilder(object):
|
||||
self._mapping = {}
|
||||
# objtype -> index
|
||||
self._objtypes = {}
|
||||
# objtype index -> objname (localized)
|
||||
# objtype index -> (domain, type, objname (localized))
|
||||
self._objnames = {}
|
||||
# add language-specific SearchLanguage instance
|
||||
self.lang = languages[lang](options)
|
||||
@ -205,21 +205,27 @@ class IndexBuilder(object):
|
||||
continue
|
||||
if prio < 0:
|
||||
continue
|
||||
# XXX splitting at dot is kind of Python specific
|
||||
prefix, name = rpartition(fullname, '.')
|
||||
pdict = rv.setdefault(prefix, {})
|
||||
try:
|
||||
i = otypes[domainname, type]
|
||||
typeindex = otypes[domainname, type]
|
||||
except KeyError:
|
||||
i = len(otypes)
|
||||
otypes[domainname, type] = i
|
||||
typeindex = len(otypes)
|
||||
otypes[domainname, type] = typeindex
|
||||
otype = domain.object_types.get(type)
|
||||
if otype:
|
||||
# use unicode() to fire translation proxies
|
||||
onames[i] = unicode(domain.get_type_name(otype))
|
||||
onames[typeindex] = (domainname, type,
|
||||
unicode(domain.get_type_name(otype)))
|
||||
else:
|
||||
onames[i] = type
|
||||
pdict[name] = (fn2index[docname], i, prio)
|
||||
onames[typeindex] = (domainname, type, type)
|
||||
if anchor == fullname:
|
||||
shortanchor = ''
|
||||
elif anchor == type + '-' + fullname:
|
||||
shortanchor = '-'
|
||||
else:
|
||||
shortanchor = anchor
|
||||
pdict[name] = (fn2index[docname], typeindex, prio, shortanchor)
|
||||
return rv
|
||||
|
||||
def get_terms(self, fn2index):
|
||||
|
@ -322,12 +322,13 @@ var Search = {
|
||||
var fullname = (prefix ? prefix + '.' : '') + name;
|
||||
if (fullname.toLowerCase().indexOf(object) > -1) {
|
||||
var match = objects[prefix][name];
|
||||
var objname = objnames[match[1]];
|
||||
var objname = objnames[match[1]][2];
|
||||
var title = titles[match[0]];
|
||||
// If more than one term searched for, we require other words to be
|
||||
// found in the name/title/description
|
||||
if (otherterms.length > 0) {
|
||||
var haystack = (prefix + ' ' + name + ' ' + objname + ' ' + title).toLowerCase();
|
||||
var haystack = (prefix + ' ' + name + ' ' +
|
||||
objname + ' ' + title).toLowerCase();
|
||||
var allfound = true;
|
||||
for (var i = 0; i < otherterms.length; i++) {
|
||||
if (haystack.indexOf(otherterms[i]) == -1) {
|
||||
@ -340,9 +341,12 @@ var Search = {
|
||||
}
|
||||
}
|
||||
var descr = objname + _(', in ') + title;
|
||||
// XXX the generated anchors are not generally correct
|
||||
// XXX there may be custom prefixes
|
||||
result = [filenames[match[0]], fullname, '#'+fullname, descr];
|
||||
anchor = match[3];
|
||||
if (anchor == '')
|
||||
anchor = fullname;
|
||||
else if (anchor == '-')
|
||||
anchor = objnames[match[1]][1] + '-' + fullname;
|
||||
result = [filenames[match[0]], fullname, '#'+anchor, descr];
|
||||
switch (match[2]) {
|
||||
case 1: objectResults.push(result); break;
|
||||
case 0: importantResults.push(result); break;
|
||||
|
@ -12,12 +12,25 @@
|
||||
import re
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.statemachine import StateMachine
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.locale import pairindextypes
|
||||
from sphinx.util.pycompat import class_types
|
||||
|
||||
|
||||
class WarningStream(object):
|
||||
|
||||
def __init__(self, warnfunc):
|
||||
self.warnfunc = warnfunc
|
||||
self._re = re.compile(r'\((DEBUG|INFO|WARNING|ERROR|SEVERE)/[0-4]\)')
|
||||
|
||||
def write(self, text):
|
||||
text = text.strip()
|
||||
if text:
|
||||
self.warnfunc(self._re.sub(r'\1:', text), None, '')
|
||||
|
||||
|
||||
# \x00 means the "<" was backslash-escaped
|
||||
explicit_title_re = re.compile(r'^(.+?)\s*(?<!\x00)<(.*?)>$', re.DOTALL)
|
||||
caption_ref_re = explicit_title_re # b/w compat alias
|
||||
@ -162,6 +175,21 @@ def make_refnode(builder, fromdocname, todocname, targetid, child, title=None):
|
||||
node.append(child)
|
||||
return node
|
||||
|
||||
|
||||
if hasattr(StateMachine, 'get_source_and_line'):
|
||||
def set_source_info(directive, node):
|
||||
node.source, node.line = \
|
||||
directive.state_machine.get_source_and_line(directive.lineno)
|
||||
def set_role_source_info(inliner, lineno, node):
|
||||
node.source, node.line = \
|
||||
inliner.reporter.locator(lineno)
|
||||
else:
|
||||
# docutils <= 0.6 compatibility
|
||||
def set_source_info(directive, node):
|
||||
node.line = directive.lineno
|
||||
def set_role_source_info(inliner, lineno, node):
|
||||
node.line = lineno
|
||||
|
||||
# monkey-patch Node.traverse to get more speed
|
||||
# traverse() is called so many times during a build that it saves
|
||||
# on average 20-25% overall build time!
|
||||
|
@ -33,12 +33,12 @@ html_warnfile = StringIO()
|
||||
|
||||
ENV_WARNINGS = """\
|
||||
%(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \
|
||||
\\(WARNING/2\\) Explicit markup ends without a blank line; unexpected \
|
||||
WARNING: Explicit markup ends without a blank line; unexpected \
|
||||
unindent\\.\\n?
|
||||
%(root)s/images.txt:9: WARNING: image file not readable: foo.png
|
||||
%(root)s/images.txt:23: WARNING: nonlocal image URI found: \
|
||||
http://www.python.org/logo.png
|
||||
%(root)s/includes.txt:\\d*: \\(WARNING/2\\) Encoding 'utf-8-sig' used for \
|
||||
%(root)s/includes.txt:\\d*: WARNING: Encoding 'utf-8-sig' used for \
|
||||
reading included file u'.*?wrongenc.inc' seems to be wrong, try giving an \
|
||||
:encoding: option\\n?
|
||||
%(root)s/includes.txt:4: WARNING: download file not readable: .*?nonexisting.png
|
||||
|
Loading…
Reference in New Issue
Block a user