merge in trunk

This commit is contained in:
Georg Brandl 2009-01-10 20:46:13 +01:00
commit d0e0acaaa1
9 changed files with 37 additions and 8 deletions

View File

@ -16,6 +16,7 @@ recursive-include sphinx/themes *
recursive-include sphinx/locale *
recursive-include tests *
recursive-include utils *
include sphinx/pycode/Grammar.txt
recursive-include doc *
prune doc/_build

View File

@ -135,12 +135,30 @@ directive.
.. directive:: autofunction
autodata
automethod
autoattribute
These work exactly like :dir:`autoclass` etc., but do not offer the options
used for automatic member documentation.
For module data members and class attributes, documentation can either be put
into a special-formatted comment *before* the attribute definition, or in a
docstring *after* the definition. This means that in the following class
definition, both attributes can be autodocumented::
class Foo:
"""Docstring for class Foo."""
#: Doc comment for attribute Foo.bar.
bar = 1
baz = 2
"""Docstring for attribute Foo.baz."""
.. versionchanged:: 0.6
:dir:`autodata` and :dir:`autoattribute` can now extract docstrings.
.. note::
If you document decorated functions or methods, keep in mind that autodoc

View File

@ -139,7 +139,7 @@ class Config(object):
if name not in self.values:
raise AttributeError('No such config value: %s' % name)
default = self.values[name][0]
if callable(default):
if hasattr(default, '__call__'):
return default(self)
return default

View File

@ -541,7 +541,7 @@ class BuildEnvironment:
doctree = pub.document
except UnicodeError, err:
from sphinx.application import SphinxError
raise SphinxError(err.message)
raise SphinxError(str(err))
self.filter_messages(doctree)
self.process_dependencies(docname, doctree)
self.process_images(docname, doctree)

View File

@ -166,7 +166,7 @@ def between(marker, what=None, keepempty=False):
def isdescriptor(x):
"""Check if the object is some kind of descriptor."""
for item in '__get__', '__set__', '__delete__':
if callable(getattr(x, item, None)):
if hasattr(getattr(x, item, None), '__call__'):
return True
return False
@ -380,6 +380,8 @@ class RstGenerator(object):
# try to also get a source code analyzer for attribute docs
try:
analyzer = ModuleAnalyzer.for_module(mod)
# parse right now, to get PycodeErrors on parsing
analyzer.parse()
except PycodeError, err:
# no source file -- e.g. for builtin and C modules
analyzer = None

View File

@ -210,7 +210,10 @@ class ModuleAnalyzer(object):
if self.parsetree is not None:
return
self.tokenize()
self.parsetree = pydriver.parse_tokens(self.tokens)
try:
self.parsetree = pydriver.parse_tokens(self.tokens)
except parse.ParseError, err:
raise PycodeError('parsing failed', err)
# find the source code encoding
encoding = sys.getdefaultencoding()
comments = self.parsetree.get_prefix()

View File

@ -274,12 +274,17 @@ def generate_tokens(readline):
line = readline()
except StopIteration:
line = ''
# if we are not at the end of the file make sure the
# line ends with a newline because the parser depends
# on that.
if line:
line = line.rstrip() + '\n'
lnum = lnum + 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
raise TokenError("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
@ -335,7 +340,7 @@ def generate_tokens(readline):
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
raise TokenError("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:

View File

@ -27,7 +27,7 @@ def make_admonition(node_class, name, arguments, options, content, lineno,
textnodes, messages = state.inline_text(title_text, lineno)
admonition_node += nodes.title(title_text, '', *textnodes)
admonition_node += messages
if options.has_key('class'):
if 'class' in options:
classes = options['class']
else:
classes = ['admonition-' + nodes.make_id(title_text)]

View File

@ -129,7 +129,7 @@ def test_html(app):
for path, check in paths.iteritems():
nodes = list(etree.findall(path))
assert nodes != []
if callable(check):
if hasattr(check, '__call__'):
check(nodes)
elif not check:
# only check for node presence