Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Ignacio Fdez. Galván 2015-05-13 10:38:28 +02:00
commit c0bc64f9fe
12 changed files with 176 additions and 36 deletions

View File

@ -10,6 +10,10 @@ Features added
Bugs fixed
----------
* #1789: ``:pyobject:`` option of ``literalinclude`` directive includes following
lines after class definitions
* #1790: ``literalinclude`` strips empty lines at the head and tail
Documentation
-------------

View File

@ -524,7 +524,7 @@ The C++ domain (name **cpp**) supports documenting C++ projects.
The following directives are available. All declarations can start with
a visibility statement (``public``, ``private`` or ``protected``).
.. rst:directive:: .. cpp:class:: class speicifer
.. rst:directive:: .. cpp:class:: class specifier
Describe a class/struct, possibly with specification of inheritance, e.g.,::

View File

@ -302,15 +302,6 @@ class LiteralInclude(Directive):
res.append(line)
lines = res
if 'lineno-match' in self.options:
# handle that docutils remove preceding lines which only contains
# line separation.
for line in lines:
# check if line contains anything else than line separation.
if line and line.splitlines()[0]:
break
linenostart += 1
prepend = self.options.get('prepend')
if prepend:
lines.insert(0, prepend + '\n')

View File

@ -23,8 +23,8 @@ from sphinx.util.pycompat import UnicodeMixin
_directive_regex = re.compile(r'\.\. \S+::')
_google_untyped_arg_regex = re.compile(r'\s*(.+?)\s*:\s*(.*)')
_google_typed_arg_regex = re.compile(r'\s*(.+?)\s*\(\s*(.+?)\s*\)\s*:\s*(.*)')
_google_untyped_arg_regex = re.compile(r'(.+)\s*(?<!:):(?!:)\s*(.*)')
_google_typed_arg_regex = re.compile(r'(.+)\((.+)\)\s*(?<!:):(?!:)\s*(.*)')
class GoogleDocstring(UnicodeMixin):
@ -207,15 +207,15 @@ class GoogleDocstring(UnicodeMixin):
if parse_type:
match = _google_typed_arg_regex.match(line)
if match:
_name = match.group(1)
_type = match.group(2)
_desc = match.group(3)
_name = match.group(1).strip()
_type = match.group(2).strip()
_desc = match.group(3).strip()
if not match:
match = _google_untyped_arg_regex.match(line)
if match:
_name = match.group(1)
_desc = match.group(2)
_name = match.group(1).strip()
_desc = match.group(2).strip()
if _name[:2] == '**':
_name = r'\*\*'+_name[2:]
@ -244,14 +244,14 @@ class GoogleDocstring(UnicodeMixin):
_name, _type, _desc = '', '', lines
match = _google_typed_arg_regex.match(lines[0])
if match:
_name = match.group(1)
_type = match.group(2)
_desc = match.group(3)
_name = match.group(1).strip()
_type = match.group(2).strip()
_desc = match.group(3).strip()
else:
match = _google_untyped_arg_regex.match(lines[0])
if match:
_type = match.group(1)
_desc = match.group(2)
_type = match.group(1).strip()
_desc = match.group(2).strip()
if match:
lines[0] = _desc
_desc = lines
@ -720,6 +720,8 @@ class NumpyDocstring(GoogleDocstring):
line = next(self._line_iter)
if parse_type:
_name, _, _type = line.partition(':')
if not _name:
_type = line
else:
_name, _type = line, ''
_name, _type = _name.strip(), _type.strip()

View File

@ -35,12 +35,12 @@ from pygments.util import ClassNotFound
from sphinx.pygments_styles import SphinxStyle, NoneStyle
lexers = dict(
none = TextLexer(),
python = PythonLexer(),
pycon = PythonConsoleLexer(),
pycon3 = PythonConsoleLexer(python3=True),
rest = RstLexer(),
c = CLexer(),
none = TextLexer(stripnl=False),
python = PythonLexer(stripnl=False),
pycon = PythonConsoleLexer(stripnl=False),
pycon3 = PythonConsoleLexer(python3=True, stripnl=False),
rest = RstLexer(stripnl=False),
c = CLexer(stripnl=False),
)
for _lexer in lexers.values():
_lexer.add_filter('raiseonerror')

View File

@ -10,6 +10,7 @@
"""
from __future__ import print_function
import re
import sys
from os import path
@ -45,6 +46,8 @@ number2name.update(token.tok_name)
_eq = nodes.Leaf(token.EQUAL, '=')
emptyline_re = re.compile('^\s*(#.*)?$')
class AttrDocVisitor(nodes.NodeVisitor):
"""
@ -289,8 +292,9 @@ class ModuleAnalyzer(object):
indent = 0
defline = False
expect_indent = False
emptylines = 0
def tokeniter(ignore = (token.COMMENT, token.NL)):
def tokeniter(ignore = (token.COMMENT,)):
for tokentup in self.tokens:
if tokentup[0] not in ignore:
yield tokentup
@ -303,7 +307,7 @@ class ModuleAnalyzer(object):
dtype, fullname, startline, _ = stack.pop()
endline = epos[0]
namespace.pop()
result[fullname] = (dtype, startline, endline)
result[fullname] = (dtype, startline, endline - emptylines)
expect_indent = False
if tok in ('def', 'class'):
name = next(tokeniter)[1]
@ -322,7 +326,7 @@ class ModuleAnalyzer(object):
dtype, fullname, startline, _ = stack.pop()
endline = spos[0]
namespace.pop()
result[fullname] = (dtype, startline, endline)
result[fullname] = (dtype, startline, endline - emptylines)
elif type == token.NEWLINE:
# if this line contained a definition, expect an INDENT
# to start the suite; if there is no such INDENT
@ -330,6 +334,13 @@ class ModuleAnalyzer(object):
if defline:
defline = False
expect_indent = True
emptylines = 0
elif type == token.NL:
# count up if line is empty or comment only
if emptyline_re.match(line):
emptylines += 1
else:
emptylines = 0
self.tags = result
return result

View File

@ -1542,7 +1542,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
# most probably a parsed-literal block -- don't highlight
self.body.append('\\begin{alltt}\n')
else:
code = node.astext().rstrip('\n')
code = node.astext()
lang = self.hlsettingstack[-1][0]
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
highlight_args = node.get('highlight_args', {})
@ -1573,7 +1573,6 @@ class LaTeXTranslator(nodes.NodeVisitor):
self.table.has_verbatim = True
# get consistent trailer
hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim}
hlcode = hlcode.rstrip() + '\n'
self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' %
(self.table and 'Original' or ''))
raise nodes.SkipNode

View File

@ -0,0 +1,3 @@

View File

@ -15,3 +15,6 @@ Literal Includes with Line Numbers Matching
:language: python
:start-after: pass
:lineno-match:
.. literalinclude:: empty.inc
:lineno-match:

View File

@ -10,4 +10,5 @@ class Bar:
def baz():
pass
# comment after Bar class definition
def bar(): pass

View File

@ -116,7 +116,8 @@ def test_literal_include_linenos(app, status, warning):
'10\n'
'11\n'
'12\n'
'13</pre></div></td>')
'13\n'
'14</pre></div></td>')
assert linenos in html
@ -138,7 +139,8 @@ def test_literal_include_lineno_start(app, status, warning):
'209\n'
'210\n'
'211\n'
'212</pre></div></td>')
'212\n'
'213</pre></div></td>')
assert linenos in html
@ -156,6 +158,7 @@ def test_literal_include_lineno_match(app, status, warning):
lines = (
'<td class="linenos"><div class="linenodiv"><pre>'
'5\n'
'6\n'
'7\n'
'8\n'
@ -164,14 +167,29 @@ def test_literal_include_lineno_match(app, status, warning):
start_after = (
'<td class="linenos"><div class="linenodiv"><pre>'
' 8\n'
' 9\n'
'10\n'
'11\n'
'12\n'
'13</pre></div></td>')
'13\n'
'14</pre></div></td>')
assert start_after in html
@with_app('latex', testroot='directive-code')
def test_literalinclude_file_whole_of_emptyline(app, status, warning):
app.builder.build_all()
latex = (app.outdir / 'Python.tex').text()
includes = (
'\\begin{Verbatim}[commandchars=\\\\\\{\\},numbers=left,firstnumber=1,stepnumber=1]\n'
'\n'
'\n'
'\n'
'\\end{Verbatim}\n')
assert includes in latex
@with_app('html', testroot='directive-code')
def test_literalinclude_caption_html(app, status, warning):
app.builder.build('index')

View File

@ -326,6 +326,71 @@ Attributes:
"""
self.assertEqual(expected, actual)
def test_code_block_in_returns_section(self):
docstring = """
Returns:
foobar: foo::
codecode
codecode
"""
expected = """
:returns: foo::
codecode
codecode
:rtype: foobar
"""
actual = str(GoogleDocstring(docstring))
self.assertEqual(expected, actual)
def test_colon_in_return_type(self):
docstring = """Example property.
Returns:
:py:class:`~.module.submodule.SomeClass`: an example instance
if available, None if not available.
"""
expected = """Example property.
:returns: an example instance
if available, None if not available.
:rtype: :py:class:`~.module.submodule.SomeClass`
"""
actual = str(GoogleDocstring(docstring))
self.assertEqual(expected, actual)
def test_kwargs_in_arguments(self):
docstring = """Allows to create attributes binded to this device.
Some other paragraph.
Code sample for usage::
dev.bind(loopback=Loopback)
dev.loopback.configure()
Arguments:
**kwargs: name/class pairs that will create resource-managers
bound as instance attributes to this instance. See code
example above.
"""
expected = """Allows to create attributes binded to this device.
Some other paragraph.
Code sample for usage::
dev.bind(loopback=Loopback)
dev.loopback.configure()
:param \\*\\*kwargs: name/class pairs that will create resource-managers
bound as instance attributes to this instance. See code
example above.
"""
actual = str(GoogleDocstring(docstring))
self.assertEqual(expected, actual)
class NumpyDocstringTest(BaseDocstringTest):
docstrings = [(
@ -588,3 +653,46 @@ numpy.multivariate_normal(mean, cov, shape=None, spam=None)
relationship
"""
self.assertEqual(expected, actual)
def test_colon_in_return_type(self):
docstring = """
Summary
Returns
-------
:py:class:`~my_mod.my_class`
an instance of :py:class:`~my_mod.my_class`
"""
expected = """
Summary
:returns: an instance of :py:class:`~my_mod.my_class`
:rtype: :py:class:`~my_mod.my_class`
"""
config = Config()
app = mock.Mock()
actual = str(NumpyDocstring(docstring, config, app, "method"))
self.assertEqual(expected, actual)
def test_underscore_in_attribute(self):
docstring = """
Attributes
----------
arg_ : type
some description
"""
expected = """
:ivar arg_: some description
:vartype arg_: type
"""
config = Config(napoleon_use_ivar=True)
app = mock.Mock()
actual = str(NumpyDocstring(docstring, config, app, "class"))
self.assertEqual(expected, actual)