mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '1.7' into 4914_dataclasses
This commit is contained in:
commit
adff7e89d4
6
CHANGES
6
CHANGES
@ -33,6 +33,12 @@ Bugs fixed
|
||||
* #4978: latex: shorthandoff is not set up for Brazil locale
|
||||
* #4928: i18n: Ignore dot-directories like .git/ in LC_MESSAGES/
|
||||
* #4946: py domain: type field could not handle "None" as a type
|
||||
* #4979: latex: Incorrect escaping of curly braces in index entries
|
||||
* #4956: autodoc: Failed to extract document from a subclass of the class on
|
||||
mocked module
|
||||
* #4973: latex: glossary directive adds whitespace to each item
|
||||
* #4980: latex: Explicit labels on code blocks are duplicated
|
||||
* #4919: node.asdom() crashes if toctree has :numbered: option
|
||||
* #4914: autodoc: Parsing error when using dataclasses without default values
|
||||
|
||||
Testing
|
||||
|
@ -168,10 +168,10 @@ class TocTreeCollector(EnvironmentCollector):
|
||||
number = tuple(numstack)
|
||||
else:
|
||||
number = None
|
||||
secnums[subnode[0]['anchorname']] = \
|
||||
subnode[0]['secnumber'] = number
|
||||
secnums[subnode[0]['anchorname']] = number
|
||||
subnode[0]['secnumber'] = list(number)
|
||||
if titlenode:
|
||||
titlenode['secnumber'] = number
|
||||
titlenode['secnumber'] = list(number)
|
||||
titlenode = None
|
||||
elif isinstance(subnode, addnodes.toctree):
|
||||
_walk_toctree(subnode, depth)
|
||||
|
@ -23,7 +23,7 @@ from sphinx.util.inspect import isenumclass, safe_getattr
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional # NOQA
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple # NOQA
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -31,6 +31,14 @@ logger = logging.getLogger(__name__)
|
||||
class _MockObject(object):
|
||||
"""Used by autodoc_mock_imports."""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# type: (Any, Any) -> Any
|
||||
if len(args) == 3 and isinstance(args[1], tuple) and args[1][-1].__class__ is cls:
|
||||
# subclassing MockObject
|
||||
return type(args[0], (_MockObject,), args[2], **kwargs) # type: ignore
|
||||
else:
|
||||
return super(_MockObject, cls).__new__(cls)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# type: (Any, Any) -> None
|
||||
pass
|
||||
@ -47,6 +55,10 @@ class _MockObject(object):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def __mro_entries__(self, bases):
|
||||
# type: (Tuple) -> Tuple
|
||||
return bases
|
||||
|
||||
def __getitem__(self, key):
|
||||
# type: (str) -> _MockObject
|
||||
return self
|
||||
|
@ -1608,6 +1608,10 @@
|
||||
% figure legend comes after caption and may contain arbitrary body elements
|
||||
\newenvironment{sphinxlegend}{\par\small}{\par}
|
||||
|
||||
% For curly braces inside \index macro
|
||||
\def\sphinxleftcurlybrace{\{}
|
||||
\def\sphinxrightcurlybrace{\}}
|
||||
|
||||
% Declare Unicode characters used by linux tree command to pdflatex utf8/utf8x
|
||||
\def\spx@bd#1#2{%
|
||||
\leavevmode
|
||||
|
@ -49,6 +49,12 @@ BEGIN_DOC = r'''
|
||||
|
||||
LATEXSECTIONNAMES = ["part", "chapter", "section", "subsection",
|
||||
"subsubsection", "paragraph", "subparagraph"]
|
||||
HYPERLINK_SUPPORT_NODES = (
|
||||
nodes.figure,
|
||||
nodes.literal_block,
|
||||
nodes.table,
|
||||
nodes.section,
|
||||
)
|
||||
|
||||
DEFAULT_SETTINGS = {
|
||||
'latex_engine': 'pdflatex',
|
||||
@ -728,6 +734,14 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
return (anchor and '\\phantomsection' or '') + \
|
||||
'\\label{%s}' % self.idescape(id)
|
||||
|
||||
def hypertarget_to(self, node, anchor=False):
|
||||
# type: (nodes.Node, bool) -> unicode
|
||||
labels = ''.join(self.hypertarget(node_id, anchor=False) for node_id in node['ids'])
|
||||
if anchor:
|
||||
return r'\phantomsection' + labels
|
||||
else:
|
||||
return labels
|
||||
|
||||
def hyperlink(self, id):
|
||||
# type: (unicode) -> unicode
|
||||
return '{\\hyperref[%s]{' % self.idescape(id)
|
||||
@ -937,8 +951,6 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
if not self.this_is_the_title:
|
||||
self.sectionlevel += 1
|
||||
self.body.append('\n\n')
|
||||
if node.get('ids'):
|
||||
self.next_section_ids.update(node['ids'])
|
||||
|
||||
def depart_section(self, node):
|
||||
# type: (nodes.Node) -> None
|
||||
@ -1033,9 +1045,9 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
except IndexError:
|
||||
# just use "subparagraph", it's not numbered anyway
|
||||
self.body.append(r'\%s%s{' % (self.sectionnames[-1], short))
|
||||
self.context.append('}\n')
|
||||
|
||||
self.context.append('}\n' + self.hypertarget_to(node.parent))
|
||||
self.restrict_footnote(node)
|
||||
|
||||
if self.next_section_ids:
|
||||
for id in self.next_section_ids:
|
||||
self.context[-1] += self.hypertarget(id, anchor=False)
|
||||
@ -1306,12 +1318,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
|
||||
def depart_table(self, node):
|
||||
# type: (nodes.Node) -> None
|
||||
labels = '' # type: unicode
|
||||
for labelid in self.pop_hyperlink_ids('table'):
|
||||
labels += self.hypertarget(labelid, anchor=False)
|
||||
if node['ids']:
|
||||
labels += self.hypertarget(node['ids'][0], anchor=False)
|
||||
|
||||
labels = self.hypertarget_to(node)
|
||||
table_type = self.table.get_table_type()
|
||||
table = self.render(table_type + '.tex_t',
|
||||
dict(table=self.table, labels=labels))
|
||||
@ -1555,9 +1562,12 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
def visit_term(self, node):
|
||||
# type: (nodes.Node) -> None
|
||||
self.in_term += 1
|
||||
ctx = '}] \\leavevmode' # type: unicode
|
||||
ctx = '' # type: unicode
|
||||
if node.get('ids'):
|
||||
ctx += self.hypertarget(node['ids'][0])
|
||||
ctx = '\\phantomsection'
|
||||
for node_id in node['ids']:
|
||||
ctx += self.hypertarget(node_id, anchor=False)
|
||||
ctx += '}] \\leavevmode'
|
||||
self.body.append('\\item[{')
|
||||
self.restrict_footnote(node)
|
||||
self.context.append(ctx)
|
||||
@ -1755,16 +1765,8 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
|
||||
def visit_figure(self, node):
|
||||
# type: (nodes.Node) -> None
|
||||
ids = '' # type: unicode
|
||||
for id in self.pop_hyperlink_ids('figure'):
|
||||
ids += self.hypertarget(id, anchor=False)
|
||||
if node['ids']:
|
||||
ids += self.hypertarget(node['ids'][0], anchor=False)
|
||||
labels = self.hypertarget_to(node)
|
||||
self.restrict_footnote(node)
|
||||
if (len(node.children) and
|
||||
isinstance(node.children[0], nodes.image) and
|
||||
node.children[0]['ids']):
|
||||
ids += self.hypertarget(node.children[0]['ids'][0], anchor=False)
|
||||
if self.table:
|
||||
# TODO: support align option
|
||||
if 'width' in node:
|
||||
@ -1776,7 +1778,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.body.append('\\begin{sphinxfigure-in-table}\n\\centering\n')
|
||||
if any(isinstance(child, nodes.caption) for child in node):
|
||||
self.body.append('\\capstart')
|
||||
self.context.append(ids + '\\end{sphinxfigure-in-table}\\relax\n')
|
||||
self.context.append(labels + '\\end{sphinxfigure-in-table}\\relax\n')
|
||||
elif node.get('align', '') in ('left', 'right'):
|
||||
length = None
|
||||
if 'width' in node:
|
||||
@ -1785,7 +1787,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
length = self.latex_image_length(node[0]['width'])
|
||||
self.body.append('\\begin{wrapfigure}{%s}{%s}\n\\centering' %
|
||||
(node['align'] == 'right' and 'r' or 'l', length or '0pt'))
|
||||
self.context.append(ids + '\\end{wrapfigure}\n')
|
||||
self.context.append(labels + '\\end{wrapfigure}\n')
|
||||
elif self.in_minipage:
|
||||
self.body.append('\n\\begin{center}')
|
||||
self.context.append('\\end{center}\n')
|
||||
@ -1794,7 +1796,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.elements['figure_align'])
|
||||
if any(isinstance(child, nodes.caption) for child in node):
|
||||
self.body.append('\\capstart\n')
|
||||
self.context.append(ids + '\\end{figure}\n')
|
||||
self.context.append(labels + '\\end{figure}\n')
|
||||
|
||||
def depart_figure(self, node):
|
||||
# type: (nodes.Node) -> None
|
||||
@ -1896,6 +1898,11 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
anchor = not self.in_title
|
||||
self.body.append(self.hypertarget(id, anchor=anchor))
|
||||
|
||||
# skip if visitor for next node supports hyperlink
|
||||
next_node = node.next_node(ascend=True)
|
||||
if isinstance(next_node, HYPERLINK_SUPPORT_NODES):
|
||||
return
|
||||
|
||||
# postpone the labels until after the sectioning command
|
||||
parindex = node.parent.index(node)
|
||||
try:
|
||||
@ -1909,22 +1916,16 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
node.parent.parent.index(node.parent)]
|
||||
else:
|
||||
raise
|
||||
if isinstance(next, nodes.section):
|
||||
domain = self.builder.env.get_domain('std')
|
||||
figtype = domain.get_figtype(next)
|
||||
if figtype and domain.get_numfig_title(next):
|
||||
ids = set()
|
||||
# labels for figures go in the figure body, not before
|
||||
if node.get('refid'):
|
||||
self.next_section_ids.add(node['refid'])
|
||||
self.next_section_ids.update(node['ids'])
|
||||
ids.add(node['refid'])
|
||||
ids.update(node['ids'])
|
||||
self.push_hyperlink_ids(figtype, ids)
|
||||
return
|
||||
else:
|
||||
domain = self.builder.env.get_domain('std')
|
||||
figtype = domain.get_figtype(next)
|
||||
if figtype and domain.get_numfig_title(next):
|
||||
ids = set()
|
||||
# labels for figures go in the figure body, not before
|
||||
if node.get('refid'):
|
||||
ids.add(node['refid'])
|
||||
ids.update(node['ids'])
|
||||
self.push_hyperlink_ids(figtype, ids)
|
||||
return
|
||||
except IndexError:
|
||||
pass
|
||||
if 'refuri' in node:
|
||||
@ -1949,6 +1950,12 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
|
||||
def visit_index(self, node, scre=re.compile(r';\s*')):
|
||||
# type: (nodes.Node, Pattern) -> None
|
||||
def escape(value):
|
||||
value = self.encode(value)
|
||||
value = value.replace(r'\{', r'\sphinxleftcurlybrace')
|
||||
value = value.replace(r'\}', r'\sphinxrightcurlybrace')
|
||||
return value
|
||||
|
||||
if not node.get('inline', True):
|
||||
self.body.append('\n')
|
||||
entries = node['entries']
|
||||
@ -1958,24 +1965,23 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
m = '|textbf'
|
||||
try:
|
||||
if type == 'single':
|
||||
p = scre.sub('!', self.encode(string))
|
||||
p = scre.sub('!', escape(string))
|
||||
self.body.append(r'\index{%s%s}' % (p, m))
|
||||
elif type == 'pair':
|
||||
p1, p2 = [self.encode(x) for x in split_into(2, 'pair', string)]
|
||||
p1, p2 = [escape(x) for x in split_into(2, 'pair', string)]
|
||||
self.body.append(r'\index{%s!%s%s}\index{%s!%s%s}' %
|
||||
(p1, p2, m, p2, p1, m))
|
||||
elif type == 'triple':
|
||||
p1, p2, p3 = [self.encode(x)
|
||||
for x in split_into(3, 'triple', string)]
|
||||
p1, p2, p3 = [escape(x) for x in split_into(3, 'triple', string)]
|
||||
self.body.append(
|
||||
r'\index{%s!%s %s%s}\index{%s!%s, %s%s}'
|
||||
r'\index{%s!%s %s%s}' %
|
||||
(p1, p2, p3, m, p2, p3, p1, m, p3, p1, p2, m))
|
||||
elif type == 'see':
|
||||
p1, p2 = [self.encode(x) for x in split_into(2, 'see', string)]
|
||||
p1, p2 = [escape(x) for x in split_into(2, 'see', string)]
|
||||
self.body.append(r'\index{%s|see{%s}}' % (p1, p2))
|
||||
elif type == 'seealso':
|
||||
p1, p2 = [self.encode(x) for x in split_into(2, 'seealso', string)]
|
||||
p1, p2 = [escape(x) for x in split_into(2, 'seealso', string)]
|
||||
self.body.append(r'\index{%s|see{%s}}' % (p1, p2))
|
||||
else:
|
||||
logger.warning('unknown index entry type %s found', type)
|
||||
@ -2224,15 +2230,10 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
self.in_parsed_literal += 1
|
||||
self.body.append('\\begin{sphinxalltt}\n')
|
||||
else:
|
||||
ids = '' # type: unicode
|
||||
for id in self.pop_hyperlink_ids('code-block'):
|
||||
ids += self.hypertarget(id, anchor=False)
|
||||
if node['ids']:
|
||||
# suppress with anchor=False \phantomsection insertion
|
||||
ids += self.hypertarget(node['ids'][0], anchor=False)
|
||||
labels = self.hypertarget_to(node)
|
||||
# LaTeX code will insert \phantomsection prior to \label
|
||||
if ids and not self.in_footnote:
|
||||
self.body.append('\n\\def\\sphinxLiteralBlockLabel{' + ids + '}')
|
||||
if labels and not self.in_footnote:
|
||||
self.body.append('\n\\def\\sphinxLiteralBlockLabel{' + labels + '}')
|
||||
code = node.astext()
|
||||
lang = self.hlsettingstack[-1][0]
|
||||
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
|
||||
|
7
tests/roots/test-glossary/conf.py
Normal file
7
tests/roots/test-glossary/conf.py
Normal file
@ -0,0 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
master_doc = 'index'
|
||||
|
||||
latex_documents = [
|
||||
(master_doc, 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report')
|
||||
]
|
22
tests/roots/test-glossary/index.rst
Normal file
22
tests/roots/test-glossary/index.rst
Normal file
@ -0,0 +1,22 @@
|
||||
test-glossary
|
||||
=============
|
||||
|
||||
.. glossary::
|
||||
:sorted:
|
||||
|
||||
boson
|
||||
Particle with integer spin.
|
||||
|
||||
*fermion*
|
||||
Particle with half-integer spin.
|
||||
|
||||
tauon
|
||||
myon
|
||||
electron
|
||||
Examples for fermions.
|
||||
|
||||
über
|
||||
Gewisse
|
||||
|
||||
änhlich
|
||||
Dinge
|
@ -10,3 +10,7 @@ A :index:`famous` :index:`equation`:
|
||||
.. index:: Einstein, relativity
|
||||
|
||||
and some text.
|
||||
|
||||
.. index:: main {
|
||||
|
||||
An index entry containing non paired curly brace
|
||||
|
7
tests/roots/test-latex-labels/conf.py
Normal file
7
tests/roots/test-latex-labels/conf.py
Normal file
@ -0,0 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
master_doc = 'index'
|
||||
|
||||
latex_documents = [
|
||||
(master_doc, 'test.tex', 'The basic Sphinx documentation for testing', 'Sphinx', 'report')
|
||||
]
|
68
tests/roots/test-latex-labels/index.rst
Normal file
68
tests/roots/test-latex-labels/index.rst
Normal file
@ -0,0 +1,68 @@
|
||||
latex-labels
|
||||
============
|
||||
|
||||
figures
|
||||
-------
|
||||
|
||||
.. _figure1:
|
||||
.. _figure2:
|
||||
|
||||
.. figure:: logo.jpg
|
||||
|
||||
labeled figure
|
||||
|
||||
.. figure:: logo.jpg
|
||||
:name: figure3
|
||||
|
||||
labeled figure
|
||||
|
||||
code-blocks
|
||||
-----------
|
||||
|
||||
.. _codeblock1:
|
||||
.. _codeblock2:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
blah blah blah
|
||||
|
||||
.. code-block:: none
|
||||
:name: codeblock3
|
||||
|
||||
blah blah blah
|
||||
|
||||
tables
|
||||
------
|
||||
|
||||
.. _table1:
|
||||
.. _table2:
|
||||
|
||||
.. table:: table caption
|
||||
|
||||
==== ====
|
||||
head head
|
||||
cell cell
|
||||
==== ====
|
||||
|
||||
.. table:: table caption
|
||||
:name: table3
|
||||
|
||||
==== ====
|
||||
head head
|
||||
cell cell
|
||||
==== ====
|
||||
|
||||
.. _section1:
|
||||
.. _section2:
|
||||
|
||||
subsection
|
||||
----------
|
||||
|
||||
.. _section3:
|
||||
|
||||
subsubsection
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
.. toctree::
|
||||
|
||||
otherdoc
|
2
tests/roots/test-latex-labels/otherdoc.rst
Normal file
2
tests/roots/test-latex-labels/otherdoc.rst
Normal file
@ -0,0 +1,2 @@
|
||||
otherdoc
|
||||
========
|
@ -1209,6 +1209,7 @@ def test_latex_index(app, status, warning):
|
||||
result = (app.outdir / 'Python.tex').text(encoding='utf8')
|
||||
assert 'A \\index{famous}famous \\index{equation}equation:\n' in result
|
||||
assert '\n\\index{Einstein}\\index{relativity}\\ignorespaces \nand' in result
|
||||
assert '\n\\index{main \\sphinxleftcurlybrace}\\ignorespaces ' in result
|
||||
|
||||
|
||||
@pytest.mark.sphinx('latex', testroot='latex-equations')
|
||||
@ -1241,3 +1242,69 @@ def test_latex_nested_enumerated_list(app, status, warning):
|
||||
assert r'\setcounter{enumiii}{9}' in result
|
||||
assert r'\setcounter{enumiv}{23}' in result
|
||||
assert r'\setcounter{enumii}{2}' in result
|
||||
|
||||
|
||||
@pytest.mark.sphinx('latex', testroot='glossary')
|
||||
def test_latex_glossary(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
result = (app.outdir / 'test.tex').text(encoding='utf8')
|
||||
assert (u'\\item[{änhlich\\index{änhlich|textbf}\\phantomsection'
|
||||
r'\label{\detokenize{index:term-anhlich}}}] \leavevmode' in result)
|
||||
assert (r'\item[{boson\index{boson|textbf}\phantomsection'
|
||||
r'\label{\detokenize{index:term-boson}}}] \leavevmode' in result)
|
||||
assert (r'\item[{\sphinxstyleemphasis{fermion}\index{fermion|textbf}'
|
||||
r'\phantomsection'
|
||||
r'\label{\detokenize{index:term-fermion}}}] \leavevmode' in result)
|
||||
assert (r'\item[{tauon\index{tauon|textbf}\phantomsection'
|
||||
r'\label{\detokenize{index:term-tauon}}}] \leavevmode'
|
||||
r'\item[{myon\index{myon|textbf}\phantomsection'
|
||||
r'\label{\detokenize{index:term-myon}}}] \leavevmode'
|
||||
r'\item[{electron\index{electron|textbf}\phantomsection'
|
||||
r'\label{\detokenize{index:term-electron}}}] \leavevmode' in result)
|
||||
assert (u'\\item[{über\\index{über|textbf}\\phantomsection'
|
||||
r'\label{\detokenize{index:term-uber}}}] \leavevmode' in result)
|
||||
|
||||
|
||||
@pytest.mark.sphinx('latex', testroot='latex-labels')
|
||||
def test_latex_labels(app, status, warning):
|
||||
app.builder.build_all()
|
||||
|
||||
result = (app.outdir / 'test.tex').text(encoding='utf8')
|
||||
|
||||
# figures
|
||||
assert (r'\caption{labeled figure}'
|
||||
r'\label{\detokenize{index:id1}}'
|
||||
r'\label{\detokenize{index:figure2}}'
|
||||
r'\label{\detokenize{index:figure1}}'
|
||||
r'\end{figure}' in result)
|
||||
assert (r'\caption{labeled figure}'
|
||||
r'\label{\detokenize{index:figure3}}'
|
||||
r'\end{figure}' in result)
|
||||
|
||||
# code-blocks
|
||||
assert (r'\def\sphinxLiteralBlockLabel{'
|
||||
r'\label{\detokenize{index:codeblock2}}'
|
||||
r'\label{\detokenize{index:codeblock1}}}' in result)
|
||||
assert (r'\def\sphinxLiteralBlockLabel{'
|
||||
r'\label{\detokenize{index:codeblock3}}}' in result)
|
||||
|
||||
# tables
|
||||
assert (r'\sphinxcaption{table caption}'
|
||||
r'\label{\detokenize{index:id2}}'
|
||||
r'\label{\detokenize{index:table2}}'
|
||||
r'\label{\detokenize{index:table1}}' in result)
|
||||
assert (r'\sphinxcaption{table caption}'
|
||||
r'\label{\detokenize{index:table3}}' in result)
|
||||
|
||||
# sections
|
||||
assert ('\\chapter{subsection}\n'
|
||||
r'\label{\detokenize{index:subsection}}'
|
||||
r'\label{\detokenize{index:section2}}'
|
||||
r'\label{\detokenize{index:section1}}' in result)
|
||||
assert ('\\section{subsubsection}\n'
|
||||
r'\label{\detokenize{index:subsubsection}}'
|
||||
r'\label{\detokenize{index:section3}}' in result)
|
||||
assert ('\\subsection{otherdoc}\n'
|
||||
r'\label{\detokenize{otherdoc:otherdoc}}'
|
||||
r'\label{\detokenize{otherdoc::doc}}' in result)
|
||||
|
@ -227,11 +227,11 @@ def test_get_toctree_for(app):
|
||||
[list_item, compact_paragraph, reference, "foo.1"],
|
||||
[list_item, compact_paragraph, reference, "foo.2"]))
|
||||
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=(1,))
|
||||
assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=(1, 1))
|
||||
assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=(1, 2))
|
||||
assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=(1, 3))
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=(2,))
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1])
|
||||
assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=[1, 1])
|
||||
assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=[1, 2])
|
||||
assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=[1, 3])
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2])
|
||||
assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/")
|
||||
|
||||
assert_node(toctree[2],
|
||||
@ -258,8 +258,8 @@ def test_get_toctree_for_collapse(app):
|
||||
([list_item, compact_paragraph, reference, "foo"],
|
||||
[list_item, compact_paragraph, reference, "bar"],
|
||||
[list_item, compact_paragraph, reference, "http://sphinx-doc.org/"]))
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=(1,))
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=(2,))
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1])
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2])
|
||||
assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/")
|
||||
|
||||
assert_node(toctree[2],
|
||||
@ -296,13 +296,13 @@ def test_get_toctree_for_maxdepth(app):
|
||||
assert_node(toctree[1][0][1][1][1],
|
||||
[bullet_list, list_item, compact_paragraph, reference, "foo.1-1"])
|
||||
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=(1,))
|
||||
assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=(1, 1))
|
||||
assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=(1, 2))
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1])
|
||||
assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=[1, 1])
|
||||
assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=[1, 2])
|
||||
assert_node(toctree[1][0][1][1][1][0][0][0],
|
||||
reference, refuri="foo#foo-1-1", secnumber=(1, 2, 1))
|
||||
assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=(1, 3))
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=(2,))
|
||||
reference, refuri="foo#foo-1-1", secnumber=[1, 2, 1])
|
||||
assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=[1, 3])
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2])
|
||||
assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/")
|
||||
|
||||
assert_node(toctree[2],
|
||||
@ -335,11 +335,11 @@ def test_get_toctree_for_includehidden(app):
|
||||
[list_item, compact_paragraph, reference, "foo.1"],
|
||||
[list_item, compact_paragraph, reference, "foo.2"]))
|
||||
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=(1,))
|
||||
assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=(1, 1))
|
||||
assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=(1, 2))
|
||||
assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=(1, 3))
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=(2,))
|
||||
assert_node(toctree[1][0][0][0], reference, refuri="foo", secnumber=[1])
|
||||
assert_node(toctree[1][0][1][0][0][0], reference, refuri="quux", secnumber=[1, 1])
|
||||
assert_node(toctree[1][0][1][1][0][0], reference, refuri="foo#foo-1", secnumber=[1, 2])
|
||||
assert_node(toctree[1][0][1][2][0][0], reference, refuri="foo#foo-2", secnumber=[1, 3])
|
||||
assert_node(toctree[1][1][0][0], reference, refuri="bar", secnumber=[2])
|
||||
assert_node(toctree[1][2][0][0], reference, refuri="http://sphinx-doc.org/")
|
||||
|
||||
assert_node(toctree[2],
|
||||
|
31
tests/test_ext_autodoc_importer.py
Normal file
31
tests/test_ext_autodoc_importer.py
Normal file
@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
test_ext_autodoc_importer
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Test the autodoc extension.
|
||||
|
||||
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from sphinx.ext.autodoc.importer import _MockObject
|
||||
|
||||
|
||||
def test_MockObject():
|
||||
mock = _MockObject()
|
||||
assert isinstance(mock.some_attr, _MockObject)
|
||||
assert isinstance(mock.some_method, _MockObject)
|
||||
assert isinstance(mock.attr1.attr2, _MockObject)
|
||||
assert isinstance(mock.attr1.attr2.meth(), _MockObject)
|
||||
|
||||
class SubClass(mock.SomeClass):
|
||||
"""docstring of SubClass"""
|
||||
def method(self):
|
||||
return "string"
|
||||
|
||||
obj = SubClass()
|
||||
assert SubClass.__doc__ == "docstring of SubClass"
|
||||
assert isinstance(obj, SubClass)
|
||||
assert obj.method() == "string"
|
||||
assert isinstance(obj.other_method(), SubClass)
|
Loading…
Reference in New Issue
Block a user