mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge branch '2.0' into refactor_py_domain3
This commit is contained in:
commit
19a006f620
4
CHANGES
4
CHANGES
@ -90,6 +90,7 @@ Features added
|
||||
- ``:classmethod:``
|
||||
- ``:property:``
|
||||
- ``:staticmethod:``
|
||||
* #6306: html: Add a label to search form for accessability purposes
|
||||
|
||||
Bugs fixed
|
||||
----------
|
||||
@ -100,6 +101,9 @@ Bugs fixed
|
||||
* commented term in glossary directive is wrongly recognized
|
||||
* #6299: rst domain: rst:directive directive generates waste space
|
||||
* #6331: man: invalid output when doctest follows rubric
|
||||
* #6351: "Hyperlink target is not referenced" message is shown even if
|
||||
referenced
|
||||
* #6165: autodoc: ``tab_width`` setting of docutils has been ignored
|
||||
|
||||
Testing
|
||||
--------
|
||||
|
@ -442,7 +442,8 @@ class Documenter:
|
||||
docstring = getdoc(self.object, self.get_attr,
|
||||
self.env.config.autodoc_inherit_docstrings)
|
||||
if docstring:
|
||||
return [prepare_docstring(docstring, ignore)]
|
||||
tab_width = self.directive.state.document.settings.tab_width
|
||||
return [prepare_docstring(docstring, ignore, tab_width)]
|
||||
return []
|
||||
|
||||
def process_doc(self, docstrings):
|
||||
@ -936,7 +937,9 @@ class DocstringSignatureMixin:
|
||||
if base not in valid_names:
|
||||
continue
|
||||
# re-prepare docstring to ignore more leading indentation
|
||||
self._new_docstrings[i] = prepare_docstring('\n'.join(doclines[1:]))
|
||||
tab_width = self.directive.state.document.settings.tab_width # type: ignore
|
||||
self._new_docstrings[i] = prepare_docstring('\n'.join(doclines[1:]),
|
||||
tabsize=tab_width)
|
||||
result = args, retann
|
||||
# don't look any further
|
||||
break
|
||||
@ -1179,7 +1182,9 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
||||
docstrings = [initdocstring]
|
||||
else:
|
||||
docstrings.append(initdocstring)
|
||||
return [prepare_docstring(docstring, ignore) for docstring in docstrings]
|
||||
|
||||
tab_width = self.directive.state.document.settings.tab_width
|
||||
return [prepare_docstring(docstring, ignore, tab_width) for docstring in docstrings]
|
||||
|
||||
def add_content(self, more_content, no_docstring=False):
|
||||
# type: (Any, bool) -> None
|
||||
|
@ -6,10 +6,14 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import warnings
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst.states import Struct
|
||||
from docutils.statemachine import StringList
|
||||
from docutils.utils import assemble_option_dict
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.ext.autodoc import Options, get_documenters
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.docutils import SphinxDirective, switch_source_input
|
||||
@ -17,7 +21,7 @@ from sphinx.util.nodes import nested_parse_with_titles
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
from typing import Callable, Dict, List, Set, Type # NOQA
|
||||
from typing import Any, Callable, Dict, List, Set, Type # NOQA
|
||||
from docutils.parsers.rst.state import RSTState # NOQA
|
||||
from docutils.utils import Reporter # NOQA
|
||||
from sphinx.config import Config # NOQA
|
||||
@ -50,8 +54,8 @@ class DummyOptionSpec(dict):
|
||||
class DocumenterBridge:
|
||||
"""A parameters container for Documenters."""
|
||||
|
||||
def __init__(self, env, reporter, options, lineno):
|
||||
# type: (BuildEnvironment, Reporter, Options, int) -> None
|
||||
def __init__(self, env, reporter, options, lineno, state=None):
|
||||
# type: (BuildEnvironment, Reporter, Options, int, Any) -> None
|
||||
self.env = env
|
||||
self.reporter = reporter
|
||||
self.genopt = options
|
||||
@ -59,6 +63,16 @@ class DocumenterBridge:
|
||||
self.filename_set = set() # type: Set[str]
|
||||
self.result = StringList()
|
||||
|
||||
if state:
|
||||
self.state = state
|
||||
else:
|
||||
# create fake object for self.state.document.settings.tab_width
|
||||
warnings.warn('DocumenterBridge requires a state object on instantiation.',
|
||||
RemovedInSphinx40Warning)
|
||||
settings = Struct(tab_width=8)
|
||||
document = Struct(settings=settings)
|
||||
self.state = Struct(document=document)
|
||||
|
||||
def warn(self, msg):
|
||||
# type: (str) -> None
|
||||
logger.warning(msg, location=(self.env.docname, self.lineno))
|
||||
@ -131,7 +145,7 @@ class AutodocDirective(SphinxDirective):
|
||||
return []
|
||||
|
||||
# generate the output
|
||||
params = DocumenterBridge(self.env, reporter, documenter_options, lineno)
|
||||
params = DocumenterBridge(self.env, reporter, documenter_options, lineno, self.state)
|
||||
documenter = doccls(params, self.arguments[0])
|
||||
documenter.generate(more_content=self.content)
|
||||
if not params.result:
|
||||
|
@ -175,7 +175,7 @@ _app = None # type: Sphinx
|
||||
class FakeDirective(DocumenterBridge):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
super().__init__({}, None, Options(), 0) # type: ignore
|
||||
super().__init__({}, None, Options(), 0, None) # type: ignore
|
||||
|
||||
|
||||
def get_documenter(app, obj, parent):
|
||||
@ -236,7 +236,7 @@ class Autosummary(SphinxDirective):
|
||||
def run(self):
|
||||
# type: () -> List[nodes.Node]
|
||||
self.bridge = DocumenterBridge(self.env, self.state.document.reporter,
|
||||
Options(), self.lineno)
|
||||
Options(), self.lineno, self.state)
|
||||
|
||||
names = [x.strip().split()[0] for x in self.content
|
||||
if x.strip() and re.search(r'^[~a-zA-Z_]', x.strip()[0])]
|
||||
|
@ -32,6 +32,7 @@ if False:
|
||||
# For type annotation
|
||||
from typing import Any, Dict # NOQA
|
||||
from pygments.formatter import Formatter # NOQA
|
||||
from pygments.style import Style # NOQA
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -69,16 +70,8 @@ class PygmentsBridge:
|
||||
def __init__(self, dest='html', stylename='sphinx', trim_doctest_flags=None):
|
||||
# type: (str, str, bool) -> None
|
||||
self.dest = dest
|
||||
if stylename is None or stylename == 'sphinx':
|
||||
style = SphinxStyle
|
||||
elif stylename == 'none':
|
||||
style = NoneStyle
|
||||
elif '.' in stylename:
|
||||
module, stylename = stylename.rsplit('.', 1)
|
||||
style = getattr(__import__(module, None, None, ['__name__']),
|
||||
stylename)
|
||||
else:
|
||||
style = get_style_by_name(stylename)
|
||||
|
||||
style = self.get_style(stylename)
|
||||
self.formatter_args = {'style': style} # type: Dict[str, Any]
|
||||
if dest == 'html':
|
||||
self.formatter = self.html_formatter
|
||||
@ -91,6 +84,18 @@ class PygmentsBridge:
|
||||
warnings.warn('trim_doctest_flags option for PygmentsBridge is now deprecated.',
|
||||
RemovedInSphinx30Warning, stacklevel=2)
|
||||
|
||||
def get_style(self, stylename):
|
||||
# type: (str) -> Style
|
||||
if stylename is None or stylename == 'sphinx':
|
||||
return SphinxStyle
|
||||
elif stylename == 'none':
|
||||
return NoneStyle
|
||||
elif '.' in stylename:
|
||||
module, stylename = stylename.rsplit('.', 1)
|
||||
return getattr(__import__(module, None, None, ['__name__']), stylename)
|
||||
else:
|
||||
return get_style_by_name(stylename)
|
||||
|
||||
def get_formatter(self, **kwargs):
|
||||
# type: (Any) -> Formatter
|
||||
kwargs.update(self.formatter_args)
|
||||
@ -110,11 +115,8 @@ class PygmentsBridge:
|
||||
return '\\begin{Verbatim}[commandchars=\\\\\\{\\}]\n' + \
|
||||
source + '\\end{Verbatim}\n'
|
||||
|
||||
def highlight_block(self, source, lang, opts=None, location=None, force=False, **kwargs):
|
||||
# type: (str, str, Any, Any, bool, Any) -> str
|
||||
if not isinstance(source, str):
|
||||
source = source.decode()
|
||||
|
||||
def get_lexer(self, source, lang, opts=None, location=None):
|
||||
# type: (str, str, Any, Any) -> Lexer
|
||||
# find out which lexer to use
|
||||
if lang in ('py', 'python'):
|
||||
if source.startswith('>>>'):
|
||||
@ -145,6 +147,15 @@ class PygmentsBridge:
|
||||
else:
|
||||
lexer.add_filter('raiseonerror')
|
||||
|
||||
return lexer
|
||||
|
||||
def highlight_block(self, source, lang, opts=None, location=None, force=False, **kwargs):
|
||||
# type: (str, str, Any, Any, bool, Any) -> str
|
||||
if not isinstance(source, str):
|
||||
source = source.decode()
|
||||
|
||||
lexer = self.get_lexer(source, lang, opts, location)
|
||||
|
||||
# trim doctest options if wanted
|
||||
if isinstance(lexer, PythonConsoleLexer) and self.trim_doctest_flags:
|
||||
source = doctest.blankline_re.sub('', source)
|
||||
@ -165,6 +176,7 @@ class PygmentsBridge:
|
||||
type='misc', subtype='highlighting_failure',
|
||||
location=location)
|
||||
hlsource = highlight(source, lexers['none'], formatter)
|
||||
|
||||
if self.dest == 'html':
|
||||
return hlsource
|
||||
else:
|
||||
|
11
sphinx/io.py
11
sphinx/io.py
@ -16,6 +16,7 @@ from docutils.io import FileInput, NullOutput
|
||||
from docutils.parsers.rst import Parser as RSTParser
|
||||
from docutils.readers import standalone
|
||||
from docutils.statemachine import StringList, string2lines
|
||||
from docutils.transforms.references import DanglingReferences
|
||||
from docutils.writers import UnfilteredWriter
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx30Warning
|
||||
@ -64,7 +65,15 @@ class SphinxBaseReader(standalone.Reader):
|
||||
|
||||
def get_transforms(self):
|
||||
# type: () -> List[Type[Transform]]
|
||||
return super().get_transforms() + self.transforms
|
||||
transforms = super().get_transforms() + self.transforms
|
||||
|
||||
# remove transforms which is not needed for Sphinx
|
||||
unused = [DanglingReferences]
|
||||
for transform in unused:
|
||||
if transform in transforms:
|
||||
transforms.remove(transform)
|
||||
|
||||
return transforms
|
||||
|
||||
def new_document(self):
|
||||
# type: () -> nodes.document
|
||||
|
@ -9,10 +9,10 @@
|
||||
#}
|
||||
{%- if pagename != "search" and builder != "singlehtml" %}
|
||||
<div id="searchbox" style="display: none" role="search">
|
||||
<h3>{{ _('Quick search') }}</h3>
|
||||
<h3 id="searchlabel">{{ _('Quick search') }}</h3>
|
||||
<div class="searchformwrapper">
|
||||
<form class="search" action="{{ pathto('search') }}" method="get">
|
||||
<input type="text" name="q" />
|
||||
<input type="text" name="q" aria-labelledby="searchlabel" />
|
||||
<input type="submit" value="{{ _('Go') }}" />
|
||||
</form>
|
||||
</div>
|
||||
|
@ -9,7 +9,7 @@
|
||||
"""
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.transforms.references import Substitutions
|
||||
from docutils.transforms.references import DanglingReferences, Substitutions
|
||||
|
||||
from sphinx.transforms import SphinxTransform
|
||||
|
||||
@ -31,6 +31,22 @@ class SubstitutionDefinitionsRemover(SphinxTransform):
|
||||
node.parent.remove(node)
|
||||
|
||||
|
||||
class SphinxDanglingReferences(DanglingReferences):
|
||||
"""DanglingReferences transform which does not output info messages."""
|
||||
|
||||
def apply(self, **kwargs):
|
||||
# type: (Any) -> None
|
||||
try:
|
||||
reporter = self.document.reporter
|
||||
report_level = reporter.report_level
|
||||
|
||||
# suppress INFO level messages for a while
|
||||
reporter.report_level = max(reporter.WARNING_LEVEL, reporter.report_level)
|
||||
super().apply()
|
||||
finally:
|
||||
reporter.report_level = report_level
|
||||
|
||||
|
||||
class SphinxDomains(SphinxTransform):
|
||||
"""Collect objects to Sphinx domains for cross references."""
|
||||
default_priority = 850
|
||||
@ -44,6 +60,7 @@ class SphinxDomains(SphinxTransform):
|
||||
def setup(app):
|
||||
# type: (Sphinx) -> Dict[str, Any]
|
||||
app.add_transform(SubstitutionDefinitionsRemover)
|
||||
app.add_transform(SphinxDanglingReferences)
|
||||
app.add_transform(SphinxDomains)
|
||||
|
||||
return {
|
||||
|
@ -15,8 +15,8 @@ if False:
|
||||
from typing import List # NOQA
|
||||
|
||||
|
||||
def prepare_docstring(s, ignore=1):
|
||||
# type: (str, int) -> List[str]
|
||||
def prepare_docstring(s, ignore=1, tabsize=8):
|
||||
# type: (str, int, int) -> List[str]
|
||||
"""Convert a docstring into lines of parseable reST. Remove common leading
|
||||
indentation, where the indentation of a given number of lines (usually just
|
||||
one) is ignored.
|
||||
@ -25,7 +25,7 @@ def prepare_docstring(s, ignore=1):
|
||||
ViewList (used as argument of nested_parse().) An empty line is added to
|
||||
act as a separator between this docstring and following content.
|
||||
"""
|
||||
lines = s.expandtabs().splitlines()
|
||||
lines = s.expandtabs(tabsize).splitlines()
|
||||
# Find minimum indentation of any non-blank lines after ignored lines.
|
||||
margin = sys.maxsize
|
||||
for line in lines[ignore:]:
|
||||
|
@ -11,6 +11,7 @@
|
||||
|
||||
import platform
|
||||
import sys
|
||||
from unittest.mock import Mock
|
||||
from warnings import catch_warnings
|
||||
|
||||
import pytest
|
||||
@ -36,7 +37,9 @@ def do_autodoc(app, objtype, name, options=None):
|
||||
app.env.temp_data.setdefault('docname', 'index') # set dummy docname
|
||||
doccls = app.registry.documenters[objtype]
|
||||
docoptions = process_documenter_options(doccls, app.config, options)
|
||||
bridge = DocumenterBridge(app.env, LoggingReporter(''), docoptions, 1)
|
||||
state = Mock()
|
||||
state.document.settings.tab_width = 8
|
||||
bridge = DocumenterBridge(app.env, LoggingReporter(''), docoptions, 1, state)
|
||||
documenter = doccls(bridge, name)
|
||||
documenter.generate()
|
||||
|
||||
@ -95,7 +98,9 @@ def setup_test():
|
||||
genopt = options,
|
||||
result = ViewList(),
|
||||
filename_set = set(),
|
||||
state = Mock(),
|
||||
)
|
||||
directive.state.document.settings.tab_width = 8
|
||||
|
||||
processed_docstrings = []
|
||||
processed_signatures = []
|
||||
|
@ -1338,7 +1338,7 @@ def test_html_sidebar(app, status, warning):
|
||||
assert '<h1 class="logo"><a href="#">Python</a></h1>' in result
|
||||
assert '<h3>Navigation</h3>' in result
|
||||
assert '<h3>Related Topics</h3>' in result
|
||||
assert '<h3>Quick search</h3>' in result
|
||||
assert '<h3 id="searchlabel">Quick search</h3>' in result
|
||||
|
||||
app.builder.add_sidebars('index', ctx)
|
||||
assert ctx['sidebars'] == ['about.html', 'navigation.html', 'relations.html',
|
||||
@ -1353,7 +1353,7 @@ def test_html_sidebar(app, status, warning):
|
||||
assert '<h1 class="logo"><a href="#">Python</a></h1>' not in result
|
||||
assert '<h3>Navigation</h3>' not in result
|
||||
assert '<h3>Related Topics</h3>' in result
|
||||
assert '<h3>Quick search</h3>' not in result
|
||||
assert '<h3 id="searchlabel">Quick search</h3>' not in result
|
||||
|
||||
app.builder.add_sidebars('index', ctx)
|
||||
assert ctx['sidebars'] == ['relations.html']
|
||||
@ -1367,7 +1367,7 @@ def test_html_sidebar(app, status, warning):
|
||||
assert '<h1 class="logo"><a href="#">Python</a></h1>' not in result
|
||||
assert '<h3>Navigation</h3>' not in result
|
||||
assert '<h3>Related Topics</h3>' not in result
|
||||
assert '<h3>Quick search</h3>' not in result
|
||||
assert '<h3 id="searchlabel">Quick search</h3>' not in result
|
||||
|
||||
app.builder.add_sidebars('index', ctx)
|
||||
assert ctx['sidebars'] == []
|
||||
|
@ -126,4 +126,4 @@ def test_theme_sidebars(app, status, warning):
|
||||
assert '<h3><a href="#">Table of Contents</a></h3>' in result
|
||||
assert '<h3>Related Topics</h3>' not in result
|
||||
assert '<h3>This Page</h3>' not in result
|
||||
assert '<h3>Quick search</h3>' in result
|
||||
assert '<h3 id="searchlabel">Quick search</h3>' in result
|
||||
|
Loading…
Reference in New Issue
Block a user