mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Merge tag 'v3.1.0'
This commit is contained in:
commit
44f4b2ad97
45
CHANGES
45
CHANGES
@ -36,8 +36,8 @@ Bugs fixed
|
|||||||
Testing
|
Testing
|
||||||
--------
|
--------
|
||||||
|
|
||||||
Release 3.1.0 (in development)
|
Release 3.1.0 (released Jun 08, 2020)
|
||||||
==============================
|
=====================================
|
||||||
|
|
||||||
Dependencies
|
Dependencies
|
||||||
------------
|
------------
|
||||||
@ -89,6 +89,8 @@ Features added
|
|||||||
builtin base classes
|
builtin base classes
|
||||||
* #2106: autodoc: Support multiple signatures on docstring
|
* #2106: autodoc: Support multiple signatures on docstring
|
||||||
* #4422: autodoc: Support GenericAlias in Python 3.7 or above
|
* #4422: autodoc: Support GenericAlias in Python 3.7 or above
|
||||||
|
* #3610: autodoc: Support overloaded functions
|
||||||
|
* #7722: autodoc: Support TypeVar
|
||||||
* #7466: autosummary: headings in generated documents are not translated
|
* #7466: autosummary: headings in generated documents are not translated
|
||||||
* #7490: autosummary: Add ``:caption:`` option to autosummary directive to set a
|
* #7490: autosummary: Add ``:caption:`` option to autosummary directive to set a
|
||||||
caption to the toctree
|
caption to the toctree
|
||||||
@ -99,7 +101,8 @@ Features added
|
|||||||
variables for custom templates
|
variables for custom templates
|
||||||
* #7530: html: Support nested <kbd> elements
|
* #7530: html: Support nested <kbd> elements
|
||||||
* #7481: html theme: Add right margin to footnote/citation labels
|
* #7481: html theme: Add right margin to footnote/citation labels
|
||||||
* #7482: html theme: CSS spacing for code blocks with captions and line numbers
|
* #7482, #7717: html theme: CSS spacing for code blocks with captions and line
|
||||||
|
numbers
|
||||||
* #7443: html theme: Add new options :confval:`globaltoc_collapse` and
|
* #7443: html theme: Add new options :confval:`globaltoc_collapse` and
|
||||||
:confval:`globaltoc_includehidden` to control the behavior of globaltoc in
|
:confval:`globaltoc_includehidden` to control the behavior of globaltoc in
|
||||||
sidebar
|
sidebar
|
||||||
@ -111,6 +114,8 @@ Features added
|
|||||||
* #7542: html theme: Make admonition/topic/sidebar scrollable
|
* #7542: html theme: Make admonition/topic/sidebar scrollable
|
||||||
* #7543: html theme: Add top and bottom margins to tables
|
* #7543: html theme: Add top and bottom margins to tables
|
||||||
* #7695: html theme: Add viewport meta tag for basic theme
|
* #7695: html theme: Add viewport meta tag for basic theme
|
||||||
|
* #7721: html theme: classic: default codetextcolor/codebgcolor doesn't override
|
||||||
|
Pygments
|
||||||
* C and C++: allow semicolon in the end of declarations.
|
* C and C++: allow semicolon in the end of declarations.
|
||||||
* C++, parse parameterized noexcept specifiers.
|
* C++, parse parameterized noexcept specifiers.
|
||||||
* #7294: C++, parse expressions with user-defined literals.
|
* #7294: C++, parse expressions with user-defined literals.
|
||||||
@ -118,8 +123,13 @@ Features added
|
|||||||
* #7143: py domain: Add ``:final:`` option to :rst:dir:`py:class:`,
|
* #7143: py domain: Add ``:final:`` option to :rst:dir:`py:class:`,
|
||||||
:rst:dir:`py:exception:` and :rst:dir:`py:method:` directives
|
:rst:dir:`py:exception:` and :rst:dir:`py:method:` directives
|
||||||
* #7596: py domain: Change a type annotation for variables to a hyperlink
|
* #7596: py domain: Change a type annotation for variables to a hyperlink
|
||||||
|
* #7770: std domain: :rst:dir:`option` directive support arguments in the form
|
||||||
|
of ``foo[=bar]``
|
||||||
* #7582: napoleon: a type for attribute are represented like type annotation
|
* #7582: napoleon: a type for attribute are represented like type annotation
|
||||||
* #7734: napoleon: overescaped trailing underscore on attribute
|
* #7734: napoleon: overescaped trailing underscore on attribute
|
||||||
|
* #7247: linkcheck: Add :confval:`linkcheck_request_headers` to send custom HTTP
|
||||||
|
headers for specific host
|
||||||
|
* #7792: setuptools: Support ``--verbosity`` option
|
||||||
* #7683: Add ``allowed_exceptions`` parameter to ``Sphinx.emit()`` to allow
|
* #7683: Add ``allowed_exceptions`` parameter to ``Sphinx.emit()`` to allow
|
||||||
handlers to raise specified exceptions
|
handlers to raise specified exceptions
|
||||||
* #7295: C++, parse (trailing) requires clauses.
|
* #7295: C++, parse (trailing) requires clauses.
|
||||||
@ -151,6 +161,7 @@ Bugs fixed
|
|||||||
* #7668: autodoc: wrong retann value is passed to a handler of
|
* #7668: autodoc: wrong retann value is passed to a handler of
|
||||||
autodoc-proccess-signature
|
autodoc-proccess-signature
|
||||||
* #7711: autodoc: fails with ValueError when processing numpy objects
|
* #7711: autodoc: fails with ValueError when processing numpy objects
|
||||||
|
* #7791: autodoc: TypeError is raised on documenting singledispatch function
|
||||||
* #7551: autosummary: a nested class is indexed as non-nested class
|
* #7551: autosummary: a nested class is indexed as non-nested class
|
||||||
* #7661: autosummary: autosummary directive emits warnings twices if failed to
|
* #7661: autosummary: autosummary directive emits warnings twices if failed to
|
||||||
import the target module
|
import the target module
|
||||||
@ -159,8 +170,12 @@ Bugs fixed
|
|||||||
* #7671: autosummary: The location of import failure warning is missing
|
* #7671: autosummary: The location of import failure warning is missing
|
||||||
* #7535: sphinx-autogen: crashes when custom template uses inheritance
|
* #7535: sphinx-autogen: crashes when custom template uses inheritance
|
||||||
* #7536: sphinx-autogen: crashes when template uses i18n feature
|
* #7536: sphinx-autogen: crashes when template uses i18n feature
|
||||||
|
* #7781: sphinx-build: Wrong error message when outdir is not directory
|
||||||
* #7653: sphinx-quickstart: Fix multiple directory creation for nested relpath
|
* #7653: sphinx-quickstart: Fix multiple directory creation for nested relpath
|
||||||
* #2785: html: Bad alignment of equation links
|
* #2785: html: Bad alignment of equation links
|
||||||
|
* #7718: html theme: some themes does not respect background color of Pygments
|
||||||
|
style (agogo, haiku, nature, pyramid, scrolls, sphinxdoc and traditional)
|
||||||
|
* #7544: html theme: inconsistent padding in admonitions
|
||||||
* #7581: napoleon: bad parsing of inline code in attribute docstrings
|
* #7581: napoleon: bad parsing of inline code in attribute docstrings
|
||||||
* #7628: imgconverter: runs imagemagick once unnecessary for builders not
|
* #7628: imgconverter: runs imagemagick once unnecessary for builders not
|
||||||
supporting images
|
supporting images
|
||||||
@ -168,7 +183,10 @@ Bugs fixed
|
|||||||
* #7646: handle errors on event handlers
|
* #7646: handle errors on event handlers
|
||||||
* #4187: LaTeX: EN DASH disappears from PDF bookmarks in Japanese documents
|
* #4187: LaTeX: EN DASH disappears from PDF bookmarks in Japanese documents
|
||||||
* #7701: LaTeX: Anonymous indirect hyperlink target causes duplicated labels
|
* #7701: LaTeX: Anonymous indirect hyperlink target causes duplicated labels
|
||||||
|
* #7723: LaTeX: pdflatex crashed when URL contains a single quote
|
||||||
* #7756: py domain: The default value for positional only argument is not shown
|
* #7756: py domain: The default value for positional only argument is not shown
|
||||||
|
* #7760: coverage: Add :confval:`coverage_show_missing_items` to show coverage
|
||||||
|
result to console
|
||||||
* C++, fix rendering and xrefs in nested names explicitly starting
|
* C++, fix rendering and xrefs in nested names explicitly starting
|
||||||
in global scope, e.g., ``::A::B``.
|
in global scope, e.g., ``::A::B``.
|
||||||
* C, fix rendering and xrefs in nested names explicitly starting
|
* C, fix rendering and xrefs in nested names explicitly starting
|
||||||
@ -176,30 +194,9 @@ Bugs fixed
|
|||||||
* #7763: C and C++, don't crash during display stringification of unary
|
* #7763: C and C++, don't crash during display stringification of unary
|
||||||
expressions and fold expressions.
|
expressions and fold expressions.
|
||||||
|
|
||||||
Testing
|
|
||||||
--------
|
|
||||||
|
|
||||||
Release 3.0.5 (in development)
|
Release 3.0.5 (in development)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
Dependencies
|
|
||||||
------------
|
|
||||||
|
|
||||||
Incompatible changes
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
Deprecated
|
|
||||||
----------
|
|
||||||
|
|
||||||
Features added
|
|
||||||
--------------
|
|
||||||
|
|
||||||
Bugs fixed
|
|
||||||
----------
|
|
||||||
|
|
||||||
Testing
|
|
||||||
--------
|
|
||||||
|
|
||||||
Release 3.0.4 (released May 27, 2020)
|
Release 3.0.4 (released May 27, 2020)
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
|
@ -2388,6 +2388,32 @@ Options for the linkcheck builder
|
|||||||
|
|
||||||
.. versionadded:: 1.1
|
.. versionadded:: 1.1
|
||||||
|
|
||||||
|
.. confval:: linkcheck_request_headers
|
||||||
|
|
||||||
|
A dictionary that maps baseurls to HTTP request headers.
|
||||||
|
|
||||||
|
The key is a URL base string like ``"https://sphinx-doc.org/"``. To specify
|
||||||
|
headers for other hosts, ``"*"`` can be used. It matches all hosts only when
|
||||||
|
the URL does not match other settings.
|
||||||
|
|
||||||
|
The value is a dictionary that maps header name to its value.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
linkcheck_request_headers = {
|
||||||
|
"https://sphinx-doc.org/": {
|
||||||
|
"Accept": "text/html",
|
||||||
|
"Accept-Encoding": "utf-8",
|
||||||
|
},
|
||||||
|
"*": {
|
||||||
|
"Accept": "text/html,application/xhtml+xml",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.. versionadded:: 3.1
|
||||||
|
|
||||||
.. confval:: linkcheck_retries
|
.. confval:: linkcheck_retries
|
||||||
|
|
||||||
The number of times the linkcheck builder will attempt to check a URL before
|
The number of times the linkcheck builder will attempt to check a URL before
|
||||||
|
@ -51,4 +51,11 @@ should check:
|
|||||||
|
|
||||||
.. versionadded:: 1.1
|
.. versionadded:: 1.1
|
||||||
|
|
||||||
.. _Python regular expressions: https://docs.python.org/library/re
|
.. confval:: coverage_show_missing_items
|
||||||
|
|
||||||
|
Print objects that are missing to standard output also.
|
||||||
|
``False`` by default.
|
||||||
|
|
||||||
|
.. versionadded:: 3.1
|
||||||
|
|
||||||
|
.. _Python regular expressions: https://docs.python.org/library/re
|
||||||
|
@ -161,7 +161,7 @@ class Sphinx:
|
|||||||
|
|
||||||
if path.exists(self.outdir) and not path.isdir(self.outdir):
|
if path.exists(self.outdir) and not path.isdir(self.outdir):
|
||||||
raise ApplicationError(__('Output directory (%s) is not a directory') %
|
raise ApplicationError(__('Output directory (%s) is not a directory') %
|
||||||
self.srcdir)
|
self.outdir)
|
||||||
|
|
||||||
if self.srcdir == self.outdir:
|
if self.srcdir == self.outdir:
|
||||||
raise ApplicationError(__('Source directory and destination '
|
raise ApplicationError(__('Source directory and destination '
|
||||||
|
@ -16,7 +16,7 @@ import threading
|
|||||||
from html.parser import HTMLParser
|
from html.parser import HTMLParser
|
||||||
from os import path
|
from os import path
|
||||||
from typing import Any, Dict, List, Set, Tuple
|
from typing import Any, Dict, List, Set, Tuple
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote, urlparse
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.nodes import Node
|
from docutils.nodes import Node
|
||||||
@ -36,6 +36,11 @@ from sphinx.util.requests import is_ssl_error
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_REQUEST_HEADERS = {
|
||||||
|
'Accept': 'text/html,application/xhtml+xml;q=0.9,*/*;q=0.8',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class AnchorCheckParser(HTMLParser):
|
class AnchorCheckParser(HTMLParser):
|
||||||
"""Specialized HTML parser that looks for a specific anchor."""
|
"""Specialized HTML parser that looks for a specific anchor."""
|
||||||
|
|
||||||
@ -107,13 +112,25 @@ class CheckExternalLinksBuilder(Builder):
|
|||||||
def check_thread(self) -> None:
|
def check_thread(self) -> None:
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'allow_redirects': True,
|
'allow_redirects': True,
|
||||||
'headers': {
|
} # type: Dict
|
||||||
'Accept': 'text/html,application/xhtml+xml;q=0.9,*/*;q=0.8',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
if self.app.config.linkcheck_timeout:
|
if self.app.config.linkcheck_timeout:
|
||||||
kwargs['timeout'] = self.app.config.linkcheck_timeout
|
kwargs['timeout'] = self.app.config.linkcheck_timeout
|
||||||
|
|
||||||
|
def get_request_headers() -> Dict:
|
||||||
|
url = urlparse(uri)
|
||||||
|
candidates = ["%s://%s" % (url.scheme, url.netloc),
|
||||||
|
"%s://%s/" % (url.scheme, url.netloc),
|
||||||
|
uri,
|
||||||
|
"*"]
|
||||||
|
|
||||||
|
for u in candidates:
|
||||||
|
if u in self.config.linkcheck_request_headers:
|
||||||
|
headers = dict(DEFAULT_REQUEST_HEADERS)
|
||||||
|
headers.update(self.config.linkcheck_request_headers[u])
|
||||||
|
return headers
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
def check_uri() -> Tuple[str, str, int]:
|
def check_uri() -> Tuple[str, str, int]:
|
||||||
# split off anchor
|
# split off anchor
|
||||||
if '#' in uri:
|
if '#' in uri:
|
||||||
@ -139,6 +156,9 @@ class CheckExternalLinksBuilder(Builder):
|
|||||||
else:
|
else:
|
||||||
auth_info = None
|
auth_info = None
|
||||||
|
|
||||||
|
# update request headers for the URL
|
||||||
|
kwargs['headers'] = get_request_headers()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if anchor and self.app.config.linkcheck_anchors:
|
if anchor and self.app.config.linkcheck_anchors:
|
||||||
# Read the whole document and see if #anchor exists
|
# Read the whole document and see if #anchor exists
|
||||||
@ -337,6 +357,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
|||||||
|
|
||||||
app.add_config_value('linkcheck_ignore', [], None)
|
app.add_config_value('linkcheck_ignore', [], None)
|
||||||
app.add_config_value('linkcheck_auth', [], None)
|
app.add_config_value('linkcheck_auth', [], None)
|
||||||
|
app.add_config_value('linkcheck_request_headers', {}, None)
|
||||||
app.add_config_value('linkcheck_retries', 1, None)
|
app.add_config_value('linkcheck_retries', 1, None)
|
||||||
app.add_config_value('linkcheck_timeout', None, None, [int])
|
app.add_config_value('linkcheck_timeout', None, None, [int])
|
||||||
app.add_config_value('linkcheck_workers', 5, None)
|
app.add_config_value('linkcheck_workers', 5, None)
|
||||||
|
@ -41,7 +41,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
# RE for option descriptions
|
# RE for option descriptions
|
||||||
option_desc_re = re.compile(r'((?:/|--|-|\+)?[^\s=]+)(=?\s*.*)')
|
option_desc_re = re.compile(r'((?:/|--|-|\+)?[^\s=[]+)(=?\s*.*)')
|
||||||
# RE for grammar tokens
|
# RE for grammar tokens
|
||||||
token_re = re.compile(r'`(\w+)`', re.U)
|
token_re = re.compile(r'`(\w+)`', re.U)
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ import warnings
|
|||||||
from inspect import Parameter, Signature
|
from inspect import Parameter, Signature
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import (
|
from typing import (
|
||||||
Any, Callable, Dict, Iterator, List, Optional, Sequence, Set, Tuple, Type, Union
|
Any, Callable, Dict, Iterator, List, Optional, Sequence, Set, Tuple, Type, TypeVar, Union
|
||||||
)
|
)
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
@ -1178,8 +1178,14 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
|||||||
self.add_line(' :async:', sourcename)
|
self.add_line(' :async:', sourcename)
|
||||||
|
|
||||||
def format_signature(self, **kwargs: Any) -> str:
|
def format_signature(self, **kwargs: Any) -> str:
|
||||||
sig = super().format_signature(**kwargs)
|
sigs = []
|
||||||
sigs = [sig]
|
if self.analyzer and '.'.join(self.objpath) in self.analyzer.overloads:
|
||||||
|
# Use signatures for overloaded functions instead of the implementation function.
|
||||||
|
overloaded = True
|
||||||
|
else:
|
||||||
|
overloaded = False
|
||||||
|
sig = super().format_signature(**kwargs)
|
||||||
|
sigs.append(sig)
|
||||||
|
|
||||||
if inspect.is_singledispatch_function(self.object):
|
if inspect.is_singledispatch_function(self.object):
|
||||||
# append signature of singledispatch'ed functions
|
# append signature of singledispatch'ed functions
|
||||||
@ -1193,12 +1199,24 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # typ
|
|||||||
documenter.object = func
|
documenter.object = func
|
||||||
documenter.objpath = [None]
|
documenter.objpath = [None]
|
||||||
sigs.append(documenter.format_signature())
|
sigs.append(documenter.format_signature())
|
||||||
|
if overloaded:
|
||||||
|
for overload in self.analyzer.overloads.get('.'.join(self.objpath)):
|
||||||
|
sig = stringify_signature(overload, **kwargs)
|
||||||
|
sigs.append(sig)
|
||||||
|
|
||||||
return "\n".join(sigs)
|
return "\n".join(sigs)
|
||||||
|
|
||||||
def annotate_to_first_argument(self, func: Callable, typ: Type) -> None:
|
def annotate_to_first_argument(self, func: Callable, typ: Type) -> None:
|
||||||
"""Annotate type hint to the first argument of function if needed."""
|
"""Annotate type hint to the first argument of function if needed."""
|
||||||
sig = inspect.signature(func)
|
try:
|
||||||
|
sig = inspect.signature(func)
|
||||||
|
except TypeError as exc:
|
||||||
|
logger.warning(__("Failed to get a function signature for %s: %s"),
|
||||||
|
self.fullname, exc)
|
||||||
|
return
|
||||||
|
except ValueError:
|
||||||
|
return
|
||||||
|
|
||||||
if len(sig.parameters) == 0:
|
if len(sig.parameters) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -1255,6 +1273,9 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
|||||||
'private-members': bool_option, 'special-members': members_option,
|
'private-members': bool_option, 'special-members': members_option,
|
||||||
} # type: Dict[str, Callable]
|
} # type: Dict[str, Callable]
|
||||||
|
|
||||||
|
_signature_class = None # type: Any
|
||||||
|
_signature_method_name = None # type: str
|
||||||
|
|
||||||
def __init__(self, *args: Any) -> None:
|
def __init__(self, *args: Any) -> None:
|
||||||
super().__init__(*args)
|
super().__init__(*args)
|
||||||
merge_special_members_option(self.options)
|
merge_special_members_option(self.options)
|
||||||
@ -1275,7 +1296,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
|||||||
self.doc_as_attr = True
|
self.doc_as_attr = True
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _get_signature(self) -> Optional[Signature]:
|
def _get_signature(self) -> Tuple[Optional[Any], Optional[str], Optional[Signature]]:
|
||||||
def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
|
def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
|
||||||
""" Get the `attr` function or method from `obj`, if it is user-defined. """
|
""" Get the `attr` function or method from `obj`, if it is user-defined. """
|
||||||
if inspect.is_builtin_class_method(obj, attr):
|
if inspect.is_builtin_class_method(obj, attr):
|
||||||
@ -1299,7 +1320,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
|||||||
if call is not None:
|
if call is not None:
|
||||||
self.env.app.emit('autodoc-before-process-signature', call, True)
|
self.env.app.emit('autodoc-before-process-signature', call, True)
|
||||||
try:
|
try:
|
||||||
return inspect.signature(call, bound_method=True)
|
sig = inspect.signature(call, bound_method=True)
|
||||||
|
return type(self.object), '__call__', sig
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -1308,7 +1330,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
|||||||
if new is not None:
|
if new is not None:
|
||||||
self.env.app.emit('autodoc-before-process-signature', new, True)
|
self.env.app.emit('autodoc-before-process-signature', new, True)
|
||||||
try:
|
try:
|
||||||
return inspect.signature(new, bound_method=True)
|
sig = inspect.signature(new, bound_method=True)
|
||||||
|
return self.object, '__new__', sig
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -1317,7 +1340,8 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
|||||||
if init is not None:
|
if init is not None:
|
||||||
self.env.app.emit('autodoc-before-process-signature', init, True)
|
self.env.app.emit('autodoc-before-process-signature', init, True)
|
||||||
try:
|
try:
|
||||||
return inspect.signature(init, bound_method=True)
|
sig = inspect.signature(init, bound_method=True)
|
||||||
|
return self.object, '__init__', sig
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -1327,20 +1351,21 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
|||||||
# the signature from, so just pass the object itself to our hook.
|
# the signature from, so just pass the object itself to our hook.
|
||||||
self.env.app.emit('autodoc-before-process-signature', self.object, False)
|
self.env.app.emit('autodoc-before-process-signature', self.object, False)
|
||||||
try:
|
try:
|
||||||
return inspect.signature(self.object, bound_method=False)
|
sig = inspect.signature(self.object, bound_method=False)
|
||||||
|
return None, None, sig
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Still no signature: happens e.g. for old-style classes
|
# Still no signature: happens e.g. for old-style classes
|
||||||
# with __init__ in C and no `__text_signature__`.
|
# with __init__ in C and no `__text_signature__`.
|
||||||
return None
|
return None, None, None
|
||||||
|
|
||||||
def format_args(self, **kwargs: Any) -> str:
|
def format_args(self, **kwargs: Any) -> str:
|
||||||
if self.env.config.autodoc_typehints in ('none', 'description'):
|
if self.env.config.autodoc_typehints in ('none', 'description'):
|
||||||
kwargs.setdefault('show_annotation', False)
|
kwargs.setdefault('show_annotation', False)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sig = self._get_signature()
|
self._signature_class, self._signature_method_name, sig = self._get_signature()
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
# __signature__ attribute contained junk
|
# __signature__ attribute contained junk
|
||||||
logger.warning(__("Failed to get a constructor signature for %s: %s"),
|
logger.warning(__("Failed to get a constructor signature for %s: %s"),
|
||||||
@ -1356,7 +1381,30 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
|
|||||||
if self.doc_as_attr:
|
if self.doc_as_attr:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
return super().format_signature(**kwargs)
|
sig = super().format_signature()
|
||||||
|
|
||||||
|
overloaded = False
|
||||||
|
qualname = None
|
||||||
|
# TODO: recreate analyzer for the module of class (To be clear, owner of the method)
|
||||||
|
if self._signature_class and self._signature_method_name and self.analyzer:
|
||||||
|
qualname = '.'.join([self._signature_class.__qualname__,
|
||||||
|
self._signature_method_name])
|
||||||
|
if qualname in self.analyzer.overloads:
|
||||||
|
overloaded = True
|
||||||
|
|
||||||
|
sigs = []
|
||||||
|
if overloaded:
|
||||||
|
# Use signatures for overloaded methods instead of the implementation method.
|
||||||
|
for overload in self.analyzer.overloads.get(qualname):
|
||||||
|
parameters = list(overload.parameters.values())
|
||||||
|
overload = overload.replace(parameters=parameters[1:],
|
||||||
|
return_annotation=Parameter.empty)
|
||||||
|
sig = stringify_signature(overload, **kwargs)
|
||||||
|
sigs.append(sig)
|
||||||
|
else:
|
||||||
|
sigs.append(sig)
|
||||||
|
|
||||||
|
return "\n".join(sigs)
|
||||||
|
|
||||||
def add_directive_header(self, sig: str) -> None:
|
def add_directive_header(self, sig: str) -> None:
|
||||||
sourcename = self.get_sourcename()
|
sourcename = self.get_sourcename()
|
||||||
@ -1586,6 +1634,48 @@ class GenericAliasDocumenter(DataDocumenter):
|
|||||||
super().add_content(content)
|
super().add_content(content)
|
||||||
|
|
||||||
|
|
||||||
|
class TypeVarDocumenter(DataDocumenter):
|
||||||
|
"""
|
||||||
|
Specialized Documenter subclass for TypeVars.
|
||||||
|
"""
|
||||||
|
|
||||||
|
objtype = 'typevar'
|
||||||
|
directivetype = 'data'
|
||||||
|
priority = DataDocumenter.priority + 1
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def can_document_member(cls, member: Any, membername: str, isattr: bool, parent: Any
|
||||||
|
) -> bool:
|
||||||
|
return isinstance(member, TypeVar) and isattr # type: ignore
|
||||||
|
|
||||||
|
def add_directive_header(self, sig: str) -> None:
|
||||||
|
self.options.annotation = SUPPRESS # type: ignore
|
||||||
|
super().add_directive_header(sig)
|
||||||
|
|
||||||
|
def get_doc(self, encoding: str = None, ignore: int = None) -> List[List[str]]:
|
||||||
|
if ignore is not None:
|
||||||
|
warnings.warn("The 'ignore' argument to autodoc.%s.get_doc() is deprecated."
|
||||||
|
% self.__class__.__name__,
|
||||||
|
RemovedInSphinx50Warning, stacklevel=2)
|
||||||
|
|
||||||
|
if self.object.__doc__ != TypeVar.__doc__:
|
||||||
|
return super().get_doc()
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def add_content(self, more_content: Any, no_docstring: bool = False) -> None:
|
||||||
|
attrs = [repr(self.object.__name__)]
|
||||||
|
for constraint in self.object.__constraints__:
|
||||||
|
attrs.append(stringify_typehint(constraint))
|
||||||
|
if self.object.__covariant__:
|
||||||
|
attrs.append("covariant=True")
|
||||||
|
if self.object.__contravariant__:
|
||||||
|
attrs.append("contravariant=True")
|
||||||
|
|
||||||
|
content = StringList([_('alias of TypeVar(%s)') % ", ".join(attrs)], source='')
|
||||||
|
super().add_content(content)
|
||||||
|
|
||||||
|
|
||||||
class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore
|
class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore
|
||||||
"""
|
"""
|
||||||
Specialized Documenter subclass for methods (normal, static and class).
|
Specialized Documenter subclass for methods (normal, static and class).
|
||||||
@ -1675,8 +1765,14 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def format_signature(self, **kwargs: Any) -> str:
|
def format_signature(self, **kwargs: Any) -> str:
|
||||||
sig = super().format_signature(**kwargs)
|
sigs = []
|
||||||
sigs = [sig]
|
if self.analyzer and '.'.join(self.objpath) in self.analyzer.overloads:
|
||||||
|
# Use signatures for overloaded methods instead of the implementation method.
|
||||||
|
overloaded = True
|
||||||
|
else:
|
||||||
|
overloaded = False
|
||||||
|
sig = super().format_signature(**kwargs)
|
||||||
|
sigs.append(sig)
|
||||||
|
|
||||||
meth = self.parent.__dict__.get(self.objpath[-1])
|
meth = self.parent.__dict__.get(self.objpath[-1])
|
||||||
if inspect.is_singledispatch_method(meth):
|
if inspect.is_singledispatch_method(meth):
|
||||||
@ -1692,12 +1788,27 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
|
|||||||
documenter.object = func
|
documenter.object = func
|
||||||
documenter.objpath = [None]
|
documenter.objpath = [None]
|
||||||
sigs.append(documenter.format_signature())
|
sigs.append(documenter.format_signature())
|
||||||
|
if overloaded:
|
||||||
|
for overload in self.analyzer.overloads.get('.'.join(self.objpath)):
|
||||||
|
if not inspect.isstaticmethod(self.object, cls=self.parent,
|
||||||
|
name=self.object_name):
|
||||||
|
parameters = list(overload.parameters.values())
|
||||||
|
overload = overload.replace(parameters=parameters[1:])
|
||||||
|
sig = stringify_signature(overload, **kwargs)
|
||||||
|
sigs.append(sig)
|
||||||
|
|
||||||
return "\n".join(sigs)
|
return "\n".join(sigs)
|
||||||
|
|
||||||
def annotate_to_first_argument(self, func: Callable, typ: Type) -> None:
|
def annotate_to_first_argument(self, func: Callable, typ: Type) -> None:
|
||||||
"""Annotate type hint to the first argument of function if needed."""
|
"""Annotate type hint to the first argument of function if needed."""
|
||||||
sig = inspect.signature(func)
|
try:
|
||||||
|
sig = inspect.signature(func)
|
||||||
|
except TypeError as exc:
|
||||||
|
logger.warning(__("Failed to get a method signature for %s: %s"),
|
||||||
|
self.fullname, exc)
|
||||||
|
return
|
||||||
|
except ValueError:
|
||||||
|
return
|
||||||
if len(sig.parameters) == 1:
|
if len(sig.parameters) == 1:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -1945,6 +2056,7 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
|||||||
app.add_autodocumenter(DataDocumenter)
|
app.add_autodocumenter(DataDocumenter)
|
||||||
app.add_autodocumenter(DataDeclarationDocumenter)
|
app.add_autodocumenter(DataDeclarationDocumenter)
|
||||||
app.add_autodocumenter(GenericAliasDocumenter)
|
app.add_autodocumenter(GenericAliasDocumenter)
|
||||||
|
app.add_autodocumenter(TypeVarDocumenter)
|
||||||
app.add_autodocumenter(FunctionDocumenter)
|
app.add_autodocumenter(FunctionDocumenter)
|
||||||
app.add_autodocumenter(DecoratorDocumenter)
|
app.add_autodocumenter(DecoratorDocumenter)
|
||||||
app.add_autodocumenter(MethodDocumenter)
|
app.add_autodocumenter(MethodDocumenter)
|
||||||
|
@ -22,6 +22,7 @@ from sphinx.application import Sphinx
|
|||||||
from sphinx.builders import Builder
|
from sphinx.builders import Builder
|
||||||
from sphinx.locale import __
|
from sphinx.locale import __
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
|
from sphinx.util.console import red # type: ignore
|
||||||
from sphinx.util.inspect import safe_getattr
|
from sphinx.util.inspect import safe_getattr
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -121,6 +122,14 @@ class CoverageBuilder(Builder):
|
|||||||
write_header(op, filename)
|
write_header(op, filename)
|
||||||
for typ, name in sorted(undoc):
|
for typ, name in sorted(undoc):
|
||||||
op.write(' * %-50s [%9s]\n' % (name, typ))
|
op.write(' * %-50s [%9s]\n' % (name, typ))
|
||||||
|
if self.config.coverage_show_missing_items:
|
||||||
|
if self.app.quiet or self.app.warningiserror:
|
||||||
|
logger.warning(__('undocumented c api: %s [%s] in file %s'),
|
||||||
|
name, typ, filename)
|
||||||
|
else:
|
||||||
|
logger.info(red('undocumented ') + 'c ' + 'api ' +
|
||||||
|
'%-30s' % (name + " [%9s]" % typ) +
|
||||||
|
red(' - in file ') + filename)
|
||||||
op.write('\n')
|
op.write('\n')
|
||||||
|
|
||||||
def ignore_pyobj(self, full_name: str) -> bool:
|
def ignore_pyobj(self, full_name: str) -> bool:
|
||||||
@ -239,16 +248,48 @@ class CoverageBuilder(Builder):
|
|||||||
if undoc['funcs']:
|
if undoc['funcs']:
|
||||||
op.write('Functions:\n')
|
op.write('Functions:\n')
|
||||||
op.writelines(' * %s\n' % x for x in undoc['funcs'])
|
op.writelines(' * %s\n' % x for x in undoc['funcs'])
|
||||||
|
if self.config.coverage_show_missing_items:
|
||||||
|
if self.app.quiet or self.app.warningiserror:
|
||||||
|
for func in undoc['funcs']:
|
||||||
|
logger.warning(
|
||||||
|
__('undocumented python function: %s :: %s'),
|
||||||
|
name, func)
|
||||||
|
else:
|
||||||
|
for func in undoc['funcs']:
|
||||||
|
logger.info(red('undocumented ') + 'py ' + 'function ' +
|
||||||
|
'%-30s' % func + red(' - in module ') + name)
|
||||||
op.write('\n')
|
op.write('\n')
|
||||||
if undoc['classes']:
|
if undoc['classes']:
|
||||||
op.write('Classes:\n')
|
op.write('Classes:\n')
|
||||||
for name, methods in sorted(
|
for class_name, methods in sorted(
|
||||||
undoc['classes'].items()):
|
undoc['classes'].items()):
|
||||||
if not methods:
|
if not methods:
|
||||||
op.write(' * %s\n' % name)
|
op.write(' * %s\n' % class_name)
|
||||||
|
if self.config.coverage_show_missing_items:
|
||||||
|
if self.app.quiet or self.app.warningiserror:
|
||||||
|
logger.warning(
|
||||||
|
__('undocumented python class: %s :: %s'),
|
||||||
|
name, class_name)
|
||||||
|
else:
|
||||||
|
logger.info(red('undocumented ') + 'py ' +
|
||||||
|
'class ' + '%-30s' % class_name +
|
||||||
|
red(' - in module ') + name)
|
||||||
else:
|
else:
|
||||||
op.write(' * %s -- missing methods:\n\n' % name)
|
op.write(' * %s -- missing methods:\n\n' % class_name)
|
||||||
op.writelines(' - %s\n' % x for x in methods)
|
op.writelines(' - %s\n' % x for x in methods)
|
||||||
|
if self.config.coverage_show_missing_items:
|
||||||
|
if self.app.quiet or self.app.warningiserror:
|
||||||
|
for meth in methods:
|
||||||
|
logger.warning(
|
||||||
|
__('undocumented python method:' +
|
||||||
|
' %s :: %s :: %s'),
|
||||||
|
name, class_name, meth)
|
||||||
|
else:
|
||||||
|
for meth in methods:
|
||||||
|
logger.info(red('undocumented ') + 'py ' +
|
||||||
|
'method ' + '%-30s' %
|
||||||
|
(class_name + '.' + meth) +
|
||||||
|
red(' - in module ') + name)
|
||||||
op.write('\n')
|
op.write('\n')
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
@ -273,4 +314,5 @@ def setup(app: Sphinx) -> Dict[str, Any]:
|
|||||||
app.add_config_value('coverage_ignore_c_items', {}, False)
|
app.add_config_value('coverage_ignore_c_items', {}, False)
|
||||||
app.add_config_value('coverage_write_headline', True, False)
|
app.add_config_value('coverage_write_headline', True, False)
|
||||||
app.add_config_value('coverage_skip_undoc_in_source', False, False)
|
app.add_config_value('coverage_skip_undoc_in_source', False, False)
|
||||||
|
app.add_config_value('coverage_show_missing_items', False, False)
|
||||||
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
|
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
import re
|
import re
|
||||||
import tokenize
|
import tokenize
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
from inspect import Signature
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from os import path
|
from os import path
|
||||||
from typing import Any, Dict, IO, List, Tuple, Optional
|
from typing import Any, Dict, IO, List, Tuple, Optional
|
||||||
@ -134,6 +135,7 @@ class ModuleAnalyzer:
|
|||||||
self.annotations = None # type: Dict[Tuple[str, str], str]
|
self.annotations = None # type: Dict[Tuple[str, str], str]
|
||||||
self.attr_docs = None # type: Dict[Tuple[str, str], List[str]]
|
self.attr_docs = None # type: Dict[Tuple[str, str], List[str]]
|
||||||
self.finals = None # type: List[str]
|
self.finals = None # type: List[str]
|
||||||
|
self.overloads = None # type: Dict[str, List[Signature]]
|
||||||
self.tagorder = None # type: Dict[str, int]
|
self.tagorder = None # type: Dict[str, int]
|
||||||
self.tags = None # type: Dict[str, Tuple[str, int, int]]
|
self.tags = None # type: Dict[str, Tuple[str, int, int]]
|
||||||
|
|
||||||
@ -152,6 +154,7 @@ class ModuleAnalyzer:
|
|||||||
|
|
||||||
self.annotations = parser.annotations
|
self.annotations = parser.annotations
|
||||||
self.finals = parser.finals
|
self.finals = parser.finals
|
||||||
|
self.overloads = parser.overloads
|
||||||
self.tags = parser.definitions
|
self.tags = parser.definitions
|
||||||
self.tagorder = parser.deforders
|
self.tagorder = parser.deforders
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
@ -12,12 +12,14 @@ import itertools
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import tokenize
|
import tokenize
|
||||||
|
from inspect import Signature
|
||||||
from token import NAME, NEWLINE, INDENT, DEDENT, NUMBER, OP, STRING
|
from token import NAME, NEWLINE, INDENT, DEDENT, NUMBER, OP, STRING
|
||||||
from tokenize import COMMENT, NL
|
from tokenize import COMMENT, NL
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
from sphinx.pycode.ast import ast # for py37 or older
|
from sphinx.pycode.ast import ast # for py37 or older
|
||||||
from sphinx.pycode.ast import parse, unparse
|
from sphinx.pycode.ast import parse, unparse
|
||||||
|
from sphinx.util.inspect import signature_from_ast
|
||||||
|
|
||||||
|
|
||||||
comment_re = re.compile('^\\s*#: ?(.*)\r?\n?$')
|
comment_re = re.compile('^\\s*#: ?(.*)\r?\n?$')
|
||||||
@ -235,8 +237,10 @@ class VariableCommentPicker(ast.NodeVisitor):
|
|||||||
self.previous = None # type: ast.AST
|
self.previous = None # type: ast.AST
|
||||||
self.deforders = {} # type: Dict[str, int]
|
self.deforders = {} # type: Dict[str, int]
|
||||||
self.finals = [] # type: List[str]
|
self.finals = [] # type: List[str]
|
||||||
|
self.overloads = {} # type: Dict[str, List[Signature]]
|
||||||
self.typing = None # type: str
|
self.typing = None # type: str
|
||||||
self.typing_final = None # type: str
|
self.typing_final = None # type: str
|
||||||
|
self.typing_overload = None # type: str
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
def get_qualname_for(self, name: str) -> Optional[List[str]]:
|
def get_qualname_for(self, name: str) -> Optional[List[str]]:
|
||||||
@ -260,6 +264,12 @@ class VariableCommentPicker(ast.NodeVisitor):
|
|||||||
if qualname:
|
if qualname:
|
||||||
self.finals.append(".".join(qualname))
|
self.finals.append(".".join(qualname))
|
||||||
|
|
||||||
|
def add_overload_entry(self, func: ast.FunctionDef) -> None:
|
||||||
|
qualname = self.get_qualname_for(func.name)
|
||||||
|
if qualname:
|
||||||
|
overloads = self.overloads.setdefault(".".join(qualname), [])
|
||||||
|
overloads.append(signature_from_ast(func))
|
||||||
|
|
||||||
def add_variable_comment(self, name: str, comment: str) -> None:
|
def add_variable_comment(self, name: str, comment: str) -> None:
|
||||||
qualname = self.get_qualname_for(name)
|
qualname = self.get_qualname_for(name)
|
||||||
if qualname:
|
if qualname:
|
||||||
@ -288,6 +298,22 @@ class VariableCommentPicker(ast.NodeVisitor):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def is_overload(self, decorators: List[ast.expr]) -> bool:
|
||||||
|
overload = []
|
||||||
|
if self.typing:
|
||||||
|
overload.append('%s.overload' % self.typing)
|
||||||
|
if self.typing_overload:
|
||||||
|
overload.append(self.typing_overload)
|
||||||
|
|
||||||
|
for decorator in decorators:
|
||||||
|
try:
|
||||||
|
if unparse(decorator) in overload:
|
||||||
|
return True
|
||||||
|
except NotImplementedError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
def get_self(self) -> ast.arg:
|
def get_self(self) -> ast.arg:
|
||||||
"""Returns the name of first argument if in function."""
|
"""Returns the name of first argument if in function."""
|
||||||
if self.current_function and self.current_function.args.args:
|
if self.current_function and self.current_function.args.args:
|
||||||
@ -313,6 +339,8 @@ class VariableCommentPicker(ast.NodeVisitor):
|
|||||||
self.typing = name.asname or name.name
|
self.typing = name.asname or name.name
|
||||||
elif name.name == 'typing.final':
|
elif name.name == 'typing.final':
|
||||||
self.typing_final = name.asname or name.name
|
self.typing_final = name.asname or name.name
|
||||||
|
elif name.name == 'typing.overload':
|
||||||
|
self.typing_overload = name.asname or name.name
|
||||||
|
|
||||||
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
|
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
|
||||||
"""Handles Import node and record it to definition orders."""
|
"""Handles Import node and record it to definition orders."""
|
||||||
@ -321,6 +349,8 @@ class VariableCommentPicker(ast.NodeVisitor):
|
|||||||
|
|
||||||
if node.module == 'typing' and name.name == 'final':
|
if node.module == 'typing' and name.name == 'final':
|
||||||
self.typing_final = name.asname or name.name
|
self.typing_final = name.asname or name.name
|
||||||
|
elif node.module == 'typing' and name.name == 'overload':
|
||||||
|
self.typing_overload = name.asname or name.name
|
||||||
|
|
||||||
def visit_Assign(self, node: ast.Assign) -> None:
|
def visit_Assign(self, node: ast.Assign) -> None:
|
||||||
"""Handles Assign node and pick up a variable comment."""
|
"""Handles Assign node and pick up a variable comment."""
|
||||||
@ -420,6 +450,8 @@ class VariableCommentPicker(ast.NodeVisitor):
|
|||||||
self.add_entry(node.name) # should be called before setting self.current_function
|
self.add_entry(node.name) # should be called before setting self.current_function
|
||||||
if self.is_final(node.decorator_list):
|
if self.is_final(node.decorator_list):
|
||||||
self.add_final_entry(node.name)
|
self.add_final_entry(node.name)
|
||||||
|
if self.is_overload(node.decorator_list):
|
||||||
|
self.add_overload_entry(node)
|
||||||
self.context.append(node.name)
|
self.context.append(node.name)
|
||||||
self.current_function = node
|
self.current_function = node
|
||||||
for child in node.body:
|
for child in node.body:
|
||||||
@ -521,6 +553,7 @@ class Parser:
|
|||||||
self.deforders = {} # type: Dict[str, int]
|
self.deforders = {} # type: Dict[str, int]
|
||||||
self.definitions = {} # type: Dict[str, Tuple[str, int, int]]
|
self.definitions = {} # type: Dict[str, Tuple[str, int, int]]
|
||||||
self.finals = [] # type: List[str]
|
self.finals = [] # type: List[str]
|
||||||
|
self.overloads = {} # type: Dict[str, List[Signature]]
|
||||||
|
|
||||||
def parse(self) -> None:
|
def parse(self) -> None:
|
||||||
"""Parse the source code."""
|
"""Parse the source code."""
|
||||||
@ -536,6 +569,7 @@ class Parser:
|
|||||||
self.comments = picker.comments
|
self.comments = picker.comments
|
||||||
self.deforders = picker.deforders
|
self.deforders = picker.deforders
|
||||||
self.finals = picker.finals
|
self.finals = picker.finals
|
||||||
|
self.overloads = picker.overloads
|
||||||
|
|
||||||
def parse_definition(self) -> None:
|
def parse_definition(self) -> None:
|
||||||
"""Parse the location of definitions from the code."""
|
"""Parse the location of definitions from the code."""
|
||||||
|
@ -84,6 +84,7 @@ class BuildDoc(Command):
|
|||||||
('link-index', 'i', 'Link index.html to the master doc'),
|
('link-index', 'i', 'Link index.html to the master doc'),
|
||||||
('copyright', None, 'The copyright string'),
|
('copyright', None, 'The copyright string'),
|
||||||
('pdb', None, 'Start pdb on exception'),
|
('pdb', None, 'Start pdb on exception'),
|
||||||
|
('verbosity', 'v', 'increase verbosity (can be repeated)'),
|
||||||
('nitpicky', 'n', 'nit-picky mode, warn about all missing references'),
|
('nitpicky', 'n', 'nit-picky mode, warn about all missing references'),
|
||||||
('keep-going', None, 'With -W, keep going when getting warnings'),
|
('keep-going', None, 'With -W, keep going when getting warnings'),
|
||||||
]
|
]
|
||||||
@ -189,7 +190,7 @@ class BuildDoc(Command):
|
|||||||
builder, confoverrides, status_stream,
|
builder, confoverrides, status_stream,
|
||||||
freshenv=self.fresh_env,
|
freshenv=self.fresh_env,
|
||||||
warningiserror=self.warning_is_error,
|
warningiserror=self.warning_is_error,
|
||||||
keep_going=self.keep_going)
|
verbosity=self.verbosity, keep_going=self.keep_going)
|
||||||
app.build(force_all=self.all_files)
|
app.build(force_all=self.all_files)
|
||||||
if app.statuscode:
|
if app.statuscode:
|
||||||
raise DistutilsExecError(
|
raise DistutilsExecError(
|
||||||
|
@ -207,7 +207,6 @@ div.document .section:first-child {
|
|||||||
|
|
||||||
div.document div.highlight {
|
div.document div.highlight {
|
||||||
padding: 3px;
|
padding: 3px;
|
||||||
background-color: #eeeeec;
|
|
||||||
border-top: 2px solid #dddddd;
|
border-top: 2px solid #dddddd;
|
||||||
border-bottom: 2px solid #dddddd;
|
border-bottom: 2px solid #dddddd;
|
||||||
margin-top: .8em;
|
margin-top: .8em;
|
||||||
|
@ -316,7 +316,7 @@ img.align-default, .figure.align-default {
|
|||||||
div.sidebar {
|
div.sidebar {
|
||||||
margin: 0 0 0.5em 1em;
|
margin: 0 0 0.5em 1em;
|
||||||
border: 1px solid #ddb;
|
border: 1px solid #ddb;
|
||||||
padding: 7px 7px 0 7px;
|
padding: 7px;
|
||||||
background-color: #ffe;
|
background-color: #ffe;
|
||||||
width: 40%;
|
width: 40%;
|
||||||
float: right;
|
float: right;
|
||||||
@ -336,7 +336,7 @@ div.admonition, div.topic, pre, div[class|="highlight"] {
|
|||||||
|
|
||||||
div.topic {
|
div.topic {
|
||||||
border: 1px solid #ccc;
|
border: 1px solid #ccc;
|
||||||
padding: 7px 7px 0 7px;
|
padding: 7px;
|
||||||
margin: 10px 0 10px 0;
|
margin: 10px 0 10px 0;
|
||||||
overflow-x: auto;
|
overflow-x: auto;
|
||||||
}
|
}
|
||||||
@ -360,10 +360,6 @@ div.admonition dt {
|
|||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
div.admonition dl {
|
|
||||||
margin-bottom: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.admonition-title {
|
p.admonition-title {
|
||||||
margin: 0px 10px 5px 0px;
|
margin: 0px 10px 5px 0px;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
@ -374,6 +370,14 @@ div.body p.centered {
|
|||||||
margin-top: 25px;
|
margin-top: 25px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* -- content of sidebars/topics/admonitions -------------------------------- */
|
||||||
|
|
||||||
|
div.sidebar > :last-child,
|
||||||
|
div.topic > :last-child,
|
||||||
|
div.admonition > :last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
/* -- tables ---------------------------------------------------------------- */
|
/* -- tables ---------------------------------------------------------------- */
|
||||||
|
|
||||||
table.docutils {
|
table.docutils {
|
||||||
@ -426,13 +430,13 @@ table.citation td {
|
|||||||
border-bottom: none;
|
border-bottom: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
th > p:first-child,
|
th > :first-child,
|
||||||
td > p:first-child {
|
td > :first-child {
|
||||||
margin-top: 0px;
|
margin-top: 0px;
|
||||||
}
|
}
|
||||||
|
|
||||||
th > p:last-child,
|
th > :last-child,
|
||||||
td > p:last-child {
|
td > :last-child {
|
||||||
margin-bottom: 0px;
|
margin-bottom: 0px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -478,6 +482,10 @@ table.field-list td, table.field-list th {
|
|||||||
|
|
||||||
/* -- hlist styles ---------------------------------------------------------- */
|
/* -- hlist styles ---------------------------------------------------------- */
|
||||||
|
|
||||||
|
table.hlist {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
table.hlist td {
|
table.hlist td {
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
}
|
}
|
||||||
@ -505,14 +513,30 @@ ol.upperroman {
|
|||||||
list-style: upper-roman;
|
list-style: upper-roman;
|
||||||
}
|
}
|
||||||
|
|
||||||
li > p:first-child {
|
ol > li:first-child > :first-child,
|
||||||
|
ul > li:first-child > :first-child {
|
||||||
margin-top: 0px;
|
margin-top: 0px;
|
||||||
}
|
}
|
||||||
|
|
||||||
li > p:last-child {
|
ol ol > li:first-child > :first-child,
|
||||||
|
ol ul > li:first-child > :first-child,
|
||||||
|
ul ol > li:first-child > :first-child,
|
||||||
|
ul ul > li:first-child > :first-child {
|
||||||
|
margin-top: revert;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol > li:last-child > :last-child,
|
||||||
|
ul > li:last-child > :last-child {
|
||||||
margin-bottom: 0px;
|
margin-bottom: 0px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ol ol > li:last-child > :last-child,
|
||||||
|
ol ul > li:last-child > :last-child,
|
||||||
|
ul ol > li:last-child > :last-child,
|
||||||
|
ul ul > li:last-child > :last-child {
|
||||||
|
margin-bottom: revert;
|
||||||
|
}
|
||||||
|
|
||||||
dl.footnote > dt,
|
dl.footnote > dt,
|
||||||
dl.citation > dt {
|
dl.citation > dt {
|
||||||
float: left;
|
float: left;
|
||||||
@ -557,7 +581,7 @@ dl {
|
|||||||
margin-bottom: 15px;
|
margin-bottom: 15px;
|
||||||
}
|
}
|
||||||
|
|
||||||
dd > p:first-child {
|
dd > :first-child {
|
||||||
margin-top: 0px;
|
margin-top: 0px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -571,6 +595,11 @@ dd {
|
|||||||
margin-left: 30px;
|
margin-left: 30px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dl > dd:last-child,
|
||||||
|
dl > dd:last-child > :last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
dt:target, span.highlighted {
|
dt:target, span.highlighted {
|
||||||
background-color: #fbe54e;
|
background-color: #fbe54e;
|
||||||
}
|
}
|
||||||
@ -655,6 +684,10 @@ span.pre {
|
|||||||
hyphens: none;
|
hyphens: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
div[class^="highlight-"] {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
td.linenos pre {
|
td.linenos pre {
|
||||||
border: 0;
|
border: 0;
|
||||||
background-color: transparent;
|
background-color: transparent;
|
||||||
@ -663,7 +696,6 @@ td.linenos pre {
|
|||||||
|
|
||||||
table.highlighttable {
|
table.highlighttable {
|
||||||
display: block;
|
display: block;
|
||||||
margin: 1em 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
table.highlighttable tbody {
|
table.highlighttable tbody {
|
||||||
@ -680,7 +712,7 @@ table.highlighttable td {
|
|||||||
}
|
}
|
||||||
|
|
||||||
table.highlighttable td.linenos {
|
table.highlighttable td.linenos {
|
||||||
padding: 0 0.5em;
|
padding-right: 0.5em;
|
||||||
}
|
}
|
||||||
|
|
||||||
table.highlighttable td.code {
|
table.highlighttable td.code {
|
||||||
@ -692,11 +724,12 @@ table.highlighttable td.code {
|
|||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
div.highlight pre,
|
||||||
table.highlighttable pre {
|
table.highlighttable pre {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
div.code-block-caption + div > table.highlighttable {
|
div.code-block-caption + div {
|
||||||
margin-top: 0;
|
margin-top: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -710,10 +743,6 @@ div.code-block-caption code {
|
|||||||
background-color: transparent;
|
background-color: transparent;
|
||||||
}
|
}
|
||||||
|
|
||||||
div.code-block-caption + div > div.highlight > pre {
|
|
||||||
margin-top: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.highlighttable td.linenos,
|
table.highlighttable td.linenos,
|
||||||
div.doctest > div.highlight span.gp { /* gp: Generic.Prompt */
|
div.doctest > div.highlight span.gp { /* gp: Generic.Prompt */
|
||||||
user-select: none;
|
user-select: none;
|
||||||
|
@ -25,8 +25,8 @@ headtextcolor = #20435c
|
|||||||
headlinkcolor = #c60f0f
|
headlinkcolor = #c60f0f
|
||||||
linkcolor = #355f7c
|
linkcolor = #355f7c
|
||||||
visitedlinkcolor = #355f7c
|
visitedlinkcolor = #355f7c
|
||||||
codebgcolor = #eeffcc
|
codebgcolor = unset
|
||||||
codetextcolor = #333333
|
codetextcolor = unset
|
||||||
|
|
||||||
bodyfont = sans-serif
|
bodyfont = sans-serif
|
||||||
headfont = 'Trebuchet MS', sans-serif
|
headfont = 'Trebuchet MS', sans-serif
|
||||||
|
@ -319,7 +319,6 @@ pre {
|
|||||||
border-width: thin;
|
border-width: thin;
|
||||||
margin: 0 0 12px 0;
|
margin: 0 0 12px 0;
|
||||||
padding: 0.8em;
|
padding: 0.8em;
|
||||||
background-color: #f0f0f0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
hr {
|
hr {
|
||||||
|
@ -184,10 +184,6 @@ div.admonition p.admonition-title + p {
|
|||||||
display: inline;
|
display: inline;
|
||||||
}
|
}
|
||||||
|
|
||||||
div.highlight{
|
|
||||||
background-color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note {
|
div.note {
|
||||||
background-color: #eee;
|
background-color: #eee;
|
||||||
border: 1px solid #ccc;
|
border: 1px solid #ccc;
|
||||||
@ -217,8 +213,6 @@ p.admonition-title:after {
|
|||||||
|
|
||||||
pre {
|
pre {
|
||||||
padding: 10px;
|
padding: 10px;
|
||||||
background-color: White;
|
|
||||||
color: #222;
|
|
||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
border: 1px solid #C6C9CB;
|
border: 1px solid #C6C9CB;
|
||||||
font-size: 1.1em;
|
font-size: 1.1em;
|
||||||
|
@ -229,10 +229,6 @@ div.admonition {
|
|||||||
padding: 10px 20px 10px 60px;
|
padding: 10px 20px 10px 60px;
|
||||||
}
|
}
|
||||||
|
|
||||||
div.highlight{
|
|
||||||
background-color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.note {
|
div.note {
|
||||||
border: 2px solid #7a9eec;
|
border: 2px solid #7a9eec;
|
||||||
border-right-style: none;
|
border-right-style: none;
|
||||||
@ -286,8 +282,6 @@ p.admonition-title:after {
|
|||||||
|
|
||||||
pre {
|
pre {
|
||||||
padding: 10px;
|
padding: 10px;
|
||||||
background-color: #fafafa;
|
|
||||||
color: #222;
|
|
||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
border: 2px solid #C6C9CB;
|
border: 2px solid #C6C9CB;
|
||||||
font-size: 1.1em;
|
font-size: 1.1em;
|
||||||
|
@ -188,7 +188,7 @@ a:hover {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pre {
|
pre {
|
||||||
background: #ededed url(metal.png);
|
background-image: url(metal.png);
|
||||||
border-top: 1px solid #ccc;
|
border-top: 1px solid #ccc;
|
||||||
border-bottom: 1px solid #ccc;
|
border-bottom: 1px solid #ccc;
|
||||||
padding: 5px;
|
padding: 5px;
|
||||||
|
@ -247,7 +247,6 @@ pre {
|
|||||||
line-height: 120%;
|
line-height: 120%;
|
||||||
padding: 0.5em;
|
padding: 0.5em;
|
||||||
border: 1px solid #ccc;
|
border: 1px solid #ccc;
|
||||||
background-color: #f8f8f8;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pre a {
|
pre a {
|
||||||
|
@ -632,7 +632,6 @@ th {
|
|||||||
pre {
|
pre {
|
||||||
font-family: monospace;
|
font-family: monospace;
|
||||||
padding: 5px;
|
padding: 5px;
|
||||||
color: #00008b;
|
|
||||||
border-left: none;
|
border-left: none;
|
||||||
border-right: none;
|
border-right: none;
|
||||||
}
|
}
|
||||||
|
@ -510,10 +510,14 @@ def stringify_signature(sig: inspect.Signature, show_annotation: bool = True,
|
|||||||
def signature_from_str(signature: str) -> inspect.Signature:
|
def signature_from_str(signature: str) -> inspect.Signature:
|
||||||
"""Create a Signature object from string."""
|
"""Create a Signature object from string."""
|
||||||
module = ast.parse('def func' + signature + ': pass')
|
module = ast.parse('def func' + signature + ': pass')
|
||||||
definition = cast(ast.FunctionDef, module.body[0]) # type: ignore
|
function = cast(ast.FunctionDef, module.body[0]) # type: ignore
|
||||||
|
|
||||||
# parameters
|
return signature_from_ast(function)
|
||||||
args = definition.args
|
|
||||||
|
|
||||||
|
def signature_from_ast(node: ast.FunctionDef) -> inspect.Signature:
|
||||||
|
"""Create a Signature object from AST *node*."""
|
||||||
|
args = node.args
|
||||||
defaults = list(args.defaults)
|
defaults = list(args.defaults)
|
||||||
params = []
|
params = []
|
||||||
if hasattr(args, "posonlyargs"):
|
if hasattr(args, "posonlyargs"):
|
||||||
@ -563,7 +567,7 @@ def signature_from_str(signature: str) -> inspect.Signature:
|
|||||||
params.append(Parameter(args.kwarg.arg, Parameter.VAR_KEYWORD,
|
params.append(Parameter(args.kwarg.arg, Parameter.VAR_KEYWORD,
|
||||||
annotation=annotation))
|
annotation=annotation))
|
||||||
|
|
||||||
return_annotation = ast_unparse(definition.returns) or Parameter.empty
|
return_annotation = ast_unparse(node.returns) or Parameter.empty
|
||||||
|
|
||||||
return inspect.Signature(params, return_annotation=return_annotation)
|
return inspect.Signature(params, return_annotation=return_annotation)
|
||||||
|
|
||||||
|
@ -1974,7 +1974,8 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
# mainly, %, #, {, } and \ need escaping via a \ escape
|
# mainly, %, #, {, } and \ need escaping via a \ escape
|
||||||
# in \href, the tilde is allowed and must be represented literally
|
# in \href, the tilde is allowed and must be represented literally
|
||||||
return self.encode(text).replace('\\textasciitilde{}', '~').\
|
return self.encode(text).replace('\\textasciitilde{}', '~').\
|
||||||
replace('\\sphinxhyphen{}', '-')
|
replace('\\sphinxhyphen{}', '-').\
|
||||||
|
replace('\\textquotesingle{}', "'")
|
||||||
|
|
||||||
def visit_Text(self, node: Text) -> None:
|
def visit_Text(self, node: Text) -> None:
|
||||||
text = self.encode(node.astext())
|
text = self.encode(node.astext())
|
||||||
|
88
tests/roots/test-ext-autodoc/target/overload.py
Normal file
88
tests/roots/test-ext-autodoc/target/overload.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
from typing import Any, overload
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sum(x: int, y: int) -> int:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sum(x: float, y: float) -> float:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sum(x: str, y: str) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def sum(x, y):
|
||||||
|
"""docstring"""
|
||||||
|
return x + y
|
||||||
|
|
||||||
|
|
||||||
|
class Math:
|
||||||
|
"""docstring"""
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sum(self, x: int, y: int) -> int:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sum(self, x: float, y: float) -> float:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sum(self, x: str, y: str) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
def sum(self, x, y):
|
||||||
|
"""docstring"""
|
||||||
|
return x + y
|
||||||
|
|
||||||
|
|
||||||
|
class Foo:
|
||||||
|
"""docstring"""
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __new__(cls, x: int, y: int) -> "Foo":
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __new__(cls, x: str, y: str) -> "Foo":
|
||||||
|
...
|
||||||
|
|
||||||
|
def __new__(cls, x, y):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Bar:
|
||||||
|
"""docstring"""
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __init__(cls, x: int, y: int) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __init__(cls, x: str, y: str) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
def __init__(cls, x, y):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Meta(type):
|
||||||
|
@overload
|
||||||
|
def __call__(cls, x: int, y: int) -> Any:
|
||||||
|
...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __call__(cls, x: str, y: str) -> Any:
|
||||||
|
...
|
||||||
|
|
||||||
|
def __call__(cls, x, y):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Baz(metaclass=Meta):
|
||||||
|
"""docstring"""
|
15
tests/roots/test-ext-autodoc/target/typevar.py
Normal file
15
tests/roots/test-ext-autodoc/target/typevar.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
#: T1
|
||||||
|
T1 = TypeVar("T1")
|
||||||
|
|
||||||
|
T2 = TypeVar("T2") # A TypeVar not having doc comment
|
||||||
|
|
||||||
|
#: T3
|
||||||
|
T3 = TypeVar("T3", int, str)
|
||||||
|
|
||||||
|
#: T4
|
||||||
|
T4 = TypeVar("T4", covariant=True)
|
||||||
|
|
||||||
|
#: T5
|
||||||
|
T5 = TypeVar("T5", contravariant=True)
|
@ -180,7 +180,9 @@ Others
|
|||||||
|
|
||||||
.. option:: arg
|
.. option:: arg
|
||||||
|
|
||||||
Link to :option:`perl +p`, :option:`--ObjC++`, :option:`--plugin.option`, :option:`create-auth-token` and :option:`arg`
|
.. option:: -j[=N]
|
||||||
|
|
||||||
|
Link to :option:`perl +p`, :option:`--ObjC++`, :option:`--plugin.option`, :option:`create-auth-token`, :option:`arg` and :option:`-j`
|
||||||
|
|
||||||
.. program:: hg
|
.. program:: hg
|
||||||
|
|
||||||
|
@ -331,6 +331,8 @@ def test_html4_output(app, status, warning):
|
|||||||
'create-auth-token'),
|
'create-auth-token'),
|
||||||
(".//a[@class='reference internal'][@href='#cmdoption-perl-arg-arg']/code/span",
|
(".//a[@class='reference internal'][@href='#cmdoption-perl-arg-arg']/code/span",
|
||||||
'arg'),
|
'arg'),
|
||||||
|
(".//a[@class='reference internal'][@href='#cmdoption-perl-j']/code/span",
|
||||||
|
'-j'),
|
||||||
(".//a[@class='reference internal'][@href='#cmdoption-hg-arg-commit']/code/span",
|
(".//a[@class='reference internal'][@href='#cmdoption-hg-arg-commit']/code/span",
|
||||||
'hg'),
|
'hg'),
|
||||||
(".//a[@class='reference internal'][@href='#cmdoption-hg-arg-commit']/code/span",
|
(".//a[@class='reference internal'][@href='#cmdoption-hg-arg-commit']/code/span",
|
||||||
|
@ -124,3 +124,36 @@ def test_auth(app, status, warning):
|
|||||||
assert c_kwargs['auth'] == 'authinfo2'
|
assert c_kwargs['auth'] == 'authinfo2'
|
||||||
else:
|
else:
|
||||||
assert not c_kwargs['auth']
|
assert not c_kwargs['auth']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx(
|
||||||
|
'linkcheck', testroot='linkcheck', freshenv=True,
|
||||||
|
confoverrides={'linkcheck_request_headers': {
|
||||||
|
"https://localhost:7777/": {
|
||||||
|
"Accept": "text/html",
|
||||||
|
},
|
||||||
|
"http://www.sphinx-doc.org": { # no slash at the end
|
||||||
|
"Accept": "application/json",
|
||||||
|
},
|
||||||
|
"*": {
|
||||||
|
"X-Secret": "open sesami",
|
||||||
|
}
|
||||||
|
}})
|
||||||
|
def test_linkcheck_request_headers(app, status, warning):
|
||||||
|
mock_req = mock.MagicMock()
|
||||||
|
mock_req.return_value = 'fake-response'
|
||||||
|
|
||||||
|
with mock.patch.multiple('requests', get=mock_req, head=mock_req):
|
||||||
|
app.builder.build_all()
|
||||||
|
for args, kwargs in mock_req.call_args_list:
|
||||||
|
url = args[0]
|
||||||
|
headers = kwargs.get('headers', {})
|
||||||
|
if "https://localhost:7777" in url:
|
||||||
|
assert headers["Accept"] == "text/html"
|
||||||
|
elif 'http://www.sphinx-doc.org' in url:
|
||||||
|
assert headers["Accept"] == "application/json"
|
||||||
|
elif 'https://www.google.com' in url:
|
||||||
|
assert headers["Accept"] == "text/html,application/xhtml+xml;q=0.9,*/*;q=0.8"
|
||||||
|
assert headers["X-Secret"] == "open sesami"
|
||||||
|
else:
|
||||||
|
assert headers["Accept"] == "text/html,application/xhtml+xml;q=0.9,*/*;q=0.8"
|
||||||
|
@ -1618,6 +1618,46 @@ def test_autodoc_GenericAlias(app):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||||
|
def test_autodoc_TypeVar(app):
|
||||||
|
options = {"members": None,
|
||||||
|
"undoc-members": None}
|
||||||
|
actual = do_autodoc(app, 'module', 'target.typevar', options)
|
||||||
|
assert list(actual) == [
|
||||||
|
'',
|
||||||
|
'.. py:module:: target.typevar',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'.. py:data:: T1',
|
||||||
|
' :module: target.typevar',
|
||||||
|
'',
|
||||||
|
' T1',
|
||||||
|
'',
|
||||||
|
" alias of TypeVar('T1')",
|
||||||
|
'',
|
||||||
|
'.. py:data:: T3',
|
||||||
|
' :module: target.typevar',
|
||||||
|
'',
|
||||||
|
' T3',
|
||||||
|
'',
|
||||||
|
" alias of TypeVar('T3', int, str)",
|
||||||
|
'',
|
||||||
|
'.. py:data:: T4',
|
||||||
|
' :module: target.typevar',
|
||||||
|
'',
|
||||||
|
' T4',
|
||||||
|
'',
|
||||||
|
" alias of TypeVar('T4', covariant=True)",
|
||||||
|
'',
|
||||||
|
'.. py:data:: T5',
|
||||||
|
' :module: target.typevar',
|
||||||
|
'',
|
||||||
|
' T5',
|
||||||
|
'',
|
||||||
|
" alias of TypeVar('T5', contravariant=True)",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.version_info < (3, 9), reason='py39+ is required.')
|
@pytest.mark.skipif(sys.version_info < (3, 9), reason='py39+ is required.')
|
||||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||||
def test_autodoc_Annotated(app):
|
def test_autodoc_Annotated(app):
|
||||||
@ -1787,6 +1827,60 @@ def test_final(app):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||||
|
def test_overload(app):
|
||||||
|
options = {"members": None}
|
||||||
|
actual = do_autodoc(app, 'module', 'target.overload', options)
|
||||||
|
assert list(actual) == [
|
||||||
|
'',
|
||||||
|
'.. py:module:: target.overload',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'.. py:class:: Bar(x: int, y: int)',
|
||||||
|
' Bar(x: str, y: str)',
|
||||||
|
' :module: target.overload',
|
||||||
|
'',
|
||||||
|
' docstring',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'.. py:class:: Baz(x: int, y: int)',
|
||||||
|
' Baz(x: str, y: str)',
|
||||||
|
' :module: target.overload',
|
||||||
|
'',
|
||||||
|
' docstring',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'.. py:class:: Foo(x: int, y: int)',
|
||||||
|
' Foo(x: str, y: str)',
|
||||||
|
' :module: target.overload',
|
||||||
|
'',
|
||||||
|
' docstring',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'.. py:class:: Math()',
|
||||||
|
' :module: target.overload',
|
||||||
|
'',
|
||||||
|
' docstring',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
' .. py:method:: Math.sum(x: int, y: int) -> int',
|
||||||
|
' Math.sum(x: float, y: float) -> float',
|
||||||
|
' Math.sum(x: str, y: str) -> str',
|
||||||
|
' :module: target.overload',
|
||||||
|
'',
|
||||||
|
' docstring',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
'.. py:function:: sum(x: int, y: int) -> int',
|
||||||
|
' sum(x: float, y: float) -> float',
|
||||||
|
' sum(x: str, y: str) -> str',
|
||||||
|
' :module: target.overload',
|
||||||
|
'',
|
||||||
|
' docstring',
|
||||||
|
'',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('dummy', testroot='ext-autodoc')
|
@pytest.mark.sphinx('dummy', testroot='ext-autodoc')
|
||||||
def test_autodoc(app, status, warning):
|
def test_autodoc(app, status, warning):
|
||||||
app.builder.build_all()
|
app.builder.build_all()
|
||||||
|
@ -28,6 +28,8 @@ def test_build(app, status, warning):
|
|||||||
|
|
||||||
assert ' * mod -- No module named mod' # in the "failed import" section
|
assert ' * mod -- No module named mod' # in the "failed import" section
|
||||||
|
|
||||||
|
assert "undocumented py" not in status.getvalue()
|
||||||
|
|
||||||
c_undoc = (app.outdir / 'c.txt').read_text()
|
c_undoc = (app.outdir / 'c.txt').read_text()
|
||||||
assert c_undoc.startswith('Undocumented C API elements\n'
|
assert c_undoc.startswith('Undocumented C API elements\n'
|
||||||
'===========================\n')
|
'===========================\n')
|
||||||
@ -46,6 +48,8 @@ def test_build(app, status, warning):
|
|||||||
assert 'Class' in undoc_py['autodoc_target']['classes']
|
assert 'Class' in undoc_py['autodoc_target']['classes']
|
||||||
assert 'undocmeth' in undoc_py['autodoc_target']['classes']['Class']
|
assert 'undocmeth' in undoc_py['autodoc_target']['classes']['Class']
|
||||||
|
|
||||||
|
assert "undocumented c" not in status.getvalue()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('coverage', testroot='ext-coverage')
|
@pytest.mark.sphinx('coverage', testroot='ext-coverage')
|
||||||
def test_coverage_ignore_pyobjects(app, status, warning):
|
def test_coverage_ignore_pyobjects(app, status, warning):
|
||||||
@ -64,3 +68,28 @@ Classes:
|
|||||||
|
|
||||||
'''
|
'''
|
||||||
assert actual == expected
|
assert actual == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('coverage', confoverrides={'coverage_show_missing_items': True})
|
||||||
|
def test_show_missing_items(app, status, warning):
|
||||||
|
app.builder.build_all()
|
||||||
|
|
||||||
|
assert "undocumented" in status.getvalue()
|
||||||
|
|
||||||
|
assert "py function raises" in status.getvalue()
|
||||||
|
assert "py class Base" in status.getvalue()
|
||||||
|
assert "py method Class.roger" in status.getvalue()
|
||||||
|
|
||||||
|
assert "c api Py_SphinxTest [ function]" in status.getvalue()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.sphinx('coverage', confoverrides={'coverage_show_missing_items': True})
|
||||||
|
def test_show_missing_items_quiet(app, status, warning):
|
||||||
|
app.quiet = True
|
||||||
|
app.builder.build_all()
|
||||||
|
|
||||||
|
assert "undocumented python function: autodoc_target :: raises" in warning.getvalue()
|
||||||
|
assert "undocumented python class: autodoc_target :: Base" in warning.getvalue()
|
||||||
|
assert "undocumented python method: autodoc_target :: Class :: roger" in warning.getvalue()
|
||||||
|
|
||||||
|
assert "undocumented c api: Py_SphinxTest [function]" in warning.getvalue()
|
||||||
|
@ -13,6 +13,7 @@ import sys
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from sphinx.pycode.parser import Parser
|
from sphinx.pycode.parser import Parser
|
||||||
|
from sphinx.util.inspect import signature_from_str
|
||||||
|
|
||||||
|
|
||||||
def test_comment_picker_basic():
|
def test_comment_picker_basic():
|
||||||
@ -452,3 +453,80 @@ def test_typing_final_not_imported():
|
|||||||
parser = Parser(source)
|
parser = Parser(source)
|
||||||
parser.parse()
|
parser.parse()
|
||||||
assert parser.finals == []
|
assert parser.finals == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_typing_overload():
|
||||||
|
source = ('import typing\n'
|
||||||
|
'\n'
|
||||||
|
'@typing.overload\n'
|
||||||
|
'def func(x: int, y: int) -> int: pass\n'
|
||||||
|
'\n'
|
||||||
|
'@typing.overload\n'
|
||||||
|
'def func(x: str, y: str) -> str: pass\n'
|
||||||
|
'\n'
|
||||||
|
'def func(x, y): pass\n')
|
||||||
|
parser = Parser(source)
|
||||||
|
parser.parse()
|
||||||
|
assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'),
|
||||||
|
signature_from_str('(x: str, y: str) -> str')]}
|
||||||
|
|
||||||
|
|
||||||
|
def test_typing_overload_from_import():
|
||||||
|
source = ('from typing import overload\n'
|
||||||
|
'\n'
|
||||||
|
'@overload\n'
|
||||||
|
'def func(x: int, y: int) -> int: pass\n'
|
||||||
|
'\n'
|
||||||
|
'@overload\n'
|
||||||
|
'def func(x: str, y: str) -> str: pass\n'
|
||||||
|
'\n'
|
||||||
|
'def func(x, y): pass\n')
|
||||||
|
parser = Parser(source)
|
||||||
|
parser.parse()
|
||||||
|
assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'),
|
||||||
|
signature_from_str('(x: str, y: str) -> str')]}
|
||||||
|
|
||||||
|
|
||||||
|
def test_typing_overload_import_as():
|
||||||
|
source = ('import typing as foo\n'
|
||||||
|
'\n'
|
||||||
|
'@foo.overload\n'
|
||||||
|
'def func(x: int, y: int) -> int: pass\n'
|
||||||
|
'\n'
|
||||||
|
'@foo.overload\n'
|
||||||
|
'def func(x: str, y: str) -> str: pass\n'
|
||||||
|
'\n'
|
||||||
|
'def func(x, y): pass\n')
|
||||||
|
parser = Parser(source)
|
||||||
|
parser.parse()
|
||||||
|
assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'),
|
||||||
|
signature_from_str('(x: str, y: str) -> str')]}
|
||||||
|
|
||||||
|
|
||||||
|
def test_typing_overload_from_import_as():
|
||||||
|
source = ('from typing import overload as bar\n'
|
||||||
|
'\n'
|
||||||
|
'@bar\n'
|
||||||
|
'def func(x: int, y: int) -> int: pass\n'
|
||||||
|
'\n'
|
||||||
|
'@bar\n'
|
||||||
|
'def func(x: str, y: str) -> str: pass\n'
|
||||||
|
'\n'
|
||||||
|
'def func(x, y): pass\n')
|
||||||
|
parser = Parser(source)
|
||||||
|
parser.parse()
|
||||||
|
assert parser.overloads == {'func': [signature_from_str('(x: int, y: int) -> int'),
|
||||||
|
signature_from_str('(x: str, y: str) -> str')]}
|
||||||
|
|
||||||
|
|
||||||
|
def test_typing_overload_not_imported():
|
||||||
|
source = ('@typing.final\n'
|
||||||
|
'def func(x: int, y: int) -> int: pass\n'
|
||||||
|
'\n'
|
||||||
|
'@typing.final\n'
|
||||||
|
'def func(x: str, y: str) -> str: pass\n'
|
||||||
|
'\n'
|
||||||
|
'def func(x, y): pass\n')
|
||||||
|
parser = Parser(source)
|
||||||
|
parser.parse()
|
||||||
|
assert parser.overloads == {}
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import _testcapi
|
import _testcapi
|
||||||
|
import ast
|
||||||
import datetime
|
import datetime
|
||||||
import functools
|
import functools
|
||||||
import sys
|
import sys
|
||||||
@ -350,6 +351,38 @@ def test_signature_from_str_invalid():
|
|||||||
inspect.signature_from_str('')
|
inspect.signature_from_str('')
|
||||||
|
|
||||||
|
|
||||||
|
def test_signature_from_ast():
|
||||||
|
signature = 'def func(a, b, *args, c=0, d="blah", **kwargs): pass'
|
||||||
|
tree = ast.parse(signature)
|
||||||
|
sig = inspect.signature_from_ast(tree.body[0])
|
||||||
|
assert list(sig.parameters.keys()) == ['a', 'b', 'args', 'c', 'd', 'kwargs']
|
||||||
|
assert sig.parameters['a'].name == 'a'
|
||||||
|
assert sig.parameters['a'].kind == Parameter.POSITIONAL_OR_KEYWORD
|
||||||
|
assert sig.parameters['a'].default == Parameter.empty
|
||||||
|
assert sig.parameters['a'].annotation == Parameter.empty
|
||||||
|
assert sig.parameters['b'].name == 'b'
|
||||||
|
assert sig.parameters['b'].kind == Parameter.POSITIONAL_OR_KEYWORD
|
||||||
|
assert sig.parameters['b'].default == Parameter.empty
|
||||||
|
assert sig.parameters['b'].annotation == Parameter.empty
|
||||||
|
assert sig.parameters['args'].name == 'args'
|
||||||
|
assert sig.parameters['args'].kind == Parameter.VAR_POSITIONAL
|
||||||
|
assert sig.parameters['args'].default == Parameter.empty
|
||||||
|
assert sig.parameters['args'].annotation == Parameter.empty
|
||||||
|
assert sig.parameters['c'].name == 'c'
|
||||||
|
assert sig.parameters['c'].kind == Parameter.KEYWORD_ONLY
|
||||||
|
assert sig.parameters['c'].default == '0'
|
||||||
|
assert sig.parameters['c'].annotation == Parameter.empty
|
||||||
|
assert sig.parameters['d'].name == 'd'
|
||||||
|
assert sig.parameters['d'].kind == Parameter.KEYWORD_ONLY
|
||||||
|
assert sig.parameters['d'].default == "'blah'"
|
||||||
|
assert sig.parameters['d'].annotation == Parameter.empty
|
||||||
|
assert sig.parameters['kwargs'].name == 'kwargs'
|
||||||
|
assert sig.parameters['kwargs'].kind == Parameter.VAR_KEYWORD
|
||||||
|
assert sig.parameters['kwargs'].default == Parameter.empty
|
||||||
|
assert sig.parameters['kwargs'].annotation == Parameter.empty
|
||||||
|
assert sig.return_annotation == Parameter.empty
|
||||||
|
|
||||||
|
|
||||||
def test_safe_getattr_with_default():
|
def test_safe_getattr_with_default():
|
||||||
class Foo:
|
class Foo:
|
||||||
def __getattr__(self, item):
|
def __getattr__(self, item):
|
||||||
|
Loading…
Reference in New Issue
Block a user