Merge branch '3.x'

This commit is contained in:
Takeshi KOMIYA 2020-04-13 23:36:49 +09:00
commit 1aca70cf80
37 changed files with 719 additions and 351 deletions

43
CHANGES
View File

@ -41,16 +41,26 @@ Incompatible changes
Deprecated
----------
* The ``module`` argument of ``sphinx.ext.autosummary.generate.
find_autosummary_in_docstring()``
Features added
--------------
* LaTeX: Make the ``toplevel_sectioning`` setting optional in LaTeX theme
* #7410: Allow to suppress "circular toctree references detected" warnings using
:confval:`suppress_warnings`
* C, added scope control directives, :rst:dir:`c:namespace`,
:rst:dir:`c:namespace-push`, and :rst:dir:`c:namespace-pop`.
* #7466: autosummary: headings in generated documents are not translated
Bugs fixed
----------
Testing
--------
Release 3.0.1 (in development)
Release 3.0.2 (in development)
==============================
Dependencies
@ -65,12 +75,43 @@ Deprecated
Features added
--------------
* C, parse attributes and add :confval:`c_id_attributes`
and :confval:`c_paren_attributes` to support user-defined attributes.
Bugs fixed
----------
* #7461: py domain: fails with IndexError for empty tuple in type annotation
* #7461: autodoc: empty tuple in type annotation is not shown correctly
Testing
--------
Release 3.0.1 (released Apr 11, 2020)
=====================================
Incompatible changes
--------------------
* #7418: std domain: :rst:dir:`term` role becomes case sensitive
Bugs fixed
----------
* #7428: py domain: a reference to class ``None`` emits a nitpicky warning
* #7445: py domain: a return annotation ``None`` in the function signature is
not converted to a hyperlink when using intersphinx
* #7418: std domain: duplication warning for glossary terms is case insensitive
* #7438: C++, fix merging overloaded functions in parallel builds.
* #7422: autodoc: fails with ValueError when using autodoc_mock_imports
* #7435: autodoc: ``autodoc_typehints='description'`` doesn't suppress typehints
in signature for classes/methods
* #7451: autodoc: fails with AttributeError when an object returns non-string
object as a ``__doc__`` member
* #7423: crashed when giving a non-string object to logger
* #7479: html theme: Do not include xmlns attribute with HTML 5 doctype
* #7426: html theme: Escape some links in HTML templates
Release 3.0.0 (released Apr 06, 2020)
=====================================

View File

@ -50,7 +50,7 @@ clean-buildfiles:
.PHONY: clean-mypyfiles
clean-mypyfiles:
rm -rf **/.mypy_cache/
find . -name '.mypy_cache' -exec rm -rf {} +
.PHONY: style-check
style-check:

View File

@ -56,6 +56,12 @@ The following is a list of deprecated interfaces.
- 6.0
- ``docutils.utils.smartyquotes``
* - The ``module`` argument of
``sphinx.ext.autosummary.generate.find_autosummary_in_docstring()``
- 3.0
- 5.0
- N/A
* - ``desc_signature['first']``
-
- 3.0

View File

@ -35,25 +35,25 @@ In practice, you have to:
:func:`sphinx.locale.get_translation` function, usually renamed ``_()``,
e.g.:
.. code-block:: python
:caption: src/__init__.py
.. code-block:: python
:caption: src/__init__.py
from sphinx.locale import get_translation
from sphinx.locale import get_translation
MESSAGE_CATALOG_NAME = 'myextension'
_ = get_translation(MESSAGE_CATALOG_NAME)
MESSAGE_CATALOG_NAME = 'myextension'
_ = get_translation(MESSAGE_CATALOG_NAME)
translated_text = _('Hello Sphinx!')
translated_text = _('Hello Sphinx!')
#. Set up your extension to be aware of its dedicated translations:
.. code-block:: python
:caption: src/__init__.py
.. code-block:: python
:caption: src/__init__.py
def setup(app):
package_dir = path.abspath(path.dirname(__file__))
locale_dir = os.path.join(package_dir, 'locales')
app.add_message_catalog(MESSAGE_CATALOG_NAME, locale_dir)
def setup(app):
package_dir = path.abspath(path.dirname(__file__))
locale_dir = os.path.join(package_dir, 'locales')
app.add_message_catalog(MESSAGE_CATALOG_NAME, locale_dir)
#. Generate message catalog template ``*.pot`` file, usually in ``locale/``
source directory, for example via `Babel`_:

View File

@ -27,7 +27,7 @@ Discovery of builders by entry point
.. versionadded:: 1.6
:term:`Builder` extensions can be discovered by means of `entry points`_ so
:term:`builder` extensions can be discovered by means of `entry points`_ so
that they do not have to be listed in the :confval:`extensions` configuration
value.

View File

@ -313,6 +313,7 @@ General configuration
* ``ref.doc``
* ``ref.python``
* ``misc.highlighting_failure``
* ``toc.circular``
* ``toc.secnum``
* ``epub.unknown_project_files``
* ``autosectionlabel.*``
@ -672,6 +673,7 @@ documentation on :ref:`intl` for details.
Currently supported languages by Sphinx are:
* ``ar`` -- Arabic
* ``bg`` -- Bulgarian
* ``bn`` -- Bengali
* ``ca`` -- Catalan
* ``cak`` -- Kaqchikel
@ -690,6 +692,7 @@ documentation on :ref:`intl` for details.
* ``fr`` -- French
* ``he`` -- Hebrew
* ``hi`` -- Hindi
* ``hi_IN`` -- Hindi (India)
* ``hr`` -- Croatian
* ``hu`` -- Hungarian
* ``id`` -- Indonesian
@ -711,9 +714,13 @@ documentation on :ref:`intl` for details.
* ``si`` -- Sinhala
* ``sk`` -- Slovak
* ``sl`` -- Slovenian
* ``sq`` -- Albanian
* ``sr`` -- Serbian
* ``sr@latin`` -- Serbian (Latin)
* ``sr_RS`` -- Serbian (Cyrillic)
* ``sv`` -- Swedish
* ``ta`` -- Tamil
* ``te`` -- Telugu
* ``tr`` -- Turkish
* ``uk_UA`` -- Ukrainian
* ``ur`` -- Urdu
@ -2470,6 +2477,30 @@ Options for the XML builder
match any sequence of characters *including* slashes.
.. _c-config:
Options for the C domain
------------------------
.. confval:: c_id_attributes
A list of strings that the parser additionally should accept as attributes.
This can for example be used when attributes have been ``#define`` d for
portability.
.. versionadded:: 3.0
.. confval:: c_paren_attributes
A list of strings that the parser additionally should accept as attributes
with one argument. That is, if ``my_align_as`` is in the list, then
``my_align_as(X)`` is parsed as an attribute for all strings ``X`` that have
balanced braces (``()``, ``[]``, and ``{}``). This can for example be used
when attributes have been ``#define`` d for portability.
.. versionadded:: 3.0
.. _cpp-config:
Options for the C++ domain

View File

@ -706,6 +706,77 @@ Inline Expressions and Types
.. versionadded:: 3.0
Namespacing
~~~~~~~~~~~
.. versionadded:: 3.1
The C language it self does not support namespacing, but it can sometimes be
useful to emulate it in documentation, e.g., to show alternate declarations.
The feature may also be used to document members of structs/unions/enums
separate from their parent declaration.
The current scope can be changed using three namespace directives. They manage
a stack declarations where ``c:namespace`` resets the stack and changes a given
scope.
The ``c:namespace-push`` directive changes the scope to a given inner scope
of the current one.
The ``c:namespace-pop`` directive undoes the most recent
``c:namespace-push`` directive.
.. rst:directive:: .. c:namespace:: scope specification
Changes the current scope for the subsequent objects to the given scope, and
resets the namespace directive stack. Note that nested scopes can be
specified by separating with a dot, e.g.::
.. c:namespace:: Namespace1.Namespace2.SomeStruct.AnInnerStruct
All subsequent objects will be defined as if their name were declared with
the scope prepended. The subsequent cross-references will be searched for
starting in the current scope.
Using ``NULL`` or ``0`` as the scope will change to global scope.
.. rst:directive:: .. c:namespace-push:: scope specification
Change the scope relatively to the current scope. For example, after::
.. c:namespace:: A.B
.. c:namespace-push:: C.D
the current scope will be ``A.B.C.D``.
.. rst:directive:: .. c:namespace-pop::
Undo the previous ``c:namespace-push`` directive (*not* just pop a scope).
For example, after::
.. c:namespace:: A.B
.. c:namespace-push:: C.D
.. c:namespace-pop::
the current scope will be ``A.B`` (*not* ``A.B.C``).
If no previous ``c:namespace-push`` directive has been used, but only a
``c:namespace`` directive, then the current scope will be reset to global
scope. That is, ``.. c:namespace:: A.B`` is equivalent to::
.. c:namespace:: NULL
.. c:namespace-push:: A.B
Configuration Variables
~~~~~~~~~~~~~~~~~~~~~~~
See :ref:`c-config`.
.. _cpp-domain:
The C++ Domain

View File

@ -66,19 +66,29 @@ class BuiltInTheme(Theme):
class UserTheme(Theme):
"""A user defined LaTeX theme."""
REQUIRED_CONFIG_KEYS = ['docclass', 'wrapperclass']
OPTIONAL_CONFIG_KEYS = ['toplevel_sectioning']
def __init__(self, name: str, filename: str) -> None:
self.name = name
super().__init__(name)
self.config = configparser.RawConfigParser()
self.config.read(path.join(filename))
try:
self.docclass = self.config.get('theme', 'docclass')
self.wrapperclass = self.config.get('theme', 'wrapperclass')
self.toplevel_sectioning = self.config.get('theme', 'toplevel_sectioning')
except configparser.NoSectionError:
raise ThemeError(__('%r doesn\'t have "theme" setting') % filename)
except configparser.NoOptionError as exc:
raise ThemeError(__('%r doesn\'t have "%s" setting') % (filename, exc.args[0]))
for key in self.REQUIRED_CONFIG_KEYS:
try:
value = self.config.get('theme', key)
setattr(self, key, value)
except configparser.NoSectionError:
raise ThemeError(__('%r doesn\'t have "theme" setting') % filename)
except configparser.NoOptionError as exc:
raise ThemeError(__('%r doesn\'t have "%s" setting') % (filename, exc.args[0]))
for key in self.OPTIONAL_CONFIG_KEYS:
try:
value = self.config.get('theme', key)
setattr(self, key, value)
except configparser.NoOptionError:
pass
class ThemeFactory:

View File

@ -35,6 +35,7 @@ from sphinx.util.cfamily import (
char_literal_re
)
from sphinx.util.docfields import Field, TypedField
from sphinx.util.docutils import SphinxDirective
from sphinx.util.nodes import make_refnode
logger = logging.getLogger(__name__)
@ -1990,6 +1991,14 @@ class DefinitionParser(BaseParser):
def language(self) -> str:
return 'C'
@property
def id_attributes(self):
return self.config.c_id_attributes
@property
def paren_attributes(self):
return self.config.c_paren_attributes
def _parse_string(self) -> str:
if self.current_char != '"':
return None
@ -2009,66 +2018,6 @@ class DefinitionParser(BaseParser):
self.pos += 1
return self.definition[startPos:self.pos]
def _parse_attribute(self) -> Any:
return None
# self.skip_ws()
# # try C++11 style
# startPos = self.pos
# if self.skip_string_and_ws('['):
# if not self.skip_string('['):
# self.pos = startPos
# else:
# # TODO: actually implement the correct grammar
# arg = self._parse_balanced_token_seq(end=[']'])
# if not self.skip_string_and_ws(']'):
# self.fail("Expected ']' in end of attribute.")
# if not self.skip_string_and_ws(']'):
# self.fail("Expected ']' in end of attribute after [[...]")
# return ASTCPPAttribute(arg)
#
# # try GNU style
# if self.skip_word_and_ws('__attribute__'):
# if not self.skip_string_and_ws('('):
# self.fail("Expected '(' after '__attribute__'.")
# if not self.skip_string_and_ws('('):
# self.fail("Expected '(' after '__attribute__('.")
# attrs = []
# while 1:
# if self.match(identifier_re):
# name = self.matched_text
# self.skip_ws()
# if self.skip_string_and_ws('('):
# self.fail('Parameterized GNU style attribute not yet supported.')
# attrs.append(ASTGnuAttribute(name, None))
# # TODO: parse arguments for the attribute
# if self.skip_string_and_ws(','):
# continue
# elif self.skip_string_and_ws(')'):
# break
# else:
# self.fail("Expected identifier, ')', or ',' in __attribute__.")
# if not self.skip_string_and_ws(')'):
# self.fail("Expected ')' after '__attribute__((...)'")
# return ASTGnuAttributeList(attrs)
#
# # try the simple id attributes defined by the user
# for id in self.config.cpp_id_attributes:
# if self.skip_word_and_ws(id):
# return ASTIdAttribute(id)
#
# # try the paren attributes defined by the user
# for id in self.config.cpp_paren_attributes:
# if not self.skip_string_and_ws(id):
# continue
# if not self.skip_string('('):
# self.fail("Expected '(' after user-defined paren-attribute.")
# arg = self._parse_balanced_token_seq(end=[')'])
# if not self.skip_string(')'):
# self.fail("Expected ')' to end user-defined paren-attribute.")
# return ASTParenAttribute(id, arg)
return None
def _parse_literal(self) -> ASTLiteral:
# -> integer-literal
# | character-literal
@ -2928,6 +2877,9 @@ class DefinitionParser(BaseParser):
assert False
return ASTDeclaration(objectType, directiveType, declaration)
def parse_namespace_object(self) -> ASTNestedName:
return self._parse_nested_name()
def parse_xref_object(self) -> ASTNestedName:
name = self._parse_nested_name()
# if there are '()' left, just skip them
@ -3081,7 +3033,7 @@ class CObject(ObjectDescription):
def handle_signature(self, sig: str, signode: TextElement) -> ASTDeclaration:
parentSymbol = self.env.temp_data['c:parent_symbol'] # type: Symbol
parser = DefinitionParser(sig, location=signode)
parser = DefinitionParser(sig, location=signode, config=self.env.config)
try:
ast = self.parse_definition(parser)
parser.assert_end()
@ -3178,6 +3130,95 @@ class CTypeObject(CObject):
object_type = 'type'
class CNamespaceObject(SphinxDirective):
"""
This directive is just to tell Sphinx that we're documenting stuff in
namespace foo.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {} # type: Dict
def run(self) -> List[Node]:
rootSymbol = self.env.domaindata['c']['root_symbol']
if self.arguments[0].strip() in ('NULL', '0', 'nullptr'):
symbol = rootSymbol
stack = [] # type: List[Symbol]
else:
parser = DefinitionParser(self.arguments[0],
location=self.get_source_info())
try:
name = parser.parse_namespace_object()
parser.assert_end()
except DefinitionError as e:
logger.warning(e, location=self.get_source_info())
name = _make_phony_error_name()
symbol = rootSymbol.add_name(name)
stack = [symbol]
self.env.temp_data['c:parent_symbol'] = symbol
self.env.temp_data['c:namespace_stack'] = stack
self.env.ref_context['c:parent_key'] = symbol.get_lookup_key()
return []
class CNamespacePushObject(SphinxDirective):
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {} # type: Dict
def run(self) -> List[Node]:
if self.arguments[0].strip() in ('NULL', '0', 'nullptr'):
return []
parser = DefinitionParser(self.arguments[0],
location=self.get_source_info())
try:
name = parser.parse_namespace_object()
parser.assert_end()
except DefinitionError as e:
logger.warning(e, location=self.get_source_info())
name = _make_phony_error_name()
oldParent = self.env.temp_data.get('c:parent_symbol', None)
if not oldParent:
oldParent = self.env.domaindata['c']['root_symbol']
symbol = oldParent.add_name(name)
stack = self.env.temp_data.get('c:namespace_stack', [])
stack.append(symbol)
self.env.temp_data['c:parent_symbol'] = symbol
self.env.temp_data['c:namespace_stack'] = stack
self.env.ref_context['c:parent_key'] = symbol.get_lookup_key()
return []
class CNamespacePopObject(SphinxDirective):
has_content = False
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
option_spec = {} # type: Dict
def run(self) -> List[Node]:
stack = self.env.temp_data.get('c:namespace_stack', None)
if not stack or len(stack) == 0:
logger.warning("C namespace pop on empty stack. Defaulting to gobal scope.",
location=self.get_source_info())
stack = []
else:
stack.pop()
if len(stack) > 0:
symbol = stack[-1]
else:
symbol = self.env.domaindata['c']['root_symbol']
self.env.temp_data['c:parent_symbol'] = symbol
self.env.temp_data['c:namespace_stack'] = stack
self.env.ref_context['cp:parent_key'] = symbol.get_lookup_key()
return []
class CXRefRole(XRefRole):
def process_link(self, env: BuildEnvironment, refnode: Element,
has_explicit_title: bool, title: str, target: str) -> Tuple[str, str]:
@ -3214,7 +3255,8 @@ class CExprRole(SphinxRole):
def run(self) -> Tuple[List[Node], List[system_message]]:
text = self.text.replace('\n', ' ')
parser = DefinitionParser(text, location=self.get_source_info())
parser = DefinitionParser(text, location=self.get_source_info(),
config=self.env.config)
# attempt to mimic XRefRole classes, except that...
classes = ['xref', 'c', self.class_type]
try:
@ -3256,6 +3298,10 @@ class CDomain(Domain):
'enum': CEnumObject,
'enumerator': CEnumeratorObject,
'type': CTypeObject,
# scope control
'namespace': CNamespaceObject,
'namespace-push': CNamespacePushObject,
'namespace-pop': CNamespacePopObject,
}
roles = {
'member': CXRefRole(),
@ -3344,7 +3390,7 @@ class CDomain(Domain):
def _resolve_xref_inner(self, env: BuildEnvironment, fromdocname: str, builder: Builder,
typ: str, target: str, node: pending_xref,
contnode: Element) -> Tuple[Element, str]:
parser = DefinitionParser(target, location=node)
parser = DefinitionParser(target, location=node, config=env.config)
try:
name = parser.parse_xref_object()
except DefinitionError as e:
@ -3401,6 +3447,8 @@ class CDomain(Domain):
def setup(app: Sphinx) -> Dict[str, Any]:
app.add_domain(CDomain)
app.add_config_value("c_id_attributes", [], 'env')
app.add_config_value("c_paren_attributes", [], 'env')
return {
'version': 'builtin',

View File

@ -21,7 +21,6 @@ from sphinx import addnodes
from sphinx.addnodes import desc_signature, pending_xref
from sphinx.application import Sphinx
from sphinx.builders import Builder
from sphinx.config import Config
from sphinx.directives import ObjectDescription
from sphinx.domains import Domain, ObjType
from sphinx.environment import BuildEnvironment
@ -32,7 +31,7 @@ from sphinx.transforms import SphinxTransform
from sphinx.transforms.post_transforms import ReferencesResolver
from sphinx.util import logging
from sphinx.util.cfamily import (
NoOldIdError, ASTBaseBase, verify_description_mode, StringifyTransform,
NoOldIdError, ASTBaseBase, ASTAttribute, verify_description_mode, StringifyTransform,
BaseParser, DefinitionError, UnsupportedMultiCharacterCharLiteral,
identifier_re, anon_identifier_re, integer_literal_re, octal_literal_re,
hex_literal_re, binary_literal_re, float_literal_re,
@ -769,89 +768,6 @@ class ASTNestedName(ASTBase):
raise Exception('Unknown description mode: %s' % mode)
################################################################################
# Attributes
################################################################################
class ASTAttribute(ASTBase):
def describe_signature(self, signode: TextElement) -> None:
raise NotImplementedError(repr(self))
class ASTCPPAttribute(ASTAttribute):
def __init__(self, arg: str) -> None:
self.arg = arg
def _stringify(self, transform: StringifyTransform) -> str:
return "[[" + self.arg + "]]"
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
class ASTGnuAttribute(ASTBase):
def __init__(self, name: str, args: Any) -> None:
self.name = name
self.args = args
def _stringify(self, transform: StringifyTransform) -> str:
res = [self.name]
if self.args:
res.append('(')
res.append(transform(self.args))
res.append(')')
return ''.join(res)
class ASTGnuAttributeList(ASTAttribute):
def __init__(self, attrs: List[ASTGnuAttribute]) -> None:
self.attrs = attrs
def _stringify(self, transform: StringifyTransform) -> str:
res = ['__attribute__((']
first = True
for attr in self.attrs:
if not first:
res.append(', ')
first = False
res.append(transform(attr))
res.append('))')
return ''.join(res)
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
class ASTIdAttribute(ASTAttribute):
"""For simple attributes defined by the user."""
def __init__(self, id: str) -> None:
self.id = id
def _stringify(self, transform: StringifyTransform) -> str:
return self.id
def describe_signature(self, signode: TextElement) -> None:
signode.append(nodes.Text(self.id, self.id))
class ASTParenAttribute(ASTAttribute):
"""For paren attributes defined by the user."""
def __init__(self, id: str, arg: str) -> None:
self.id = id
self.arg = arg
def _stringify(self, transform: StringifyTransform) -> str:
return self.id + '(' + self.arg + ')'
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
################################################################################
# Expressions
################################################################################
@ -4300,18 +4216,73 @@ class Symbol:
Symbol.debug_indent += 1
Symbol.debug_print("merge_with:")
assert other is not None
def unconditionalAdd(self, otherChild):
# TODO: hmm, should we prune by docnames?
self._children.append(otherChild)
otherChild.parent = self
otherChild._assert_invariants()
if Symbol.debug_lookup:
Symbol.debug_indent += 1
for otherChild in other._children:
ourChild = self._find_first_named_symbol(
if Symbol.debug_lookup:
Symbol.debug_print("otherChild:\n", otherChild.to_string(Symbol.debug_indent))
Symbol.debug_indent += 1
if otherChild.isRedeclaration:
unconditionalAdd(self, otherChild)
if Symbol.debug_lookup:
Symbol.debug_print("isRedeclaration")
Symbol.debug_indent -= 1
continue
candiateIter = self._find_named_symbols(
identOrOp=otherChild.identOrOp,
templateParams=otherChild.templateParams,
templateArgs=otherChild.templateArgs,
templateShorthand=False, matchSelf=False,
recurseInAnon=False, correctPrimaryTemplateArgs=False)
recurseInAnon=False, correctPrimaryTemplateArgs=False,
searchInSiblings=False)
candidates = list(candiateIter)
if Symbol.debug_lookup:
Symbol.debug_print("raw candidate symbols:", len(candidates))
symbols = [s for s in candidates if not s.isRedeclaration]
if Symbol.debug_lookup:
Symbol.debug_print("non-duplicate candidate symbols:", len(symbols))
if len(symbols) == 0:
unconditionalAdd(self, otherChild)
if Symbol.debug_lookup:
Symbol.debug_indent -= 1
continue
ourChild = None
if otherChild.declaration is None:
if Symbol.debug_lookup:
Symbol.debug_print("no declaration in other child")
ourChild = symbols[0]
else:
queryId = otherChild.declaration.get_newest_id()
if Symbol.debug_lookup:
Symbol.debug_print("queryId: ", queryId)
for symbol in symbols:
if symbol.declaration is None:
if Symbol.debug_lookup:
Symbol.debug_print("empty candidate")
# if in the end we have non matching, but have an empty one,
# then just continue with that
ourChild = symbol
continue
candId = symbol.declaration.get_newest_id()
if Symbol.debug_lookup:
Symbol.debug_print("candidate:", candId)
if candId == queryId:
ourChild = symbol
break
if Symbol.debug_lookup:
Symbol.debug_indent -= 1
if ourChild is None:
# TODO: hmm, should we prune by docnames?
self._children.append(otherChild)
otherChild.parent = self
otherChild._assert_invariants()
unconditionalAdd(self, otherChild)
continue
if otherChild.declaration and otherChild.docname in docnames:
if not ourChild.declaration:
@ -4326,10 +4297,14 @@ class Symbol:
# Both have declarations, and in the same docname.
# This can apparently happen, it should be safe to
# just ignore it, right?
pass
# Hmm, only on duplicate declarations, right?
msg = "Internal C++ domain error during symbol merging.\n"
msg += "ourChild:\n" + ourChild.to_string(1)
msg += "\notherChild:\n" + otherChild.to_string(1)
logger.warning(msg, location=otherChild.docname)
ourChild.merge_with(otherChild, docnames, env)
if Symbol.debug_lookup:
Symbol.debug_indent -= 1
Symbol.debug_indent -= 2
def add_name(self, nestedName: ASTNestedName,
templatePrefix: ASTTemplateDeclarationPrefix = None) -> "Symbol":
@ -4608,16 +4583,18 @@ class DefinitionParser(BaseParser):
_prefix_keys = ('class', 'struct', 'enum', 'union', 'typename')
def __init__(self, definition: str, *,
location: Union[nodes.Node, Tuple[str, int]],
config: "Config") -> None:
super().__init__(definition, location=location)
self.config = config
@property
def language(self) -> str:
return 'C++'
@property
def id_attributes(self):
return self.config.cpp_id_attributes
@property
def paren_attributes(self):
return self.config.cpp_paren_attributes
def _parse_string(self) -> str:
if self.current_char != '"':
return None
@ -4637,85 +4614,6 @@ class DefinitionParser(BaseParser):
self.pos += 1
return self.definition[startPos:self.pos]
def _parse_balanced_token_seq(self, end: List[str]) -> str:
# TODO: add handling of string literals and similar
brackets = {'(': ')', '[': ']', '{': '}'}
startPos = self.pos
symbols = [] # type: List[str]
while not self.eof:
if len(symbols) == 0 and self.current_char in end:
break
if self.current_char in brackets.keys():
symbols.append(brackets[self.current_char])
elif len(symbols) > 0 and self.current_char == symbols[-1]:
symbols.pop()
elif self.current_char in ")]}":
self.fail("Unexpected '%s' in balanced-token-seq." % self.current_char)
self.pos += 1
if self.eof:
self.fail("Could not find end of balanced-token-seq starting at %d."
% startPos)
return self.definition[startPos:self.pos]
def _parse_attribute(self) -> ASTAttribute:
self.skip_ws()
# try C++11 style
startPos = self.pos
if self.skip_string_and_ws('['):
if not self.skip_string('['):
self.pos = startPos
else:
# TODO: actually implement the correct grammar
arg = self._parse_balanced_token_seq(end=[']'])
if not self.skip_string_and_ws(']'):
self.fail("Expected ']' in end of attribute.")
if not self.skip_string_and_ws(']'):
self.fail("Expected ']' in end of attribute after [[...]")
return ASTCPPAttribute(arg)
# try GNU style
if self.skip_word_and_ws('__attribute__'):
if not self.skip_string_and_ws('('):
self.fail("Expected '(' after '__attribute__'.")
if not self.skip_string_and_ws('('):
self.fail("Expected '(' after '__attribute__('.")
attrs = []
while 1:
if self.match(identifier_re):
name = self.matched_text
self.skip_ws()
if self.skip_string_and_ws('('):
self.fail('Parameterized GNU style attribute not yet supported.')
attrs.append(ASTGnuAttribute(name, None))
# TODO: parse arguments for the attribute
if self.skip_string_and_ws(','):
continue
elif self.skip_string_and_ws(')'):
break
else:
self.fail("Expected identifier, ')', or ',' in __attribute__.")
if not self.skip_string_and_ws(')'):
self.fail("Expected ')' after '__attribute__((...)'")
return ASTGnuAttributeList(attrs)
# try the simple id attributes defined by the user
for id in self.config.cpp_id_attributes:
if self.skip_word_and_ws(id):
return ASTIdAttribute(id)
# try the paren attributes defined by the user
for id in self.config.cpp_paren_attributes:
if not self.skip_string_and_ws(id):
continue
if not self.skip_string('('):
self.fail("Expected '(' after user-defined paren-attribute.")
arg = self._parse_balanced_token_seq(end=[')'])
if not self.skip_string(')'):
self.fail("Expected ')' to end user-defined paren-attribute.")
return ASTParenAttribute(id, arg)
return None
def _parse_literal(self) -> ASTLiteral:
# -> integer-literal
# | character-literal
@ -7116,7 +7014,6 @@ class CPPDomain(Domain):
print("\tother:")
print(otherdata['root_symbol'].dump(1))
print("\tother end")
print("merge_domaindata end")
self.data['root_symbol'].merge_with(otherdata['root_symbol'],
docnames, self.env)
@ -7130,6 +7027,11 @@ class CPPDomain(Domain):
logger.warning(msg, location=docname)
else:
ourNames[name] = docname
if Symbol.debug_show_tree:
print("\tresult:")
print(self.data['root_symbol'].dump(1))
print("\tresult end")
print("merge_domaindata end")
def _resolve_xref_inner(self, env: BuildEnvironment, fromdocname: str, builder: Builder,
typ: str, target: str, node: pending_xref,
@ -7137,8 +7039,7 @@ class CPPDomain(Domain):
# add parens again for those that could be functions
if typ == 'any' or typ == 'func':
target += '()'
parser = DefinitionParser(target, location=node,
config=env.config)
parser = DefinitionParser(target, location=node, config=env.config)
try:
ast, isShorthand = parser.parse_xref_object()
except DefinitionError as e:

View File

@ -76,8 +76,13 @@ ModuleEntry = NamedTuple('ModuleEntry', [('docname', str),
def _parse_annotation(annotation: str) -> List[Node]:
"""Parse type annotation."""
def make_xref(text: str) -> addnodes.pending_xref:
if text == 'None':
reftype = 'obj'
else:
reftype = 'class'
return pending_xref('', nodes.Text(text),
refdomain='py', reftype='class', reftarget=text)
refdomain='py', reftype=reftype, reftarget=text)
def unparse(node: ast.AST) -> List[Node]:
if isinstance(node, ast.Attribute):
@ -105,11 +110,16 @@ def _parse_annotation(annotation: str) -> List[Node]:
result.append(addnodes.desc_sig_punctuation('', ']'))
return result
elif isinstance(node, ast.Tuple):
result = []
for elem in node.elts:
result.extend(unparse(elem))
result.append(addnodes.desc_sig_punctuation('', ', '))
result.pop()
if node.elts:
result = []
for elem in node.elts:
result.extend(unparse(elem))
result.append(addnodes.desc_sig_punctuation('', ', '))
result.pop()
else:
result = [addnodes.desc_sig_punctuation('', '('),
addnodes.desc_sig_punctuation('', ')')]
return result
else:
raise SyntaxError # unsupported syntax
@ -1318,7 +1328,7 @@ def builtin_resolver(app: Sphinx, env: BuildEnvironment,
if node.get('refdomain') != 'py':
return None
elif node.get('reftype') == 'obj' and node.get('reftarget') == 'None':
elif node.get('reftype') in ('class', 'obj') and node.get('reftarget') == 'None':
return contnode
elif node.get('reftype') in ('class', 'exc'):
reftarget = node.get('reftarget')

View File

@ -303,7 +303,7 @@ def make_glossary_term(env: "BuildEnvironment", textnodes: Iterable[Node], index
term['ids'].append(node_id)
std = cast(StandardDomain, env.get_domain('std'))
std.note_object('term', termtext.lower(), node_id, location=term)
std.note_object('term', termtext, node_id, location=term)
# add an index entry too
indexnode = addnodes.index()
@ -563,7 +563,7 @@ class StandardDomain(Domain):
# links to tokens in grammar productions
'token': TokenXRefRole(),
# links to terms in glossary
'term': XRefRole(lowercase=True, innernodeclass=nodes.inline,
'term': XRefRole(innernodeclass=nodes.inline,
warn_dangling=True),
# links to headings or arbitrary labels
'ref': XRefRole(lowercase=True, innernodeclass=nodes.inline,

View File

@ -152,7 +152,7 @@ class TocTree:
logger.warning(__('circular toctree references '
'detected, ignoring: %s <- %s'),
ref, ' <- '.join(parents),
location=ref)
location=ref, type='toc', subtype='circular')
continue
refdoc = ref
toc = self.env.tocs[ref].deepcopy()

View File

@ -556,6 +556,9 @@ class Documenter:
isattr = False
doc = getdoc(member, self.get_attr, self.env.config.autodoc_inherit_docstrings)
if not isinstance(doc, str):
# Ignore non-string __doc__
doc = None
# if the member __doc__ is the same as self's __doc__, it's just
# inherited and therefore not the member's doc
@ -1173,7 +1176,7 @@ class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type:
return ret
def format_args(self, **kwargs: Any) -> str:
if self.env.config.autodoc_typehints == 'none':
if self.env.config.autodoc_typehints in ('none', 'description'):
kwargs.setdefault('show_annotation', False)
# for classes, the relevant signature is the __init__ method's
@ -1429,7 +1432,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type:
return ret
def format_args(self, **kwargs: Any) -> str:
if self.env.config.autodoc_typehints == 'none':
if self.env.config.autodoc_typehints in ('none', 'description'):
kwargs.setdefault('show_annotation', False)
unwrapped = inspect.unwrap(self.object)

View File

@ -33,7 +33,7 @@ import sphinx.locale
from sphinx import __display_version__
from sphinx import package_dir
from sphinx.builders import Builder
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.deprecation import RemovedInSphinx40Warning, RemovedInSphinx50Warning
from sphinx.ext.autodoc import Documenter
from sphinx.ext.autosummary import import_by_name, get_documenter
from sphinx.jinja2glue import BuiltinTemplateLoader
@ -120,6 +120,11 @@ class AutosummaryRenderer:
self.env.filters['e'] = rst.escape
self.env.filters['underline'] = _underline
if builder:
if builder.app.translator:
self.env.add_extension("jinja2.ext.i18n")
self.env.install_gettext_translations(builder.app.translator) # type: ignore
def exists(self, template_name: str) -> bool:
"""Check if template file exists."""
try:
@ -328,6 +333,10 @@ def find_autosummary_in_docstring(name: str, module: str = None, filename: str =
See `find_autosummary_in_lines`.
"""
if module:
warnings.warn('module argument for find_autosummary_in_docstring() is deprecated.',
RemovedInSphinx50Warning)
try:
real_name, obj, parent, modname = import_by_name(name)
lines = pydoc.getdoc(obj).splitlines()

View File

@ -8,7 +8,7 @@
.. automethod:: __init__
{% if methods %}
.. rubric:: Methods
.. rubric:: {{ _('Methods') }}
.. autosummary::
{% for item in methods %}
@ -19,7 +19,7 @@
{% block attributes %}
{% if attributes %}
.. rubric:: Attributes
.. rubric:: {{ _('Attributes') }}
.. autosummary::
{% for item in attributes %}

View File

@ -4,7 +4,7 @@
{% block functions %}
{% if functions %}
.. rubric:: Functions
.. rubric:: {{ _('Functions') }}
.. autosummary::
{% for item in functions %}
@ -15,7 +15,7 @@
{% block classes %}
{% if classes %}
.. rubric:: Classes
.. rubric:: {{ _('Classes') }}
.. autosummary::
{% for item in classes %}
@ -26,7 +26,7 @@
{% block exceptions %}
{% if exceptions %}
.. rubric:: Exceptions
.. rubric:: {{ _('Exceptions') }}
.. autosummary::
{% for item in exceptions %}

View File

@ -114,7 +114,10 @@ def unparse(node: ast.AST) -> str:
elif isinstance(node, ast.UnaryOp):
return "%s %s" % (unparse(node.op), unparse(node.operand))
elif isinstance(node, ast.Tuple):
return ", ".join(unparse(e) for e in node.elts)
if node.elts:
return ", ".join(unparse(e) for e in node.elts)
else:
return "()"
elif sys.version_info > (3, 6) and isinstance(node, ast.Constant):
# this branch should be placed at last
return repr(node.value)

View File

@ -14,17 +14,17 @@
<div class="header-wrapper" role="banner">
<div class="header">
{%- if logo %}
<p class="logo"><a href="{{ pathto(master_doc) }}">
<img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
<p class="logo"><a href="{{ pathto(master_doc)|e }}">
<img class="logo" src="{{ pathto('_static/' + logo, 1)|e }}" alt="Logo"/>
</a></p>
{%- endif %}
{%- block headertitle %}
<div class="headertitle"><a
href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a></div>
href="{{ pathto(master_doc)|e }}">{{ shorttitle|e }}</a></div>
{%- endblock %}
<div class="rel" role="navigation" aria-label="related navigation">
{%- for rellink in rellinks|reverse %}
<a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}"
<a href="{{ pathto(rellink[0])|e }}" title="{{ rellink[1]|striptags|e }}"
{{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
{%- if not loop.last %}{{ reldelim2 }}{% endif %}
{%- endfor %}
@ -78,7 +78,7 @@
<div class="left">
<div role="navigation" aria-label="related navigaton">
{%- for rellink in rellinks|reverse %}
<a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}"
<a href="{{ pathto(rellink[0])|e }}" title="{{ rellink[1]|striptags|e }}"
{{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
{%- if not loop.last %}{{ reldelim2 }}{% endif %}
{%- endfor %}

View File

@ -43,7 +43,7 @@
id="toggle-{{ groupid.next() }}" style="display: none" alt="-" />
{%- endif %}</td>
<td>{% if grouptype == 2 %}&#160;&#160;&#160;{% endif %}
{% if page %}<a href="{{ pathto(page) }}#{{ anchor }}">{% endif -%}
{% if page %}<a href="{{ pathto(page)|e }}#{{ anchor }}">{% endif -%}
<code class="xref">{{ name|e }}</code>
{%- if page %}</a>{% endif %}
{%- if extra %} <em>({{ extra|e }})</em>{% endif -%}

View File

@ -7,5 +7,5 @@
:copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
#}
<h3><a href="{{ pathto(master_doc) }}">{{ _('Table of Contents') }}</a></h3>
<h3><a href="{{ pathto(master_doc)|e }}">{{ _('Table of Contents') }}</a></h3>
{{ toctree() }}

View File

@ -32,12 +32,12 @@
<ul>
{%- for rellink in rellinks %}
<li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}>
<a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}"
<a href="{{ pathto(rellink[0])|e }}" title="{{ rellink[1]|striptags|e }}"
{{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
{%- if not loop.first %}{{ reldelim2 }}{% endif %}</li>
{%- endfor %}
{%- block rootrellink %}
<li class="nav-item nav-item-0"><a href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li>
<li class="nav-item nav-item-0"><a href="{{ pathto(master_doc)|e }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li>
{%- endblock %}
{%- for parent in parents %}
<li class="nav-item nav-item-{{ loop.index }}"><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li>
@ -53,8 +53,8 @@
<div class="sphinxsidebarwrapper">
{%- block sidebarlogo %}
{%- if logo %}
<p class="logo"><a href="{{ pathto(master_doc) }}">
<img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
<p class="logo"><a href="{{ pathto(master_doc)|e }}">
<img class="logo" src="{{ pathto('_static/' + logo, 1)|e }}" alt="Logo"/>
</a></p>
{%- endif %}
{%- endblock %}
@ -94,13 +94,13 @@
{%- endmacro %}
{%- macro css() %}
<link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
<link rel="stylesheet" href="{{ pathto('_static/' + style, 1)|e }}" type="text/css" />
<link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" />
{%- for css in css_files %}
{%- if css|attr("filename") %}
{{ css_tag(css) }}
{%- else %}
<link rel="stylesheet" href="{{ pathto(css, 1) }}" type="text/css" />
<link rel="stylesheet" href="{{ pathto(css, 1)|e }}" type="text/css" />
{%- endif %}
{%- endfor %}
{%- endmacro %}
@ -108,7 +108,7 @@
{%- if html_tag %}
{{ html_tag }}
{%- else %}
<html xmlns="http://www.w3.org/1999/xhtml"{% if language is not none %} lang="{{ language }}"{% endif %}>
<html{% if not html5_doctype %} xmlns="http://www.w3.org/1999/xhtml"{% endif %}{% if language is not none %} lang="{{ language }}"{% endif %}>
{%- endif %}
<head>
{%- if not html5_doctype and not skip_ua_compatible %}
@ -139,7 +139,7 @@
href="{{ pathto('_static/opensearch.xml', 1) }}"/>
{%- endif %}
{%- if favicon %}
<link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
<link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1)|e }}"/>
{%- endif %}
{%- endif %}
{%- block linktags %}

View File

@ -8,6 +8,6 @@
:license: BSD, see LICENSE for details.
#}
{%- if display_toc %}
<h3><a href="{{ pathto(master_doc) }}">{{ _('Table of Contents') }}</a></h3>
<h3><a href="{{ pathto(master_doc)|e }}">{{ _('Table of Contents') }}</a></h3>
{{ toc }}
{%- endif %}

View File

@ -7,7 +7,7 @@
template="{{ use_opensearch }}/{{ pathto('search') }}?q={searchTerms}"/>
<LongName>{{ docstitle|e }}</LongName>
{%- if favicon %}
<Image height="16" width="16" type="image/x-icon">{{ use_opensearch }}/{{ pathto('_static/' + favicon, 1) }}</Image>
<Image height="16" width="16" type="image/x-icon">{{ use_opensearch }}/{{ pathto('_static/' + favicon, 1)|e }}</Image>
{%- endif %}
{% block extra %} {# Put e.g. an <Image> element here. #} {% endblock %}
</OpenSearchDescription>

View File

@ -21,7 +21,7 @@
«&#160;&#160;<a href="{{ prev.link|e }}">{{ prev.title }}</a>
&#160;&#160;::&#160;&#160;
{%- endif %}
<a class="uplink" href="{{ pathto(master_doc) }}">{{ _('Contents') }}</a>
<a class="uplink" href="{{ pathto(master_doc)|e }}">{{ _('Contents') }}</a>
{%- if next %}
&#160;&#160;::&#160;&#160;
<a href="{{ next.link|e }}">{{ next.title }}</a>&#160;&#160;»
@ -36,11 +36,11 @@
{%- block haikuheader %}
{%- if theme_full_logo != "false" %}
<a href="{{ pathto('index') }}">
<img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
<img class="logo" src="{{ pathto('_static/' + logo, 1)|e }}" alt="Logo"/>
</a>
{%- else %}
{%- if logo -%}
<img class="rightlogo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
<img class="rightlogo" src="{{ pathto('_static/' + logo, 1)|e }}" alt="Logo"/>
{%- endif -%}
<h1 class="heading"><a href="{{ pathto('index') }}">
<span>{{ shorttitle|e }}</span></a></h1>

View File

@ -12,8 +12,8 @@
{%- if logo %}
<div class="header" role="banner">
<div class="logo">
<a href="{{ pathto(master_doc) }}">
<img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
<a href="{{ pathto(master_doc)|e }}">
<img class="logo" src="{{ pathto('_static/' + logo, 1)|e }}" alt="Logo"/>
</a>
</div>
</div>

View File

@ -30,7 +30,7 @@
{%- if prev %}
<a href="{{ prev.link|e }}">&laquo; {{ prev.title }}</a> |
{%- endif %}
<a href="{{ pathto(current_page_name) if current_page_name else '#' }}">{{ title }}</a>
<a href="{{ pathto(current_page_name)|e if current_page_name else '#' }}">{{ title }}</a>
{%- if next %}
| <a href="{{ next.link|e }}">{{ next.title }} &raquo;</a>
{%- endif %}

View File

@ -16,7 +16,9 @@ from typing import (
)
from docutils import nodes
from docutils.nodes import TextElement
from sphinx.config import Config
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.util import logging
@ -112,6 +114,92 @@ class ASTBaseBase:
return '<%s>' % self.__class__.__name__
################################################################################
# Attributes
################################################################################
class ASTAttribute(ASTBaseBase):
def describe_signature(self, signode: TextElement) -> None:
raise NotImplementedError(repr(self))
class ASTCPPAttribute(ASTAttribute):
def __init__(self, arg: str) -> None:
self.arg = arg
def _stringify(self, transform: StringifyTransform) -> str:
return "[[" + self.arg + "]]"
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
class ASTGnuAttribute(ASTBaseBase):
def __init__(self, name: str, args: Any) -> None:
self.name = name
self.args = args
def _stringify(self, transform: StringifyTransform) -> str:
res = [self.name]
if self.args:
res.append('(')
res.append(transform(self.args))
res.append(')')
return ''.join(res)
class ASTGnuAttributeList(ASTAttribute):
def __init__(self, attrs: List[ASTGnuAttribute]) -> None:
self.attrs = attrs
def _stringify(self, transform: StringifyTransform) -> str:
res = ['__attribute__((']
first = True
for attr in self.attrs:
if not first:
res.append(', ')
first = False
res.append(transform(attr))
res.append('))')
return ''.join(res)
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
class ASTIdAttribute(ASTAttribute):
"""For simple attributes defined by the user."""
def __init__(self, id: str) -> None:
self.id = id
def _stringify(self, transform: StringifyTransform) -> str:
return self.id
def describe_signature(self, signode: TextElement) -> None:
signode.append(nodes.Text(self.id, self.id))
class ASTParenAttribute(ASTAttribute):
"""For paren attributes defined by the user."""
def __init__(self, id: str, arg: str) -> None:
self.id = id
self.arg = arg
def _stringify(self, transform: StringifyTransform) -> str:
return self.id + '(' + self.arg + ')'
def describe_signature(self, signode: TextElement) -> None:
txt = str(self)
signode.append(nodes.Text(txt, txt))
################################################################################
class UnsupportedMultiCharacterCharLiteral(Exception):
@property
def decoded(self) -> str:
@ -132,9 +220,11 @@ class DefinitionError(Exception):
class BaseParser:
def __init__(self, definition: str, *,
location: Union[nodes.Node, Tuple[str, int]]) -> None:
location: Union[nodes.Node, Tuple[str, int]],
config: "Config") -> None:
self.definition = definition.strip()
self.location = location # for warnings
self.config = config
self.pos = 0
self.end = len(self.definition)
@ -252,3 +342,92 @@ class BaseParser:
self.skip_ws()
if not self.eof:
self.fail('Expected end of definition.')
################################################################################
@property
def id_attributes(self):
raise NotImplementedError
@property
def paren_attributes(self):
raise NotImplementedError
def _parse_balanced_token_seq(self, end: List[str]) -> str:
# TODO: add handling of string literals and similar
brackets = {'(': ')', '[': ']', '{': '}'}
startPos = self.pos
symbols = [] # type: List[str]
while not self.eof:
if len(symbols) == 0 and self.current_char in end:
break
if self.current_char in brackets.keys():
symbols.append(brackets[self.current_char])
elif len(symbols) > 0 and self.current_char == symbols[-1]:
symbols.pop()
elif self.current_char in ")]}":
self.fail("Unexpected '%s' in balanced-token-seq." % self.current_char)
self.pos += 1
if self.eof:
self.fail("Could not find end of balanced-token-seq starting at %d."
% startPos)
return self.definition[startPos:self.pos]
def _parse_attribute(self) -> ASTAttribute:
self.skip_ws()
# try C++11 style
startPos = self.pos
if self.skip_string_and_ws('['):
if not self.skip_string('['):
self.pos = startPos
else:
# TODO: actually implement the correct grammar
arg = self._parse_balanced_token_seq(end=[']'])
if not self.skip_string_and_ws(']'):
self.fail("Expected ']' in end of attribute.")
if not self.skip_string_and_ws(']'):
self.fail("Expected ']' in end of attribute after [[...]")
return ASTCPPAttribute(arg)
# try GNU style
if self.skip_word_and_ws('__attribute__'):
if not self.skip_string_and_ws('('):
self.fail("Expected '(' after '__attribute__'.")
if not self.skip_string_and_ws('('):
self.fail("Expected '(' after '__attribute__('.")
attrs = []
while 1:
if self.match(identifier_re):
name = self.matched_text
self.skip_ws()
if self.skip_string_and_ws('('):
self.fail('Parameterized GNU style attribute not yet supported.')
attrs.append(ASTGnuAttribute(name, None))
# TODO: parse arguments for the attribute
if self.skip_string_and_ws(','):
continue
elif self.skip_string_and_ws(')'):
break
else:
self.fail("Expected identifier, ')', or ',' in __attribute__.")
if not self.skip_string_and_ws(')'):
self.fail("Expected ')' after '__attribute__((...)'")
return ASTGnuAttributeList(attrs)
# try the simple id attributes defined by the user
for id in self.id_attributes:
if self.skip_word_and_ws(id):
return ASTIdAttribute(id)
# try the paren attributes defined by the user
for id in self.paren_attributes:
if not self.skip_string_and_ws(id):
continue
if not self.skip_string('('):
self.fail("Expected '(' after user-defined paren-attribute.")
arg = self._parse_balanced_token_seq(end=[')'])
if not self.skip_string(')'):
self.fail("Expected ')' to end user-defined paren-attribute.")
return ASTParenAttribute(id, arg)
return None

View File

@ -17,7 +17,7 @@ import typing
import warnings
from functools import partial, partialmethod
from inspect import ( # NOQA
Parameter, isclass, ismethod, ismethoddescriptor, unwrap
Parameter, isclass, ismethod, ismethoddescriptor
)
from io import StringIO
from typing import Any, Callable, Mapping, List, Tuple
@ -116,6 +116,15 @@ def getargspec(func: Callable) -> Any:
kwonlyargs, kwdefaults, annotations)
def unwrap(obj: Any) -> Any:
"""Get an original object from wrapped object (wrapped functions)."""
try:
return inspect.unwrap(obj)
except ValueError:
# might be a mock object
return obj
def unwrap_all(obj: Any) -> Any:
"""
Get an original object from wrapped object (unwrapping partials, wrapped
@ -217,7 +226,7 @@ def isattributedescriptor(obj: Any) -> bool:
return True
elif isdescriptor(obj):
# non data descriptor
unwrapped = inspect.unwrap(obj)
unwrapped = unwrap(obj)
if isfunction(unwrapped) or isbuiltin(unwrapped) or inspect.ismethod(unwrapped):
# attribute must not be either function, builtin and method
return False

View File

@ -411,7 +411,7 @@ class WarningIsErrorFilter(logging.Filter):
message = record.msg # use record.msg itself
if location:
raise SphinxWarning(location + ":" + message)
raise SphinxWarning(location + ":" + str(message))
else:
raise SphinxWarning(message)
else:

View File

@ -0,0 +1,21 @@
.. c:namespace:: NS
.. c:var:: int NSVar
.. c:namespace:: NULL
.. c:var:: int NULLVar
.. c:namespace:: NSDummy
.. c:namespace:: 0
.. c:var:: int ZeroVar
.. c:namespace-push:: NS2.NS3
.. c:var:: int NS2NS3Var
.. c:namespace-pop::
.. c:var:: int PopVar

View File

@ -7,28 +7,20 @@
:copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import pytest
from docutils import nodes
import sphinx.domains.c as cDomain
from sphinx import addnodes
from sphinx.addnodes import (
desc, desc_addname, desc_annotation, desc_content, desc_name, desc_optional,
desc_parameter, desc_parameterlist, desc_returns, desc_signature, desc_type,
pending_xref
)
from sphinx.domains.c import DefinitionParser, DefinitionError
from sphinx.domains.c import _max_id, _id_prefix, Symbol
from sphinx.testing import restructuredtext
from sphinx.testing.util import assert_node
from sphinx.util import docutils
def parse(name, string):
parser = DefinitionParser(string, location=None)
class Config:
c_id_attributes = ["id_attr", 'LIGHTGBM_C_EXPORT']
c_paren_attributes = ["paren_attr"]
parser = DefinitionParser(string, location=None, config=Config())
parser.allowFallbackExpressionParsing = False
ast = parser.parse_declaration(name, name)
parser.assert_end()
@ -87,7 +79,10 @@ def check(name, input, idDict, output=None):
def test_expressions():
def exprCheck(expr, output=None):
parser = DefinitionParser(expr, location=None)
class Config:
c_id_attributes = ["id_attr"]
c_paren_attributes = ["paren_attr"]
parser = DefinitionParser(expr, location=None, config=Config())
parser.allowFallbackExpressionParsing = False
ast = parser.parse_expression()
parser.assert_end()
@ -404,24 +399,23 @@ def test_initializers():
def test_attributes():
return # TODO
# style: C++
check('member', '[[]] int f', {1: 'f__i', 2: '1f'})
check('member', '[ [ ] ] int f', {1: 'f__i', 2: '1f'},
check('member', '[[]] int f', {1: 'f'})
check('member', '[ [ ] ] int f', {1: 'f'},
# this will fail when the proper grammar is implemented
output='[[ ]] int f')
check('member', '[[a]] int f', {1: 'f__i', 2: '1f'})
check('member', '[[a]] int f', {1: 'f'})
# style: GNU
check('member', '__attribute__(()) int f', {1: 'f__i', 2: '1f'})
check('member', '__attribute__((a)) int f', {1: 'f__i', 2: '1f'})
check('member', '__attribute__((a, b)) int f', {1: 'f__i', 2: '1f'})
check('member', '__attribute__(()) int f', {1: 'f'})
check('member', '__attribute__((a)) int f', {1: 'f'})
check('member', '__attribute__((a, b)) int f', {1: 'f'})
# style: user-defined id
check('member', 'id_attr int f', {1: 'f__i', 2: '1f'})
check('member', 'id_attr int f', {1: 'f'})
# style: user-defined paren
check('member', 'paren_attr() int f', {1: 'f__i', 2: '1f'})
check('member', 'paren_attr(a) int f', {1: 'f__i', 2: '1f'})
check('member', 'paren_attr("") int f', {1: 'f__i', 2: '1f'})
check('member', 'paren_attr(()[{}][]{}) int f', {1: 'f__i', 2: '1f'})
check('member', 'paren_attr() int f', {1: 'f'})
check('member', 'paren_attr(a) int f', {1: 'f'})
check('member', 'paren_attr("") int f',{1: 'f'})
check('member', 'paren_attr(()[{}][]{}) int f', {1: 'f'})
with pytest.raises(DefinitionError):
parse('member', 'paren_attr(() int f')
with pytest.raises(DefinitionError):
@ -437,18 +431,20 @@ def test_attributes():
# position: decl specs
check('function', 'static inline __attribute__(()) void f()',
{1: 'f', 2: '1fv'},
{1: 'f'},
output='__attribute__(()) static inline void f()')
check('function', '[[attr1]] [[attr2]] void f()',
{1: 'f', 2: '1fv'},
{1: 'f'},
output='[[attr1]] [[attr2]] void f()')
# position: declarator
check('member', 'int *[[attr]] i', {1: 'i__iP', 2: '1i'})
check('member', 'int *const [[attr]] volatile i', {1: 'i__iPVC', 2: '1i'},
check('member', 'int *[[attr]] i', {1: 'i'})
check('member', 'int *const [[attr]] volatile i', {1: 'i'},
output='int *[[attr]] volatile const i')
check('member', 'int &[[attr]] i', {1: 'i__iR', 2: '1i'})
check('member', 'int *[[attr]] *i', {1: 'i__iPP', 2: '1i'})
check('member', 'int *[[attr]] *i', {1: 'i'})
# issue michaeljones/breathe#500
check('function', 'LIGHTGBM_C_EXPORT int LGBM_BoosterFree(int handle)',
{1: 'LGBM_BoosterFree'})
# def test_print():
# # used for getting all the ids out for checking
@ -473,6 +469,14 @@ def test_build_domain_c(app, status, warning):
ws = filter_warnings(warning, "index")
assert len(ws) == 0
@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True})
def test_build_domain_c(app, status, warning):
app.builder.build_all()
ws = filter_warnings(warning, "namespace")
assert len(ws) == 0
t = (app.outdir / "namespace.html").read_text()
for id_ in ('NS.NSVar', 'NULLVar', 'ZeroVar', 'NS2.NS3.NS2NS3Var', 'PopVar'):
assert 'id="c.{}"'.format(id_) in t
@pytest.mark.sphinx(testroot='domain-c', confoverrides={'nitpicky': True})
def test_build_domain_c_anon_dup_decl(app, status, warning):

View File

@ -22,8 +22,7 @@ def parse(name, string):
class Config:
cpp_id_attributes = ["id_attr"]
cpp_paren_attributes = ["paren_attr"]
parser = DefinitionParser(string, location=None,
config=Config())
parser = DefinitionParser(string, location=None, config=Config())
parser.allowFallbackExpressionParsing = False
ast = parser.parse_declaration(name, name)
parser.assert_end()

View File

@ -239,6 +239,7 @@ def test_get_full_qualified_name():
def test_parse_annotation():
doctree = _parse_annotation("int")
assert_node(doctree, ([pending_xref, "int"],))
assert_node(doctree[0], pending_xref, refdomain="py", reftype="class", reftarget="int")
doctree = _parse_annotation("List[int]")
assert_node(doctree, ([pending_xref, "List"],
@ -254,6 +255,13 @@ def test_parse_annotation():
[pending_xref, "int"],
[desc_sig_punctuation, "]"]))
doctree = _parse_annotation("Tuple[()]")
assert_node(doctree, ([pending_xref, "Tuple"],
[desc_sig_punctuation, "["],
[desc_sig_punctuation, "("],
[desc_sig_punctuation, ")"],
[desc_sig_punctuation, "]"]))
doctree = _parse_annotation("Callable[[int, int], int]")
assert_node(doctree, ([pending_xref, "Callable"],
[desc_sig_punctuation, "["],
@ -266,6 +274,12 @@ def test_parse_annotation():
[pending_xref, "int"],
[desc_sig_punctuation, "]"]))
# None type makes an object-reference (not a class reference)
doctree = _parse_annotation("None")
assert_node(doctree, ([pending_xref, "None"],))
assert_node(doctree[0], pending_xref, refdomain="py", reftype="obj", reftarget="None")
def test_pyfunction_signature(app):
text = ".. py:function:: hello(name: str) -> str"

View File

@ -98,7 +98,7 @@ def test_glossary(app):
text = (".. glossary::\n"
"\n"
" term1\n"
" term2\n"
" TERM2\n"
" description\n"
"\n"
" term3 : classifier\n"
@ -113,7 +113,7 @@ def test_glossary(app):
assert_node(doctree, (
[glossary, definition_list, ([definition_list_item, ([term, ("term1",
index)],
[term, ("term2",
[term, ("TERM2",
index)],
definition)],
[definition_list_item, ([term, ("term3",
@ -126,7 +126,7 @@ def test_glossary(app):
assert_node(doctree[0][0][0][0][1],
entries=[("single", "term1", "term-term1", "main", None)])
assert_node(doctree[0][0][0][1][1],
entries=[("single", "term2", "term-term2", "main", None)])
entries=[("single", "TERM2", "term-TERM2", "main", None)])
assert_node(doctree[0][0][0][2],
[definition, nodes.paragraph, "description"])
assert_node(doctree[0][0][1][0][1],
@ -142,7 +142,7 @@ def test_glossary(app):
# index
objects = list(app.env.get_domain("std").get_objects())
assert ("term1", "term1", "term", "index", "term-term1", -1) in objects
assert ("term2", "term2", "term", "index", "term-term2", -1) in objects
assert ("TERM2", "TERM2", "term", "index", "term-TERM2", -1) in objects
assert ("term3", "term3", "term", "index", "term-term3", -1) in objects
assert ("term4", "term4", "term", "index", "term-term4", -1) in objects

View File

@ -54,6 +54,7 @@ from sphinx.pycode import ast
("- 1", "- 1"), # UnaryOp
("- a", "- a"), # USub
("(1, 2, 3)", "1, 2, 3"), # Tuple
("()", "()"), # Tuple (empty)
])
def test_unparse(source, expected):
module = ast.parse(source)

View File

@ -48,6 +48,14 @@ def test_info_and_warning(app, status, warning):
assert 'message5' in warning.getvalue()
def test_Exception(app, status, warning):
logging.setup(app, status, warning)
logger = logging.getLogger(__name__)
logger.info(Exception)
assert "<class 'Exception'>" in status.getvalue()
def test_verbosity_filter(app, status, warning):
# verbosity = 0: INFO
app.verbosity = 0