move the warnings to token_type

This commit is contained in:
Keewis 2020-06-04 14:33:35 +02:00
parent 37e02512fc
commit 866c822e11
2 changed files with 86 additions and 38 deletions

View File

@ -799,14 +799,6 @@ def _recombine_set_tokens(tokens):
try:
token = next(iterable)
except StopIteration:
if open_braces != 0:
location = ("", "")
logger.warning(
__("invalid value set: %r"),
"".join(tokens),
location=location,
)
break
if token == "{":
@ -841,35 +833,62 @@ def _tokenize_type_spec(spec):
return _recombine_set_tokens(tokens)
def _token_type(token):
if token.startswith(" ") or token.endswith(" "):
type_ = "delimiter"
elif (
token.isnumeric()
or (token.startswith("{") and token.endswith("}"))
or (token.startswith('"') and token.endswith('"'))
or (token.startswith("'") and token.endswith("'"))
):
type_ = "literal"
elif token.startswith("{"):
logger.warning(
__("invalid value set (missing closing brace): %s"),
token,
location=None,
)
type_ = "literal"
elif token.endswith("}"):
logger.warning(
__("invalid value set (missing opening brace): %s"),
token,
location=None,
)
type_ = "literal"
elif token.startswith("'") or token.startswith('"'):
logger.warning(
__("malformed string literal (missing closing quote): %s"),
token,
location=None,
)
type_ = "literal"
elif token.endswith("'") or token.endswith('"'):
logger.warning(
__("malformed string literal (missing opening quote): %s"),
token,
location=None,
)
type_ = "literal"
elif token in ("optional", "default"):
type_ = "control"
elif _xref_regex.match(token):
type_ = "reference"
else:
type_ = "obj"
return type_
def _convert_numpy_type_spec(_type, translations={}):
def token_type(token):
if token.startswith(" ") or token.endswith(" "):
type_ = "delimiter"
elif (
token.isnumeric()
or (token.startswith("{") and token.endswith("}"))
or (token.startswith('"') and token.endswith('"'))
or (token.startswith("'") and token.endswith("'"))
):
type_ = "literal"
elif token.startswith("{"):
# invalid value set, make it a literal to avoid further warnings
type_ = "literal"
elif token in ("optional", "default"):
type_ = "control"
elif _xref_regex.match(token):
type_ = "reference"
else:
type_ = "obj"
return type_
def convert_obj(obj, translations, default_translation):
return translations.get(obj, default_translation.format(obj))
tokens = _tokenize_type_spec(_type)
types = [
(token, token_type(token))
(token, _token_type(token))
for token in tokens
]

View File

@ -16,7 +16,12 @@ from unittest import TestCase, mock
from sphinx.ext.napoleon import Config
from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring
from sphinx.ext.napoleon.docstring import _tokenize_type_spec, _recombine_set_tokens, _convert_numpy_type_spec
from sphinx.ext.napoleon.docstring import (
_tokenize_type_spec,
_recombine_set_tokens,
_convert_numpy_type_spec,
_token_type
)
class NamedtupleSubclass(namedtuple('NamedtupleSubclass', ('attr1', 'attr2'))):
@ -1993,15 +1998,39 @@ definition_after_normal_text : int
actual = _recombine_set_tokens(input_tokens)
self.assertEqual(expected, actual)
def test_recombine_set_tokens_invalid(self):
invalid_tokens = (
["{", "1", ", ", "2"],
def test_token_type(self):
tokens = (
("1", "literal"),
("'string'", "literal"),
('"another_string"', "literal"),
("{1, 2}", "literal"),
("{'va{ue', 'set'}", "literal"),
("optional", "control"),
("default", "control"),
(", ", "delimiter"),
(" of ", "delimiter"),
(" or ", "delimiter"),
(": ", "delimiter"),
("True", "obj"),
("None", "obj"),
("name", "obj"),
(":py:class:`Enum`", "reference"),
)
for input_tokens in invalid_tokens:
with self.assertWarnsRegex(UserWarning, "invalid value set:"):
_recombine_set_tokens(input_tokens)
for token, expected in tokens:
actual = _token_type(token)
self.assertEqual(expected, actual)
def test_token_type_invalid(self):
tokens = (
"{1, 2",
"1, 2}",
"'abc",
"def'",
)
for token in tokens:
# TODO: check for the warning
_token_type(token)
def test_tokenize_type_spec(self):
types = (