mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
move tokenize_type_spec to its own function and add tests for it
This commit is contained in:
parent
8ab210f1b0
commit
25937f745a
@ -792,40 +792,61 @@ class GoogleDocstring:
|
||||
return lines
|
||||
|
||||
|
||||
def _recombine_set_tokens(tokens):
|
||||
def takewhile_set(iterable):
|
||||
yield "{"
|
||||
|
||||
open_braces = 1
|
||||
while True:
|
||||
try:
|
||||
token = next(iterable)
|
||||
except StopIteration:
|
||||
if open_braces != 0:
|
||||
raise ValueError("invalid value set: {}".format("".join(tokens)))
|
||||
|
||||
break
|
||||
|
||||
if token == "{":
|
||||
open_braces += 1
|
||||
elif token == "}":
|
||||
open_braces -= 1
|
||||
|
||||
yield token
|
||||
|
||||
if open_braces == 0:
|
||||
break
|
||||
|
||||
def combine_set(tokens):
|
||||
iterable = iter(tokens)
|
||||
while True:
|
||||
try:
|
||||
token = next(iterable)
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
yield "".join(takewhile_set(iterable)) if token == "{" else token
|
||||
|
||||
return list(combine_set(tokens))
|
||||
|
||||
|
||||
def _tokenize_type_spec(spec):
|
||||
delimiters = r"(\sor\s|\sof\s|:\s|,\s|[{]|[}])"
|
||||
|
||||
tokens = tuple(
|
||||
item
|
||||
for item in re.split(delimiters, spec)
|
||||
if item
|
||||
)
|
||||
return _recombine_set_tokens(tokens)
|
||||
|
||||
|
||||
def _parse_numpy_type_spec(_type):
|
||||
def recombine_set(tokens):
|
||||
def combine_set(tokens):
|
||||
in_set = False
|
||||
set_items = []
|
||||
raw_tokens = _tokenize_type_spec(_type)
|
||||
tokens = list(_recombine_set_tokens(raw_tokens))
|
||||
return tokens
|
||||
|
||||
for token in tokens:
|
||||
if token.startswith("{"):
|
||||
in_set = True
|
||||
elif token.endswith("}"):
|
||||
in_set = False
|
||||
set_items.append(token)
|
||||
|
||||
if in_set:
|
||||
set_items.append(token)
|
||||
else:
|
||||
if set_items:
|
||||
token = "".join(set_items)
|
||||
set_items = []
|
||||
yield token
|
||||
|
||||
return list(combine_set(tokens))
|
||||
|
||||
def tokenize_type_spec(spec):
|
||||
delimiters = r"(\sor\s|\sof\s|:\s|,\s|[{]|[}])"
|
||||
|
||||
split = [
|
||||
item
|
||||
for item in re.split(delimiters, _type)
|
||||
if item
|
||||
]
|
||||
tokens = recombine_set(split)
|
||||
return tokens
|
||||
|
||||
def _parse_numpy_type_spec2(_type):
|
||||
def token_type(token):
|
||||
if token.startswith(" ") or token.endswith(" "):
|
||||
type_ = "delimiter"
|
||||
|
@ -16,6 +16,7 @@ from unittest import TestCase, mock
|
||||
|
||||
from sphinx.ext.napoleon import Config
|
||||
from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring
|
||||
from sphinx.ext.napoleon.docstring import _tokenize_type_spec, _recombine_set_tokens, _parse_numpy_type_spec
|
||||
|
||||
|
||||
class NamedtupleSubclass(namedtuple('NamedtupleSubclass', ('attr1', 'attr2'))):
|
||||
@ -1976,6 +1977,108 @@ definition_after_normal_text : int
|
||||
actual = str(NumpyDocstring(docstring, config))
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_recombine_set_tokens(self):
|
||||
tokens = (
|
||||
["{", "'F'", ", ", "'C'", ", ", "'N'", "}"],
|
||||
["{", '"F"', ", ", '"C"', ", ", '"N"', "}"],
|
||||
["{", "1", ", ", "2", "}"],
|
||||
)
|
||||
recombined_tokens = (
|
||||
["{'F', 'C', 'N'}"],
|
||||
['{"F", "C", "N"}'],
|
||||
["{1, 2}"],
|
||||
)
|
||||
|
||||
for input_tokens, expected in zip(tokens, recombined_tokens):
|
||||
actual = _recombine_set_tokens(input_tokens)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_recombine_set_tokens_invalid(self):
|
||||
invalid_tokens = (
|
||||
["{", "1", ", ", "2"],
|
||||
)
|
||||
|
||||
for input_tokens in invalid_tokens:
|
||||
with self.assertRaisesRegex(ValueError, "invalid value set:"):
|
||||
_recombine_set_tokens(input_tokens)
|
||||
|
||||
|
||||
def test_tokenize_type_spec(self):
|
||||
types = (
|
||||
"str",
|
||||
"int or float or None",
|
||||
'{"F", "C", "N"}',
|
||||
"{'F', 'C', 'N'}",
|
||||
)
|
||||
modifiers = (
|
||||
"optional",
|
||||
"default: None",
|
||||
)
|
||||
|
||||
type_tokens = (
|
||||
["str"],
|
||||
["int", " or ", "float", " or ", "None"],
|
||||
['{"F", "C", "N"}'],
|
||||
["{'F', 'C', 'N'}"],
|
||||
)
|
||||
modifier_tokens = (
|
||||
["optional"],
|
||||
["default", ": ", "None"],
|
||||
)
|
||||
|
||||
type_specs = tuple(
|
||||
", ".join([type_, modifier])
|
||||
for type_ in types
|
||||
for modifier in modifiers
|
||||
)
|
||||
tokens = tuple(
|
||||
tokens_ + [", "] + modifier_tokens_
|
||||
for tokens_ in type_tokens
|
||||
for modifier_tokens_ in modifier_tokens
|
||||
)
|
||||
|
||||
for type_spec, expected in zip(type_specs, tokens):
|
||||
actual = _tokenize_type_spec(type_spec)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_parse_numpy_type_spec(self):
|
||||
types = (
|
||||
"str",
|
||||
"int or float or None",
|
||||
'{"F", "C", "N"}',
|
||||
"{'F', 'C', 'N'}",
|
||||
)
|
||||
modifiers = (
|
||||
"optional",
|
||||
"default: None",
|
||||
)
|
||||
|
||||
type_tokens = (
|
||||
["str"],
|
||||
["int", " or ", "float", " or ", "None"],
|
||||
['{"F", "C", "N"}'],
|
||||
["{'F', 'C', 'N'}"],
|
||||
)
|
||||
modifier_tokens = (
|
||||
["optional"],
|
||||
["default", ": ", "None"],
|
||||
)
|
||||
|
||||
type_specs = tuple(
|
||||
", ".join([type_, modifier])
|
||||
for type_ in types
|
||||
for modifier in modifiers
|
||||
)
|
||||
tokens = tuple(
|
||||
tuple(tokens_ + [", "] + modifier_tokens_)
|
||||
for tokens_ in type_tokens
|
||||
for modifier_tokens_ in modifier_tokens
|
||||
)
|
||||
|
||||
for type_spec, expected in zip(type_specs, tokens):
|
||||
actual = _parse_numpy_type_spec(type_spec)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_parameter_types(self):
|
||||
import textwrap
|
||||
docstring = textwrap.dedent("""\
|
||||
|
Loading…
Reference in New Issue
Block a user