mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
reimplement the value set combination function using collections.deque
This commit is contained in:
parent
866c822e11
commit
d177e58999
@ -10,6 +10,7 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import collections
|
||||
import inspect
|
||||
import re
|
||||
from functools import partial
|
||||
@ -790,17 +791,34 @@ class GoogleDocstring:
|
||||
return lines
|
||||
|
||||
|
||||
def _recombine_set_tokens(tokens):
|
||||
def takewhile_set(iterable):
|
||||
yield "{"
|
||||
def _recombine_sets(tokens):
|
||||
tokens = collections.deque(tokens)
|
||||
keywords = ("optional", "default")
|
||||
|
||||
open_braces = 1
|
||||
def takewhile_set(tokens):
|
||||
open_braces = 0
|
||||
previous_token = None
|
||||
print("combining set:", tokens)
|
||||
while True:
|
||||
try:
|
||||
token = next(iterable)
|
||||
except StopIteration:
|
||||
token = tokens.popleft()
|
||||
except IndexError:
|
||||
break
|
||||
|
||||
if token == ", ":
|
||||
previous_token = token
|
||||
continue
|
||||
|
||||
if token in keywords:
|
||||
tokens.appendleft(token)
|
||||
if previous_token is not None:
|
||||
tokens.appendleft(previous_token)
|
||||
break
|
||||
|
||||
if previous_token is not None:
|
||||
yield previous_token
|
||||
previous_token = None
|
||||
|
||||
if token == "{":
|
||||
open_braces += 1
|
||||
elif token == "}":
|
||||
@ -812,26 +830,28 @@ def _recombine_set_tokens(tokens):
|
||||
break
|
||||
|
||||
def combine_set(tokens):
|
||||
iterable = iter(tokens)
|
||||
while True:
|
||||
try:
|
||||
token = next(iterable)
|
||||
except StopIteration:
|
||||
token = tokens.popleft()
|
||||
except IndexError:
|
||||
break
|
||||
|
||||
yield "".join(takewhile_set(iterable)) if token == "{" else token
|
||||
if token == "{":
|
||||
tokens.appendleft("{")
|
||||
yield "".join(takewhile_set(tokens))
|
||||
else:
|
||||
yield token
|
||||
|
||||
return list(combine_set(tokens))
|
||||
|
||||
|
||||
def _tokenize_type_spec(spec):
|
||||
tokens = tuple(
|
||||
tokens = list(
|
||||
item
|
||||
for item in _token_regex.split(spec)
|
||||
if item is not None and item.strip()
|
||||
)
|
||||
return _recombine_set_tokens(tokens)
|
||||
|
||||
return tokens
|
||||
|
||||
|
||||
def _token_type(token):
|
||||
@ -842,7 +862,7 @@ def _token_type(token):
|
||||
or (token.startswith("{") and token.endswith("}"))
|
||||
or (token.startswith('"') and token.endswith('"'))
|
||||
or (token.startswith("'") and token.endswith("'"))
|
||||
):
|
||||
):
|
||||
type_ = "literal"
|
||||
elif token.startswith("{"):
|
||||
logger.warning(
|
||||
@ -887,9 +907,10 @@ def _convert_numpy_type_spec(_type, translations={}):
|
||||
return translations.get(obj, default_translation.format(obj))
|
||||
|
||||
tokens = _tokenize_type_spec(_type)
|
||||
combined_tokens = _recombine_sets(tokens)
|
||||
types = [
|
||||
(token, _token_type(token))
|
||||
for token in tokens
|
||||
for token in combined_tokens
|
||||
]
|
||||
|
||||
# don't use the object role if it's not necessary
|
||||
|
@ -18,7 +18,7 @@ from sphinx.ext.napoleon import Config
|
||||
from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring
|
||||
from sphinx.ext.napoleon.docstring import (
|
||||
_tokenize_type_spec,
|
||||
_recombine_set_tokens,
|
||||
_recombine_sets,
|
||||
_convert_numpy_type_spec,
|
||||
_token_type
|
||||
)
|
||||
@ -1068,7 +1068,7 @@ Methods:
|
||||
.. method:: func(i, j)
|
||||
:noindex:
|
||||
|
||||
|
||||
|
||||
description
|
||||
"""
|
||||
config = Config()
|
||||
@ -1982,22 +1982,6 @@ definition_after_normal_text : int
|
||||
actual = str(NumpyDocstring(docstring, config))
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_recombine_set_tokens(self):
|
||||
tokens = (
|
||||
["{", "'F'", ", ", "'C'", ", ", "'N'", "}"],
|
||||
["{", '"F"', ", ", '"C"', ", ", '"N"', "}"],
|
||||
["{", "1", ", ", "2", "}"],
|
||||
)
|
||||
recombined_tokens = (
|
||||
["{'F', 'C', 'N'}"],
|
||||
['{"F", "C", "N"}'],
|
||||
["{1, 2}"],
|
||||
)
|
||||
|
||||
for input_tokens, expected in zip(tokens, recombined_tokens):
|
||||
actual = _recombine_set_tokens(input_tokens)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_token_type(self):
|
||||
tokens = (
|
||||
("1", "literal"),
|
||||
@ -2042,6 +2026,7 @@ definition_after_normal_text : int
|
||||
r"'with \'quotes\''",
|
||||
)
|
||||
modifiers = (
|
||||
"",
|
||||
"optional",
|
||||
"default: None",
|
||||
)
|
||||
@ -2049,23 +2034,24 @@ definition_after_normal_text : int
|
||||
type_tokens = (
|
||||
["str"],
|
||||
["int", " or ", "float", " or ", "None"],
|
||||
['{"F", "C", "N"}'],
|
||||
["{'F', 'C', 'N'}"],
|
||||
["{", '"F"', ", ", '"C"', ", ", '"N"', "}"],
|
||||
["{", "'F'", ", ", "'C'", ", ", "'N'", "}"],
|
||||
['"ma{icious"'],
|
||||
[r"'with \'quotes\''"],
|
||||
)
|
||||
modifier_tokens = (
|
||||
[],
|
||||
["optional"],
|
||||
["default", ": ", "None"],
|
||||
)
|
||||
|
||||
|
||||
type_specs = tuple(
|
||||
", ".join([type_, modifier])
|
||||
", ".join([type_, modifier]) if modifier else type_
|
||||
for type_ in types
|
||||
for modifier in modifiers
|
||||
)
|
||||
tokens = tuple(
|
||||
tokens_ + [", "] + modifier_tokens_
|
||||
tokens_ + ([", "] + modifier_tokens_ if modifier_tokens_ else [])
|
||||
for tokens_ in type_tokens
|
||||
for modifier_tokens_ in modifier_tokens
|
||||
)
|
||||
@ -2074,8 +2060,63 @@ definition_after_normal_text : int
|
||||
actual = _tokenize_type_spec(type_spec)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_recombine_sets(self):
|
||||
type_tokens = (
|
||||
["{", "1", ", ", "2", "}"],
|
||||
["{", '"F"', ", ", '"C"', ", ", '"N"', "}"],
|
||||
["{", "'F'", ", ", "'C'", ", ", "'N'", "}"],
|
||||
)
|
||||
modifier_tokens = (
|
||||
[],
|
||||
["optional"],
|
||||
["default", ": ", "None"],
|
||||
)
|
||||
tokens = tuple(
|
||||
type_tokens_ + ([", "] + modifier_tokens_ if modifier_tokens_ else [])
|
||||
for type_tokens_ in type_tokens
|
||||
for modifier_tokens_ in modifier_tokens
|
||||
)
|
||||
|
||||
combined_tokens = tuple(
|
||||
["".join(type_tokens_)] + ([", "] + modifier_tokens_ if modifier_tokens_ else [])
|
||||
for type_tokens_ in type_tokens
|
||||
for modifier_tokens_ in modifier_tokens
|
||||
)
|
||||
|
||||
for tokens_, expected in zip(tokens, combined_tokens):
|
||||
actual = _recombine_sets(tokens_)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_recombine_sets_invalid(self):
|
||||
type_tokens = (
|
||||
["{", "1", ", ", "2"],
|
||||
['"F"', ", ", '"C"', ", ", '"N"', "}"],
|
||||
)
|
||||
modifier_tokens = (
|
||||
[],
|
||||
["optional"],
|
||||
["default", ": ", "None"],
|
||||
)
|
||||
tokens = tuple(
|
||||
type_tokens_ + ([", "] + modifier_tokens_ if modifier_tokens_ else [])
|
||||
for type_tokens_ in type_tokens
|
||||
for modifier_tokens_ in modifier_tokens
|
||||
)
|
||||
|
||||
combined_tokens = tuple(
|
||||
(["".join(type_tokens_)] if "{" in type_tokens_ else type_tokens_)
|
||||
+ ([", "] + modifier_tokens_ if modifier_tokens_ else [])
|
||||
for type_tokens_ in type_tokens
|
||||
for modifier_tokens_ in modifier_tokens
|
||||
)
|
||||
|
||||
for tokens_, expected in zip(tokens, combined_tokens):
|
||||
actual = _recombine_sets(tokens_)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_convert_numpy_type_spec(self):
|
||||
types = (
|
||||
"",
|
||||
"str",
|
||||
"int or float or None",
|
||||
'{"F", "C", "N"}',
|
||||
|
Loading…
Reference in New Issue
Block a user