mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Replace use of six.text_type with str
This removes the last use of the six package allowing Sphinx to remove it as a dependency.
This commit is contained in:
parent
4405366e22
commit
6978918ffc
1
CHANGES
1
CHANGES
@ -17,6 +17,7 @@ Dependencies
|
|||||||
which in Ubuntu xenial are provided by package ``fonts-freefont-otf``, and
|
which in Ubuntu xenial are provided by package ``fonts-freefont-otf``, and
|
||||||
e.g. in Fedora 29 via package ``texlive-gnu-freefont``.
|
e.g. in Fedora 29 via package ``texlive-gnu-freefont``.
|
||||||
* requests 2.5.0 or above
|
* requests 2.5.0 or above
|
||||||
|
* The six package is no longer a dependency.
|
||||||
|
|
||||||
Incompatible changes
|
Incompatible changes
|
||||||
--------------------
|
--------------------
|
||||||
|
1
setup.py
1
setup.py
@ -15,7 +15,6 @@ if sys.version_info < (3, 5):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
install_requires = [
|
install_requires = [
|
||||||
'six>=1.5',
|
|
||||||
'Jinja2>=2.3',
|
'Jinja2>=2.3',
|
||||||
'Pygments>=2.0',
|
'Pygments>=2.0',
|
||||||
'docutils>=0.12',
|
'docutils>=0.12',
|
||||||
|
@ -25,7 +25,6 @@ from docutils.frontend import OptionParser
|
|||||||
from docutils.io import DocTreeInput, StringOutput
|
from docutils.io import DocTreeInput, StringOutput
|
||||||
from docutils.readers.doctree import Reader as DoctreeReader
|
from docutils.readers.doctree import Reader as DoctreeReader
|
||||||
from docutils.utils import relative_path
|
from docutils.utils import relative_path
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import package_dir, __display_version__
|
from sphinx import package_dir, __display_version__
|
||||||
from sphinx.application import ENV_PICKLE_FILENAME
|
from sphinx.application import ENV_PICKLE_FILENAME
|
||||||
@ -86,10 +85,10 @@ def get_stable_hash(obj):
|
|||||||
return get_stable_hash(list(obj.items()))
|
return get_stable_hash(list(obj.items()))
|
||||||
elif isinstance(obj, (list, tuple)):
|
elif isinstance(obj, (list, tuple)):
|
||||||
obj = sorted(get_stable_hash(o) for o in obj)
|
obj = sorted(get_stable_hash(o) for o in obj)
|
||||||
return md5(text_type(obj).encode()).hexdigest()
|
return md5(str(obj).encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
class Stylesheet(text_type):
|
class Stylesheet(str):
|
||||||
"""A metadata of stylesheet.
|
"""A metadata of stylesheet.
|
||||||
|
|
||||||
To keep compatibility with old themes, an instance of stylesheet behaves as
|
To keep compatibility with old themes, an instance of stylesheet behaves as
|
||||||
@ -101,7 +100,7 @@ class Stylesheet(text_type):
|
|||||||
|
|
||||||
def __new__(cls, filename, *args, **attributes):
|
def __new__(cls, filename, *args, **attributes):
|
||||||
# type: (str, str, str) -> None
|
# type: (str, str, str) -> None
|
||||||
self = text_type.__new__(cls, filename) # type: ignore
|
self = str.__new__(cls, filename) # type: ignore
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.attributes = attributes
|
self.attributes = attributes
|
||||||
self.attributes.setdefault('rel', 'stylesheet')
|
self.attributes.setdefault('rel', 'stylesheet')
|
||||||
@ -146,7 +145,7 @@ class JSContainer(list):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class JavaScript(text_type):
|
class JavaScript(str):
|
||||||
"""A metadata of javascript file.
|
"""A metadata of javascript file.
|
||||||
|
|
||||||
To keep compatibility with old themes, an instance of javascript behaves as
|
To keep compatibility with old themes, an instance of javascript behaves as
|
||||||
@ -158,7 +157,7 @@ class JavaScript(text_type):
|
|||||||
|
|
||||||
def __new__(cls, filename, **attributes):
|
def __new__(cls, filename, **attributes):
|
||||||
# type: (str, **str) -> None
|
# type: (str, **str) -> None
|
||||||
self = text_type.__new__(cls, filename) # type: ignore
|
self = str.__new__(cls, filename) # type: ignore
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.attributes = attributes
|
self.attributes = attributes
|
||||||
self.attributes.setdefault('type', 'text/javascript')
|
self.attributes.setdefault('type', 'text/javascript')
|
||||||
|
@ -12,7 +12,6 @@ import os
|
|||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
from docutils.frontend import OptionParser
|
from docutils.frontend import OptionParser
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import package_dir, addnodes, highlighting
|
from sphinx import package_dir, addnodes, highlighting
|
||||||
from sphinx.builders import Builder
|
from sphinx.builders import Builder
|
||||||
@ -24,7 +23,7 @@ from sphinx.builders.latex.transforms import (
|
|||||||
from sphinx.config import ENUM
|
from sphinx.config import ENUM
|
||||||
from sphinx.environment import NoUri
|
from sphinx.environment import NoUri
|
||||||
from sphinx.environment.adapters.asset import ImageAdapter
|
from sphinx.environment.adapters.asset import ImageAdapter
|
||||||
from sphinx.errors import SphinxError, ConfigError
|
from sphinx.errors import SphinxError
|
||||||
from sphinx.locale import _, __
|
from sphinx.locale import _, __
|
||||||
from sphinx.transforms import SphinxTransformer
|
from sphinx.transforms import SphinxTransformer
|
||||||
from sphinx.util import texescape, logging, status_iterator
|
from sphinx.util import texescape, logging, status_iterator
|
||||||
@ -400,23 +399,6 @@ class LaTeXBuilder(Builder):
|
|||||||
|
|
||||||
def validate_config_values(app, config):
|
def validate_config_values(app, config):
|
||||||
# type: (Sphinx, Config) -> None
|
# type: (Sphinx, Config) -> None
|
||||||
for document in config.latex_documents:
|
|
||||||
try:
|
|
||||||
text_type(document[2])
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
raise ConfigError(
|
|
||||||
__('Invalid latex_documents.title found (might contain non-ASCII chars. '
|
|
||||||
'Please use u"..." notation instead): %r') % (document,)
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
text_type(document[3])
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
raise ConfigError(
|
|
||||||
__('Invalid latex_documents.author found (might contain non-ASCII chars. '
|
|
||||||
'Please use u"..." notation instead): %r') % (document,)
|
|
||||||
)
|
|
||||||
|
|
||||||
for key in list(config.latex_elements):
|
for key in list(config.latex_elements):
|
||||||
if key not in DEFAULT_SETTINGS:
|
if key not in DEFAULT_SETTINGS:
|
||||||
msg = __("Unknown configure key: latex_elements[%r]. ignored.")
|
msg = __("Unknown configure key: latex_elements[%r]. ignored.")
|
||||||
|
@ -16,7 +16,6 @@ import sys
|
|||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from docutils.utils import SystemMessage
|
from docutils.utils import SystemMessage
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
import sphinx.locale
|
import sphinx.locale
|
||||||
from sphinx import __display_version__, package_dir
|
from sphinx import __display_version__, package_dir
|
||||||
@ -53,17 +52,17 @@ def handle_exception(app, args, exception, stderr=sys.stderr):
|
|||||||
print(terminal_safe(exception.args[0]), file=stderr)
|
print(terminal_safe(exception.args[0]), file=stderr)
|
||||||
elif isinstance(exception, SphinxError):
|
elif isinstance(exception, SphinxError):
|
||||||
print(red('%s:' % exception.category), file=stderr)
|
print(red('%s:' % exception.category), file=stderr)
|
||||||
print(terminal_safe(text_type(exception)), file=stderr)
|
print(terminal_safe(str(exception)), file=stderr)
|
||||||
elif isinstance(exception, UnicodeError):
|
elif isinstance(exception, UnicodeError):
|
||||||
print(red(__('Encoding error:')), file=stderr)
|
print(red(__('Encoding error:')), file=stderr)
|
||||||
print(terminal_safe(text_type(exception)), file=stderr)
|
print(terminal_safe(str(exception)), file=stderr)
|
||||||
tbpath = save_traceback(app)
|
tbpath = save_traceback(app)
|
||||||
print(red(__('The full traceback has been saved in %s, if you want '
|
print(red(__('The full traceback has been saved in %s, if you want '
|
||||||
'to report the issue to the developers.') % tbpath),
|
'to report the issue to the developers.') % tbpath),
|
||||||
file=stderr)
|
file=stderr)
|
||||||
elif isinstance(exception, RuntimeError) and 'recursion depth' in str(exception):
|
elif isinstance(exception, RuntimeError) and 'recursion depth' in str(exception):
|
||||||
print(red(__('Recursion error:')), file=stderr)
|
print(red(__('Recursion error:')), file=stderr)
|
||||||
print(terminal_safe(text_type(exception)), file=stderr)
|
print(terminal_safe(str(exception)), file=stderr)
|
||||||
print(file=stderr)
|
print(file=stderr)
|
||||||
print(__('This can happen with very large or deeply nested source '
|
print(__('This can happen with very large or deeply nested source '
|
||||||
'files. You can carefully increase the default Python '
|
'files. You can carefully increase the default Python '
|
||||||
|
@ -32,7 +32,6 @@ except ImportError:
|
|||||||
USE_LIBEDIT = False
|
USE_LIBEDIT = False
|
||||||
|
|
||||||
from docutils.utils import column_width
|
from docutils.utils import column_width
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
import sphinx.locale
|
import sphinx.locale
|
||||||
from sphinx import __display_version__, package_dir
|
from sphinx import __display_version__, package_dir
|
||||||
@ -158,7 +157,7 @@ def term_decode(text):
|
|||||||
warnings.warn('term_decode() is deprecated.',
|
warnings.warn('term_decode() is deprecated.',
|
||||||
RemovedInSphinx40Warning, stacklevel=2)
|
RemovedInSphinx40Warning, stacklevel=2)
|
||||||
|
|
||||||
if isinstance(text, text_type):
|
if isinstance(text, str):
|
||||||
return text
|
return text
|
||||||
|
|
||||||
# Use the known encoding, if possible
|
# Use the known encoding, if possible
|
||||||
@ -391,10 +390,9 @@ def generate(d, overwrite=True, silent=False, templatedir=None):
|
|||||||
d['project_underline'] = column_width(d['project']) * '='
|
d['project_underline'] = column_width(d['project']) * '='
|
||||||
d.setdefault('extensions', [])
|
d.setdefault('extensions', [])
|
||||||
d['copyright'] = time.strftime('%Y') + ', ' + d['author']
|
d['copyright'] = time.strftime('%Y') + ', ' + d['author']
|
||||||
d['author_texescaped'] = text_type(d['author']).\
|
d['author_texescaped'] = d['author'].translate(texescape.tex_escape_map)
|
||||||
translate(texescape.tex_escape_map)
|
|
||||||
d['project_doc'] = d['project'] + ' Documentation'
|
d['project_doc'] = d['project'] + ' Documentation'
|
||||||
d['project_doc_texescaped'] = text_type(d['project'] + ' Documentation').\
|
d['project_doc_texescaped'] = (d['project'] + ' Documentation').\
|
||||||
translate(texescape.tex_escape_map)
|
translate(texescape.tex_escape_map)
|
||||||
|
|
||||||
# escape backslashes and single quotes in strings that are put into
|
# escape backslashes and single quotes in strings that are put into
|
||||||
|
@ -16,8 +16,6 @@ from collections import OrderedDict
|
|||||||
from os import path, getenv
|
from os import path, getenv
|
||||||
from typing import Any, NamedTuple, Union
|
from typing import Any, NamedTuple, Union
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx30Warning, RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx30Warning, RemovedInSphinx40Warning
|
||||||
from sphinx.errors import ConfigError, ExtensionError
|
from sphinx.errors import ConfigError, ExtensionError
|
||||||
from sphinx.locale import _, __
|
from sphinx.locale import _, __
|
||||||
@ -41,7 +39,7 @@ copyright_year_re = re.compile(r'^((\d{4}-)?)(\d{4})(?=[ ,])')
|
|||||||
|
|
||||||
ConfigValue = NamedTuple('ConfigValue', [('name', str),
|
ConfigValue = NamedTuple('ConfigValue', [('name', str),
|
||||||
('value', Any),
|
('value', Any),
|
||||||
('rebuild', Union[bool, text_type])])
|
('rebuild', Union[bool, str])])
|
||||||
|
|
||||||
|
|
||||||
def is_serializable(obj):
|
def is_serializable(obj):
|
||||||
@ -78,7 +76,7 @@ class ENUM:
|
|||||||
|
|
||||||
|
|
||||||
# RemovedInSphinx40Warning
|
# RemovedInSphinx40Warning
|
||||||
string_classes = [text_type] # type: List
|
string_classes = [str] # type: List
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
|
@ -13,7 +13,6 @@ from copy import deepcopy
|
|||||||
|
|
||||||
from docutils import nodes, utils
|
from docutils import nodes, utils
|
||||||
from docutils.parsers.rst import directives
|
from docutils.parsers.rst import directives
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.directives import ObjectDescription
|
from sphinx.directives import ObjectDescription
|
||||||
@ -633,7 +632,7 @@ class ASTBase:
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
return self._stringify(lambda ast: text_type(ast))
|
return self._stringify(lambda ast: str(ast))
|
||||||
|
|
||||||
def get_display_string(self):
|
def get_display_string(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
@ -664,7 +663,7 @@ class ASTCPPAttribute(ASTBase):
|
|||||||
|
|
||||||
def describe_signature(self, signode):
|
def describe_signature(self, signode):
|
||||||
# type: (addnodes.desc_signature) -> None
|
# type: (addnodes.desc_signature) -> None
|
||||||
txt = text_type(self)
|
txt = str(self)
|
||||||
signode.append(nodes.Text(txt, txt))
|
signode.append(nodes.Text(txt, txt))
|
||||||
|
|
||||||
|
|
||||||
@ -703,7 +702,7 @@ class ASTGnuAttributeList(ASTBase):
|
|||||||
|
|
||||||
def describe_signature(self, signode):
|
def describe_signature(self, signode):
|
||||||
# type: (addnodes.desc_signature) -> None
|
# type: (addnodes.desc_signature) -> None
|
||||||
txt = text_type(self)
|
txt = str(self)
|
||||||
signode.append(nodes.Text(txt, txt))
|
signode.append(nodes.Text(txt, txt))
|
||||||
|
|
||||||
|
|
||||||
@ -737,7 +736,7 @@ class ASTParenAttribute(ASTBase):
|
|||||||
|
|
||||||
def describe_signature(self, signode):
|
def describe_signature(self, signode):
|
||||||
# type: (addnodes.desc_signature) -> None
|
# type: (addnodes.desc_signature) -> None
|
||||||
txt = text_type(self)
|
txt = str(self)
|
||||||
signode.append(nodes.Text(txt, txt))
|
signode.append(nodes.Text(txt, txt))
|
||||||
|
|
||||||
|
|
||||||
@ -777,7 +776,7 @@ class ASTBooleanLiteral(ASTBase):
|
|||||||
return 'L0E'
|
return 'L0E'
|
||||||
|
|
||||||
def describe_signature(self, signode, mode, env, symbol):
|
def describe_signature(self, signode, mode, env, symbol):
|
||||||
signode.append(nodes.Text(text_type(self)))
|
signode.append(nodes.Text(str(self)))
|
||||||
|
|
||||||
|
|
||||||
class ASTNumberLiteral(ASTBase):
|
class ASTNumberLiteral(ASTBase):
|
||||||
@ -794,7 +793,7 @@ class ASTNumberLiteral(ASTBase):
|
|||||||
return "L%sE" % self.data
|
return "L%sE" % self.data
|
||||||
|
|
||||||
def describe_signature(self, signode, mode, env, symbol):
|
def describe_signature(self, signode, mode, env, symbol):
|
||||||
txt = text_type(self)
|
txt = str(self)
|
||||||
signode.append(nodes.Text(txt, txt))
|
signode.append(nodes.Text(txt, txt))
|
||||||
|
|
||||||
|
|
||||||
@ -828,7 +827,7 @@ class ASTCharLiteral(ASTBase):
|
|||||||
return self.type + str(self.value)
|
return self.type + str(self.value)
|
||||||
|
|
||||||
def describe_signature(self, signode, mode, env, symbol):
|
def describe_signature(self, signode, mode, env, symbol):
|
||||||
txt = text_type(self)
|
txt = str(self)
|
||||||
signode.append(nodes.Text(txt, txt))
|
signode.append(nodes.Text(txt, txt))
|
||||||
|
|
||||||
|
|
||||||
@ -847,7 +846,7 @@ class ASTStringLiteral(ASTBase):
|
|||||||
return "LA%d_KcE" % (len(self.data) - 2)
|
return "LA%d_KcE" % (len(self.data) - 2)
|
||||||
|
|
||||||
def describe_signature(self, signode, mode, env, symbol):
|
def describe_signature(self, signode, mode, env, symbol):
|
||||||
txt = text_type(self)
|
txt = str(self)
|
||||||
signode.append(nodes.Text(txt, txt))
|
signode.append(nodes.Text(txt, txt))
|
||||||
|
|
||||||
|
|
||||||
@ -910,7 +909,7 @@ class ASTFoldExpr(ASTBase):
|
|||||||
# type: (int) -> str
|
# type: (int) -> str
|
||||||
assert version >= 3
|
assert version >= 3
|
||||||
if version == 3:
|
if version == 3:
|
||||||
return text_type(self)
|
return str(self)
|
||||||
# TODO: find the right mangling scheme
|
# TODO: find the right mangling scheme
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
@ -1466,7 +1465,7 @@ class ASTFallbackExpr(ASTBase):
|
|||||||
|
|
||||||
def get_id(self, version):
|
def get_id(self, version):
|
||||||
# type: (int) -> str
|
# type: (int) -> str
|
||||||
return text_type(self.expr)
|
return str(self.expr)
|
||||||
|
|
||||||
def describe_signature(self, signode, mode, env, symbol):
|
def describe_signature(self, signode, mode, env, symbol):
|
||||||
signode += nodes.Text(self.expr)
|
signode += nodes.Text(self.expr)
|
||||||
@ -1504,7 +1503,7 @@ class ASTIdentifier(ASTBase):
|
|||||||
if self.is_anon():
|
if self.is_anon():
|
||||||
return 'Ut%d_%s' % (len(self.identifier) - 1, self.identifier[1:])
|
return 'Ut%d_%s' % (len(self.identifier) - 1, self.identifier[1:])
|
||||||
else:
|
else:
|
||||||
return text_type(len(self.identifier)) + self.identifier
|
return str(len(self.identifier)) + self.identifier
|
||||||
|
|
||||||
# and this is where we finally make a difference between __str__ and the display string
|
# and this is where we finally make a difference between __str__ and the display string
|
||||||
|
|
||||||
@ -1987,7 +1986,7 @@ class ASTOperator(ASTBase):
|
|||||||
def describe_signature(self, signode, mode, env, prefix, templateArgs, symbol):
|
def describe_signature(self, signode, mode, env, prefix, templateArgs, symbol):
|
||||||
# type: (addnodes.desc_signature, str, Any, str, str, Symbol) -> None
|
# type: (addnodes.desc_signature, str, Any, str, str, Symbol) -> None
|
||||||
_verify_description_mode(mode)
|
_verify_description_mode(mode)
|
||||||
identifier = text_type(self)
|
identifier = str(self)
|
||||||
if mode == 'lastIsName':
|
if mode == 'lastIsName':
|
||||||
signode += addnodes.desc_name(identifier, identifier)
|
signode += addnodes.desc_name(identifier, identifier)
|
||||||
else:
|
else:
|
||||||
@ -2036,7 +2035,7 @@ class ASTOperatorType(ASTOperator):
|
|||||||
|
|
||||||
def get_name_no_template(self):
|
def get_name_no_template(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
return text_type(self)
|
return str(self)
|
||||||
|
|
||||||
|
|
||||||
class ASTOperatorLiteral(ASTOperator):
|
class ASTOperatorLiteral(ASTOperator):
|
||||||
@ -2071,9 +2070,9 @@ class ASTTemplateArgConstant(ASTBase):
|
|||||||
def get_id(self, version):
|
def get_id(self, version):
|
||||||
# type: (int) -> str
|
# type: (int) -> str
|
||||||
if version == 1:
|
if version == 1:
|
||||||
return text_type(self).replace(' ', '-')
|
return str(self).replace(' ', '-')
|
||||||
if version == 2:
|
if version == 2:
|
||||||
return 'X' + text_type(self) + 'E'
|
return 'X' + str(self) + 'E'
|
||||||
return 'X' + self.value.get_id(version) + 'E'
|
return 'X' + self.value.get_id(version) + 'E'
|
||||||
|
|
||||||
def describe_signature(self, signode, mode, env, symbol):
|
def describe_signature(self, signode, mode, env, symbol):
|
||||||
@ -2148,7 +2147,7 @@ class ASTNestedNameElement(ASTBase):
|
|||||||
|
|
||||||
def describe_signature(self, signode, mode, env, prefix, symbol):
|
def describe_signature(self, signode, mode, env, prefix, symbol):
|
||||||
# type: (addnodes.desc_signature, str, BuildEnvironment, str, Symbol) -> None
|
# type: (addnodes.desc_signature, str, BuildEnvironment, str, Symbol) -> None
|
||||||
tArgs = text_type(self.templateArgs) if self.templateArgs is not None else ''
|
tArgs = str(self.templateArgs) if self.templateArgs is not None else ''
|
||||||
self.identOrOp.describe_signature(signode, mode, env, prefix, tArgs, symbol)
|
self.identOrOp.describe_signature(signode, mode, env, prefix, tArgs, symbol)
|
||||||
if self.templateArgs is not None:
|
if self.templateArgs is not None:
|
||||||
self.templateArgs.describe_signature(signode, mode, env, symbol)
|
self.templateArgs.describe_signature(signode, mode, env, symbol)
|
||||||
@ -2181,7 +2180,7 @@ class ASTNestedName(ASTBase):
|
|||||||
def get_id(self, version, modifiers=''):
|
def get_id(self, version, modifiers=''):
|
||||||
# type: (int, str) -> str
|
# type: (int, str) -> str
|
||||||
if version == 1:
|
if version == 1:
|
||||||
tt = text_type(self)
|
tt = str(self)
|
||||||
if tt in _id_shorthands_v1:
|
if tt in _id_shorthands_v1:
|
||||||
return _id_shorthands_v1[tt]
|
return _id_shorthands_v1[tt]
|
||||||
else:
|
else:
|
||||||
@ -2216,9 +2215,9 @@ class ASTNestedName(ASTBase):
|
|||||||
_verify_description_mode(mode)
|
_verify_description_mode(mode)
|
||||||
# just print the name part, with template args, not template params
|
# just print the name part, with template args, not template params
|
||||||
if mode == 'noneIsName':
|
if mode == 'noneIsName':
|
||||||
signode += nodes.Text(text_type(self))
|
signode += nodes.Text(str(self))
|
||||||
elif mode == 'param':
|
elif mode == 'param':
|
||||||
name = text_type(self)
|
name = str(self)
|
||||||
signode += nodes.emphasis(name, name)
|
signode += nodes.emphasis(name, name)
|
||||||
elif mode == 'markType' or mode == 'lastIsName':
|
elif mode == 'markType' or mode == 'lastIsName':
|
||||||
# Each element should be a pending xref targeting the complete
|
# Each element should be a pending xref targeting the complete
|
||||||
@ -2251,10 +2250,10 @@ class ASTNestedName(ASTBase):
|
|||||||
if template:
|
if template:
|
||||||
dest += nodes.Text("template ")
|
dest += nodes.Text("template ")
|
||||||
first = False
|
first = False
|
||||||
txt_nne = text_type(nne)
|
txt_nne = str(nne)
|
||||||
if txt_nne != '':
|
if txt_nne != '':
|
||||||
if nne.templateArgs and iTemplateParams < len(templateParams):
|
if nne.templateArgs and iTemplateParams < len(templateParams):
|
||||||
templateParamsPrefix += text_type(templateParams[iTemplateParams])
|
templateParamsPrefix += str(templateParams[iTemplateParams])
|
||||||
iTemplateParams += 1
|
iTemplateParams += 1
|
||||||
nne.describe_signature(dest, 'markType',
|
nne.describe_signature(dest, 'markType',
|
||||||
env, templateParamsPrefix + prefix, symbol)
|
env, templateParamsPrefix + prefix, symbol)
|
||||||
@ -2299,7 +2298,7 @@ class ASTTrailingTypeSpecFundamental(ASTBase):
|
|||||||
|
|
||||||
def describe_signature(self, signode, mode, env, symbol):
|
def describe_signature(self, signode, mode, env, symbol):
|
||||||
# type: (addnodes.desc_signature, str, BuildEnvironment, Symbol) -> None
|
# type: (addnodes.desc_signature, str, BuildEnvironment, Symbol) -> None
|
||||||
signode += nodes.Text(text_type(self.name))
|
signode += nodes.Text(str(self.name))
|
||||||
|
|
||||||
|
|
||||||
class ASTTrailingTypeSpecName(ASTBase):
|
class ASTTrailingTypeSpecName(ASTBase):
|
||||||
@ -2347,7 +2346,7 @@ class ASTTrailingTypeSpecDecltypeAuto(ASTBase):
|
|||||||
|
|
||||||
def describe_signature(self, signode, mode, env, symbol):
|
def describe_signature(self, signode, mode, env, symbol):
|
||||||
# type: (addnodes.desc_signature, str, BuildEnvironment, Symbol) -> None
|
# type: (addnodes.desc_signature, str, BuildEnvironment, Symbol) -> None
|
||||||
signode.append(nodes.Text(text_type(self)))
|
signode.append(nodes.Text(str(self)))
|
||||||
|
|
||||||
|
|
||||||
class ASTTrailingTypeSpecDecltype(ASTBase):
|
class ASTTrailingTypeSpecDecltype(ASTBase):
|
||||||
@ -2460,7 +2459,7 @@ class ASTParametersQualifiers(ASTBase):
|
|||||||
if not first:
|
if not first:
|
||||||
res.append(', ')
|
res.append(', ')
|
||||||
first = False
|
first = False
|
||||||
res.append(text_type(a))
|
res.append(str(a))
|
||||||
res.append(')')
|
res.append(')')
|
||||||
if self.volatile:
|
if self.volatile:
|
||||||
res.append(' volatile')
|
res.append(' volatile')
|
||||||
@ -2471,7 +2470,7 @@ class ASTParametersQualifiers(ASTBase):
|
|||||||
res.append(self.refQual)
|
res.append(self.refQual)
|
||||||
if self.exceptionSpec:
|
if self.exceptionSpec:
|
||||||
res.append(' ')
|
res.append(' ')
|
||||||
res.append(text_type(self.exceptionSpec))
|
res.append(str(self.exceptionSpec))
|
||||||
if self.final:
|
if self.final:
|
||||||
res.append(' final')
|
res.append(' final')
|
||||||
if self.override:
|
if self.override:
|
||||||
@ -2508,13 +2507,13 @@ class ASTParametersQualifiers(ASTBase):
|
|||||||
if self.refQual:
|
if self.refQual:
|
||||||
_add_text(signode, self.refQual)
|
_add_text(signode, self.refQual)
|
||||||
if self.exceptionSpec:
|
if self.exceptionSpec:
|
||||||
_add_anno(signode, text_type(self.exceptionSpec))
|
_add_anno(signode, str(self.exceptionSpec))
|
||||||
if self.final:
|
if self.final:
|
||||||
_add_anno(signode, 'final')
|
_add_anno(signode, 'final')
|
||||||
if self.override:
|
if self.override:
|
||||||
_add_anno(signode, 'override')
|
_add_anno(signode, 'override')
|
||||||
if self.initializer:
|
if self.initializer:
|
||||||
_add_text(signode, '= ' + text_type(self.initializer))
|
_add_text(signode, '= ' + str(self.initializer))
|
||||||
|
|
||||||
|
|
||||||
class ASTDeclSpecsSimple(ASTBase):
|
class ASTDeclSpecsSimple(ASTBase):
|
||||||
@ -2646,7 +2645,7 @@ class ASTDeclSpecs(ASTBase):
|
|||||||
if len(res) > 0:
|
if len(res) > 0:
|
||||||
res.append(" ")
|
res.append(" ")
|
||||||
res.append(transform(self.trailingTypeSpec))
|
res.append(transform(self.trailingTypeSpec))
|
||||||
r = text_type(self.rightSpecs)
|
r = str(self.rightSpecs)
|
||||||
if len(r) > 0:
|
if len(r) > 0:
|
||||||
if len(res) > 0:
|
if len(res) > 0:
|
||||||
res.append(" ")
|
res.append(" ")
|
||||||
@ -2697,7 +2696,7 @@ class ASTArray(ASTBase):
|
|||||||
return 'A'
|
return 'A'
|
||||||
if version == 2:
|
if version == 2:
|
||||||
if self.size:
|
if self.size:
|
||||||
return 'A' + text_type(self.size) + '_'
|
return 'A' + str(self.size) + '_'
|
||||||
else:
|
else:
|
||||||
return 'A_'
|
return 'A_'
|
||||||
if self.size:
|
if self.size:
|
||||||
@ -3314,7 +3313,7 @@ class ASTType(ASTBase):
|
|||||||
_verify_description_mode(mode)
|
_verify_description_mode(mode)
|
||||||
self.declSpecs.describe_signature(signode, 'markType', env, symbol)
|
self.declSpecs.describe_signature(signode, 'markType', env, symbol)
|
||||||
if (self.decl.require_space_after_declSpecs() and
|
if (self.decl.require_space_after_declSpecs() and
|
||||||
len(text_type(self.declSpecs)) > 0):
|
len(str(self.declSpecs)) > 0):
|
||||||
signode += nodes.Text(' ')
|
signode += nodes.Text(' ')
|
||||||
# for parameters that don't really declare new names we get 'markType',
|
# for parameters that don't really declare new names we get 'markType',
|
||||||
# this should not be propagated, but be 'noneIsName'.
|
# this should not be propagated, but be 'noneIsName'.
|
||||||
@ -3922,8 +3921,8 @@ class Symbol:
|
|||||||
param = templateParams.params[i]
|
param = templateParams.params[i]
|
||||||
arg = templateArgs.args[i]
|
arg = templateArgs.args[i]
|
||||||
# TODO: doing this by string manipulation is probably not the most efficient
|
# TODO: doing this by string manipulation is probably not the most efficient
|
||||||
paramName = text_type(param.name)
|
paramName = str(param.name)
|
||||||
argTxt = text_type(arg)
|
argTxt = str(arg)
|
||||||
isArgPackExpansion = argTxt.endswith('...')
|
isArgPackExpansion = argTxt.endswith('...')
|
||||||
if param.isPack != isArgPackExpansion:
|
if param.isPack != isArgPackExpansion:
|
||||||
return True
|
return True
|
||||||
@ -3951,13 +3950,13 @@ class Symbol:
|
|||||||
return False
|
return False
|
||||||
if templateParams:
|
if templateParams:
|
||||||
# TODO: do better comparison
|
# TODO: do better comparison
|
||||||
if text_type(s.templateParams) != text_type(templateParams):
|
if str(s.templateParams) != str(templateParams):
|
||||||
return False
|
return False
|
||||||
if (s.templateArgs is None) != (templateArgs is None):
|
if (s.templateArgs is None) != (templateArgs is None):
|
||||||
return False
|
return False
|
||||||
if s.templateArgs:
|
if s.templateArgs:
|
||||||
# TODO: do better comparison
|
# TODO: do better comparison
|
||||||
if text_type(s.templateArgs) != text_type(templateArgs):
|
if str(s.templateArgs) != str(templateArgs):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
if matchSelf and matches(self):
|
if matchSelf and matches(self):
|
||||||
@ -4246,7 +4245,7 @@ class Symbol:
|
|||||||
if not ourChild.declaration:
|
if not ourChild.declaration:
|
||||||
ourChild._fill_empty(otherChild.declaration, otherChild.docname)
|
ourChild._fill_empty(otherChild.declaration, otherChild.docname)
|
||||||
elif ourChild.docname != otherChild.docname:
|
elif ourChild.docname != otherChild.docname:
|
||||||
name = text_type(ourChild.declaration)
|
name = str(ourChild.declaration)
|
||||||
msg = __("Duplicate declaration, also defined in '%s'.\n"
|
msg = __("Duplicate declaration, also defined in '%s'.\n"
|
||||||
"Declaration is '%s'.")
|
"Declaration is '%s'.")
|
||||||
msg = msg % (ourChild.docname, name)
|
msg = msg % (ourChild.docname, name)
|
||||||
@ -4394,20 +4393,20 @@ class Symbol:
|
|||||||
res.append('::')
|
res.append('::')
|
||||||
else:
|
else:
|
||||||
if self.templateParams:
|
if self.templateParams:
|
||||||
res.append(text_type(self.templateParams))
|
res.append(str(self.templateParams))
|
||||||
res.append('\n')
|
res.append('\n')
|
||||||
res.append('\t' * indent)
|
res.append('\t' * indent)
|
||||||
if self.identOrOp:
|
if self.identOrOp:
|
||||||
res.append(text_type(self.identOrOp))
|
res.append(str(self.identOrOp))
|
||||||
else:
|
else:
|
||||||
res.append(text_type(self.declaration))
|
res.append(str(self.declaration))
|
||||||
if self.templateArgs:
|
if self.templateArgs:
|
||||||
res.append(text_type(self.templateArgs))
|
res.append(str(self.templateArgs))
|
||||||
if self.declaration:
|
if self.declaration:
|
||||||
res.append(": ")
|
res.append(": ")
|
||||||
if self.isRedeclaration:
|
if self.isRedeclaration:
|
||||||
res.append('!!duplicate!! ')
|
res.append('!!duplicate!! ')
|
||||||
res.append(text_type(self.declaration))
|
res.append(str(self.declaration))
|
||||||
if self.docname:
|
if self.docname:
|
||||||
res.append('\t(')
|
res.append('\t(')
|
||||||
res.append(self.docname)
|
res.append(self.docname)
|
||||||
@ -6144,7 +6143,7 @@ class DefinitionParser:
|
|||||||
msg += " Declaration:\n\t"
|
msg += " Declaration:\n\t"
|
||||||
if templatePrefix:
|
if templatePrefix:
|
||||||
msg += "%s\n\t" % templatePrefix
|
msg += "%s\n\t" % templatePrefix
|
||||||
msg += text_type(nestedName)
|
msg += str(nestedName)
|
||||||
self.warn(msg)
|
self.warn(msg)
|
||||||
|
|
||||||
newTemplates = []
|
newTemplates = []
|
||||||
@ -6438,7 +6437,7 @@ class CPPObject(ObjectDescription):
|
|||||||
parentDecl = parentSymbol.declaration
|
parentDecl = parentSymbol.declaration
|
||||||
if parentDecl is not None and parentDecl.objectType == 'function':
|
if parentDecl is not None and parentDecl.objectType == 'function':
|
||||||
self.warn("C++ declarations inside functions are not supported." +
|
self.warn("C++ declarations inside functions are not supported." +
|
||||||
" Parent function is " + text_type(parentSymbol.get_full_nested_name()))
|
" Parent function is " + str(parentSymbol.get_full_nested_name()))
|
||||||
name = _make_phony_error_name()
|
name = _make_phony_error_name()
|
||||||
symbol = parentSymbol.add_name(name)
|
symbol = parentSymbol.add_name(name)
|
||||||
env.temp_data['cpp:last_symbol'] = symbol
|
env.temp_data['cpp:last_symbol'] = symbol
|
||||||
@ -6742,8 +6741,7 @@ class CPPExprRole:
|
|||||||
try:
|
try:
|
||||||
ast = parser.parse_expression()
|
ast = parser.parse_expression()
|
||||||
except DefinitionError as ex:
|
except DefinitionError as ex:
|
||||||
Warner().warn('Unparseable C++ expression: %r\n%s'
|
Warner().warn('Unparseable C++ expression: %r\n%s' % (text, ex))
|
||||||
% (text, text_type(ex.description)))
|
|
||||||
# see below
|
# see below
|
||||||
return [self.node_type(text, text, classes=classes)], []
|
return [self.node_type(text, text, classes=classes)], []
|
||||||
parentSymbol = env.temp_data.get('cpp:parent_symbol', None)
|
parentSymbol = env.temp_data.get('cpp:parent_symbol', None)
|
||||||
@ -6891,8 +6889,7 @@ class CPPDomain(Domain):
|
|||||||
# strange, that we don't get the error now, use the original
|
# strange, that we don't get the error now, use the original
|
||||||
return target, e
|
return target, e
|
||||||
t, ex = findWarning(e)
|
t, ex = findWarning(e)
|
||||||
warner.warn('Unparseable C++ cross-reference: %r\n%s'
|
warner.warn('Unparseable C++ cross-reference: %r\n%s' % (t, ex))
|
||||||
% (t, text_type(ex.description)))
|
|
||||||
return None, None
|
return None, None
|
||||||
parentKey = node.get("cpp:parent_key", None)
|
parentKey = node.get("cpp:parent_key", None)
|
||||||
rootSymbol = self.data['root_symbol']
|
rootSymbol = self.data['root_symbol']
|
||||||
@ -6923,7 +6920,7 @@ class CPPDomain(Domain):
|
|||||||
templateShorthand=True,
|
templateShorthand=True,
|
||||||
matchSelf=True, recurseInAnon=True)
|
matchSelf=True, recurseInAnon=True)
|
||||||
if s is None or s.declaration is None:
|
if s is None or s.declaration is None:
|
||||||
txtName = text_type(name)
|
txtName = str(name)
|
||||||
if txtName.startswith('std::') or txtName == 'std':
|
if txtName.startswith('std::') or txtName == 'std':
|
||||||
raise NoUri()
|
raise NoUri()
|
||||||
return None, None
|
return None, None
|
||||||
@ -7026,7 +7023,7 @@ class CPPDomain(Domain):
|
|||||||
continue
|
continue
|
||||||
assert symbol.docname
|
assert symbol.docname
|
||||||
fullNestedName = symbol.get_full_nested_name()
|
fullNestedName = symbol.get_full_nested_name()
|
||||||
name = text_type(fullNestedName).lstrip(':')
|
name = str(fullNestedName).lstrip(':')
|
||||||
dispname = fullNestedName.get_display_string().lstrip(':')
|
dispname = fullNestedName.get_display_string().lstrip(':')
|
||||||
objectType = symbol.declaration.objectType
|
objectType = symbol.declaration.objectType
|
||||||
docname = symbol.docname
|
docname = symbol.docname
|
||||||
@ -7045,7 +7042,7 @@ class CPPDomain(Domain):
|
|||||||
rootSymbol = self.data['root_symbol']
|
rootSymbol = self.data['root_symbol']
|
||||||
parentSymbol = rootSymbol.direct_lookup(parentKey)
|
parentSymbol = rootSymbol.direct_lookup(parentKey)
|
||||||
parentName = parentSymbol.get_full_nested_name()
|
parentName = parentSymbol.get_full_nested_name()
|
||||||
return '::'.join([text_type(parentName), target])
|
return '::'.join([str(parentName), target])
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
|
@ -12,8 +12,6 @@ import re
|
|||||||
import unicodedata
|
import unicodedata
|
||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx.locale import _, __
|
from sphinx.locale import _, __
|
||||||
from sphinx.util import split_into, logging
|
from sphinx.util import split_into, logging
|
||||||
|
|
||||||
@ -44,7 +42,7 @@ class IndexEntries:
|
|||||||
# Force the word to be unicode if it's a ASCII bytestring.
|
# Force the word to be unicode if it's a ASCII bytestring.
|
||||||
# This will solve problems with unicode normalization later.
|
# This will solve problems with unicode normalization later.
|
||||||
# For instance the RFC role will add bytestrings at the moment
|
# For instance the RFC role will add bytestrings at the moment
|
||||||
word = text_type(word)
|
word = str(word)
|
||||||
entry = dic.get(word)
|
entry = dic.get(word)
|
||||||
if not entry:
|
if not entry:
|
||||||
dic[word] = entry = [[], {}, key]
|
dic[word] = entry = [[], {}, key]
|
||||||
|
@ -8,8 +8,6 @@
|
|||||||
:license: BSD, see LICENSE for details.
|
:license: BSD, see LICENSE for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.environment.collectors import EnvironmentCollector
|
from sphinx.environment.collectors import EnvironmentCollector
|
||||||
from sphinx.util import split_index_msg, logging
|
from sphinx.util import split_index_msg, logging
|
||||||
@ -45,7 +43,7 @@ class IndexEntriesCollector(EnvironmentCollector):
|
|||||||
for entry in node['entries']:
|
for entry in node['entries']:
|
||||||
split_index_msg(entry[0], entry[1])
|
split_index_msg(entry[0], entry[1])
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
logger.warning(text_type(exc), location=node)
|
logger.warning(str(exc), location=node)
|
||||||
node.parent.remove(node)
|
node.parent.remove(node)
|
||||||
else:
|
else:
|
||||||
for entry in node['entries']:
|
for entry in node['entries']:
|
||||||
|
@ -12,12 +12,10 @@
|
|||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from docutils.statemachine import StringList
|
from docutils.statemachine import StringList
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
import sphinx
|
import sphinx
|
||||||
from sphinx.deprecation import RemovedInSphinx30Warning, RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx30Warning, RemovedInSphinx40Warning
|
||||||
@ -461,13 +459,7 @@ class Documenter:
|
|||||||
def get_sourcename(self):
|
def get_sourcename(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
if self.analyzer:
|
if self.analyzer:
|
||||||
# prevent encoding errors when the file name is non-ASCII
|
return '%s:docstring of %s' % (self.analyzer.srcname, self.fullname)
|
||||||
if not isinstance(self.analyzer.srcname, text_type):
|
|
||||||
filename = text_type(self.analyzer.srcname,
|
|
||||||
sys.getfilesystemencoding(), 'replace')
|
|
||||||
else:
|
|
||||||
filename = self.analyzer.srcname
|
|
||||||
return '%s:docstring of %s' % (filename, self.fullname)
|
|
||||||
return 'docstring of %s' % self.fullname
|
return 'docstring of %s' % self.fullname
|
||||||
|
|
||||||
def add_content(self, more_content, no_docstring=False):
|
def add_content(self, more_content, no_docstring=False):
|
||||||
|
@ -65,7 +65,6 @@ from docutils import nodes
|
|||||||
from docutils.parsers.rst import directives
|
from docutils.parsers.rst import directives
|
||||||
from docutils.parsers.rst.states import RSTStateMachine, state_classes
|
from docutils.parsers.rst.states import RSTStateMachine, state_classes
|
||||||
from docutils.statemachine import StringList
|
from docutils.statemachine import StringList
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
import sphinx
|
import sphinx
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
@ -160,8 +159,7 @@ def autosummary_table_visit_html(self, node):
|
|||||||
par = cast(nodes.paragraph, col1_entry[0])
|
par = cast(nodes.paragraph, col1_entry[0])
|
||||||
for j, subnode in enumerate(list(par)):
|
for j, subnode in enumerate(list(par)):
|
||||||
if isinstance(subnode, nodes.Text):
|
if isinstance(subnode, nodes.Text):
|
||||||
new_text = text_type(subnode.astext())
|
new_text = subnode.astext().replace(" ", "\u00a0")
|
||||||
new_text = new_text.replace(" ", "\u00a0")
|
|
||||||
par[j] = nodes.Text(new_text)
|
par[j] = nodes.Text(new_text)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
pass
|
pass
|
||||||
|
@ -17,7 +17,6 @@ from os import path
|
|||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
import sphinx
|
import sphinx
|
||||||
from sphinx.errors import SphinxError
|
from sphinx.errors import SphinxError
|
||||||
@ -290,7 +289,7 @@ def html_visit_math(self, node):
|
|||||||
try:
|
try:
|
||||||
fname, depth = render_math(self, '$' + node.astext() + '$')
|
fname, depth = render_math(self, '$' + node.astext() + '$')
|
||||||
except MathExtError as exc:
|
except MathExtError as exc:
|
||||||
msg = text_type(exc)
|
msg = str(exc)
|
||||||
sm = nodes.system_message(msg, type='WARNING', level=2,
|
sm = nodes.system_message(msg, type='WARNING', level=2,
|
||||||
backrefs=[], source=node.astext())
|
backrefs=[], source=node.astext())
|
||||||
sm.walkabout(self)
|
sm.walkabout(self)
|
||||||
@ -317,7 +316,7 @@ def html_visit_displaymath(self, node):
|
|||||||
try:
|
try:
|
||||||
fname, depth = render_math(self, latex)
|
fname, depth = render_math(self, latex)
|
||||||
except MathExtError as exc:
|
except MathExtError as exc:
|
||||||
msg = text_type(exc)
|
msg = str(exc)
|
||||||
sm = nodes.system_message(msg, type='WARNING', level=2,
|
sm = nodes.system_message(msg, type='WARNING', level=2,
|
||||||
backrefs=[], source=node.astext())
|
backrefs=[], source=node.astext())
|
||||||
sm.walkabout(self)
|
sm.walkabout(self)
|
||||||
|
@ -20,7 +20,6 @@ from pygments.lexers import PythonLexer, Python3Lexer, PythonConsoleLexer, \
|
|||||||
CLexer, TextLexer, RstLexer
|
CLexer, TextLexer, RstLexer
|
||||||
from pygments.styles import get_style_by_name
|
from pygments.styles import get_style_by_name
|
||||||
from pygments.util import ClassNotFound
|
from pygments.util import ClassNotFound
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx30Warning
|
from sphinx.deprecation import RemovedInSphinx30Warning
|
||||||
from sphinx.ext import doctest
|
from sphinx.ext import doctest
|
||||||
@ -113,7 +112,7 @@ class PygmentsBridge:
|
|||||||
|
|
||||||
def highlight_block(self, source, lang, opts=None, location=None, force=False, **kwargs):
|
def highlight_block(self, source, lang, opts=None, location=None, force=False, **kwargs):
|
||||||
# type: (str, str, Any, Any, bool, Any) -> str
|
# type: (str, str, Any, Any, bool, Any) -> str
|
||||||
if not isinstance(source, text_type):
|
if not isinstance(source, str):
|
||||||
source = source.decode()
|
source = source.decode()
|
||||||
|
|
||||||
# find out which lexer to use
|
# find out which lexer to use
|
||||||
|
@ -14,8 +14,6 @@ import warnings
|
|||||||
from collections import UserString, defaultdict
|
from collections import UserString, defaultdict
|
||||||
from gettext import NullTranslations
|
from gettext import NullTranslations
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx30Warning
|
from sphinx.deprecation import RemovedInSphinx30Warning
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
@ -41,7 +39,7 @@ class _TranslationProxy(UserString):
|
|||||||
# type: (Callable, str) -> object
|
# type: (Callable, str) -> object
|
||||||
if not args:
|
if not args:
|
||||||
# not called with "function" and "arguments", but a plain string
|
# not called with "function" and "arguments", but a plain string
|
||||||
return text_type(func)
|
return str(func)
|
||||||
return object.__new__(cls)
|
return object.__new__(cls)
|
||||||
|
|
||||||
def __getnewargs__(self):
|
def __getnewargs__(self):
|
||||||
@ -73,7 +71,7 @@ class _TranslationProxy(UserString):
|
|||||||
|
|
||||||
def __dir__(self):
|
def __dir__(self):
|
||||||
# type: () -> List[str]
|
# type: () -> List[str]
|
||||||
return dir(text_type)
|
return dir(str)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
@ -124,7 +122,7 @@ class _TranslationProxy(UserString):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
try:
|
try:
|
||||||
return 'i' + repr(text_type(self.data))
|
return 'i' + repr(str(self.data))
|
||||||
except Exception:
|
except Exception:
|
||||||
return '<%s broken>' % self.__class__.__name__
|
return '<%s broken>' % self.__class__.__name__
|
||||||
|
|
||||||
|
@ -16,8 +16,6 @@ import tokenize
|
|||||||
from token import NAME, NEWLINE, INDENT, DEDENT, NUMBER, OP, STRING
|
from token import NAME, NEWLINE, INDENT, DEDENT, NUMBER, OP, STRING
|
||||||
from tokenize import COMMENT, NL
|
from tokenize import COMMENT, NL
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, Dict, IO, List, Tuple # NOQA
|
from typing import Any, Dict, IO, List, Tuple # NOQA
|
||||||
@ -349,7 +347,7 @@ class VariableCommentPicker(ast.NodeVisitor):
|
|||||||
targets = get_assign_targets(self.previous)
|
targets = get_assign_targets(self.previous)
|
||||||
varnames = get_lvar_names(targets[0], self.get_self())
|
varnames = get_lvar_names(targets[0], self.get_self())
|
||||||
for varname in varnames:
|
for varname in varnames:
|
||||||
if isinstance(node.value.s, text_type):
|
if isinstance(node.value.s, str):
|
||||||
docstring = node.value.s
|
docstring = node.value.s
|
||||||
else:
|
else:
|
||||||
docstring = node.value.s.decode(self.encoding or 'utf-8')
|
docstring = node.value.s.decode(self.encoding or 'utf-8')
|
||||||
|
@ -13,8 +13,6 @@ import re
|
|||||||
import warnings
|
import warnings
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
@ -351,9 +349,9 @@ class IndexBuilder:
|
|||||||
otypes[domainname, type] = typeindex
|
otypes[domainname, type] = typeindex
|
||||||
otype = domain.object_types.get(type)
|
otype = domain.object_types.get(type)
|
||||||
if otype:
|
if otype:
|
||||||
# use unicode() to fire translation proxies
|
# use str() to fire translation proxies
|
||||||
onames[typeindex] = (domainname, type,
|
onames[typeindex] = (domainname, type,
|
||||||
text_type(domain.get_type_name(otype)))
|
str(domain.get_type_name(otype)))
|
||||||
else:
|
else:
|
||||||
onames[typeindex] = (domainname, type, type)
|
onames[typeindex] = (domainname, type, type)
|
||||||
if anchor == fullname:
|
if anchor == fullname:
|
||||||
|
@ -9,8 +9,6 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
import builtins # NOQA
|
import builtins # NOQA
|
||||||
@ -20,7 +18,7 @@ if False:
|
|||||||
FILESYSTEMENCODING = sys.getfilesystemencoding() or sys.getdefaultencoding()
|
FILESYSTEMENCODING = sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||||
|
|
||||||
|
|
||||||
class path(text_type):
|
class path(str):
|
||||||
"""
|
"""
|
||||||
Represents a path which behaves like a string.
|
Represents a path which behaves like a string.
|
||||||
"""
|
"""
|
||||||
@ -222,4 +220,4 @@ class path(text_type):
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
# type: () -> str
|
# type: () -> str
|
||||||
return '%s(%s)' % (self.__class__.__name__, text_type.__repr__(self))
|
return '%s(%s)' % (self.__class__.__name__, super().__repr__())
|
||||||
|
@ -13,7 +13,6 @@ from typing import NamedTuple
|
|||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from pygments.lexers import PythonConsoleLexer, guess_lexer
|
from pygments.lexers import PythonConsoleLexer, guess_lexer
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.ext import doctest
|
from sphinx.ext import doctest
|
||||||
@ -25,7 +24,7 @@ if False:
|
|||||||
from sphinx.application import Sphinx # NOQA
|
from sphinx.application import Sphinx # NOQA
|
||||||
|
|
||||||
|
|
||||||
HighlightSetting = NamedTuple('HighlightSetting', [('language', text_type),
|
HighlightSetting = NamedTuple('HighlightSetting', [('language', str),
|
||||||
('lineno_threshold', int)])
|
('lineno_threshold', int)])
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,7 +13,6 @@ from hashlib import sha1
|
|||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx.locale import __
|
from sphinx.locale import __
|
||||||
from sphinx.transforms import SphinxTransform
|
from sphinx.transforms import SphinxTransform
|
||||||
@ -118,8 +117,7 @@ class ImageDownloader(BaseImageConverter):
|
|||||||
node['uri'] = path
|
node['uri'] = path
|
||||||
self.app.env.images.add_file(self.env.docname, path)
|
self.app.env.images.add_file(self.env.docname, path)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.warning(__('Could not fetch remote image: %s [%s]') %
|
logger.warning(__('Could not fetch remote image: %s [%s]') % (node['uri'], exc))
|
||||||
(node['uri'], text_type(exc)))
|
|
||||||
|
|
||||||
|
|
||||||
class DataURIExtractor(BaseImageConverter):
|
class DataURIExtractor(BaseImageConverter):
|
||||||
|
@ -26,7 +26,6 @@ from time import mktime, strptime
|
|||||||
from urllib.parse import urlsplit, urlunsplit, quote_plus, parse_qsl, urlencode
|
from urllib.parse import urlsplit, urlunsplit, quote_plus, parse_qsl, urlencode
|
||||||
|
|
||||||
from docutils.utils import relative_path
|
from docutils.utils import relative_path
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx30Warning, RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx30Warning, RemovedInSphinx40Warning
|
||||||
from sphinx.errors import PycodeError, SphinxParallelError, ExtensionError
|
from sphinx.errors import PycodeError, SphinxParallelError, ExtensionError
|
||||||
@ -70,9 +69,7 @@ def path_stabilize(filepath):
|
|||||||
# type: (str) -> str
|
# type: (str) -> str
|
||||||
"normalize path separater and unicode string"
|
"normalize path separater and unicode string"
|
||||||
newpath = filepath.replace(os.path.sep, SEP)
|
newpath = filepath.replace(os.path.sep, SEP)
|
||||||
if isinstance(newpath, text_type):
|
return unicodedata.normalize('NFC', newpath)
|
||||||
newpath = unicodedata.normalize('NFC', newpath)
|
|
||||||
return newpath
|
|
||||||
|
|
||||||
|
|
||||||
def get_matching_files(dirname, exclude_matchers=()):
|
def get_matching_files(dirname, exclude_matchers=()):
|
||||||
@ -637,9 +634,9 @@ def display_chunk(chunk):
|
|||||||
# type: (Any) -> str
|
# type: (Any) -> str
|
||||||
if isinstance(chunk, (list, tuple)):
|
if isinstance(chunk, (list, tuple)):
|
||||||
if len(chunk) == 1:
|
if len(chunk) == 1:
|
||||||
return text_type(chunk[0])
|
return str(chunk[0])
|
||||||
return '%s .. %s' % (chunk[0], chunk[-1])
|
return '%s .. %s' % (chunk[0], chunk[-1])
|
||||||
return text_type(chunk)
|
return str(chunk)
|
||||||
|
|
||||||
|
|
||||||
def old_status_iterator(iterable, summary, color="darkgreen", stringify_func=display_chunk):
|
def old_status_iterator(iterable, summary, color="darkgreen", stringify_func=display_chunk):
|
||||||
|
@ -17,7 +17,6 @@ from os import path
|
|||||||
from typing import NamedTuple
|
from typing import NamedTuple
|
||||||
|
|
||||||
import imagesize
|
import imagesize
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx30Warning
|
from sphinx.deprecation import RemovedInSphinx30Warning
|
||||||
|
|
||||||
@ -42,8 +41,8 @@ mime_suffixes = OrderedDict([
|
|||||||
('.svgz', 'image/svg+xml'),
|
('.svgz', 'image/svg+xml'),
|
||||||
])
|
])
|
||||||
|
|
||||||
DataURI = NamedTuple('DataURI', [('mimetype', text_type),
|
DataURI = NamedTuple('DataURI', [('mimetype', str),
|
||||||
('charset', text_type),
|
('charset', str),
|
||||||
('data', bytes)])
|
('data', bytes)])
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,8 +11,6 @@
|
|||||||
import json
|
import json
|
||||||
from collections import UserString
|
from collections import UserString
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
if False:
|
if False:
|
||||||
# For type annotation
|
# For type annotation
|
||||||
from typing import Any, IO # NOQA
|
from typing import Any, IO # NOQA
|
||||||
@ -23,7 +21,7 @@ class SphinxJSONEncoder(json.JSONEncoder):
|
|||||||
def default(self, obj):
|
def default(self, obj):
|
||||||
# type: (Any) -> str
|
# type: (Any) -> str
|
||||||
if isinstance(obj, UserString):
|
if isinstance(obj, UserString):
|
||||||
return text_type(obj)
|
return str(obj)
|
||||||
return super().default(obj)
|
return super().default(obj)
|
||||||
|
|
||||||
|
|
||||||
|
@ -12,7 +12,6 @@ import re
|
|||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.locale import __
|
from sphinx.locale import __
|
||||||
@ -115,7 +114,7 @@ def repr_domxml(node, length=80):
|
|||||||
try:
|
try:
|
||||||
text = node.asdom().toxml()
|
text = node.asdom().toxml()
|
||||||
except Exception:
|
except Exception:
|
||||||
text = text_type(node)
|
text = str(node)
|
||||||
if length and len(text) > length:
|
if length and len(text) > length:
|
||||||
text = text[:length] + '...'
|
text = text[:length] + '...'
|
||||||
return text
|
return text
|
||||||
@ -398,7 +397,7 @@ def inline_all_toctrees(builder, docnameset, docname, tree, colorfunc, traversed
|
|||||||
tree = cast(nodes.document, tree.deepcopy())
|
tree = cast(nodes.document, tree.deepcopy())
|
||||||
for toctreenode in tree.traverse(addnodes.toctree):
|
for toctreenode in tree.traverse(addnodes.toctree):
|
||||||
newnodes = []
|
newnodes = []
|
||||||
includefiles = map(text_type, toctreenode['includefiles'])
|
includefiles = map(str, toctreenode['includefiles'])
|
||||||
for includefile in includefiles:
|
for includefile in includefiles:
|
||||||
if includefile not in traversed:
|
if includefile not in traversed:
|
||||||
try:
|
try:
|
||||||
|
@ -14,8 +14,6 @@ from html import escape as htmlescape # NOQA
|
|||||||
from io import TextIOWrapper # NOQA
|
from io import TextIOWrapper # NOQA
|
||||||
from textwrap import indent # NOQA
|
from textwrap import indent # NOQA
|
||||||
|
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||||
from sphinx.locale import __
|
from sphinx.locale import __
|
||||||
from sphinx.util import logging
|
from sphinx.util import logging
|
||||||
@ -64,7 +62,7 @@ def convert_with_2to3(filepath):
|
|||||||
lineno, offset = err.context[1]
|
lineno, offset = err.context[1]
|
||||||
# try to match ParseError details with SyntaxError details
|
# try to match ParseError details with SyntaxError details
|
||||||
raise SyntaxError(err.msg, (filepath, lineno, offset, err.value))
|
raise SyntaxError(err.msg, (filepath, lineno, offset, err.value))
|
||||||
return text_type(tree)
|
return str(tree)
|
||||||
|
|
||||||
|
|
||||||
class UnicodeMixin:
|
class UnicodeMixin:
|
||||||
|
@ -12,7 +12,6 @@ from typing import Any, Callable, Dict, List, Tuple, Union
|
|||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from docutils.parsers.rst.states import Inliner
|
from docutils.parsers.rst.states import Inliner
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
|
|
||||||
# An entry of Directive.option_spec
|
# An entry of Directive.option_spec
|
||||||
@ -22,11 +21,11 @@ DirectiveOption = Callable[[str], Any]
|
|||||||
TextlikeNode = Union[nodes.Text, nodes.TextElement]
|
TextlikeNode = Union[nodes.Text, nodes.TextElement]
|
||||||
|
|
||||||
# common role functions
|
# common role functions
|
||||||
RoleFunction = Callable[[text_type, text_type, text_type, int, Inliner, Dict, List[text_type]],
|
RoleFunction = Callable[[str, str, str, int, Inliner, Dict, List[str]],
|
||||||
Tuple[List[nodes.Node], List[nodes.system_message]]]
|
Tuple[List[nodes.Node], List[nodes.system_message]]]
|
||||||
|
|
||||||
# title getter functions for enumerable nodes (see sphinx.domains.std)
|
# title getter functions for enumerable nodes (see sphinx.domains.std)
|
||||||
TitleGetter = Callable[[nodes.Node], text_type]
|
TitleGetter = Callable[[nodes.Node], str]
|
||||||
|
|
||||||
# inventory data on memory
|
# inventory data on memory
|
||||||
Inventory = Dict[str, Dict[str, Tuple[str, str, str, str]]]
|
Inventory = Dict[str, Dict[str, Tuple[str, str, str, str]]]
|
||||||
|
@ -20,7 +20,6 @@ from typing import Iterable, cast
|
|||||||
|
|
||||||
from docutils import nodes, writers
|
from docutils import nodes, writers
|
||||||
from docutils.writers.latex2e import Babel
|
from docutils.writers.latex2e import Babel
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx import highlighting
|
from sphinx import highlighting
|
||||||
@ -784,7 +783,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
|
|
||||||
def idescape(self, id):
|
def idescape(self, id):
|
||||||
# type: (str) -> str
|
# type: (str) -> str
|
||||||
return '\\detokenize{%s}' % text_type(id).translate(tex_replace_map).\
|
return '\\detokenize{%s}' % str(id).translate(tex_replace_map).\
|
||||||
encode('ascii', 'backslashreplace').decode('ascii').\
|
encode('ascii', 'backslashreplace').decode('ascii').\
|
||||||
replace('\\', '_')
|
replace('\\', '_')
|
||||||
|
|
||||||
@ -805,34 +804,34 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
figure = self.builder.config.numfig_format['figure'].split('%s', 1)
|
figure = self.builder.config.numfig_format['figure'].split('%s', 1)
|
||||||
if len(figure) == 1:
|
if len(figure) == 1:
|
||||||
ret.append('\\def\\fnum@figure{%s}\n' %
|
ret.append('\\def\\fnum@figure{%s}\n' %
|
||||||
text_type(figure[0]).strip().translate(tex_escape_map))
|
str(figure[0]).strip().translate(tex_escape_map))
|
||||||
else:
|
else:
|
||||||
definition = text_type(figure[0]).strip().translate(tex_escape_map)
|
definition = str(figure[0]).strip().translate(tex_escape_map)
|
||||||
ret.append(self.babel_renewcommand('\\figurename', definition))
|
ret.append(self.babel_renewcommand('\\figurename', definition))
|
||||||
if figure[1]:
|
if figure[1]:
|
||||||
ret.append('\\makeatletter\n')
|
ret.append('\\makeatletter\n')
|
||||||
ret.append('\\def\\fnum@figure{\\figurename\\thefigure%s}\n' %
|
ret.append('\\def\\fnum@figure{\\figurename\\thefigure%s}\n' %
|
||||||
text_type(figure[1]).strip().translate(tex_escape_map))
|
str(figure[1]).strip().translate(tex_escape_map))
|
||||||
ret.append('\\makeatother\n')
|
ret.append('\\makeatother\n')
|
||||||
|
|
||||||
table = self.builder.config.numfig_format['table'].split('%s', 1)
|
table = self.builder.config.numfig_format['table'].split('%s', 1)
|
||||||
if len(table) == 1:
|
if len(table) == 1:
|
||||||
ret.append('\\def\\fnum@table{%s}\n' %
|
ret.append('\\def\\fnum@table{%s}\n' %
|
||||||
text_type(table[0]).strip().translate(tex_escape_map))
|
str(table[0]).strip().translate(tex_escape_map))
|
||||||
else:
|
else:
|
||||||
definition = text_type(table[0]).strip().translate(tex_escape_map)
|
definition = str(table[0]).strip().translate(tex_escape_map)
|
||||||
ret.append(self.babel_renewcommand('\\tablename', definition))
|
ret.append(self.babel_renewcommand('\\tablename', definition))
|
||||||
if table[1]:
|
if table[1]:
|
||||||
ret.append('\\makeatletter\n')
|
ret.append('\\makeatletter\n')
|
||||||
ret.append('\\def\\fnum@table{\\tablename\\thetable%s}\n' %
|
ret.append('\\def\\fnum@table{\\tablename\\thetable%s}\n' %
|
||||||
text_type(table[1]).strip().translate(tex_escape_map))
|
str(table[1]).strip().translate(tex_escape_map))
|
||||||
ret.append('\\makeatother\n')
|
ret.append('\\makeatother\n')
|
||||||
|
|
||||||
codeblock = self.builder.config.numfig_format['code-block'].split('%s', 1)
|
codeblock = self.builder.config.numfig_format['code-block'].split('%s', 1)
|
||||||
if len(codeblock) == 1:
|
if len(codeblock) == 1:
|
||||||
pass # FIXME
|
pass # FIXME
|
||||||
else:
|
else:
|
||||||
definition = text_type(codeblock[0]).strip().translate(tex_escape_map)
|
definition = str(codeblock[0]).strip().translate(tex_escape_map)
|
||||||
ret.append(self.babel_renewcommand('\\literalblockname', definition))
|
ret.append(self.babel_renewcommand('\\literalblockname', definition))
|
||||||
if codeblock[1]:
|
if codeblock[1]:
|
||||||
pass # FIXME
|
pass # FIXME
|
||||||
@ -849,7 +848,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
if i > 0:
|
if i > 0:
|
||||||
ret.append('\\indexspace\n')
|
ret.append('\\indexspace\n')
|
||||||
ret.append('\\bigletter{%s}\n' %
|
ret.append('\\bigletter{%s}\n' %
|
||||||
text_type(letter).translate(tex_escape_map))
|
str(letter).translate(tex_escape_map))
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if not entry[3]:
|
if not entry[3]:
|
||||||
continue
|
continue
|
||||||
@ -2028,7 +2027,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
id = node.get('refuri', '')[1:].replace('#', ':')
|
id = node.get('refuri', '')[1:].replace('#', ':')
|
||||||
|
|
||||||
title = node.get('title', '%s')
|
title = node.get('title', '%s')
|
||||||
title = text_type(title).translate(tex_escape_map).replace('\\%s', '%s')
|
title = str(title).translate(tex_escape_map).replace('\\%s', '%s')
|
||||||
if '\\{name\\}' in title or '\\{number\\}' in title:
|
if '\\{name\\}' in title or '\\{number\\}' in title:
|
||||||
# new style format (cf. "Fig.%{number}")
|
# new style format (cf. "Fig.%{number}")
|
||||||
title = title.replace('\\{name\\}', '{name}').replace('\\{number\\}', '{number}')
|
title = title.replace('\\{name\\}', '{name}').replace('\\{number\\}', '{number}')
|
||||||
@ -2476,7 +2475,7 @@ class LaTeXTranslator(SphinxTranslator):
|
|||||||
|
|
||||||
def encode(self, text):
|
def encode(self, text):
|
||||||
# type: (str) -> str
|
# type: (str) -> str
|
||||||
text = text_type(text).translate(tex_escape_map)
|
text = str(text).translate(tex_escape_map)
|
||||||
if self.literal_whitespace:
|
if self.literal_whitespace:
|
||||||
# Insert a blank before the newline, to avoid
|
# Insert a blank before the newline, to avoid
|
||||||
# ! LaTeX Error: There's no line here to end.
|
# ! LaTeX Error: There's no line here to end.
|
||||||
|
@ -12,7 +12,6 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
import sphinx.domains.cpp as cppDomain
|
import sphinx.domains.cpp as cppDomain
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
@ -39,7 +38,7 @@ def check(name, input, idDict, output=None):
|
|||||||
if output is None:
|
if output is None:
|
||||||
output = input
|
output = input
|
||||||
ast = parse(name, input)
|
ast = parse(name, input)
|
||||||
res = text_type(ast)
|
res = str(ast)
|
||||||
if res != output:
|
if res != output:
|
||||||
print("")
|
print("")
|
||||||
print("Input: ", input)
|
print("Input: ", input)
|
||||||
|
@ -11,7 +11,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from mock import Mock
|
from mock import Mock
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.domains.python import py_sig_re, _pseudo_parse_arglist, PythonDomain
|
from sphinx.domains.python import py_sig_re, _pseudo_parse_arglist, PythonDomain
|
||||||
@ -30,22 +29,22 @@ def parse(sig):
|
|||||||
|
|
||||||
def test_function_signatures():
|
def test_function_signatures():
|
||||||
rv = parse('func(a=1) -> int object')
|
rv = parse('func(a=1) -> int object')
|
||||||
assert text_type(rv) == 'a=1'
|
assert rv == 'a=1'
|
||||||
|
|
||||||
rv = parse('func(a=1, [b=None])')
|
rv = parse('func(a=1, [b=None])')
|
||||||
assert text_type(rv) == 'a=1, [b=None]'
|
assert rv == 'a=1, [b=None]'
|
||||||
|
|
||||||
rv = parse('func(a=1[, b=None])')
|
rv = parse('func(a=1[, b=None])')
|
||||||
assert text_type(rv) == 'a=1, [b=None]'
|
assert rv == 'a=1, [b=None]'
|
||||||
|
|
||||||
rv = parse("compile(source : string, filename, symbol='file')")
|
rv = parse("compile(source : string, filename, symbol='file')")
|
||||||
assert text_type(rv) == "source : string, filename, symbol='file'"
|
assert rv == "source : string, filename, symbol='file'"
|
||||||
|
|
||||||
rv = parse('func(a=[], [b=None])')
|
rv = parse('func(a=[], [b=None])')
|
||||||
assert text_type(rv) == 'a=[], [b=None]'
|
assert rv == 'a=[], [b=None]'
|
||||||
|
|
||||||
rv = parse('func(a=[][, b=None])')
|
rv = parse('func(a=[][, b=None])')
|
||||||
assert text_type(rv) == 'a=[], [b=None]'
|
assert rv == 'a=[], [b=None]'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.sphinx('dummy', testroot='domain-py')
|
@pytest.mark.sphinx('dummy', testroot='domain-py')
|
||||||
|
@ -12,7 +12,6 @@ import time
|
|||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from six import text_type
|
|
||||||
|
|
||||||
from sphinx import application
|
from sphinx import application
|
||||||
from sphinx.cmd import quickstart as qs
|
from sphinx.cmd import quickstart as qs
|
||||||
@ -35,7 +34,6 @@ def mock_input(answers, needanswer=False):
|
|||||||
raise AssertionError('answer for %r missing and no default '
|
raise AssertionError('answer for %r missing and no default '
|
||||||
'present' % prompt)
|
'present' % prompt)
|
||||||
called.add(prompt)
|
called.add(prompt)
|
||||||
prompt = text_type(prompt)
|
|
||||||
for question in answers:
|
for question in answers:
|
||||||
if prompt.startswith(qs.PROMPT_PREFIX + question):
|
if prompt.startswith(qs.PROMPT_PREFIX + question):
|
||||||
return answers[question]
|
return answers[question]
|
||||||
|
Loading…
Reference in New Issue
Block a user