mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
refactoring: Drop PY2 and PY3 flags
This commit is contained in:
parent
5ffce30d75
commit
3a2418a827
6
CHANGES
6
CHANGES
@ -15,6 +15,12 @@ Incompatible changes
|
||||
Deprecated
|
||||
----------
|
||||
|
||||
* ``sphinx.ext.doctest.doctest_encode()``
|
||||
* ``sphinx.testing.util.remove_unicode_literal()``
|
||||
|
||||
For more details, see `deprecation APIs list
|
||||
<http://www.sphinx-doc.org/en/master/extdev/index.html#deprecated-apis>`_
|
||||
|
||||
Features added
|
||||
--------------
|
||||
* #1618: The search results preview of generated HTML documentation is
|
||||
|
@ -116,6 +116,16 @@ The following is a list of deprecated interface.
|
||||
- (will be) Removed
|
||||
- Alternatives
|
||||
|
||||
* - ``sphinx.ext.doctest.doctest_encode()``
|
||||
- 2.0
|
||||
- 4.0
|
||||
- N/A
|
||||
|
||||
* - ``sphinx.testing.util.remove_unicode_literal()``
|
||||
- 2.0
|
||||
- 4.0
|
||||
- N/A
|
||||
|
||||
* - :rst:dir:`highlightlang`
|
||||
- 1.8
|
||||
- 4.0
|
||||
|
@ -17,6 +17,7 @@ import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
from collections import OrderedDict
|
||||
from io import open
|
||||
from os import path
|
||||
@ -34,12 +35,13 @@ except ImportError:
|
||||
USE_LIBEDIT = False
|
||||
|
||||
from docutils.utils import column_width
|
||||
from six import PY2, PY3, text_type, binary_type
|
||||
from six import text_type, binary_type
|
||||
from six.moves import input
|
||||
from six.moves.urllib.parse import quote as urlquote
|
||||
|
||||
import sphinx.locale
|
||||
from sphinx import __display_version__, package_dir
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import texescape
|
||||
from sphinx.util.console import ( # type: ignore
|
||||
@ -185,20 +187,6 @@ def do_prompt(text, default=None, validator=nonempty):
|
||||
prompt = PROMPT_PREFIX + '%s [%s]: ' % (text, default) # type: unicode
|
||||
else:
|
||||
prompt = PROMPT_PREFIX + text + ': '
|
||||
if PY2:
|
||||
# for Python 2.x, try to get a Unicode string out of it
|
||||
if prompt.encode('ascii', 'replace').decode('ascii', 'replace') \
|
||||
!= prompt:
|
||||
if TERM_ENCODING:
|
||||
prompt = prompt.encode(TERM_ENCODING)
|
||||
else:
|
||||
print(turquoise(__('* Note: non-ASCII default value provided '
|
||||
'and terminal encoding unknown -- assuming '
|
||||
'UTF-8 or Latin-1.')))
|
||||
try:
|
||||
prompt = prompt.encode('utf-8')
|
||||
except UnicodeEncodeError:
|
||||
prompt = prompt.encode('latin1')
|
||||
if USE_LIBEDIT:
|
||||
# Note: libedit has a problem for combination of ``input()`` and escape
|
||||
# sequence (see #5335). To avoid the problem, all prompts are not colored
|
||||
@ -222,10 +210,9 @@ def do_prompt(text, default=None, validator=nonempty):
|
||||
def convert_python_source(source, rex=re.compile(r"[uU]('.*?')")):
|
||||
# type: (unicode, Pattern) -> unicode
|
||||
# remove Unicode literal prefixes
|
||||
if PY3:
|
||||
return rex.sub('\\1', source)
|
||||
else:
|
||||
return source
|
||||
warnings.warn('convert_python_source() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
return rex.sub('\\1', source)
|
||||
|
||||
|
||||
class QuickstartRenderer(SphinxRenderer):
|
||||
@ -399,7 +386,7 @@ def generate(d, overwrite=True, silent=False, templatedir=None):
|
||||
if 'mastertocmaxdepth' not in d:
|
||||
d['mastertocmaxdepth'] = 2
|
||||
|
||||
d['PY3'] = PY3
|
||||
d['PY3'] = True
|
||||
d['project_fn'] = make_filename(d['project'])
|
||||
d['project_url'] = urlquote(d['project'].encode('idna'))
|
||||
d['project_manpage'] = d['project_fn'].lower()
|
||||
@ -455,7 +442,7 @@ def generate(d, overwrite=True, silent=False, templatedir=None):
|
||||
if not conf_path or not path.isfile(conf_path):
|
||||
conf_path = os.path.join(package_dir, 'templates', 'quickstart', 'conf.py_t')
|
||||
with open(conf_path) as f:
|
||||
conf_text = convert_python_source(f.read())
|
||||
conf_text = f.read()
|
||||
|
||||
write_file(path.join(srcdir, 'conf.py'), template.render_string(conf_text, d))
|
||||
|
||||
|
@ -17,9 +17,7 @@ from collections import OrderedDict
|
||||
from os import path, getenv
|
||||
from typing import Any, NamedTuple, Union
|
||||
|
||||
from six import (
|
||||
PY2, PY3, iteritems, string_types, binary_type, text_type, integer_types
|
||||
)
|
||||
from six import iteritems, string_types, binary_type, text_type, integer_types
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx30Warning
|
||||
from sphinx.errors import ConfigError, ExtensionError
|
||||
@ -41,12 +39,9 @@ CONFIG_FILENAME = 'conf.py'
|
||||
UNSERIALIZABLE_TYPES = (type, types.ModuleType, types.FunctionType)
|
||||
copyright_year_re = re.compile(r'^((\d{4}-)?)(\d{4})(?=[ ,])')
|
||||
|
||||
if PY3:
|
||||
unicode = str # special alias for static typing...
|
||||
|
||||
ConfigValue = NamedTuple('ConfigValue', [('name', str),
|
||||
('value', Any),
|
||||
('rebuild', Union[bool, unicode])])
|
||||
('rebuild', Union[bool, text_type])])
|
||||
|
||||
|
||||
def is_serializable(obj):
|
||||
@ -83,8 +78,6 @@ class ENUM(object):
|
||||
|
||||
|
||||
string_classes = [text_type] # type: List
|
||||
if PY2:
|
||||
string_classes.append(binary_type) # => [str, unicode]
|
||||
|
||||
|
||||
class Config(object):
|
||||
@ -367,9 +360,8 @@ def eval_config_file(filename, tags):
|
||||
try:
|
||||
execfile_(filename, namespace)
|
||||
except SyntaxError as err:
|
||||
msg = __("There is a syntax error in your configuration file: %s")
|
||||
if PY3:
|
||||
msg += __("\nDid you change the syntax from 2.x to 3.x?")
|
||||
msg = __("There is a syntax error in your configuration file: %s\n"
|
||||
"Did you change the syntax from 2.x to 3.x?")
|
||||
raise ConfigError(msg % err)
|
||||
except SystemExit:
|
||||
msg = __("The configuration file (or one of the modules it imports) "
|
||||
|
@ -16,7 +16,7 @@ import warnings
|
||||
from collections import namedtuple
|
||||
from types import FunctionType, MethodType, ModuleType
|
||||
|
||||
from six import PY2, iteritems
|
||||
from six import iteritems
|
||||
|
||||
from sphinx.util import logging
|
||||
from sphinx.util.inspect import isenumclass, safe_getattr
|
||||
@ -219,8 +219,6 @@ def import_object(modname, objpath, objtype='', attrgetter=safe_getattr, warning
|
||||
else:
|
||||
errmsg += '; the following exception was raised:\n%s' % traceback.format_exc()
|
||||
|
||||
if PY2:
|
||||
errmsg = errmsg.decode('utf-8') # type: ignore
|
||||
logger.debug(errmsg)
|
||||
raise ImportError(errmsg)
|
||||
|
||||
|
@ -16,22 +16,24 @@ import doctest
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
from os import path
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
from packaging.specifiers import SpecifierSet, InvalidSpecifier
|
||||
from packaging.version import Version
|
||||
from six import itervalues, StringIO, binary_type, text_type, PY2
|
||||
from six import itervalues, StringIO, binary_type
|
||||
|
||||
import sphinx
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.locale import __
|
||||
from sphinx.util import force_decode, logging
|
||||
from sphinx.util.console import bold # type: ignore
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
from sphinx.util.nodes import set_source_info
|
||||
from sphinx.util.osutil import fs_encoding, relpath
|
||||
from sphinx.util.osutil import relpath
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -43,18 +45,12 @@ logger = logging.getLogger(__name__)
|
||||
blankline_re = re.compile(r'^\s*<BLANKLINE>', re.MULTILINE)
|
||||
doctestopt_re = re.compile(r'#\s*doctest:.+$', re.MULTILINE)
|
||||
|
||||
if PY2:
|
||||
def doctest_encode(text, encoding):
|
||||
# type: (str, unicode) -> unicode
|
||||
if isinstance(text, text_type):
|
||||
text = text.encode(encoding)
|
||||
if text.startswith(codecs.BOM_UTF8):
|
||||
text = text[len(codecs.BOM_UTF8):]
|
||||
return text
|
||||
else:
|
||||
def doctest_encode(text, encoding):
|
||||
# type: (unicode, unicode) -> unicode
|
||||
return text
|
||||
|
||||
def doctest_encode(text, encoding):
|
||||
# type: (unicode, unicode) -> unicode
|
||||
warnings.warn('doctest_encode() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
return text
|
||||
|
||||
|
||||
def is_allowed_version(spec, version):
|
||||
@ -382,7 +378,7 @@ Doctest summary
|
||||
self.test_doc(docname, doctree)
|
||||
|
||||
def get_filename_for_node(self, node, docname):
|
||||
# type: (nodes.Node, unicode) -> str
|
||||
# type: (nodes.Node, unicode) -> unicode
|
||||
"""Try to get the file which actually contains the doctest, not the
|
||||
filename of the document it's included in."""
|
||||
try:
|
||||
@ -390,8 +386,6 @@ Doctest summary
|
||||
.rsplit(':docstring of ', maxsplit=1)[0]
|
||||
except Exception:
|
||||
filename = self.env.doc2path(docname, base=None)
|
||||
if PY2:
|
||||
return filename.encode(fs_encoding)
|
||||
return filename
|
||||
|
||||
@staticmethod
|
||||
@ -444,7 +438,7 @@ Doctest summary
|
||||
logger.warning(__('no code/output in %s block at %s:%s'),
|
||||
node.get('testnodetype', 'doctest'),
|
||||
filename, line_number)
|
||||
code = TestCode(source, type=node.get('testnodetype', 'doctest'),
|
||||
code = TestCode(source, type=node.get('testnodetype', 'doctest'), # type: ignore
|
||||
filename=filename, lineno=line_number,
|
||||
options=node.get('options'))
|
||||
node_groups = node.get('groups', ['default'])
|
||||
@ -501,9 +495,9 @@ Doctest summary
|
||||
# type: (Any, List[TestCode], Any) -> bool
|
||||
examples = []
|
||||
for testcode in testcodes:
|
||||
examples.append(doctest.Example( # type: ignore
|
||||
doctest_encode(testcode.code, self.env.config.source_encoding), '', # type: ignore # NOQA
|
||||
lineno=testcode.lineno))
|
||||
example = doctest.Example(testcode.code, '', # type: ignore
|
||||
lineno=testcode.lineno)
|
||||
examples.append(example)
|
||||
if not examples:
|
||||
return True
|
||||
# simulate a doctest with the code
|
||||
@ -528,9 +522,8 @@ Doctest summary
|
||||
if len(code) == 1:
|
||||
# ordinary doctests (code/output interleaved)
|
||||
try:
|
||||
test = parser.get_doctest( # type: ignore
|
||||
doctest_encode(code[0].code, self.env.config.source_encoding), {}, # type: ignore # NOQA
|
||||
group.name, code[0].filename, code[0].lineno)
|
||||
test = parser.get_doctest(code[0].code, {}, group.name, # type: ignore
|
||||
code[0].filename, code[0].lineno)
|
||||
except Exception:
|
||||
logger.warning(__('ignoring invalid doctest code: %r'), code[0].code,
|
||||
location=(code[0].filename, code[0].lineno))
|
||||
@ -555,11 +548,10 @@ Doctest summary
|
||||
exc_msg = m.group('msg')
|
||||
else:
|
||||
exc_msg = None
|
||||
example = doctest.Example( # type: ignore
|
||||
doctest_encode(code[0].code, self.env.config.source_encoding), output, # type: ignore # NOQA
|
||||
exc_msg=exc_msg,
|
||||
lineno=code[0].lineno,
|
||||
options=options)
|
||||
example = doctest.Example(code[0].code, output, # type: ignore
|
||||
exc_msg=exc_msg,
|
||||
lineno=code[0].lineno,
|
||||
options=options)
|
||||
test = doctest.DocTest([example], {}, group.name, # type: ignore
|
||||
code[0].filename, code[0].lineno, None)
|
||||
self.type = 'exec' # multiple statements again
|
||||
|
@ -34,7 +34,7 @@ from os import path
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.utils import relative_path
|
||||
from six import PY3, iteritems, string_types
|
||||
from six import iteritems, string_types, text_type
|
||||
from six.moves.urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
import sphinx
|
||||
@ -50,10 +50,7 @@ if False:
|
||||
from sphinx.config import Config # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
|
||||
if PY3:
|
||||
unicode = str
|
||||
|
||||
Inventory = Dict[unicode, Dict[unicode, Tuple[unicode, unicode, unicode, unicode]]]
|
||||
Inventory = Dict[text_type, Dict[text_type, Tuple[text_type, text_type, text_type, text_type]]] # NOQA
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -17,7 +17,7 @@ import tokenize
|
||||
from token import NAME, NEWLINE, INDENT, DEDENT, NUMBER, OP, STRING
|
||||
from tokenize import COMMENT, NL
|
||||
|
||||
from six import PY2, text_type
|
||||
from six import text_type
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -59,10 +59,7 @@ def get_lvar_names(node, self=None):
|
||||
# => TypeError
|
||||
"""
|
||||
if self:
|
||||
if PY2:
|
||||
self_id = self.id # type: ignore
|
||||
else:
|
||||
self_id = self.arg
|
||||
self_id = self.arg # type: ignore
|
||||
|
||||
node_name = node.__class__.__name__
|
||||
if node_name in ('Index', 'Num', 'Slice', 'Str', 'Subscript'):
|
||||
|
@ -22,7 +22,7 @@ import re
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from six import iteritems, PY3
|
||||
from six import iteritems
|
||||
|
||||
try:
|
||||
import MeCab
|
||||
@ -77,16 +77,12 @@ class MecabSplitter(BaseSplitter):
|
||||
|
||||
def split(self, input):
|
||||
# type: (unicode) -> List[unicode]
|
||||
input2 = input if PY3 else input.encode(self.dict_encode)
|
||||
if native_module:
|
||||
result = self.native.parse(input2)
|
||||
result = self.native.parse(input)
|
||||
else:
|
||||
result = self.ctypes_libmecab.mecab_sparse_tostr(
|
||||
self.ctypes_mecab, input.encode(self.dict_encode))
|
||||
if PY3:
|
||||
return result.split(' ')
|
||||
else:
|
||||
return result.decode(self.dict_encode).split(' ')
|
||||
return result.split(' ')
|
||||
|
||||
def init_native(self, options):
|
||||
# type: (Dict) -> None
|
||||
|
@ -11,7 +11,7 @@ import shutil
|
||||
import sys
|
||||
from io import open
|
||||
|
||||
from six import PY2, text_type
|
||||
from six import text_type
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -25,13 +25,6 @@ class path(text_type):
|
||||
"""
|
||||
Represents a path which behaves like a string.
|
||||
"""
|
||||
if PY2:
|
||||
def __new__(cls, s, encoding=FILESYSTEMENCODING, errors='strict'):
|
||||
# type: (unicode, unicode, unicode) -> path
|
||||
if isinstance(s, str):
|
||||
s = s.decode(encoding, errors)
|
||||
return text_type.__new__(cls, s) # type: ignore
|
||||
return text_type.__new__(cls, s) # type: ignore
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
@ -169,8 +162,7 @@ class path(text_type):
|
||||
"""
|
||||
Returns the text in the file.
|
||||
"""
|
||||
mode = 'rU' if PY2 else 'r'
|
||||
with open(self, mode=mode, encoding=encoding, **kwargs) as f:
|
||||
with open(self, mode='r', encoding=encoding, **kwargs) as f:
|
||||
return f.read()
|
||||
|
||||
def bytes(self):
|
||||
|
@ -20,6 +20,7 @@ from six import string_types
|
||||
|
||||
from sphinx import application, locale
|
||||
from sphinx.builders.latex import LaTeXBuilder
|
||||
from sphinx.deprecation import RemovedInSphinx40Warning
|
||||
from sphinx.pycode import ModuleAnalyzer
|
||||
from sphinx.testing.path import path
|
||||
from sphinx.util.osutil import relpath
|
||||
@ -191,6 +192,8 @@ _unicode_literals_re = re.compile(r'u(".*?")|u(\'.*?\')')
|
||||
|
||||
def remove_unicode_literals(s):
|
||||
# type: (unicode) -> unicode
|
||||
warnings.warn('remove_unicode_literals() is deprecated.',
|
||||
RemovedInSphinx40Warning)
|
||||
return _unicode_literals_re.sub(lambda x: x.group(1) or x.group(2), s) # type: ignore
|
||||
|
||||
|
||||
|
@ -18,7 +18,7 @@ from os import path
|
||||
from typing import NamedTuple
|
||||
|
||||
import imagesize
|
||||
from six import PY3, BytesIO, iteritems
|
||||
from six import BytesIO, iteritems, text_type
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx30Warning
|
||||
|
||||
@ -34,9 +34,6 @@ if False:
|
||||
# For type annotation
|
||||
from typing import Dict, IO, List, Tuple # NOQA
|
||||
|
||||
if PY3:
|
||||
unicode = str # special alias for static typing...
|
||||
|
||||
mime_suffixes = OrderedDict([
|
||||
('.gif', 'image/gif'),
|
||||
('.jpg', 'image/jpeg'),
|
||||
@ -46,8 +43,8 @@ mime_suffixes = OrderedDict([
|
||||
('.svgz', 'image/svg+xml'),
|
||||
]) # type: Dict[unicode, unicode]
|
||||
|
||||
DataURI = NamedTuple('DataURI', [('mimetype', unicode),
|
||||
('charset', unicode),
|
||||
DataURI = NamedTuple('DataURI', [('mimetype', text_type),
|
||||
('charset', text_type),
|
||||
('data', bytes)])
|
||||
|
||||
|
||||
|
@ -10,14 +10,14 @@
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import enum
|
||||
import inspect
|
||||
import re
|
||||
import sys
|
||||
import typing
|
||||
from collections import OrderedDict
|
||||
from functools import partial
|
||||
|
||||
from six import PY2, PY3, StringIO, binary_type, string_types, itervalues
|
||||
from six import StringIO, binary_type, string_types, itervalues
|
||||
from six.moves import builtins
|
||||
|
||||
from sphinx.util import force_decode
|
||||
@ -33,112 +33,73 @@ logger = logging.getLogger(__name__)
|
||||
memory_address_re = re.compile(r' at 0x[0-9a-f]{8,16}(?=>)', re.IGNORECASE)
|
||||
|
||||
|
||||
if PY3:
|
||||
# Copied from the definition of inspect.getfullargspec from Python master,
|
||||
# and modified to remove the use of special flags that break decorated
|
||||
# callables and bound methods in the name of backwards compatibility. Used
|
||||
# under the terms of PSF license v2, which requires the above statement
|
||||
# and the following:
|
||||
#
|
||||
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
|
||||
# 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software
|
||||
# Foundation; All Rights Reserved
|
||||
def getargspec(func):
|
||||
"""Like inspect.getfullargspec but supports bound methods, and wrapped
|
||||
methods."""
|
||||
# On 3.5+, signature(int) or similar raises ValueError. On 3.4, it
|
||||
# succeeds with a bogus signature. We want a TypeError uniformly, to
|
||||
# match historical behavior.
|
||||
if (isinstance(func, type) and
|
||||
is_builtin_class_method(func, "__new__") and
|
||||
is_builtin_class_method(func, "__init__")):
|
||||
raise TypeError(
|
||||
"can't compute signature for built-in type {}".format(func))
|
||||
# Copied from the definition of inspect.getfullargspec from Python master,
|
||||
# and modified to remove the use of special flags that break decorated
|
||||
# callables and bound methods in the name of backwards compatibility. Used
|
||||
# under the terms of PSF license v2, which requires the above statement
|
||||
# and the following:
|
||||
#
|
||||
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
|
||||
# 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software
|
||||
# Foundation; All Rights Reserved
|
||||
def getargspec(func):
|
||||
"""Like inspect.getfullargspec but supports bound methods, and wrapped
|
||||
methods."""
|
||||
# On 3.5+, signature(int) or similar raises ValueError. On 3.4, it
|
||||
# succeeds with a bogus signature. We want a TypeError uniformly, to
|
||||
# match historical behavior.
|
||||
if (isinstance(func, type) and
|
||||
is_builtin_class_method(func, "__new__") and
|
||||
is_builtin_class_method(func, "__init__")):
|
||||
raise TypeError(
|
||||
"can't compute signature for built-in type {}".format(func))
|
||||
|
||||
sig = inspect.signature(func)
|
||||
sig = inspect.signature(func) # type: ignore
|
||||
|
||||
args = []
|
||||
varargs = None
|
||||
varkw = None
|
||||
kwonlyargs = []
|
||||
defaults = ()
|
||||
annotations = {}
|
||||
defaults = ()
|
||||
kwdefaults = {}
|
||||
args = []
|
||||
varargs = None
|
||||
varkw = None
|
||||
kwonlyargs = []
|
||||
defaults = ()
|
||||
annotations = {}
|
||||
defaults = ()
|
||||
kwdefaults = {}
|
||||
|
||||
if sig.return_annotation is not sig.empty:
|
||||
annotations['return'] = sig.return_annotation
|
||||
if sig.return_annotation is not sig.empty:
|
||||
annotations['return'] = sig.return_annotation
|
||||
|
||||
for param in sig.parameters.values():
|
||||
kind = param.kind
|
||||
name = param.name
|
||||
for param in sig.parameters.values():
|
||||
kind = param.kind
|
||||
name = param.name
|
||||
|
||||
if kind is inspect.Parameter.POSITIONAL_ONLY:
|
||||
args.append(name)
|
||||
elif kind is inspect.Parameter.POSITIONAL_OR_KEYWORD:
|
||||
args.append(name)
|
||||
if param.default is not param.empty:
|
||||
defaults += (param.default,)
|
||||
elif kind is inspect.Parameter.VAR_POSITIONAL:
|
||||
varargs = name
|
||||
elif kind is inspect.Parameter.KEYWORD_ONLY:
|
||||
kwonlyargs.append(name)
|
||||
if param.default is not param.empty:
|
||||
kwdefaults[name] = param.default
|
||||
elif kind is inspect.Parameter.VAR_KEYWORD:
|
||||
varkw = name
|
||||
if kind is inspect.Parameter.POSITIONAL_ONLY: # type: ignore
|
||||
args.append(name)
|
||||
elif kind is inspect.Parameter.POSITIONAL_OR_KEYWORD: # type: ignore
|
||||
args.append(name)
|
||||
if param.default is not param.empty:
|
||||
defaults += (param.default,) # type: ignore
|
||||
elif kind is inspect.Parameter.VAR_POSITIONAL: # type: ignore
|
||||
varargs = name
|
||||
elif kind is inspect.Parameter.KEYWORD_ONLY: # type: ignore
|
||||
kwonlyargs.append(name)
|
||||
if param.default is not param.empty:
|
||||
kwdefaults[name] = param.default
|
||||
elif kind is inspect.Parameter.VAR_KEYWORD: # type: ignore
|
||||
varkw = name
|
||||
|
||||
if param.annotation is not param.empty:
|
||||
annotations[name] = param.annotation
|
||||
if param.annotation is not param.empty:
|
||||
annotations[name] = param.annotation
|
||||
|
||||
if not kwdefaults:
|
||||
# compatibility with 'func.__kwdefaults__'
|
||||
kwdefaults = None
|
||||
if not kwdefaults:
|
||||
# compatibility with 'func.__kwdefaults__'
|
||||
kwdefaults = None
|
||||
|
||||
if not defaults:
|
||||
# compatibility with 'func.__defaults__'
|
||||
defaults = None
|
||||
if not defaults:
|
||||
# compatibility with 'func.__defaults__'
|
||||
defaults = None
|
||||
|
||||
return inspect.FullArgSpec(args, varargs, varkw, defaults,
|
||||
kwonlyargs, kwdefaults, annotations)
|
||||
|
||||
else: # 2.7
|
||||
def getargspec(func):
|
||||
# type: (Any) -> Any
|
||||
"""Like inspect.getargspec but supports functools.partial as well."""
|
||||
if inspect.ismethod(func):
|
||||
func = func.__func__
|
||||
parts = 0, () # type: Tuple[int, Tuple[unicode, ...]]
|
||||
if type(func) is partial:
|
||||
keywords = func.keywords
|
||||
if keywords is None:
|
||||
keywords = {}
|
||||
parts = len(func.args), keywords.keys()
|
||||
func = func.func
|
||||
if not inspect.isfunction(func):
|
||||
raise TypeError('%r is not a Python function' % func)
|
||||
args, varargs, varkw = inspect.getargs(func.__code__)
|
||||
func_defaults = func.__defaults__
|
||||
if func_defaults is None:
|
||||
func_defaults = []
|
||||
else:
|
||||
func_defaults = list(func_defaults)
|
||||
if parts[0]:
|
||||
args = args[parts[0]:]
|
||||
if parts[1]:
|
||||
for arg in parts[1]:
|
||||
i = args.index(arg) - len(args) # type: ignore
|
||||
del args[i]
|
||||
try:
|
||||
del func_defaults[i]
|
||||
except IndexError:
|
||||
pass
|
||||
return inspect.ArgSpec(args, varargs, varkw, func_defaults) # type: ignore
|
||||
|
||||
try:
|
||||
import enum
|
||||
except ImportError:
|
||||
enum = None
|
||||
return inspect.FullArgSpec(args, varargs, varkw, defaults, # type: ignore
|
||||
kwonlyargs, kwdefaults, annotations)
|
||||
|
||||
|
||||
def isenumclass(x):
|
||||
@ -275,18 +236,15 @@ def object_description(object):
|
||||
except TypeError:
|
||||
pass # Cannot sort set values, fall back to generic repr
|
||||
else:
|
||||
template = "{%s}" if PY3 else "set([%s])"
|
||||
return template % ", ".join(object_description(x)
|
||||
for x in sorted_values)
|
||||
return "{%s}" % ", ".join(object_description(x) for x in sorted_values)
|
||||
if isinstance(object, frozenset):
|
||||
try:
|
||||
sorted_values = sorted(object)
|
||||
except TypeError:
|
||||
pass # Cannot sort frozenset values, fall back to generic repr
|
||||
else:
|
||||
template = "frozenset({%s})" if PY3 else "frozenset([%s])"
|
||||
return template % ", ".join(object_description(x)
|
||||
for x in sorted_values)
|
||||
return "frozenset({%s})" % ", ".join(object_description(x)
|
||||
for x in sorted_values)
|
||||
try:
|
||||
s = repr(object)
|
||||
except Exception:
|
||||
@ -351,20 +309,17 @@ class Signature(object):
|
||||
self.has_retval = has_retval
|
||||
self.partialmethod_with_noargs = False
|
||||
|
||||
if PY3:
|
||||
try:
|
||||
self.signature = inspect.signature(subject)
|
||||
except IndexError:
|
||||
# Until python 3.6.4, cpython has been crashed on inspection for
|
||||
# partialmethods not having any arguments.
|
||||
# https://bugs.python.org/issue33009
|
||||
if hasattr(subject, '_partialmethod'):
|
||||
self.signature = None
|
||||
self.partialmethod_with_noargs = True
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
self.argspec = getargspec(subject)
|
||||
try:
|
||||
self.signature = inspect.signature(subject) # type: ignore
|
||||
except IndexError:
|
||||
# Until python 3.6.4, cpython has been crashed on inspection for
|
||||
# partialmethods not having any arguments.
|
||||
# https://bugs.python.org/issue33009
|
||||
if hasattr(subject, '_partialmethod'):
|
||||
self.signature = None
|
||||
self.partialmethod_with_noargs = True
|
||||
else:
|
||||
raise
|
||||
|
||||
try:
|
||||
self.annotations = typing.get_type_hints(subject) # type: ignore
|
||||
@ -380,53 +335,32 @@ class Signature(object):
|
||||
if bound_method:
|
||||
# client gives a hint that the subject is a bound method
|
||||
|
||||
if PY3 and inspect.ismethod(subject):
|
||||
if inspect.ismethod(subject):
|
||||
# inspect.signature already considers the subject is bound method.
|
||||
# So it is not need to skip first argument.
|
||||
self.skip_first_argument = False
|
||||
else:
|
||||
self.skip_first_argument = True
|
||||
else:
|
||||
if PY3:
|
||||
# inspect.signature recognizes type of method properly without any hints
|
||||
self.skip_first_argument = False
|
||||
else:
|
||||
# check the subject is bound method or not
|
||||
self.skip_first_argument = inspect.ismethod(subject) and subject.__self__ # type: ignore # NOQA
|
||||
# inspect.signature recognizes type of method properly without any hints
|
||||
self.skip_first_argument = False
|
||||
|
||||
@property
|
||||
def parameters(self):
|
||||
# type: () -> Dict
|
||||
if PY3:
|
||||
if self.partialmethod_with_noargs:
|
||||
return {}
|
||||
else:
|
||||
return self.signature.parameters
|
||||
if self.partialmethod_with_noargs:
|
||||
return {}
|
||||
else:
|
||||
params = OrderedDict() # type: Dict
|
||||
positionals = len(self.argspec.args) - len(self.argspec.defaults)
|
||||
for i, arg in enumerate(self.argspec.args):
|
||||
if i < positionals:
|
||||
params[arg] = Parameter(arg)
|
||||
else:
|
||||
default = self.argspec.defaults[i - positionals]
|
||||
params[arg] = Parameter(arg, default=default)
|
||||
if self.argspec.varargs:
|
||||
params[self.argspec.varargs] = Parameter(self.argspec.varargs,
|
||||
Parameter.VAR_POSITIONAL)
|
||||
if self.argspec.keywords:
|
||||
params[self.argspec.keywords] = Parameter(self.argspec.keywords,
|
||||
Parameter.VAR_KEYWORD)
|
||||
return params
|
||||
return self.signature.parameters
|
||||
|
||||
@property
|
||||
def return_annotation(self):
|
||||
# type: () -> Any
|
||||
if PY3 and self.signature:
|
||||
if self.signature:
|
||||
if self.has_retval:
|
||||
return self.signature.return_annotation
|
||||
else:
|
||||
return inspect.Parameter.empty
|
||||
return inspect.Parameter.empty # type: ignore
|
||||
else:
|
||||
return None
|
||||
|
||||
@ -477,7 +411,7 @@ class Signature(object):
|
||||
args.append(arg.getvalue())
|
||||
last_kind = param.kind
|
||||
|
||||
if PY2 or self.return_annotation is inspect.Parameter.empty:
|
||||
if self.return_annotation is inspect.Parameter.empty: # type: ignore
|
||||
return '(%s)' % ', '.join(args)
|
||||
else:
|
||||
if 'return' in self.annotations:
|
||||
|
@ -12,7 +12,7 @@ import os
|
||||
import re
|
||||
import zlib
|
||||
|
||||
from six import PY3
|
||||
from six import text_type
|
||||
|
||||
from sphinx.util import logging
|
||||
|
||||
@ -22,10 +22,7 @@ if False:
|
||||
from sphinx.builders import Builder # NOQA
|
||||
from sphinx.environment import BuildEnvironment # NOQA
|
||||
|
||||
if PY3:
|
||||
unicode = str
|
||||
|
||||
Inventory = Dict[unicode, Dict[unicode, Tuple[unicode, unicode, unicode, unicode]]]
|
||||
Inventory = Dict[text_type, Dict[text_type, Tuple[text_type, text_type, text_type, text_type]]] # NOQA
|
||||
|
||||
|
||||
BUFSIZE = 16 * 1024
|
||||
|
@ -17,7 +17,6 @@ from contextlib import contextmanager
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.utils import get_source_line
|
||||
from six import PY2, StringIO
|
||||
|
||||
from sphinx.errors import SphinxWarning
|
||||
from sphinx.util.console import colorize
|
||||
@ -162,28 +161,7 @@ class WarningStreamHandler(logging.StreamHandler):
|
||||
pass
|
||||
|
||||
|
||||
class NewLineStreamHandlerPY2(logging.StreamHandler):
|
||||
"""StreamHandler which switches line terminator by record.nonl flag."""
|
||||
|
||||
def emit(self, record):
|
||||
# type: (logging.LogRecord) -> None
|
||||
try:
|
||||
self.acquire()
|
||||
stream = self.stream
|
||||
if getattr(record, 'nonl', False):
|
||||
# remove return code forcely when nonl=True
|
||||
self.stream = StringIO()
|
||||
super(NewLineStreamHandlerPY2, self).emit(record)
|
||||
stream.write(self.stream.getvalue()[:-1])
|
||||
stream.flush()
|
||||
else:
|
||||
super(NewLineStreamHandlerPY2, self).emit(record)
|
||||
finally:
|
||||
self.stream = stream
|
||||
self.release()
|
||||
|
||||
|
||||
class NewLineStreamHandlerPY3(logging.StreamHandler):
|
||||
class NewLineStreamHandler(logging.StreamHandler):
|
||||
"""StreamHandler which switches line terminator by record.nonl flag."""
|
||||
|
||||
def emit(self, record):
|
||||
@ -193,18 +171,12 @@ class NewLineStreamHandlerPY3(logging.StreamHandler):
|
||||
if getattr(record, 'nonl', False):
|
||||
# skip appending terminator when nonl=True
|
||||
self.terminator = ''
|
||||
super(NewLineStreamHandlerPY3, self).emit(record)
|
||||
super(NewLineStreamHandler, self).emit(record)
|
||||
finally:
|
||||
self.terminator = '\n'
|
||||
self.release()
|
||||
|
||||
|
||||
if PY2:
|
||||
NewLineStreamHandler = NewLineStreamHandlerPY2
|
||||
else:
|
||||
NewLineStreamHandler = NewLineStreamHandlerPY3
|
||||
|
||||
|
||||
class MemoryHandler(logging.handlers.BufferingHandler):
|
||||
"""Handler buffering all logs."""
|
||||
|
||||
|
@ -13,7 +13,6 @@ from __future__ import print_function
|
||||
import contextlib
|
||||
import errno
|
||||
import filecmp
|
||||
import locale
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@ -23,7 +22,7 @@ import warnings
|
||||
from io import BytesIO, StringIO
|
||||
from os import path
|
||||
|
||||
from six import PY2, PY3, text_type
|
||||
from six import text_type
|
||||
|
||||
from sphinx.deprecation import RemovedInSphinx30Warning, RemovedInSphinx40Warning
|
||||
|
||||
@ -37,9 +36,6 @@ ENOENT = getattr(errno, 'ENOENT', 0)
|
||||
EPIPE = getattr(errno, 'EPIPE', 0)
|
||||
EINVAL = getattr(errno, 'EINVAL', 0)
|
||||
|
||||
if PY3:
|
||||
unicode = str # special alias for static typing...
|
||||
|
||||
# SEP separates path elements in the canonical file names
|
||||
#
|
||||
# Define SEP as a manifest constant, not so much because we expect it to change
|
||||
@ -195,19 +191,13 @@ def ustrftime(format, *args):
|
||||
if source_date_epoch is not None:
|
||||
time_struct = time.gmtime(float(source_date_epoch))
|
||||
args = [time_struct] # type: ignore
|
||||
if PY2:
|
||||
# if a locale is set, the time strings are encoded in the encoding
|
||||
# given by LC_TIME; if that is available, use it
|
||||
enc = locale.getlocale(locale.LC_TIME)[1] or 'utf-8'
|
||||
return time.strftime(text_type(format).encode(enc), *args).decode(enc)
|
||||
else: # Py3
|
||||
# On Windows, time.strftime() and Unicode characters will raise UnicodeEncodeError.
|
||||
# https://bugs.python.org/issue8304
|
||||
try:
|
||||
return time.strftime(format, *args)
|
||||
except UnicodeEncodeError:
|
||||
r = time.strftime(format.encode('unicode-escape').decode(), *args)
|
||||
return r.encode().decode('unicode-escape')
|
||||
# On Windows, time.strftime() and Unicode characters will raise UnicodeEncodeError.
|
||||
# https://bugs.python.org/issue8304
|
||||
try:
|
||||
return time.strftime(format, *args) # type: ignore
|
||||
except UnicodeEncodeError:
|
||||
r = time.strftime(format.encode('unicode-escape').decode(), *args) # type: ignore
|
||||
return r.encode().decode('unicode-escape')
|
||||
|
||||
|
||||
def relpath(path, start=os.curdir):
|
||||
|
@ -9,10 +9,12 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
import sys
|
||||
from html import escape as htmlescape # NOQA
|
||||
from io import TextIOWrapper # NOQA
|
||||
from textwrap import indent # type: ignore # NOQA
|
||||
|
||||
from six import PY3, text_type, exec_
|
||||
from six import text_type, exec_
|
||||
|
||||
if False:
|
||||
# For type annotation
|
||||
@ -25,114 +27,52 @@ NoneType = type(None)
|
||||
# Python 2/3 compatibility
|
||||
|
||||
# prefix for Unicode strings
|
||||
if PY3:
|
||||
u = ''
|
||||
else:
|
||||
u = 'u'
|
||||
|
||||
|
||||
# TextIOWrapper
|
||||
if PY3:
|
||||
from io import TextIOWrapper
|
||||
else:
|
||||
def TextIOWrapper(stream, encoding):
|
||||
# type: (file, str) -> Any
|
||||
return codecs.lookup(encoding or 'ascii')[2](stream)
|
||||
u = ''
|
||||
|
||||
|
||||
# sys_encoding: some kind of default system encoding; should be used with
|
||||
# a lenient error handler
|
||||
if PY3:
|
||||
sys_encoding = sys.getdefaultencoding()
|
||||
else:
|
||||
sys_encoding = __import__('locale').getpreferredencoding()
|
||||
sys_encoding = sys.getdefaultencoding()
|
||||
|
||||
|
||||
# terminal_safe(): safely encode a string for printing to the terminal
|
||||
if PY3:
|
||||
def terminal_safe(s):
|
||||
# type: (unicode) -> unicode
|
||||
return s.encode('ascii', 'backslashreplace').decode('ascii')
|
||||
else:
|
||||
def terminal_safe(s):
|
||||
# type: (unicode) -> unicode
|
||||
return s.encode('ascii', 'backslashreplace')
|
||||
def terminal_safe(s):
|
||||
# type: (unicode) -> unicode
|
||||
return s.encode('ascii', 'backslashreplace').decode('ascii')
|
||||
|
||||
|
||||
# convert_with_2to3():
|
||||
if PY3:
|
||||
# support for running 2to3 over config files
|
||||
def convert_with_2to3(filepath):
|
||||
# type: (unicode) -> unicode
|
||||
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
|
||||
from lib2to3.pgen2.parse import ParseError
|
||||
fixers = get_fixers_from_package('lib2to3.fixes')
|
||||
refactoring_tool = RefactoringTool(fixers)
|
||||
source = refactoring_tool._read_python_source(filepath)[0]
|
||||
try:
|
||||
tree = refactoring_tool.refactor_string(source, 'conf.py')
|
||||
except ParseError as err:
|
||||
# do not propagate lib2to3 exceptions
|
||||
lineno, offset = err.context[1]
|
||||
# try to match ParseError details with SyntaxError details
|
||||
raise SyntaxError(err.msg, (filepath, lineno, offset, err.value))
|
||||
return text_type(tree)
|
||||
else:
|
||||
# no need to refactor on 2.x versions
|
||||
convert_with_2to3 = None
|
||||
|
||||
|
||||
# htmlescape()
|
||||
if PY3:
|
||||
from html import escape as htmlescape
|
||||
else:
|
||||
from cgi import escape as htmlescape # NOQA
|
||||
# support for running 2to3 over config files
|
||||
def convert_with_2to3(filepath):
|
||||
# type: (unicode) -> unicode
|
||||
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
|
||||
from lib2to3.pgen2.parse import ParseError
|
||||
fixers = get_fixers_from_package('lib2to3.fixes')
|
||||
refactoring_tool = RefactoringTool(fixers)
|
||||
source = refactoring_tool._read_python_source(filepath)[0]
|
||||
try:
|
||||
tree = refactoring_tool.refactor_string(source, 'conf.py')
|
||||
except ParseError as err:
|
||||
# do not propagate lib2to3 exceptions
|
||||
lineno, offset = err.context[1]
|
||||
# try to match ParseError details with SyntaxError details
|
||||
raise SyntaxError(err.msg, (filepath, lineno, offset, err.value))
|
||||
return text_type(tree)
|
||||
|
||||
|
||||
# UnicodeMixin
|
||||
if PY3:
|
||||
class UnicodeMixin(object):
|
||||
"""Mixin class to handle defining the proper __str__/__unicode__
|
||||
methods in Python 2 or 3."""
|
||||
class UnicodeMixin(object):
|
||||
"""Mixin class to handle defining the proper __str__/__unicode__
|
||||
methods in Python 2 or 3."""
|
||||
|
||||
def __str__(self):
|
||||
return self.__unicode__()
|
||||
else:
|
||||
class UnicodeMixin(object):
|
||||
"""Mixin class to handle defining the proper __str__/__unicode__
|
||||
methods in Python 2 or 3."""
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return self.__unicode__().encode('utf8') # type: ignore
|
||||
|
||||
|
||||
# indent()
|
||||
if PY3:
|
||||
from textwrap import indent
|
||||
else:
|
||||
# backport from python3
|
||||
def indent(text, prefix, predicate=None):
|
||||
# type: (unicode, unicode, Callable) -> unicode
|
||||
if predicate is None:
|
||||
def predicate(line):
|
||||
# type: (unicode) -> unicode
|
||||
return line.strip()
|
||||
|
||||
def prefixed_lines():
|
||||
# type: () -> Generator
|
||||
for line in text.splitlines(True):
|
||||
yield (prefix + line if predicate(line) else line)
|
||||
return ''.join(prefixed_lines())
|
||||
def __str__(self):
|
||||
return self.__unicode__()
|
||||
|
||||
|
||||
def execfile_(filepath, _globals, open=open):
|
||||
# type: (unicode, Any, Callable) -> None
|
||||
from sphinx.util.osutil import fs_encoding
|
||||
# get config source -- 'b' is a no-op under 2.x, while 'U' is
|
||||
# ignored under 3.x (but 3.x compile() accepts \r\n newlines)
|
||||
mode = 'rb' if PY3 else 'rbU'
|
||||
with open(filepath, mode) as f:
|
||||
with open(filepath, 'rb') as f:
|
||||
source = f.read()
|
||||
|
||||
# compile to a code object, handle syntax errors
|
||||
|
@ -13,15 +13,12 @@ from typing import Callable, Dict, List, Tuple
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst.states import Inliner
|
||||
from six import PY3
|
||||
from six import text_type
|
||||
|
||||
|
||||
if PY3:
|
||||
unicode = str
|
||||
|
||||
# common role functions
|
||||
RoleFunction = Callable[[unicode, unicode, unicode, int, Inliner, Dict, List[unicode]],
|
||||
RoleFunction = Callable[[text_type, text_type, text_type, int, Inliner, Dict, List[text_type]],
|
||||
Tuple[List[nodes.Node], List[nodes.Node]]]
|
||||
|
||||
# title getter functions for enumerable nodes (see sphinx.domains.std)
|
||||
TitleGetter = Callable[[nodes.Node], unicode]
|
||||
TitleGetter = Callable[[nodes.Node], text_type]
|
||||
|
@ -17,7 +17,6 @@ from warnings import catch_warnings
|
||||
|
||||
import pytest
|
||||
from docutils.statemachine import ViewList
|
||||
from six import PY3
|
||||
|
||||
from sphinx.ext.autodoc import (
|
||||
ModuleLevelDocumenter, cut_lines, between, ALL,
|
||||
@ -30,11 +29,6 @@ from sphinx.util.docutils import LoggingReporter
|
||||
|
||||
app = None
|
||||
|
||||
if PY3:
|
||||
ROGER_METHOD = ' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)'
|
||||
else:
|
||||
ROGER_METHOD = ' .. py:classmethod:: Class.roger(a, e=5, f=6)'
|
||||
|
||||
IS_PYPY = platform.python_implementation() == 'PyPy'
|
||||
|
||||
|
||||
@ -722,7 +716,7 @@ def test_autodoc_undoc_members(app):
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.prop',
|
||||
ROGER_METHOD,
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
@ -802,7 +796,7 @@ def test_autodoc_special_members(app):
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.prop',
|
||||
ROGER_METHOD,
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
@ -875,11 +869,6 @@ def test_autodoc_subclass_of_builtin_class(app):
|
||||
|
||||
@pytest.mark.sphinx('html', testroot='ext-autodoc')
|
||||
def test_autodoc_inner_class(app):
|
||||
if PY3:
|
||||
builtins = ' alias of :class:`builtins.dict`'
|
||||
else:
|
||||
builtins = ' alias of :class:`__builtin__.dict`'
|
||||
|
||||
options = {"members": None}
|
||||
actual = do_autodoc(app, 'class', 'target.Outer', options)
|
||||
assert list(actual) == [
|
||||
@ -905,7 +894,7 @@ def test_autodoc_inner_class(app):
|
||||
' .. py:attribute:: Outer.factory',
|
||||
' :module: target',
|
||||
' ',
|
||||
builtins
|
||||
' alias of :class:`builtins.dict`'
|
||||
]
|
||||
|
||||
actual = do_autodoc(app, 'class', 'target.Outer.Inner', options)
|
||||
@ -974,7 +963,7 @@ def test_autodoc_member_order(app):
|
||||
' .. py:attribute:: Class.docattr',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
' .. py:attribute:: Class.mdocattr',
|
||||
ROGER_METHOD,
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.inst_attr_inline',
|
||||
' .. py:attribute:: Class.inst_attr_comment',
|
||||
@ -993,7 +982,7 @@ def test_autodoc_member_order(app):
|
||||
' .. py:method:: Class.excludemeth()',
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
ROGER_METHOD,
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:method:: Class.undocmeth()',
|
||||
' .. py:attribute:: Class._private_inst_attr',
|
||||
@ -1028,7 +1017,7 @@ def test_autodoc_member_order(app):
|
||||
' .. py:method:: Class.meth()',
|
||||
' .. py:classmethod:: Class.moore(a, e, f) -> happiness',
|
||||
' .. py:attribute:: Class.prop',
|
||||
ROGER_METHOD,
|
||||
' .. py:classmethod:: Class.roger(a, *, b=2, c=3, d=4, e=5, f=6)',
|
||||
' .. py:attribute:: Class.skipattr',
|
||||
' .. py:method:: Class.skipmeth()',
|
||||
' .. py:attribute:: Class.udocattr',
|
||||
|
@ -16,10 +16,9 @@ from itertools import cycle, chain
|
||||
|
||||
import pytest
|
||||
from html5lib import getTreeBuilder, HTMLParser
|
||||
from six import PY3
|
||||
|
||||
from sphinx.errors import ConfigError
|
||||
from sphinx.testing.util import remove_unicode_literals, strip_escseq
|
||||
from sphinx.testing.util import strip_escseq
|
||||
from sphinx.util.inventory import InventoryFile
|
||||
|
||||
|
||||
@ -30,10 +29,10 @@ ENV_WARNINGS = """\
|
||||
%(root)s/autodoc_fodder.py:docstring of autodoc_fodder.MarkupError:\\d+: \
|
||||
WARNING: Explicit markup ends without a blank line; unexpected unindent.
|
||||
%(root)s/index.rst:\\d+: WARNING: Encoding 'utf-8-sig' used for reading included \
|
||||
file u'%(root)s/wrongenc.inc' seems to be wrong, try giving an :encoding: option
|
||||
file '%(root)s/wrongenc.inc' seems to be wrong, try giving an :encoding: option
|
||||
%(root)s/index.rst:\\d+: WARNING: image file not readable: foo.png
|
||||
%(root)s/index.rst:\\d+: WARNING: download file not readable: %(root)s/nonexisting.png
|
||||
%(root)s/index.rst:\\d+: WARNING: invalid single index entry u''
|
||||
%(root)s/index.rst:\\d+: WARNING: invalid single index entry ''
|
||||
%(root)s/undecodable.rst:\\d+: WARNING: undecodable source characters, replacing \
|
||||
with "\\?": b?'here: >>>(\\\\|/)xbb<<<((\\\\|/)r)?'
|
||||
"""
|
||||
@ -45,10 +44,6 @@ HTML_WARNINGS = ENV_WARNINGS + """\
|
||||
%(root)s/index.rst:\\d+: WARNING: Could not lex literal_block as "c". Highlighting skipped.
|
||||
"""
|
||||
|
||||
if PY3:
|
||||
ENV_WARNINGS = remove_unicode_literals(ENV_WARNINGS)
|
||||
HTML_WARNINGS = remove_unicode_literals(HTML_WARNINGS)
|
||||
|
||||
|
||||
etree_cache = {}
|
||||
|
||||
|
@ -17,11 +17,10 @@ from shutil import copyfile
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
import pytest
|
||||
from six import PY3
|
||||
from test_build_html import ENV_WARNINGS
|
||||
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.testing.util import remove_unicode_literals, strip_escseq
|
||||
from sphinx.testing.util import strip_escseq
|
||||
from sphinx.util import docutils
|
||||
from sphinx.util.osutil import cd, ensuredir
|
||||
from sphinx.writers.latex import LaTeXTranslator
|
||||
@ -40,9 +39,6 @@ LATEX_WARNINGS = ENV_WARNINGS + """\
|
||||
%(root)s/index.rst:\\d+: WARNING: Could not lex literal_block as "c". Highlighting skipped.
|
||||
"""
|
||||
|
||||
if PY3:
|
||||
LATEX_WARNINGS = remove_unicode_literals(LATEX_WARNINGS)
|
||||
|
||||
|
||||
# only run latex if all needed packages are there
|
||||
def kpsetest(*filenames):
|
||||
|
@ -15,10 +15,9 @@ import re
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
import pytest
|
||||
from six import PY3
|
||||
from test_build_html import ENV_WARNINGS
|
||||
|
||||
from sphinx.testing.util import remove_unicode_literals, strip_escseq
|
||||
from sphinx.testing.util import strip_escseq
|
||||
from sphinx.writers.texinfo import TexinfoTranslator
|
||||
|
||||
|
||||
@ -30,9 +29,6 @@ TEXINFO_WARNINGS = ENV_WARNINGS + """\
|
||||
\\['application/pdf', 'image/svg\\+xml'\\] \\(svgimg.\\*\\)
|
||||
"""
|
||||
|
||||
if PY3:
|
||||
TEXINFO_WARNINGS = remove_unicode_literals(TEXINFO_WARNINGS)
|
||||
|
||||
|
||||
@pytest.mark.sphinx('texinfo', testroot='warnings', freshenv=True)
|
||||
def test_texinfo_warnings(app, status, warning):
|
||||
|
@ -11,7 +11,6 @@
|
||||
"""
|
||||
import mock
|
||||
import pytest
|
||||
from six import PY3
|
||||
|
||||
import sphinx
|
||||
from sphinx.config import Config, ENUM, string_classes, check_confval_types
|
||||
@ -136,19 +135,6 @@ def test_errors_warnings(logger, tempdir):
|
||||
assert cfg.project == u'Jägermeister'
|
||||
assert logger.called is False
|
||||
|
||||
# test the warning for bytestrings with non-ascii content
|
||||
# bytestrings with non-ascii content are a syntax error in python3 so we
|
||||
# skip the test there
|
||||
if PY3:
|
||||
return
|
||||
(tempdir / 'conf.py').write_text(
|
||||
u'# -*- coding: latin-1\nproject = "fooä"\n', encoding='latin-1')
|
||||
cfg = Config.read(tempdir, {}, None)
|
||||
|
||||
assert logger.warning.called is False
|
||||
cfg.check_unicode()
|
||||
assert logger.warning.called is True
|
||||
|
||||
|
||||
def test_errors_if_setup_is_not_callable(tempdir, make_app):
|
||||
# test the error to call setup() in the config file
|
||||
@ -242,7 +228,7 @@ TYPECHECK_WARNINGS = [
|
||||
('value8', B(), None, C(), False), # sibling type
|
||||
('value9', None, None, 'foo', False), # no default or no annotations
|
||||
('value10', None, None, 123, False), # no default or no annotations
|
||||
('value11', None, [str], u'bar', False if PY3 else True), # str vs unicode
|
||||
('value11', None, [str], u'bar', False), # str vs unicode
|
||||
('value12', 'string', None, u'bar', False), # str vs unicode
|
||||
('value13', None, string_classes, 'bar', False), # string_classes
|
||||
('value14', None, string_classes, u'bar', False), # string_classes
|
||||
|
@ -14,7 +14,6 @@ from collections import Counter
|
||||
import pytest
|
||||
from packaging.specifiers import InvalidSpecifier
|
||||
from packaging.version import InvalidVersion
|
||||
from six import PY2
|
||||
|
||||
from sphinx.ext.doctest import is_allowed_version
|
||||
|
||||
@ -112,9 +111,6 @@ def record(directive, part, should_skip):
|
||||
return 'Recorded {} {} {}'.format(directive, part, should_skip)
|
||||
|
||||
|
||||
@pytest.mark.xfail(
|
||||
PY2, reason='node.source points to document instead of filename',
|
||||
)
|
||||
@pytest.mark.sphinx('doctest', testroot='ext-doctest-with-autodoc')
|
||||
def test_reporting_with_autodoc(app, status, warning, capfd):
|
||||
# Patch builder to get a copy of the output
|
||||
|
@ -12,8 +12,6 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from six import PY2
|
||||
|
||||
import sphinx
|
||||
from sphinx.pycode import ModuleAnalyzer
|
||||
|
||||
@ -24,20 +22,14 @@ def test_ModuleAnalyzer_for_string():
|
||||
analyzer = ModuleAnalyzer.for_string('print("Hello world")', 'module_name')
|
||||
assert analyzer.modname == 'module_name'
|
||||
assert analyzer.srcname == '<string>'
|
||||
if PY2:
|
||||
assert analyzer.encoding == 'ascii'
|
||||
else:
|
||||
assert analyzer.encoding is None
|
||||
assert analyzer.encoding is None
|
||||
|
||||
|
||||
def test_ModuleAnalyzer_for_file():
|
||||
analyzer = ModuleAnalyzer.for_string(SPHINX_MODULE_PATH, 'sphinx')
|
||||
assert analyzer.modname == 'sphinx'
|
||||
assert analyzer.srcname == '<string>'
|
||||
if PY2:
|
||||
assert analyzer.encoding == 'ascii'
|
||||
else:
|
||||
assert analyzer.encoding is None
|
||||
assert analyzer.encoding is None
|
||||
|
||||
|
||||
def test_ModuleAnalyzer_for_module():
|
||||
|
@ -12,7 +12,6 @@
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from six import PY2
|
||||
|
||||
from sphinx.pycode.parser import Parser
|
||||
|
||||
@ -135,7 +134,6 @@ def test_complex_assignment():
|
||||
assert parser.definitions == {}
|
||||
|
||||
|
||||
@pytest.mark.skipif(PY2, reason='tests for py3 syntax')
|
||||
def test_complex_assignment_py3():
|
||||
source = ('a, *b, c = (1, 2, 3, 4) #: unpack assignment\n'
|
||||
'd, *self.attr = (5, 6, 7) #: unpack assignment2\n'
|
||||
|
@ -13,7 +13,7 @@ import sys
|
||||
import time
|
||||
|
||||
import pytest
|
||||
from six import PY2, text_type, StringIO
|
||||
from six import text_type, StringIO
|
||||
from six.moves import input
|
||||
|
||||
from sphinx import application
|
||||
@ -37,12 +37,7 @@ def mock_input(answers, needanswer=False):
|
||||
raise AssertionError('answer for %r missing and no default '
|
||||
'present' % prompt)
|
||||
called.add(prompt)
|
||||
if PY2:
|
||||
prompt = str(prompt) # Python2.x raw_input emulation
|
||||
# `raw_input` encode `prompt` by default encoding to print.
|
||||
else:
|
||||
prompt = text_type(prompt) # Python3.x input emulation
|
||||
# `input` decode prompt by default encoding before print.
|
||||
prompt = text_type(prompt)
|
||||
for question in answers:
|
||||
if prompt.startswith(qs.PROMPT_PREFIX + question):
|
||||
return answers[question]
|
||||
|
@ -13,7 +13,6 @@ import sys
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
from six import PY3
|
||||
|
||||
from sphinx.util import inspect
|
||||
|
||||
@ -25,15 +24,11 @@ def test_getargspec():
|
||||
spec = inspect.getargspec(func)
|
||||
assert spec.args == ['a', 'b', 'c', 'd']
|
||||
assert spec.varargs == 'e'
|
||||
if PY3:
|
||||
assert spec.varkw == 'f'
|
||||
assert spec.defaults == (1, 2)
|
||||
assert spec.kwonlyargs == []
|
||||
assert spec.kwonlydefaults is None
|
||||
assert spec.annotations == {}
|
||||
else:
|
||||
assert spec.keywords == 'f'
|
||||
assert spec.defaults == [1, 2]
|
||||
assert spec.varkw == 'f'
|
||||
assert spec.defaults == (1, 2)
|
||||
assert spec.kwonlyargs == []
|
||||
assert spec.kwonlydefaults is None
|
||||
assert spec.annotations == {}
|
||||
|
||||
|
||||
def test_getargspec_partial():
|
||||
@ -42,19 +37,13 @@ def test_getargspec_partial():
|
||||
|
||||
partial = functools.partial(func1, 10, c=11)
|
||||
spec = inspect.getargspec(partial)
|
||||
if PY3:
|
||||
assert spec.args == ['b']
|
||||
assert spec.varargs is None
|
||||
assert spec.varkw == 'f'
|
||||
assert spec.defaults is None
|
||||
assert spec.kwonlyargs == ['c', 'd']
|
||||
assert spec.kwonlydefaults == {'c': 11, 'd': 2}
|
||||
assert spec.annotations == {}
|
||||
else:
|
||||
assert spec.args == ['b', 'd']
|
||||
assert spec.varargs == 'e'
|
||||
assert spec.keywords == 'f'
|
||||
assert spec.defaults == [2]
|
||||
assert spec.args == ['b']
|
||||
assert spec.varargs is None
|
||||
assert spec.varkw == 'f'
|
||||
assert spec.defaults is None
|
||||
assert spec.kwonlyargs == ['c', 'd']
|
||||
assert spec.kwonlydefaults == {'c': 11, 'd': 2}
|
||||
assert spec.annotations == {}
|
||||
|
||||
|
||||
def test_getargspec_partial2():
|
||||
@ -62,19 +51,8 @@ def test_getargspec_partial2():
|
||||
pass
|
||||
p = functools.partial(fun, 10, c=11)
|
||||
|
||||
if PY3:
|
||||
# Python 3's partial is rather cleverer than Python 2's, and we
|
||||
# have to jump through some hoops to define an equivalent function
|
||||
# in a way that won't confuse Python 2's parser:
|
||||
ns = {}
|
||||
exec(dedent("""
|
||||
def f_expected(b, *, c=11, d=2):
|
||||
pass
|
||||
"""), ns)
|
||||
f_expected = ns["f_expected"]
|
||||
else:
|
||||
def f_expected(b, d=2):
|
||||
pass
|
||||
def f_expected(b, *, c=11, d=2):
|
||||
pass
|
||||
expected = inspect.getargspec(f_expected)
|
||||
|
||||
assert expected == inspect.getargspec(p)
|
||||
@ -367,37 +345,25 @@ def test_dictionary_sorting():
|
||||
def test_set_sorting():
|
||||
set_ = set("gfedcba")
|
||||
description = inspect.object_description(set_)
|
||||
if PY3:
|
||||
assert description == "{'a', 'b', 'c', 'd', 'e', 'f', 'g'}"
|
||||
else:
|
||||
assert description == "set(['a', 'b', 'c', 'd', 'e', 'f', 'g'])"
|
||||
assert description == "{'a', 'b', 'c', 'd', 'e', 'f', 'g'}"
|
||||
|
||||
|
||||
def test_set_sorting_fallback():
|
||||
set_ = set((None, 1))
|
||||
description = inspect.object_description(set_)
|
||||
if PY3:
|
||||
assert description in ("{1, None}", "{None, 1}")
|
||||
else:
|
||||
assert description in ("set([1, None])", "set([None, 1])")
|
||||
assert description in ("{1, None}", "{None, 1}")
|
||||
|
||||
|
||||
def test_frozenset_sorting():
|
||||
frozenset_ = frozenset("gfedcba")
|
||||
description = inspect.object_description(frozenset_)
|
||||
if PY3:
|
||||
assert description == "frozenset({'a', 'b', 'c', 'd', 'e', 'f', 'g'})"
|
||||
else:
|
||||
assert description == "frozenset(['a', 'b', 'c', 'd', 'e', 'f', 'g'])"
|
||||
assert description == "frozenset({'a', 'b', 'c', 'd', 'e', 'f', 'g'})"
|
||||
|
||||
|
||||
def test_frozenset_sorting_fallback():
|
||||
frozenset_ = frozenset((None, 1))
|
||||
description = inspect.object_description(frozenset_)
|
||||
if PY3:
|
||||
assert description in ("frozenset({1, None})", "frozenset({None, 1})")
|
||||
else:
|
||||
assert description in ("frozenset([1, None])", "frozenset([None, 1])")
|
||||
assert description in ("frozenset({1, None})", "frozenset({None, 1})")
|
||||
|
||||
|
||||
def test_dict_customtype():
|
||||
|
Loading…
Reference in New Issue
Block a user