Merge pull request #5760 from tk0miya/migrate_to_mypy-py3

Migrate to mypy py3 mode
This commit is contained in:
Takeshi KOMIYA 2018-12-15 13:07:58 +09:00 committed by GitHub
commit aaf0046f44
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
60 changed files with 165 additions and 177 deletions

View File

@ -43,7 +43,6 @@ paths =
.
[mypy]
python_version = 2.7
show_column_numbers = True
show_error_context = True
ignore_missing_imports = True

View File

@ -46,6 +46,7 @@ extras_require = {
'flake8>=3.5.0',
'flake8-import-order',
'mypy>=0.470',
'docutils-stubs',
],
}

View File

@ -13,4 +13,4 @@ import sys
from sphinx.cmd.build import main
sys.exit(main(sys.argv[1:])) # type: ignore
sys.exit(main(sys.argv[1:]))

View File

@ -276,10 +276,10 @@ class Sphinx:
user_locale_dirs, self.config.language, domains=['sphinx'],
charset=self.config.source_encoding):
catinfo.write_mo(self.config.language)
locale_dirs = [None, path.join(package_dir, 'locale')] + user_locale_dirs # type: ignore # NOQA
locale_dirs = [None, path.join(package_dir, 'locale')] + user_locale_dirs
else:
locale_dirs = []
self.translator, has_translation = locale.init(locale_dirs, self.config.language) # type: ignore # NOQA
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
if self.config.language is not None:
if has_translation or self.config.language == 'en':
# "en" never needs to be translated

View File

@ -167,7 +167,7 @@ class AppleHelpBuilder(StandaloneHTMLBuilder):
logger.info(bold(__('writing Info.plist... ')), nonl=True)
with open(path.join(contents_dir, 'Info.plist'), 'wb') as fb:
plistlib.dump(info_plist, fb) # type: ignore
plistlib.dump(info_plist, fb)
logger.info(__('done'))
# Copy the icon, if one is supplied

View File

@ -113,11 +113,9 @@ class ChangesBuilder(Builder):
'show_copyright': self.config.html_show_copyright,
'show_sphinx': self.config.html_show_sphinx,
}
with open(path.join(self.outdir, 'index.html'), 'w', # type: ignore
encoding='utf8') as f:
with open(path.join(self.outdir, 'index.html'), 'w', encoding='utf8') as f:
f.write(self.templates.render('changes/frameset.html', ctx))
with open(path.join(self.outdir, 'changes.html'), 'w', # type: ignore
encoding='utf8') as f:
with open(path.join(self.outdir, 'changes.html'), 'w', encoding='utf8') as f:
f.write(self.templates.render('changes/versionchanges.html', ctx))
hltext = ['.. versionadded:: %s' % version,
@ -135,7 +133,7 @@ class ChangesBuilder(Builder):
logger.info(bold(__('copying source files...')))
for docname in self.env.all_docs:
with open(self.env.doc2path(docname), # type: ignore
with open(self.env.doc2path(docname),
encoding=self.env.config.source_encoding) as f:
try:
lines = f.readlines()
@ -144,7 +142,7 @@ class ChangesBuilder(Builder):
continue
targetfn = path.join(self.outdir, 'rst', os_path(docname)) + '.html'
ensuredir(path.dirname(targetfn))
with open(targetfn, 'w', encoding='utf-8') as f: # type: ignore
with open(targetfn, 'w', encoding='utf-8') as f:
text = ''.join(hl(i + 1, line) for (i, line) in enumerate(lines))
ctx = {
'filename': self.env.doc2path(docname, None),

View File

@ -136,7 +136,7 @@ class DevhelpBuilder(StandaloneHTMLBuilder):
# Dump the XML file
xmlfile = path.join(outdir, outname + '.devhelp.gz')
with gzip.open(xmlfile, 'w') as f: # type: ignore
with gzip.open(xmlfile, 'w') as f:
tree.write(f, 'utf-8')

View File

@ -83,7 +83,7 @@ class Catalog:
if msg not in self.metadata: # faster lookup in hash
self.messages.append(msg)
self.metadata[msg] = []
self.metadata[msg].append((origin.source, origin.line, origin.uid))
self.metadata[msg].append((origin.source, origin.line, origin.uid)) # type: ignore
class MsgOrigin:
@ -198,7 +198,7 @@ def should_write(filepath, new_content):
if not path.exists(filepath):
return True
try:
with open(filepath, encoding='utf-8') as oldpot: # type: ignore
with open(filepath, encoding='utf-8') as oldpot:
old_content = oldpot.read()
old_header_index = old_content.index('"POT-Creation-Date:')
new_header_index = new_content.index('"POT-Creation-Date:')
@ -246,10 +246,10 @@ class MessageCatalogBuilder(I18nBuilder):
extract_translations = self.templates.environment.extract_translations
for template in status_iterator(files, __('reading templates... '), "purple", # type: ignore # NOQA
for template in status_iterator(files, __('reading templates... '), "purple",
len(files), self.app.verbosity):
try:
with open(template, encoding='utf-8') as f: # type: ignore
with open(template, encoding='utf-8') as f:
context = f.read()
for line, meth, msg in extract_translations(context):
origin = MsgOrigin(template, line)
@ -272,7 +272,7 @@ class MessageCatalogBuilder(I18nBuilder):
'ctime': datetime.fromtimestamp(
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
}
for textdomain, catalog in status_iterator(self.catalogs.items(), # type: ignore
for textdomain, catalog in status_iterator(self.catalogs.items(),
__("writing message catalogs... "),
"darkgreen", len(self.catalogs),
self.app.verbosity,
@ -282,31 +282,30 @@ class MessageCatalogBuilder(I18nBuilder):
pofn = path.join(self.outdir, textdomain + '.pot')
output = StringIO()
output.write(POHEADER % data) # type: ignore
output.write(POHEADER % data)
for message in catalog.messages:
positions = catalog.metadata[message]
if self.config.gettext_location:
# generate "#: file1:line1\n#: file2:line2 ..."
output.write("#: %s\n" % "\n#: ".join( # type: ignore
output.write("#: %s\n" % "\n#: ".join(
"%s:%s" % (canon_path(relpath(source, self.outdir)), line)
for source, line, _ in positions))
if self.config.gettext_uuid:
# generate "# uuid1\n# uuid2\n ..."
output.write("# %s\n" % "\n# ".join( # type: ignore
uid for _, _, uid in positions))
output.write("# %s\n" % "\n# ".join(uid for _, _, uid in positions))
# message contains *one* line of text ready for translation
message = message.replace('\\', r'\\'). \
replace('"', r'\"'). \
replace('\n', '\\n"\n"')
output.write('msgid "%s"\nmsgstr ""\n\n' % message) # type: ignore
output.write('msgid "%s"\nmsgstr ""\n\n' % message)
content = output.getvalue()
if should_write(pofn, content):
with open(pofn, 'w', encoding='utf-8') as pofile: # type: ignore
with open(pofn, 'w', encoding='utf-8') as pofile:
pofile.write(content)

View File

@ -833,7 +833,7 @@ class StandaloneHTMLBuilder(Builder):
ensuredir(path.join(self.outdir, '_static'))
# first, create pygments style file
with open(path.join(self.outdir, '_static', 'pygments.css'), 'w') as f:
f.write(self.highlighter.get_stylesheet()) # type: ignore
f.write(self.highlighter.get_stylesheet())
# then, copy translations JavaScript file
if self.config.language is not None:
jsfile = self._get_translations_js()
@ -956,7 +956,7 @@ class StandaloneHTMLBuilder(Builder):
try:
searchindexfn = path.join(self.outdir, self.searchindex_filename)
if self.indexer_dumps_unicode:
with open(searchindexfn, encoding='utf-8') as ft: # type: ignore
with open(searchindexfn, encoding='utf-8') as ft:
self.indexer.load(ft, self.indexer_format)
else:
with open(searchindexfn, 'rb') as fb:
@ -1137,8 +1137,8 @@ class StandaloneHTMLBuilder(Builder):
# outfilename's path is in general different from self.outdir
ensuredir(path.dirname(outfilename))
try:
with open(outfilename, 'w', # type: ignore
encoding=ctx['encoding'], errors='xmlcharrefreplace') as f:
with open(outfilename, 'w', encoding=ctx['encoding'],
errors='xmlcharrefreplace') as f:
f.write(output)
except (IOError, OSError) as err:
logger.warning(__("error writing file %s: %s"), outfilename, err)
@ -1175,7 +1175,7 @@ class StandaloneHTMLBuilder(Builder):
# first write to a temporary file, so that if dumping fails,
# the existing index won't be overwritten
if self.indexer_dumps_unicode:
with open(searchindexfn + '.tmp', 'w', encoding='utf-8') as ft: # type: ignore
with open(searchindexfn + '.tmp', 'w', encoding='utf-8') as ft:
self.indexer.dump(ft, self.indexer_format)
else:
with open(searchindexfn + '.tmp', 'wb') as fb:
@ -1433,7 +1433,7 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
def dump_context(self, context, filename):
# type: (Dict, unicode) -> None
if self.implementation_dumps_unicode:
with open(filename, 'w', encoding='utf-8') as ft: # type: ignore
with open(filename, 'w', encoding='utf-8') as ft:
self.implementation.dump(context, ft, *self.additional_dump_args)
else:
with open(filename, 'wb') as fb:

View File

@ -211,8 +211,8 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
def open_file(self, outdir, basename, mode='w'):
# type: (unicode, unicode, unicode) -> IO
# open a file with the correct encoding for the selected language
return open(path.join(outdir, basename), mode, # type: ignore
encoding=self.encoding, errors='xmlcharrefreplace')
return open(path.join(outdir, basename), mode, encoding=self.encoding,
errors='xmlcharrefreplace')
def update_page_context(self, pagename, templatename, ctx, event_arg):
# type: (unicode, unicode, Dict, unicode) -> None

View File

@ -207,7 +207,7 @@ class LaTeXBuilder(Builder):
f.write('\\NeedsTeXFormat{LaTeX2e}[1995/12/01]\n')
f.write('\\ProvidesPackage{sphinxhighlight}'
'[2016/05/29 stylesheet for highlighting with pygments]\n\n')
f.write(highlighter.get_stylesheet()) # type: ignore
f.write(highlighter.get_stylesheet())
def write(self, *ignored):
# type: (Any) -> None

View File

@ -294,8 +294,7 @@ class CheckExternalLinksBuilder(Builder):
def write_entry(self, what, docname, line, uri):
# type: (unicode, unicode, int, unicode) -> None
with open(path.join(self.outdir, 'output.txt'), 'a', # type: ignore
encoding='utf-8') as output:
with open(path.join(self.outdir, 'output.txt'), 'a', encoding='utf-8') as output:
output.write("%s:%s: [%s] %s\n" % (self.env.doc2path(docname, None),
line, what, uri))

View File

@ -135,8 +135,7 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
nspace = nspace.lower()
# write the project file
with open(path.join(outdir, outname + '.qhp'), 'w', # type: ignore
encoding='utf-8') as f:
with open(path.join(outdir, outname + '.qhp'), 'w', encoding='utf-8') as f:
body = render_file('project.qhp', outname=outname,
title=self.config.html_title, version=self.config.version,
project=self.config.project, namespace=nspace,
@ -150,8 +149,7 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
startpage = 'qthelp://' + posixpath.join(nspace, 'doc', 'index.html')
logger.info(__('writing collection project file...'))
with open(path.join(outdir, outname + '.qhcp'), 'w', # type: ignore
encoding='utf-8') as f:
with open(path.join(outdir, outname + '.qhcp'), 'w', encoding='utf-8') as f:
body = render_file('project.qhcp', outname=outname,
title=self.config.html_short_title,
homepage=homepage, startpage=startpage)

View File

@ -81,7 +81,7 @@ class TextBuilder(Builder):
outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
ensuredir(path.dirname(outfilename))
try:
with open(outfilename, 'w', encoding='utf-8') as f: # type: ignore
with open(outfilename, 'w', encoding='utf-8') as f:
f.write(self.writer.output)
except (IOError, OSError) as err:
logger.warning(__("error writing file %s: %s"), outfilename, err)

View File

@ -95,7 +95,7 @@ class XMLBuilder(Builder):
outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
ensuredir(path.dirname(outfilename))
try:
with open(outfilename, 'w', encoding='utf-8') as f: # type: ignore
with open(outfilename, 'w', encoding='utf-8') as f:
f.write(self.writer.output)
except (IOError, OSError) as err:
logger.warning(__("error writing file %s: %s"), outfilename, err)

View File

@ -304,4 +304,4 @@ def main(argv=sys.argv[1:]): # type: ignore
if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) # type: ignore
sys.exit(main(sys.argv[1:]))

View File

@ -430,7 +430,7 @@ def generate(d, overwrite=True, silent=False, templatedir=None):
if overwrite or not path.isfile(fpath):
if 'quiet' not in d:
print(__('Creating file %s.') % fpath)
with open(fpath, 'wt', encoding='utf-8', newline=newline) as f: # type: ignore
with open(fpath, 'wt', encoding='utf-8', newline=newline) as f:
f.write(content)
else:
if 'quiet' not in d:

View File

@ -274,7 +274,7 @@ class Config:
logger.warning("%s", exc)
for name in config:
if name in self.values:
self.__dict__[name] = config[name] # type: ignore
self.__dict__[name] = config[name]
def __getattr__(self, name):
# type: (unicode) -> Any
@ -306,7 +306,7 @@ class Config:
def __iter__(self):
# type: () -> Generator[ConfigValue, None, None]
for name, value in self.values.items():
yield ConfigValue(name, getattr(self, name), value[1]) # type: ignore
yield ConfigValue(name, getattr(self, name), value[1])
def add(self, name, default, rebuild, types):
# type: (unicode, Any, Union[bool, unicode], Any) -> None
@ -334,7 +334,7 @@ class Config:
# create a picklable copy of values list
__dict__['values'] = {}
for key, value in self.values.items(): # type: ignore
for key, value in self.values.items():
real_value = getattr(self, key)
if not is_serializable(real_value):
# omit unserializable value

View File

@ -216,8 +216,7 @@ class LiteralIncludeReader:
def read_file(self, filename, location=None):
# type: (unicode, Any) -> List[unicode]
try:
with open(filename, # type: ignore
encoding=self.encoding, errors='strict') as f:
with open(filename, encoding=self.encoding, errors='strict') as f:
text = f.read() # type: unicode
if 'tab-width' in self.options:
text = text.expandtabs(self.options['tab-width'])

View File

@ -62,10 +62,10 @@ class Meta(html.Meta, SphinxDirective):
meta = node.details['nodes'][0]
meta.source = self.env.doc2path(self.env.docname)
meta.line = self.lineno
meta.rawcontent = meta['content']
meta.rawcontent = meta['content'] # type: ignore
# docutils' meta nodes aren't picklable because the class is nested
meta.__class__ = addnodes.meta
meta.__class__ = addnodes.meta # type: ignore
return result

View File

@ -140,7 +140,7 @@ class ChangeSetDomain(Domain):
version = node['version']
module = self.env.ref_context.get('py:module')
objname = self.env.temp_data.get('object')
changeset = ChangeSet(node['type'], self.env.docname, node.line, # type: ignore
changeset = ChangeSet(node['type'], self.env.docname, node.line,
module, objname, node.astext())
self.data['changes'].setdefault(version, []).append(changeset)

View File

@ -2232,7 +2232,7 @@ class ASTNestedName(ASTBase):
# so it can remove it in inner declarations.
dest = signode
if mode == 'lastIsName':
dest = addnodes.desc_addname()
dest = addnodes.desc_addname() # type: ignore
for i in range(len(names)):
nne = names[i]
template = self.templates[i]
@ -6936,7 +6936,7 @@ class CPPDomain(Domain):
# the non-identifier refs are cross-references, which should be processed:
# - fix parenthesis due to operator() and add_function_parentheses
if typ != "identifier":
title = contnode.pop(0).astext()
title = contnode.pop(0).astext() # type: ignore
# If it's operator(), we need to add '()' if explicit function parens
# are requested. Then the Sphinx machinery will add another pair.
# Also, if it's an 'any' ref that resolves to a function, we need to add

View File

@ -152,7 +152,7 @@ class PyXrefMixin:
delims_re = re.compile(delims)
sub_targets = re.split(delims, target)
split_contnode = bool(contnode and contnode.astext() == target)
split_contnode = bool(contnode and contnode.astext() == target) # type: ignore
results = []
for sub_target in filter(None, sub_targets):

View File

@ -374,7 +374,7 @@ class BuildEnvironment:
# the source directory is a bytestring with non-ASCII characters;
# let's try to encode the rel_fn in the file system encoding
enc_rel_fn = rel_fn.encode(sys.getfilesystemencoding())
return rel_fn, path.abspath(path.join(self.srcdir, enc_rel_fn))
return rel_fn, path.abspath(path.join(self.srcdir, enc_rel_fn)) # type: ignore
@property
def found_docs(self):

View File

@ -199,7 +199,7 @@ class TocTree:
if len(toplevel) > 1:
subtrees = toplevel.traverse(addnodes.toctree)
if subtrees:
toplevel[1][:] = subtrees
toplevel[1][:] = subtrees # type: ignore
else:
toplevel.pop(1)
# resolve all sub-toctrees
@ -246,8 +246,8 @@ class TocTree:
caption_node.rawsource = toctree['rawcaption']
if hasattr(toctree, 'uid'):
# move uid to caption_node to translate it
caption_node.uid = toctree.uid
del toctree.uid
caption_node.uid = toctree.uid # type: ignore
del toctree.uid # type: ignore
newnode += caption_node
newnode.extend(tocentries)
newnode['toctree'] = True

View File

@ -315,7 +315,7 @@ class Documenter:
modname = None
parents = []
self.modname, self.objpath = self.resolve_name(modname, parents, path, base) # type: ignore # NOQA
self.modname, self.objpath = self.resolve_name(modname, parents, path, base)
if not self.modname:
return False
@ -905,7 +905,7 @@ class ClassLevelDocumenter(Documenter):
# ... if still None, there's no way to know
if mod_cls is None:
return None, []
modname, cls = rpartition(mod_cls, '.') # type: ignore
modname, cls = rpartition(mod_cls, '.')
parents = [cls]
# if the module name is still missing, get it like above
if not modname:
@ -953,7 +953,7 @@ class DocstringSignatureMixin:
result = args, retann
# don't look any further
break
return result # type: ignore
return result
def get_doc(self, encoding=None, ignore=1):
# type: (unicode, int) -> List[List[unicode]]

View File

@ -114,7 +114,7 @@ class AutodocDirective(SphinxDirective):
reporter = self.state.document.reporter
try:
source, lineno = reporter.get_source_and_line(self.lineno)
source, lineno = reporter.get_source_and_line(self.lineno) # type: ignore
except AttributeError:
source, lineno = (None, None)
logger.debug('[autodoc] %s:%s: input:\n%s', source, lineno, self.block_text)

View File

@ -228,7 +228,7 @@ def generate_autosummary_docs(sources, output_dir=None, suffix='.rst',
ns['underline'] = len(name) * '='
rendered = template.render(**ns)
f.write(rendered) # type: ignore
f.write(rendered)
# descend recursively to new files
if new_files:
@ -248,8 +248,7 @@ def find_autosummary_in_files(filenames):
"""
documented = [] # type: List[Tuple[unicode, unicode, unicode]]
for filename in filenames:
with open(filename, encoding='utf-8', # type: ignore
errors='ignore') as f:
with open(filename, encoding='utf-8', errors='ignore') as f:
lines = f.read().splitlines()
documented.extend(find_autosummary_in_lines(lines, filename=filename))
return documented
@ -264,7 +263,7 @@ def find_autosummary_in_docstring(name, module=None, filename=None):
try:
real_name, obj, parent, modname = import_by_name(name)
lines = pydoc.getdoc(obj).splitlines()
return find_autosummary_in_lines(lines, module=name, filename=filename) # type: ignore
return find_autosummary_in_lines(lines, module=name, filename=filename)
except AttributeError:
pass
except ImportError as e:

View File

@ -226,7 +226,7 @@ class TestGroup:
else:
raise RuntimeError(__('invalid TestCode type'))
def __repr__(self): # type: ignore
def __repr__(self):
# type: () -> unicode
return 'TestGroup(name=%r, setup=%r, cleanup=%r, tests=%r)' % (
self.name, self.setup, self.cleanup, self.tests)
@ -241,7 +241,7 @@ class TestCode:
self.lineno = lineno
self.options = options or {}
def __repr__(self): # type: ignore
def __repr__(self):
# type: () -> unicode
return 'TestCode(%r, %r, filename=%r, lineno=%r, options=%r)' % (
self.code, self.type, self.filename, self.lineno, self.options)
@ -313,9 +313,7 @@ class DocTestBuilder(Builder):
date = time.strftime('%Y-%m-%d %H:%M:%S')
self.outfile = None # type: IO
self.outfile = open(path.join(self.outdir, 'output.txt'), # type: ignore
'w', encoding='utf-8')
self.outfile = open(path.join(self.outdir, 'output.txt'), 'w', encoding='utf-8')
self.outfile.write(('Results of doctest builder run on %s\n'
'==================================%s\n') %
(date, '=' * len(date)))
@ -437,7 +435,7 @@ Doctest summary
logger.warning(__('no code/output in %s block at %s:%s'),
node.get('testnodetype', 'doctest'),
filename, line_number)
code = TestCode(source, type=node.get('testnodetype', 'doctest'), # type: ignore
code = TestCode(source, type=node.get('testnodetype', 'doctest'),
filename=filename, lineno=line_number,
options=node.get('options'))
node_groups = node.get('groups', ['default'])
@ -494,8 +492,7 @@ Doctest summary
# type: (Any, List[TestCode], Any) -> bool
examples = []
for testcode in testcodes:
example = doctest.Example(testcode.code, '', # type: ignore
lineno=testcode.lineno)
example = doctest.Example(testcode.code, '', lineno=testcode.lineno)
examples.append(example)
if not examples:
return True
@ -547,11 +544,9 @@ Doctest summary
exc_msg = m.group('msg')
else:
exc_msg = None
example = doctest.Example(code[0].code, output, # type: ignore
exc_msg=exc_msg,
lineno=code[0].lineno,
options=options)
test = doctest.DocTest([example], {}, group.name, # type: ignore
example = doctest.Example(code[0].code, output, exc_msg=exc_msg,
lineno=code[0].lineno, options=options)
test = doctest.DocTest([example], {}, group.name,
code[0].filename, code[0].lineno, None)
self.type = 'exec' # multiple statements again
# DocTest.__init__ copies the globs namespace, which we don't want

View File

@ -145,7 +145,7 @@ class Graphviz(SphinxDirective):
rel_filename, filename = self.env.relfn2path(argument)
self.env.note_dependency(rel_filename)
try:
with open(filename, encoding='utf-8') as fp: # type: ignore
with open(filename, encoding='utf-8') as fp:
dotcode = fp.read()
except (IOError, OSError):
return [document.reporter.warning(
@ -310,7 +310,7 @@ def render_dot_html(self, node, code, options, prefix='graphviz',
self.body.append('<p class="warning">%s</p>' % alt)
self.body.append('</object></div>\n')
else:
with open(outfn + '.map', encoding='utf-8') as mapfile: # type: ignore
with open(outfn + '.map', encoding='utf-8') as mapfile:
imgmap = ClickableMapDefinition(outfn + '.map', mapfile.read(), dot=code)
if imgmap.clickable:
# has a map

View File

@ -124,7 +124,7 @@ def compile_math(latex, builder):
"""Compile LaTeX macros for math to DVI."""
tempdir = ensure_tempdir(builder)
filename = path.join(tempdir, 'math.tex')
with open(filename, 'w', encoding='utf-8') as f: # type: ignore
with open(filename, 'w', encoding='utf-8') as f:
f.write(latex)
# build latex command; old versions of latex don't have the

View File

@ -80,7 +80,7 @@ def try_import(objname):
"""
try:
__import__(objname)
return sys.modules.get(objname) # type: ignore
return sys.modules.get(objname)
except ImportError:
matched = module_sig_re.match(objname)
@ -93,7 +93,7 @@ def try_import(objname):
return None
try:
__import__(modname)
return getattr(sys.modules.get(modname), attrname, None) # type: ignore
return getattr(sys.modules.get(modname), attrname, None)
except ImportError:
return None

View File

@ -240,7 +240,7 @@ def load_mappings(app):
# files; remote ones only if the cache time is expired
if '://' not in inv or uri not in inventories.cache \
or inventories.cache[uri][1] < cache_time:
safe_inv_url = _get_safe_url(inv) # type: ignore
safe_inv_url = _get_safe_url(inv)
logger.info('loading intersphinx inventory from %s...', safe_inv_url)
try:
invdata = fetch_inventory(app, uri, inv)
@ -418,4 +418,4 @@ if __name__ == '__main__':
import logging # type: ignore
logging.basicConfig() # type: ignore
inspect_main(argv=sys.argv[1:]) # type: ignore
inspect_main(argv=sys.argv[1:])

View File

@ -133,7 +133,7 @@ class GoogleDocstring(UnicodeMixin):
if isinstance(docstring, str):
lines = docstring.splitlines()
else:
lines = docstring # type: ignore
lines = docstring
self._line_iter = modify_iter(lines, modifier=lambda s: s.rstrip())
self._parsed_lines = [] # type: List[unicode]
self._is_in_section = False

View File

@ -96,7 +96,7 @@ class PygmentsBridge:
def get_formatter(self, **kwargs):
# type: (Any) -> Formatter
kwargs.update(self.formatter_args) # type: ignore
kwargs.update(self.formatter_args)
return self.formatter(**kwargs)
def unhighlighted(self, source):

View File

@ -233,7 +233,7 @@ class SphinxRSTFileInput(SphinxBaseFileInput):
for lineno, line in enumerate(epilog.splitlines()):
text.append(line, '<rst_epilog>', lineno)
def read(self):
def read(self): # type: ignore
# type: () -> StringList
warnings.warn('SphinxRSTFileInput is deprecated.',
RemovedInSphinx30Warning, stacklevel=2)
@ -284,7 +284,7 @@ def read_doc(app, env, filename):
filetype = get_filetype(app.config.source_suffix, filename)
input_class = app.registry.get_source_input(filetype)
reader = SphinxStandaloneReader(app)
source = input_class(app, env, source=None, source_path=filename,
source = input_class(app, env, source=None, source_path=filename, # type: ignore
encoding=env.config.source_encoding)
parser = app.registry.create_source_parser(app, filetype)
if parser.__class__.__name__ == 'CommonMarkParser' and parser.settings_spec == ():
@ -295,7 +295,7 @@ def read_doc(app, env, filename):
# CommonMarkParser.
parser.settings_spec = RSTParser.settings_spec
pub = Publisher(reader=reader,
pub = Publisher(reader=reader, # type: ignore
parser=parser,
writer=SphinxDummyWriter(),
source_class=SphinxDummySourceClass,

View File

@ -90,7 +90,7 @@ class _TranslationProxy(UserString):
# type: (unicode) -> unicode
return self.data + other
def __radd__(self, other): # type: ignore
def __radd__(self, other):
# type: (unicode) -> unicode
return other + self.data
@ -106,7 +106,7 @@ class _TranslationProxy(UserString):
# type: (Any) -> unicode
return self.data * other
def __rmul__(self, other): # type: ignore
def __rmul__(self, other):
# type: (Any) -> unicode
return other * self.data
@ -183,8 +183,7 @@ def init(locale_dirs, language, catalog='sphinx', namespace='general'):
# loading
for dir_ in locale_dirs:
try:
trans = gettext.translation(catalog, localedir=dir_, # type: ignore
languages=languages)
trans = gettext.translation(catalog, localedir=dir_, languages=languages)
if translator is None:
translator = trans
else:
@ -234,7 +233,7 @@ def _lazy_translate(catalog, namespace, message):
not bound yet at that time.
"""
translator = get_translator(catalog, namespace)
return translator.gettext(message) # type: ignore
return translator.gettext(message)
def get_translation(catalog, namespace='general'):
@ -269,9 +268,9 @@ def get_translation(catalog, namespace='general'):
else:
translator = get_translator(catalog, namespace)
if len(args) <= 1:
return translator.gettext(message) # type: ignore
return translator.gettext(message)
else: # support pluralization
return translator.ngettext(message, args[0], args[1]) # type: ignore
return translator.ngettext(message, args[0], args[1])
return gettext

View File

@ -76,7 +76,7 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
def parse(self, inputstring, document):
# type: (Union[str, StringList], nodes.document) -> None
"""Parse text and generate a document tree."""
self.setup_parse(inputstring, document)
self.setup_parse(inputstring, document) # type: ignore
self.statemachine = states.RSTStateMachine(
state_classes=self.state_classes,
initial_state=self.initial_state,

View File

@ -50,7 +50,7 @@ def get_assign_targets(node):
def get_lvar_names(node, self=None):
# type: (ast.AST, ast.expr) -> List[unicode]
# type: (ast.AST, ast.arg) -> List[unicode]
"""Convert assignment-AST to variable names.
This raises `TypeError` if the assignment does not create new variable::
@ -60,7 +60,7 @@ def get_lvar_names(node, self=None):
# => TypeError
"""
if self:
self_id = self.arg # type: ignore
self_id = self.arg
node_name = node.__class__.__name__
if node_name in ('Index', 'Num', 'Slice', 'Str', 'Subscript'):
@ -100,7 +100,7 @@ def dedent_docstring(s):
# dummy function to mock `inspect.getdoc`.
pass
dummy.__doc__ = s # type: ignore
dummy.__doc__ = s
docstring = inspect.getdoc(dummy)
return docstring.lstrip("\r\n").rstrip("\r\n")
@ -144,7 +144,7 @@ class TokenProcessor:
# type: (List[unicode]) -> None
lines = iter(buffers)
self.buffers = buffers
self.tokens = tokenize.generate_tokens(lambda: next(lines)) # type: ignore
self.tokens = tokenize.generate_tokens(lambda: next(lines))
self.current = None # type: Token
self.previous = None # type: Token
@ -279,7 +279,7 @@ class VariableCommentPicker(ast.NodeVisitor):
self.comments[(context, name)] = comment
def get_self(self):
# type: () -> ast.expr
# type: () -> ast.arg
"""Returns the name of first argument if in function."""
if self.current_function and self.current_function.args.args:
return self.current_function.args.args[0]

View File

@ -238,7 +238,7 @@ class SphinxComponentRegistry:
ref_nodeclass, objname, doc_field_types))
# create a subclass of GenericObject as the new directive
directive = type(directivename, # type: ignore
directive = type(directivename,
(GenericObject, object),
{'indextemplate': indextemplate,
'parse_node': staticmethod(parse_node),
@ -260,7 +260,7 @@ class SphinxComponentRegistry:
(directivename, rolename, indextemplate, ref_nodeclass, objname))
# create a subclass of Target as the new directive
directive = type(directivename, # type: ignore
directive = type(directivename,
(Target, object),
{'indextemplate': indextemplate})
@ -431,7 +431,7 @@ class SphinxComponentRegistry:
def add_js_file(self, filename, **attributes):
# type: (unicode, **unicode) -> None
logger.debug('[app] adding js_file: %r, %r', filename, attributes)
self.js_files.append((filename, attributes)) # type: ignore
self.js_files.append((filename, attributes))
def add_latex_package(self, name, options):
# type: (unicode, unicode) -> None

View File

@ -155,7 +155,7 @@ class path(text_type):
"""
if isinstance(text, bytes):
text = text.decode(encoding)
with open(self, 'w', encoding=encoding, **kwargs) as f: # type: ignore
with open(self, 'w', encoding=encoding, **kwargs) as f:
f.write(text)
def text(self, encoding='utf-8', **kwargs):
@ -163,7 +163,7 @@ class path(text_type):
"""
Returns the text in the file.
"""
with open(self, encoding=encoding, **kwargs) as f: # type: ignore
with open(self, encoding=encoding, **kwargs) as f:
return f.read()
def bytes(self):
@ -209,7 +209,7 @@ class path(text_type):
"""
Recursively create directories.
"""
os.makedirs(self, mode, exist_ok=exist_ok) # type: ignore
os.makedirs(self, mode, exist_ok=exist_ok)
def joinpath(self, *args):
# type: (Any) -> path

View File

@ -99,7 +99,7 @@ def etree_parse(path):
# type: (unicode) -> Any
with warnings.catch_warnings(record=False):
warnings.filterwarnings("ignore", category=DeprecationWarning)
return ElementTree.parse(path) # type: ignore
return ElementTree.parse(path)
class Struct:
@ -133,8 +133,8 @@ class SphinxTestApp(application.Sphinx):
warningiserror = False
self._saved_path = sys.path[:]
self._saved_directives = directives._directives.copy()
self._saved_roles = roles._roles.copy()
self._saved_directives = directives._directives.copy() # type: ignore
self._saved_roles = roles._roles.copy() # type: ignore
self._saved_nodeclasses = set(v for v in dir(nodes.GenericNodeVisitor)
if v.startswith('visit_'))
@ -154,8 +154,8 @@ class SphinxTestApp(application.Sphinx):
locale.translators.clear()
sys.path[:] = self._saved_path
sys.modules.pop('autodoc_fodder', None)
directives._directives = self._saved_directives
roles._roles = self._saved_roles
directives._directives = self._saved_directives # type: ignore
roles._roles = self._saved_roles # type: ignore
for method in dir(nodes.GenericNodeVisitor):
if method.startswith('visit_') and \
method not in self._saved_nodeclasses:

View File

@ -63,7 +63,7 @@ def publish_msgstr(app, source, source_path, source_line, config, settings):
settings=settings,
)
try:
doc = doc[0]
doc = doc[0] # type: ignore
except IndexError: # empty node
pass
return doc
@ -107,7 +107,7 @@ class Locale(SphinxTransform):
# phase1: replace reference ids with translated names
for node, msg in extract_messages(self.document):
msgstr = catalog.gettext(msg) # type: ignore
msgstr = catalog.gettext(msg)
# XXX add marker to untranslated parts
if not msgstr or msgstr == msg or not msgstr.strip():
# as-of-yet untranslated
@ -221,7 +221,7 @@ class Locale(SphinxTransform):
if node.get('translated', False): # to avoid double translation
continue # skip if the node is already translated by phase1
msgstr = catalog.gettext(msg) # type: ignore
msgstr = catalog.gettext(msg)
# XXX add marker to untranslated parts
if not msgstr or msgstr == msg: # as-of-yet untranslated
continue
@ -454,7 +454,7 @@ class Locale(SphinxTransform):
msg_parts = split_index_msg(type, msg)
msgstr_parts = []
for part in msg_parts:
msgstr = catalog.gettext(part) # type: ignore
msgstr = catalog.gettext(part)
if not msgstr:
msgstr = part
msgstr_parts.append(msgstr)

View File

@ -18,6 +18,7 @@ from contextlib import contextmanager
from copy import copy
from distutils.version import LooseVersion
from os import path
from typing import IO, cast
import docutils
from docutils import nodes
@ -55,13 +56,13 @@ def docutils_namespace():
# type: () -> Generator[None, None, None]
"""Create namespace for reST parsers."""
try:
_directives = copy(directives._directives)
_roles = copy(roles._roles)
_directives = copy(directives._directives) # type: ignore
_roles = copy(roles._roles) # type: ignore
yield
finally:
directives._directives = _directives
roles._roles = _roles
directives._directives = _directives # type: ignore
roles._roles = _roles # type: ignore
for node in list(additional_nodes):
unregister_node(node)
@ -71,7 +72,7 @@ def docutils_namespace():
def is_directive_registered(name):
# type: (unicode) -> bool
"""Check the *name* directive is already registered."""
return name in directives._directives
return name in directives._directives # type: ignore
def register_directive(name, directive):
@ -87,7 +88,7 @@ def register_directive(name, directive):
def is_role_registered(name):
# type: (unicode) -> bool
"""Check the *name* role is already registered."""
return name in roles._roles
return name in roles._roles # type: ignore
def register_role(name, role):
@ -103,7 +104,7 @@ def register_role(name, role):
def unregister_role(name):
# type: (unicode) -> None
"""Unregister a role from docutils."""
roles._roles.pop(name, None)
roles._roles.pop(name, None) # type: ignore
def is_node_registered(node):
@ -120,7 +121,7 @@ def register_node(node):
inside ``docutils_namespace()`` to prevent side-effects.
"""
if not hasattr(nodes.GenericNodeVisitor, 'visit_' + node.__name__):
nodes._add_node_class_names([node.__name__])
nodes._add_node_class_names([node.__name__]) # type: ignore
additional_nodes.add(node)
@ -166,7 +167,7 @@ def using_user_docutils_conf(confdir):
try:
docutilsconfig = os.environ.get('DOCUTILSCONFIG', None)
if confdir:
os.environ['DOCUTILSCONFIG'] = path.join(path.abspath(confdir), 'docutils.conf') # type: ignore # NOQA
os.environ['DOCUTILSCONFIG'] = path.join(path.abspath(confdir), 'docutils.conf')
yield
finally:
@ -211,8 +212,8 @@ class sphinx_domains:
self.directive_func = directives.directive
self.role_func = roles.role
directives.directive = self.lookup_directive
roles.role = self.lookup_role
directives.directive = self.lookup_directive # type: ignore
roles.role = self.lookup_role # type: ignore
def disable(self):
# type: () -> None
@ -272,7 +273,7 @@ class WarningStream:
else:
location, type, level = matched.groups()
message = report_re.sub('', text).rstrip()
logger.log(type, message, location=location) # type: ignore
logger.log(type, message, location=location)
class LoggingReporter(Reporter):
@ -287,7 +288,7 @@ class LoggingReporter(Reporter):
halt_level=Reporter.SEVERE_LEVEL, debug=False,
error_handler='backslashreplace'):
# type: (unicode, int, int, bool, unicode) -> None
stream = WarningStream()
stream = cast(IO, WarningStream())
super(LoggingReporter, self).__init__(source, report_level, halt_level,
stream, debug, error_handler=error_handler)
@ -329,17 +330,17 @@ def switch_source_input(state, content):
"""Switch current source input of state temporarily."""
try:
# remember the original ``get_source_and_line()`` method
get_source_and_line = state.memo.reporter.get_source_and_line
get_source_and_line = state.memo.reporter.get_source_and_line # type: ignore
# replace it by new one
state_machine = StateMachine([], None)
state_machine.input_lines = content
state.memo.reporter.get_source_and_line = state_machine.get_source_and_line
state.memo.reporter.get_source_and_line = state_machine.get_source_and_line # type: ignore # NOQA
yield
finally:
# restore the method
state.memo.reporter.get_source_and_line = get_source_and_line
state.memo.reporter.get_source_and_line = get_source_and_line # type: ignore
class SphinxFileOutput(FileOutput):
@ -354,7 +355,7 @@ class SphinxFileOutput(FileOutput):
# type: (unicode) -> unicode
if (self.destination_path and self.autoclose and 'b' not in self.mode and
self.overwrite_if_changed and os.path.exists(self.destination_path)):
with open(self.destination_path, encoding=self.encoding) as f: # type: ignore
with open(self.destination_path, encoding=self.encoding) as f:
# skip writing: content not changed
if f.read() == data:
return data

View File

@ -49,10 +49,10 @@ def copy_asset_file(source, destination, context=None, renderer=None):
from sphinx.util.template import SphinxRenderer
renderer = SphinxRenderer()
with open(source, encoding='utf-8') as fsrc: # type: ignore
with open(source, encoding='utf-8') as fsrc:
if destination.lower().endswith('_t'):
destination = destination[:-2]
with open(destination, 'w', encoding='utf-8') as fdst: # type: ignore
with open(destination, 'w', encoding='utf-8') as fdst:
fdst.write(renderer.render_string(fsrc.read(), context))
else:
copyfile(source, destination)

View File

@ -69,7 +69,7 @@ class CatalogInfo(LocaleFileInfoBase):
def write_mo(self, locale):
# type: (unicode) -> None
with open(self.po_path, encoding=self.charset) as file_po: # type: ignore
with open(self.po_path, encoding=self.charset) as file_po:
try:
po = read_po(file_po, locale)
except Exception as exc:
@ -99,10 +99,10 @@ def find_catalog_files(docname, srcdir, locale_dirs, lang, compaction):
return []
domain = find_catalog(docname, compaction)
files = [gettext.find(domain, path.join(srcdir, dir_), [lang]) # type: ignore
files = [gettext.find(domain, path.join(srcdir, dir_), [lang])
for dir_ in locale_dirs]
files = [relpath(f, srcdir) for f in files if f] # type: ignore
return files # type: ignore
files = [relpath(f, srcdir) for f in files if f]
return files
def find_catalog_source_files(locale_dirs, locale, domains=None, gettext_compact=None,

View File

@ -140,4 +140,4 @@ def test_svg(h, f):
# install test_svg() to imghdr
# refs: https://docs.python.org/3.6/library/imghdr.html#imghdr.tests
imghdr.tests.append(test_svg) # type: ignore
imghdr.tests.append(test_svg)

View File

@ -54,7 +54,7 @@ def getargspec(func):
raise TypeError(
"can't compute signature for built-in type {}".format(func))
sig = inspect.signature(func) # type: ignore
sig = inspect.signature(func)
args = []
varargs = None
@ -72,19 +72,19 @@ def getargspec(func):
kind = param.kind
name = param.name
if kind is inspect.Parameter.POSITIONAL_ONLY: # type: ignore
if kind is inspect.Parameter.POSITIONAL_ONLY:
args.append(name)
elif kind is inspect.Parameter.POSITIONAL_OR_KEYWORD: # type: ignore
elif kind is inspect.Parameter.POSITIONAL_OR_KEYWORD:
args.append(name)
if param.default is not param.empty:
defaults += (param.default,) # type: ignore
elif kind is inspect.Parameter.VAR_POSITIONAL: # type: ignore
elif kind is inspect.Parameter.VAR_POSITIONAL:
varargs = name
elif kind is inspect.Parameter.KEYWORD_ONLY: # type: ignore
elif kind is inspect.Parameter.KEYWORD_ONLY:
kwonlyargs.append(name)
if param.default is not param.empty:
kwdefaults[name] = param.default
elif kind is inspect.Parameter.VAR_KEYWORD: # type: ignore
elif kind is inspect.Parameter.VAR_KEYWORD:
varkw = name
if param.annotation is not param.empty:
@ -98,7 +98,7 @@ def getargspec(func):
# compatibility with 'func.__defaults__'
defaults = None
return inspect.FullArgSpec(args, varargs, varkw, defaults, # type: ignore
return inspect.FullArgSpec(args, varargs, varkw, defaults,
kwonlyargs, kwdefaults, annotations)
@ -308,7 +308,7 @@ class Signature:
self.partialmethod_with_noargs = False
try:
self.signature = inspect.signature(subject) # type: ignore
self.signature = inspect.signature(subject)
except IndexError:
# Until python 3.6.4, cpython has been crashed on inspection for
# partialmethods not having any arguments.
@ -320,7 +320,7 @@ class Signature:
raise
try:
self.annotations = typing.get_type_hints(subject) # type: ignore
self.annotations = typing.get_type_hints(subject)
except Exception:
# get_type_hints() does not support some kind of objects like partial,
# ForwardRef and so on. For them, it raises an exception. In that case,
@ -355,7 +355,7 @@ class Signature:
if self.has_retval:
return self.signature.return_annotation
else:
return inspect.Parameter.empty # type: ignore
return inspect.Parameter.empty
else:
return None
@ -391,10 +391,10 @@ class Signature:
if param.default is not param.empty:
if param.annotation is param.empty:
arg.write('=')
arg.write(object_description(param.default)) # type: ignore
arg.write(object_description(param.default))
else:
arg.write(' = ')
arg.write(object_description(param.default)) # type: ignore
arg.write(object_description(param.default))
elif param.kind == param.VAR_POSITIONAL:
arg.write('*')
arg.write(param.name)
@ -405,7 +405,7 @@ class Signature:
args.append(arg.getvalue())
last_kind = param.kind
if self.return_annotation is inspect.Parameter.empty: # type: ignore
if self.return_annotation is inspect.Parameter.empty:
return '(%s)' % ', '.join(args)
else:
if 'return' in self.annotations:

View File

@ -56,7 +56,7 @@ def encode_string(s):
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' # type: ignore
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
def decode_string(s):

View File

@ -454,7 +454,7 @@ class MessagePrefixFilter(logging.Filter):
def filter(self, record):
# type: (logging.LogRecord) -> bool
if self.prefix:
record.msg = self.prefix + ' ' + record.msg # type: ignore
record.msg = self.prefix + ' ' + record.msg
return True
@ -526,7 +526,7 @@ class ColorizeFormatter(logging.Formatter):
color = COLOR_MAP.get(record.levelno)
if color:
return colorize(color, message) # type: ignore
return colorize(color, message)
else:
return message

View File

@ -451,7 +451,7 @@ def set_source_info(directive, node):
def set_role_source_info(inliner, lineno, node):
# type: (Inliner, int, nodes.Node) -> None
node.source, node.line = inliner.reporter.get_source_and_line(lineno)
node.source, node.line = inliner.reporter.get_source_and_line(lineno) # type: ignore
NON_SMARTQUOTABLE_PARENT_NODES = (
@ -510,4 +510,4 @@ def _new_copy(self):
return newnode
nodes.Element.copy = _new_copy
nodes.Element.copy = _new_copy # type: ignore

View File

@ -83,7 +83,7 @@ def relative_uri(base, to):
def ensuredir(path):
# type: (unicode) -> None
"""Ensure that a path exists."""
os.makedirs(path, exist_ok=True) # type: ignore
os.makedirs(path, exist_ok=True)
def walk(top, topdown=True, followlinks=False):
@ -169,9 +169,9 @@ def ustrftime(format, *args):
# On Windows, time.strftime() and Unicode characters will raise UnicodeEncodeError.
# https://bugs.python.org/issue8304
try:
return time.strftime(format, *args) # type: ignore
return time.strftime(format, *args)
except UnicodeEncodeError:
r = time.strftime(format.encode('unicode-escape').decode(), *args) # type: ignore
r = time.strftime(format.encode('unicode-escape').decode(), *args)
return r.encode().decode('unicode-escape')
@ -200,7 +200,7 @@ def abspath(pathdir):
try:
pathdir = pathdir.decode(fs_encoding)
except UnicodeDecodeError:
raise UnicodeDecodeError('multibyte filename not supported on ' # type: ignore
raise UnicodeDecodeError('multibyte filename not supported on '
'this filesystem encoding '
'(%r)' % fs_encoding)
return pathdir

View File

@ -12,7 +12,7 @@
import sys
from html import escape as htmlescape # NOQA
from io import TextIOWrapper # NOQA
from textwrap import indent # type: ignore # NOQA
from textwrap import indent # NOQA
from six import text_type

View File

@ -281,7 +281,7 @@ class HTMLTranslator(SphinxTranslator, BaseTranslator):
self.depart_reference(node)
# overwritten -- we don't want source comments to show up in the HTML
def visit_comment(self, node):
def visit_comment(self, node): # type: ignore
# type: (nodes.Element) -> None
raise nodes.SkipNode
@ -840,7 +840,7 @@ class HTMLTranslator(SphinxTranslator, BaseTranslator):
else:
node['classes'].append('row-odd')
self.body.append(self.starttag(node, 'tr', ''))
node.column = 0
node.column = 0 # type: ignore
def visit_entry(self, node):
# type: (nodes.Element) -> None

View File

@ -250,7 +250,7 @@ class HTML5Translator(SphinxTranslator, BaseTranslator):
self.depart_reference(node)
# overwritten -- we don't want source comments to show up in the HTML
def visit_comment(self, node):
def visit_comment(self, node): # type: ignore
# type: (nodes.Element) -> None
raise nodes.SkipNode
@ -791,7 +791,7 @@ class HTML5Translator(SphinxTranslator, BaseTranslator):
else:
node['classes'].append('row-odd')
self.body.append(self.starttag(node, 'tr', ''))
node.column = 0
node.column = 0 # type: ignore
def visit_field_list(self, node):
# type: (nodes.Element) -> None

View File

@ -40,7 +40,7 @@ try:
from docutils.utils.roman import toRoman
except ImportError:
# In Debain/Ubuntu, roman package is provided as roman, not as docutils.utils.roman
from roman import toRoman
from roman import toRoman # type: ignore
if False:
# For type annotation
@ -2664,7 +2664,7 @@ class LaTeXTranslator(SphinxTranslator):
return ('%s\\def%s{%s}%s\n' % (prefix, name, definition, suffix))
def _make_visit_admonition(name):
def _make_visit_admonition(name): # type: ignore
# type: (unicode) -> Callable[[LaTeXTranslator, nodes.Element], None]
warnings.warn('LaTeXTranslator._make_visit_admonition() is deprecated.',
RemovedInSphinx30Warning)

View File

@ -116,7 +116,7 @@ class ManualPageTranslator(SphinxTranslator, BaseTranslator):
# Overwrite admonition label translations with our own
for label, translation in admonitionlabels.items():
self.language.labels[label] = self.deunicode(translation)
self.language.labels[label] = self.deunicode(translation) # type: ignore
# overwritten -- added quotes around all .TH arguments
def header(self):
@ -253,7 +253,7 @@ class ManualPageTranslator(SphinxTranslator, BaseTranslator):
super(ManualPageTranslator, self).visit_term(node)
# overwritten -- we don't want source comments to show up
def visit_comment(self, node):
def visit_comment(self, node): # type: ignore
# type: (nodes.Element) -> None
raise nodes.SkipNode
@ -333,7 +333,7 @@ class ManualPageTranslator(SphinxTranslator, BaseTranslator):
self.body.append(self.defs['reference'][0])
# avoid repeating escaping code... fine since
# visit_Text calls astext() and only works on that afterwards
self.visit_Text(node)
self.visit_Text(node) # type: ignore
self.body.append(self.defs['reference'][1])
uri = node.get('refuri', '')

View File

@ -1745,7 +1745,7 @@ class TexinfoTranslator(SphinxTranslator):
self.escape_arg(node.astext()))
raise nodes.SkipNode
def _make_visit_admonition(name):
def _make_visit_admonition(name): # type: ignore
# type: (unicode) -> Callable[[TexinfoTranslator, nodes.Element], None]
warnings.warn('TexinfoTranslator._make_visit_admonition() is deprecated.',
RemovedInSphinx30Warning)

View File

@ -337,7 +337,7 @@ class TextWrapper(textwrap.TextWrapper):
"""
def split(t):
# type: (unicode) -> List[unicode]
return super(TextWrapper, self)._split(t) # type: ignore
return super(TextWrapper, self)._split(t)
chunks = [] # type: List[unicode]
for chunk in split(text):
for w, g in groupby(chunk, column_width):
@ -1372,7 +1372,7 @@ class TextTranslator(SphinxTranslator):
# type: (nodes.Node) -> None
raise NotImplementedError('Unknown node: ' + node.__class__.__name__)
def _make_depart_admonition(name):
def _make_depart_admonition(name): # type: ignore
# type: (unicode) -> Callable[[TextTranslator, nodes.Element], None]
warnings.warn('TextTranslator._make_depart_admonition() is deprecated.',
RemovedInSphinx30Warning)

View File

@ -55,6 +55,7 @@ description =
Run type checks.
deps =
mypy
docutils-stubs
commands=
mypy sphinx/