mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
Refactor code using `with` syntax
This commit is contained in:
@@ -62,11 +62,8 @@ def write_file(name, text, opts):
|
||||
print('File %s already exists, skipping.' % fname)
|
||||
else:
|
||||
print('Creating file %s.' % fname)
|
||||
f = open(fname, 'w')
|
||||
try:
|
||||
with open(fname, 'w') as f:
|
||||
f.write(text)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
|
||||
def format_heading(level, text):
|
||||
|
||||
@@ -178,14 +178,11 @@ class AppleHelpBuilder(StandaloneHTMLBuilder):
|
||||
|
||||
# Build the access page
|
||||
self.info(bold('building access page...'), nonl=True)
|
||||
f = codecs.open(path.join(language_dir, '_access.html'), 'w')
|
||||
try:
|
||||
with codecs.open(path.join(language_dir, '_access.html'), 'w') as f:
|
||||
f.write(access_page_template % {
|
||||
'toc': htmlescape(toc, quote=True),
|
||||
'title': htmlescape(self.config.applehelp_title)
|
||||
})
|
||||
finally:
|
||||
f.close()
|
||||
self.info('done')
|
||||
|
||||
# Generate the help index
|
||||
|
||||
@@ -101,16 +101,10 @@ class ChangesBuilder(Builder):
|
||||
'show_copyright': self.config.html_show_copyright,
|
||||
'show_sphinx': self.config.html_show_sphinx,
|
||||
}
|
||||
f = codecs.open(path.join(self.outdir, 'index.html'), 'w', 'utf8')
|
||||
try:
|
||||
with codecs.open(path.join(self.outdir, 'index.html'), 'w', 'utf8') as f:
|
||||
f.write(self.templates.render('changes/frameset.html', ctx))
|
||||
finally:
|
||||
f.close()
|
||||
f = codecs.open(path.join(self.outdir, 'changes.html'), 'w', 'utf8')
|
||||
try:
|
||||
with codecs.open(path.join(self.outdir, 'changes.html'), 'w', 'utf8') as f:
|
||||
f.write(self.templates.render('changes/versionchanges.html', ctx))
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
hltext = ['.. versionadded:: %s' % version,
|
||||
'.. versionchanged:: %s' % version,
|
||||
@@ -126,27 +120,22 @@ class ChangesBuilder(Builder):
|
||||
|
||||
self.info(bold('copying source files...'))
|
||||
for docname in self.env.all_docs:
|
||||
f = codecs.open(self.env.doc2path(docname), 'r',
|
||||
self.env.config.source_encoding)
|
||||
try:
|
||||
lines = f.readlines()
|
||||
except UnicodeDecodeError:
|
||||
self.warn('could not read %r for changelog creation' % docname)
|
||||
continue
|
||||
finally:
|
||||
f.close()
|
||||
with codecs.open(self.env.doc2path(docname), 'r',
|
||||
self.env.config.source_encoding) as f:
|
||||
try:
|
||||
lines = f.readlines()
|
||||
except UnicodeDecodeError:
|
||||
self.warn('could not read %r for changelog creation' % docname)
|
||||
continue
|
||||
targetfn = path.join(self.outdir, 'rst', os_path(docname)) + '.html'
|
||||
ensuredir(path.dirname(targetfn))
|
||||
f = codecs.open(targetfn, 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(targetfn, 'w', 'utf-8') as f:
|
||||
text = ''.join(hl(i+1, line) for (i, line) in enumerate(lines))
|
||||
ctx = {
|
||||
'filename': self.env.doc2path(docname, None),
|
||||
'text': text
|
||||
}
|
||||
f.write(self.templates.render('changes/rstsource.html', ctx))
|
||||
finally:
|
||||
f.close()
|
||||
themectx = dict(('theme_' + key, val) for (key, val) in
|
||||
iteritems(self.theme.get_options({})))
|
||||
copy_static_entry(path.join(package_dir, 'themes', 'default',
|
||||
|
||||
@@ -127,8 +127,5 @@ class DevhelpBuilder(StandaloneHTMLBuilder):
|
||||
write_index(title, refs, subitems)
|
||||
|
||||
# Dump the XML file
|
||||
f = comp_open(path.join(outdir, outname + '.devhelp'), 'w')
|
||||
try:
|
||||
with comp_open(path.join(outdir, outname + '.devhelp'), 'w') as f:
|
||||
tree.write(f, 'utf-8')
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
@@ -496,11 +496,8 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
def build_mimetype(self, outdir, outname):
|
||||
"""Write the metainfo file mimetype."""
|
||||
self.info('writing %s file...' % outname)
|
||||
f = codecs.open(path.join(outdir, outname), 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(path.join(outdir, outname), 'w', 'utf-8') as f:
|
||||
f.write(self.mimetype_template)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def build_container(self, outdir, outname):
|
||||
"""Write the metainfo file META-INF/cointainer.xml."""
|
||||
@@ -511,11 +508,8 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
except OSError as err:
|
||||
if err.errno != EEXIST:
|
||||
raise
|
||||
f = codecs.open(path.join(outdir, outname), 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(path.join(outdir, outname), 'w', 'utf-8') as f:
|
||||
f.write(self.container_template)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def content_metadata(self, files, spine, guide):
|
||||
"""Create a dictionary with all metadata for the content.opf
|
||||
@@ -652,12 +646,9 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
guide = '\n'.join(guide)
|
||||
|
||||
# write the project file
|
||||
f = codecs.open(path.join(outdir, outname), 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(path.join(outdir, outname), 'w', 'utf-8') as f:
|
||||
f.write(content_tmpl %
|
||||
self.content_metadata(projectfiles, spine, guide))
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def new_navpoint(self, node, level, incr=True):
|
||||
"""Create a new entry in the toc from the node at given level."""
|
||||
@@ -749,11 +740,8 @@ class EpubBuilder(StandaloneHTMLBuilder):
|
||||
navpoints = self.build_navpoints(refnodes)
|
||||
level = max(item['level'] for item in self.refnodes)
|
||||
level = min(level, self.config.epub_tocdepth)
|
||||
f = codecs.open(path.join(outdir, outname), 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(path.join(outdir, outname), 'w', 'utf-8') as f:
|
||||
f.write(self.toc_template % self.toc_metadata(level, navpoints))
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def build_epub(self, outdir, outname):
|
||||
"""Write the epub file.
|
||||
|
||||
@@ -203,11 +203,9 @@ class Epub3Builder(EpubBuilder):
|
||||
# 'includehidden'
|
||||
refnodes = self.refnodes
|
||||
navlist = self.build_navlist(refnodes)
|
||||
f = codecs.open(path.join(outdir, outname), 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(path.join(outdir, outname), 'w', 'utf-8') as f:
|
||||
f.write(self.navigation_doc_template %
|
||||
self.navigation_doc_metadata(navlist))
|
||||
finally:
|
||||
f.close()
|
||||
# Add nav.xhtml to epub file
|
||||
self.files.append(outname)
|
||||
|
||||
# Add nav.xhtml to epub file
|
||||
self.files.append(outname)
|
||||
|
||||
@@ -211,8 +211,7 @@ class MessageCatalogBuilder(I18nBuilder):
|
||||
ensuredir(path.join(self.outdir, path.dirname(textdomain)))
|
||||
|
||||
pofn = path.join(self.outdir, textdomain + '.pot')
|
||||
pofile = open(pofn, 'w', encoding='utf-8')
|
||||
try:
|
||||
with open(pofn, 'w', encoding='utf-8') as pofile:
|
||||
pofile.write(POHEADER % data)
|
||||
|
||||
for message in catalog.messages:
|
||||
@@ -234,6 +233,3 @@ class MessageCatalogBuilder(I18nBuilder):
|
||||
replace('"', r'\"'). \
|
||||
replace('\n', '\\n"\n"')
|
||||
pofile.write('msgid "%s"\nmsgstr ""\n\n' % message)
|
||||
|
||||
finally:
|
||||
pofile.close()
|
||||
|
||||
@@ -175,8 +175,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
self.tags_hash = get_stable_hash(sorted(self.tags))
|
||||
old_config_hash = old_tags_hash = ''
|
||||
try:
|
||||
fp = open(path.join(self.outdir, '.buildinfo'))
|
||||
try:
|
||||
with open(path.join(self.outdir, '.buildinfo')) as f:
|
||||
version = fp.readline()
|
||||
if version.rstrip() != '# Sphinx build info version 1':
|
||||
raise ValueError
|
||||
@@ -187,8 +186,6 @@ class StandaloneHTMLBuilder(Builder):
|
||||
tag, old_tags_hash = fp.readline().strip().split(': ')
|
||||
if tag != 'tags':
|
||||
raise ValueError
|
||||
finally:
|
||||
fp.close()
|
||||
except ValueError:
|
||||
self.warn('unsupported build info format in %r, building all' %
|
||||
path.join(self.outdir, '.buildinfo'))
|
||||
@@ -657,15 +654,12 @@ class StandaloneHTMLBuilder(Builder):
|
||||
|
||||
def write_buildinfo(self):
|
||||
# write build info file
|
||||
fp = open(path.join(self.outdir, '.buildinfo'), 'w')
|
||||
try:
|
||||
with open(path.join(self.outdir, '.buildinfo'), 'w') as fp:
|
||||
fp.write('# Sphinx build info version 1\n'
|
||||
'# This file hashes the configuration used when building'
|
||||
' these files. When it is not found, a full rebuild will'
|
||||
' be done.\nconfig: %s\ntags: %s\n' %
|
||||
(self.config_hash, self.tags_hash))
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
def cleanup(self):
|
||||
# clean up theme stuff
|
||||
@@ -705,10 +699,8 @@ class StandaloneHTMLBuilder(Builder):
|
||||
f = codecs.open(searchindexfn, 'r', encoding='utf-8')
|
||||
else:
|
||||
f = open(searchindexfn, 'rb')
|
||||
try:
|
||||
with f:
|
||||
self.indexer.load(f, self.indexer_format)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError, ValueError):
|
||||
if keep:
|
||||
self.warn('search index couldn\'t be loaded, but not all '
|
||||
@@ -812,11 +804,8 @@ class StandaloneHTMLBuilder(Builder):
|
||||
# outfilename's path is in general different from self.outdir
|
||||
ensuredir(path.dirname(outfilename))
|
||||
try:
|
||||
f = codecs.open(outfilename, 'w', encoding, 'xmlcharrefreplace')
|
||||
try:
|
||||
with codecs.open(outfilename, 'w', encoding, 'xmlcharrefreplace') as f:
|
||||
f.write(output)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError) as err:
|
||||
self.warn("error writing file %s: %s" % (outfilename, err))
|
||||
if self.copysource and ctx.get('sourcename'):
|
||||
@@ -833,8 +822,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
|
||||
def dump_inventory(self):
|
||||
self.info(bold('dumping object inventory... '), nonl=True)
|
||||
f = open(path.join(self.outdir, INVENTORY_FILENAME), 'wb')
|
||||
try:
|
||||
with open(path.join(self.outdir, INVENTORY_FILENAME), 'wb') as f:
|
||||
f.write((u'# Sphinx inventory version 2\n'
|
||||
u'# Project: %s\n'
|
||||
u'# Version: %s\n'
|
||||
@@ -856,8 +844,6 @@ class StandaloneHTMLBuilder(Builder):
|
||||
(u'%s %s:%s %s %s %s\n' % (name, domainname, type,
|
||||
prio, uri, dispname)).encode('utf-8')))
|
||||
f.write(compressor.flush())
|
||||
finally:
|
||||
f.close()
|
||||
self.info('done')
|
||||
|
||||
def dump_search_index(self):
|
||||
@@ -872,10 +858,8 @@ class StandaloneHTMLBuilder(Builder):
|
||||
f = codecs.open(searchindexfn + '.tmp', 'w', encoding='utf-8')
|
||||
else:
|
||||
f = open(searchindexfn + '.tmp', 'wb')
|
||||
try:
|
||||
with f:
|
||||
self.indexer.dump(f, self.indexer_format)
|
||||
finally:
|
||||
f.close()
|
||||
movefile(searchindexfn + '.tmp', searchindexfn)
|
||||
self.info('done')
|
||||
|
||||
@@ -1086,10 +1070,8 @@ class SerializingHTMLBuilder(StandaloneHTMLBuilder):
|
||||
f = codecs.open(filename, 'w', encoding='utf-8')
|
||||
else:
|
||||
f = open(filename, 'wb')
|
||||
try:
|
||||
with f:
|
||||
self.implementation.dump(context, f, *self.additional_dump_args)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def handle_page(self, pagename, ctx, templatename='page.html',
|
||||
outfilename=None, event_arg=None):
|
||||
|
||||
@@ -198,16 +198,12 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
||||
|
||||
def build_hhx(self, outdir, outname):
|
||||
self.info('dumping stopword list...')
|
||||
f = self.open_file(outdir, outname+'.stp')
|
||||
try:
|
||||
with self.open_file(outdir, outname+'.stp') as f:
|
||||
for word in sorted(stopwords):
|
||||
print(word, file=f)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
self.info('writing project file...')
|
||||
f = self.open_file(outdir, outname+'.hhp')
|
||||
try:
|
||||
with self.open_file(outdir, outname+'.hhp') as f:
|
||||
f.write(project_template % {'outname': outname,
|
||||
'title': self.config.html_title,
|
||||
'version': self.config.version,
|
||||
@@ -223,12 +219,9 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
||||
fn.endswith('.html'):
|
||||
print(path.join(root, fn)[olen:].replace(os.sep, '\\'),
|
||||
file=f)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
self.info('writing TOC file...')
|
||||
f = self.open_file(outdir, outname+'.hhc')
|
||||
try:
|
||||
with self.open_file(outdir, outname+'.hhc') as f:
|
||||
f.write(contents_header)
|
||||
# special books
|
||||
f.write('<LI> ' + object_sitemap % (self.config.html_short_title,
|
||||
@@ -266,13 +259,10 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
||||
for node in tocdoc.traverse(istoctree):
|
||||
write_toc(node)
|
||||
f.write(contents_footer)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
self.info('writing index file...')
|
||||
index = self.env.create_index(self)
|
||||
f = self.open_file(outdir, outname+'.hhk')
|
||||
try:
|
||||
with self.open_file(outdir, outname+'.hhk') as f:
|
||||
f.write('<UL>\n')
|
||||
|
||||
def write_index(title, refs, subitems):
|
||||
@@ -302,5 +292,3 @@ class HTMLHelpBuilder(StandaloneHTMLBuilder):
|
||||
for title, (refs, subitems, key_) in group:
|
||||
write_index(title, refs, subitems)
|
||||
f.write('</UL>\n')
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
@@ -179,8 +179,7 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
|
||||
nspace = nspace.lower()
|
||||
|
||||
# write the project file
|
||||
f = codecs.open(path.join(outdir, outname+'.qhp'), 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(path.join(outdir, outname+'.qhp'), 'w', 'utf-8') as f:
|
||||
f.write(project_template % {
|
||||
'outname': htmlescape(outname),
|
||||
'title': htmlescape(self.config.html_title),
|
||||
@@ -191,23 +190,18 @@ class QtHelpBuilder(StandaloneHTMLBuilder):
|
||||
'sections': sections,
|
||||
'keywords': keywords,
|
||||
'files': projectfiles})
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
homepage = 'qthelp://' + posixpath.join(
|
||||
nspace, 'doc', self.get_target_uri(self.config.master_doc))
|
||||
startpage = 'qthelp://' + posixpath.join(nspace, 'doc', 'index.html')
|
||||
|
||||
self.info('writing collection project file...')
|
||||
f = codecs.open(path.join(outdir, outname+'.qhcp'), 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(path.join(outdir, outname+'.qhcp'), 'w', 'utf-8') as f:
|
||||
f.write(collection_template % {
|
||||
'outname': htmlescape(outname),
|
||||
'title': htmlescape(self.config.html_short_title),
|
||||
'homepage': htmlescape(homepage),
|
||||
'startpage': htmlescape(startpage)})
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def isdocnode(self, node):
|
||||
if not isinstance(node, nodes.list_item):
|
||||
|
||||
@@ -220,11 +220,8 @@ class TexinfoBuilder(Builder):
|
||||
fn = path.join(self.outdir, 'Makefile')
|
||||
self.info(fn, nonl=1)
|
||||
try:
|
||||
mkfile = open(fn, 'w')
|
||||
try:
|
||||
with open(fn, 'w') as mkfile:
|
||||
mkfile.write(TEXINFO_MAKEFILE)
|
||||
finally:
|
||||
mkfile.close()
|
||||
except (IOError, OSError) as err:
|
||||
self.warn("error writing file %s: %s" % (fn, err))
|
||||
self.info(' done')
|
||||
|
||||
@@ -60,11 +60,8 @@ class TextBuilder(Builder):
|
||||
outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
|
||||
ensuredir(path.dirname(outfilename))
|
||||
try:
|
||||
f = codecs.open(outfilename, 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(outfilename, 'w', 'utf-8') as f:
|
||||
f.write(self.writer.output)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError) as err:
|
||||
self.warn("error writing file %s: %s" % (outfilename, err))
|
||||
|
||||
|
||||
@@ -77,11 +77,8 @@ class XMLBuilder(Builder):
|
||||
outfilename = path.join(self.outdir, os_path(docname) + self.out_suffix)
|
||||
ensuredir(path.dirname(outfilename))
|
||||
try:
|
||||
f = codecs.open(outfilename, 'w', 'utf-8')
|
||||
try:
|
||||
with codecs.open(outfilename, 'w', 'utf-8') as f:
|
||||
f.write(self.writer.output)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, OSError) as err:
|
||||
self.warn("error writing file %s: %s" % (outfilename, err))
|
||||
|
||||
|
||||
@@ -173,13 +173,12 @@ class LiteralInclude(Directive):
|
||||
}
|
||||
|
||||
def read_with_encoding(self, filename, document, codec_info, encoding):
|
||||
f = None
|
||||
try:
|
||||
f = codecs.StreamReaderWriter(open(filename, 'rb'), codec_info[2],
|
||||
codec_info[3], 'strict')
|
||||
lines = f.readlines()
|
||||
lines = dedent_lines(lines, self.options.get('dedent'))
|
||||
return lines
|
||||
with codecs.StreamReaderWriter(open(filename, 'rb'), codec_info[2],
|
||||
codec_info[3], 'strict') as f:
|
||||
lines = f.readlines()
|
||||
lines = dedent_lines(lines, self.options.get('dedent'))
|
||||
return lines
|
||||
except (IOError, OSError):
|
||||
return [document.reporter.warning(
|
||||
'Include file %r not found or reading it failed' % filename,
|
||||
@@ -189,9 +188,6 @@ class LiteralInclude(Directive):
|
||||
'Encoding %r used for reading included file %r seems to '
|
||||
'be wrong, try giving an :encoding: option' %
|
||||
(encoding, filename))]
|
||||
finally:
|
||||
if f is not None:
|
||||
f.close()
|
||||
|
||||
def run(self):
|
||||
document = self.state.document
|
||||
|
||||
@@ -103,11 +103,8 @@ class BuildEnvironment:
|
||||
|
||||
@staticmethod
|
||||
def frompickle(srcdir, config, filename):
|
||||
picklefile = open(filename, 'rb')
|
||||
try:
|
||||
with open(filename, 'rb') as picklefile:
|
||||
env = pickle.load(picklefile)
|
||||
finally:
|
||||
picklefile.close()
|
||||
if env.version != ENV_VERSION:
|
||||
raise IOError('build environment version not current')
|
||||
if env.srcdir != srcdir:
|
||||
@@ -123,7 +120,6 @@ class BuildEnvironment:
|
||||
del self.config.values
|
||||
domains = self.domains
|
||||
del self.domains
|
||||
picklefile = open(filename, 'wb')
|
||||
# remove potentially pickling-problematic values from config
|
||||
for key, val in list(vars(self.config).items()):
|
||||
if key.startswith('_') or \
|
||||
@@ -131,10 +127,8 @@ class BuildEnvironment:
|
||||
isinstance(val, types.FunctionType) or \
|
||||
isinstance(val, class_types):
|
||||
del self.config[key]
|
||||
try:
|
||||
with open(filename, 'wb') as picklefile:
|
||||
pickle.dump(self, picklefile, pickle.HIGHEST_PROTOCOL)
|
||||
finally:
|
||||
picklefile.close()
|
||||
# reset attributes
|
||||
self.domains = domains
|
||||
self.config.values = values
|
||||
@@ -751,12 +745,9 @@ class BuildEnvironment:
|
||||
if self.versioning_compare:
|
||||
# get old doctree
|
||||
try:
|
||||
f = open(self.doc2path(docname,
|
||||
self.doctreedir, '.doctree'), 'rb')
|
||||
try:
|
||||
with open(self.doc2path(docname,
|
||||
self.doctreedir, '.doctree'), 'rb') as f:
|
||||
old_doctree = pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
except EnvironmentError:
|
||||
pass
|
||||
|
||||
@@ -786,11 +777,8 @@ class BuildEnvironment:
|
||||
doctree_filename = self.doc2path(docname, self.doctreedir,
|
||||
'.doctree')
|
||||
ensuredir(path.dirname(doctree_filename))
|
||||
f = open(doctree_filename, 'wb')
|
||||
try:
|
||||
with open(doctree_filename, 'wb') as f:
|
||||
pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# utilities to use while reading a document
|
||||
|
||||
@@ -1226,11 +1214,8 @@ class BuildEnvironment:
|
||||
def get_doctree(self, docname):
|
||||
"""Read the doctree for a file from the pickle and return it."""
|
||||
doctree_filename = self.doc2path(docname, self.doctreedir, '.doctree')
|
||||
f = open(doctree_filename, 'rb')
|
||||
try:
|
||||
with open(doctree_filename, 'rb') as f:
|
||||
doctree = pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
doctree.settings.env = self
|
||||
doctree.reporter = Reporter(self.doc2path(docname), 2, 5,
|
||||
stream=WarningStream(self._warnfunc))
|
||||
|
||||
@@ -87,8 +87,7 @@ class CoverageBuilder(Builder):
|
||||
c_objects = self.env.domaindata['c']['objects']
|
||||
for filename in self.c_sourcefiles:
|
||||
undoc = set()
|
||||
f = open(filename, 'r')
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
for line in f:
|
||||
for key, regex in self.c_regexes:
|
||||
match = regex.match(line)
|
||||
@@ -101,15 +100,12 @@ class CoverageBuilder(Builder):
|
||||
else:
|
||||
undoc.add((key, name))
|
||||
continue
|
||||
finally:
|
||||
f.close()
|
||||
if undoc:
|
||||
self.c_undoc[filename] = undoc
|
||||
|
||||
def write_c_coverage(self):
|
||||
output_file = path.join(self.outdir, 'c.txt')
|
||||
op = open(output_file, 'w')
|
||||
try:
|
||||
with open(output_file, 'w') as op:
|
||||
if self.config.coverage_write_headline:
|
||||
write_header(op, 'Undocumented C API elements', '=')
|
||||
op.write('\n')
|
||||
@@ -119,8 +115,6 @@ class CoverageBuilder(Builder):
|
||||
for typ, name in sorted(undoc):
|
||||
op.write(' * %-50s [%9s]\n' % (name, typ))
|
||||
op.write('\n')
|
||||
finally:
|
||||
op.close()
|
||||
|
||||
def build_py_coverage(self):
|
||||
objects = self.env.domaindata['py']['objects']
|
||||
@@ -214,9 +208,8 @@ class CoverageBuilder(Builder):
|
||||
|
||||
def write_py_coverage(self):
|
||||
output_file = path.join(self.outdir, 'python.txt')
|
||||
op = open(output_file, 'w')
|
||||
failed = []
|
||||
try:
|
||||
with open(output_file, 'w') as op:
|
||||
if self.config.coverage_write_headline:
|
||||
write_header(op, 'Undocumented Python objects', '=')
|
||||
keys = sorted(self.py_undoc.keys())
|
||||
@@ -247,17 +240,12 @@ class CoverageBuilder(Builder):
|
||||
if failed:
|
||||
write_header(op, 'Modules that failed to import')
|
||||
op.writelines(' * %s -- %s\n' % x for x in failed)
|
||||
finally:
|
||||
op.close()
|
||||
|
||||
def finish(self):
|
||||
# dump the coverage data to a pickle file too
|
||||
picklepath = path.join(self.outdir, 'undoc.pickle')
|
||||
dumpfile = open(picklepath, 'wb')
|
||||
try:
|
||||
with open(picklepath, 'wb') as dumpfile:
|
||||
pickle.dump((self.py_undoc, self.c_undoc), dumpfile)
|
||||
finally:
|
||||
dumpfile.close()
|
||||
|
||||
|
||||
def setup(app):
|
||||
|
||||
@@ -82,11 +82,8 @@ class Graphviz(Directive):
|
||||
rel_filename, filename = env.relfn2path(argument)
|
||||
env.note_dependency(rel_filename)
|
||||
try:
|
||||
fp = codecs.open(filename, 'r', 'utf-8')
|
||||
try:
|
||||
with codecs.open(filename, 'r', 'utf-8') as fp:
|
||||
dotcode = fp.read()
|
||||
finally:
|
||||
fp.close()
|
||||
except (IOError, OSError):
|
||||
return [document.reporter.warning(
|
||||
'External Graphviz file %r not found or reading '
|
||||
@@ -239,11 +236,8 @@ def render_dot_html(self, node, code, options, prefix='graphviz',
|
||||
<p class="warning">%s</p></object>\n''' % (fname, alt)
|
||||
self.body.append(svgtag)
|
||||
else:
|
||||
mapfile = open(outfn + '.map', 'rb')
|
||||
try:
|
||||
with open(outfn + '.map', 'rb') as mapfile:
|
||||
imgmap = mapfile.readlines()
|
||||
finally:
|
||||
mapfile.close()
|
||||
if len(imgmap) == 2:
|
||||
# nothing in image map (the lines are <map> and </map>)
|
||||
self.body.append('<img src="%s" alt="%s" %s/>\n' %
|
||||
|
||||
@@ -70,10 +70,8 @@ class SphinxFileSystemLoader(FileSystemLoader):
|
||||
f = open_if_exists(filename)
|
||||
if f is None:
|
||||
continue
|
||||
try:
|
||||
with f:
|
||||
contents = f.read().decode(self.encoding)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
mtime = path.getmtime(filename)
|
||||
|
||||
|
||||
@@ -92,11 +92,8 @@ class Driver(object):
|
||||
|
||||
def parse_file(self, filename, debug=False):
|
||||
"""Parse a file and return the syntax tree."""
|
||||
stream = open(filename)
|
||||
try:
|
||||
with open(filename) as stream:
|
||||
return self.parse_stream(stream, debug)
|
||||
finally:
|
||||
stream.close()
|
||||
|
||||
def parse_string(self, text, debug=False):
|
||||
"""Parse a string and return the syntax tree."""
|
||||
|
||||
@@ -1487,11 +1487,8 @@ def generate(d, overwrite=True, silent=False):
|
||||
def write_file(fpath, content, newline=None):
|
||||
if overwrite or not path.isfile(fpath):
|
||||
print('Creating file %s.' % fpath)
|
||||
f = open(fpath, 'wt', encoding='utf-8', newline=newline)
|
||||
try:
|
||||
with open(fpath, 'wt', encoding='utf-8', newline=newline) as f:
|
||||
f.write(content)
|
||||
finally:
|
||||
f.close()
|
||||
else:
|
||||
print('File %s already exists, skipping.' % fpath)
|
||||
|
||||
|
||||
@@ -23,18 +23,14 @@ IEND_CHUNK = b'\x00\x00\x00\x00IEND\xAE\x42\x60\x82'
|
||||
|
||||
def read_png_depth(filename):
|
||||
"""Read the special tEXt chunk indicating the depth from a PNG file."""
|
||||
result = None
|
||||
f = open(filename, 'rb')
|
||||
try:
|
||||
with open(filename, 'rb') as f:
|
||||
f.seek(- (LEN_IEND + LEN_DEPTH), 2)
|
||||
depthchunk = f.read(LEN_DEPTH)
|
||||
if not depthchunk.startswith(DEPTH_CHUNK_LEN + DEPTH_CHUNK_START):
|
||||
# either not a PNG file or not containing the depth chunk
|
||||
return None
|
||||
result = struct.unpack('!i', depthchunk[14:18])[0]
|
||||
finally:
|
||||
f.close()
|
||||
return result
|
||||
else:
|
||||
return struct.unpack('!i', depthchunk[14:18])[0]
|
||||
|
||||
|
||||
def write_png_depth(filename, depth):
|
||||
@@ -43,8 +39,7 @@ def write_png_depth(filename, depth):
|
||||
The chunk is placed immediately before the special IEND chunk.
|
||||
"""
|
||||
data = struct.pack('!i', depth)
|
||||
f = open(filename, 'r+b')
|
||||
try:
|
||||
with open(filename, 'r+b') as f:
|
||||
# seek to the beginning of the IEND chunk
|
||||
f.seek(-LEN_IEND, 2)
|
||||
# overwrite it with the depth chunk
|
||||
@@ -54,5 +49,3 @@ def write_png_depth(filename, depth):
|
||||
f.write(struct.pack('!I', crc))
|
||||
# replace the IEND chunk
|
||||
f.write(IEND_CHUNK)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
@@ -105,11 +105,8 @@ def execfile_(filepath, _globals, open=open):
|
||||
from sphinx.util.osutil import fs_encoding
|
||||
# get config source -- 'b' is a no-op under 2.x, while 'U' is
|
||||
# ignored under 3.x (but 3.x compile() accepts \r\n newlines)
|
||||
f = open(filepath, 'rbU')
|
||||
try:
|
||||
with open(filepath, 'rbU') as f:
|
||||
source = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# py26 accept only LF eol instead of CRLF
|
||||
if sys.version_info[:2] == (2, 6):
|
||||
|
||||
@@ -130,11 +130,8 @@ class WebSupport(object):
|
||||
"""Load and return the "global context" pickle."""
|
||||
if not self._globalcontext:
|
||||
infilename = path.join(self.datadir, 'globalcontext.pickle')
|
||||
f = open(infilename, 'rb')
|
||||
try:
|
||||
with open(infilename, 'rb') as f:
|
||||
self._globalcontext = pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
return self._globalcontext
|
||||
|
||||
def get_document(self, docname, username='', moderator=False):
|
||||
@@ -185,14 +182,11 @@ class WebSupport(object):
|
||||
infilename = docpath + '.fpickle'
|
||||
|
||||
try:
|
||||
f = open(infilename, 'rb')
|
||||
with open(infilename, 'rb') as f:
|
||||
document = pickle.load(f)
|
||||
except IOError:
|
||||
raise errors.DocumentNotFoundError(
|
||||
'The document "%s" could not be found' % docname)
|
||||
try:
|
||||
document = pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
comment_opts = self._make_comment_options(username, moderator)
|
||||
comment_meta = self._make_metadata(
|
||||
|
||||
Reference in New Issue
Block a user