mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
final pep8 fixes; reactivate most warnings in flake8
This commit is contained in:
parent
3047a23a66
commit
548a6dc22e
11
doc/conf.py
11
doc/conf.py
@ -38,10 +38,11 @@ epub_scheme = 'url'
|
||||
epub_identifier = epub_publisher
|
||||
epub_pre_files = [('index.html', 'Welcome')]
|
||||
epub_post_files = [('install.html', 'Installing Sphinx'),
|
||||
('develop.html', 'Sphinx development')]
|
||||
('develop.html', 'Sphinx development')]
|
||||
epub_exclude_files = ['_static/opensearch.xml', '_static/doctools.js',
|
||||
'_static/jquery.js', '_static/searchtools.js', '_static/underscore.js',
|
||||
'_static/basic.css', 'search.html', '_static/websupport.js']
|
||||
'_static/jquery.js', '_static/searchtools.js',
|
||||
'_static/underscore.js', '_static/basic.css',
|
||||
'search.html', '_static/websupport.js']
|
||||
epub_fix_images = False
|
||||
epub_max_image_width = 0
|
||||
epub_show_urls = 'inline'
|
||||
@ -93,11 +94,11 @@ gettext_compact = False
|
||||
|
||||
# -- Extension interface -------------------------------------------------------
|
||||
|
||||
from sphinx import addnodes
|
||||
|
||||
from sphinx import addnodes # noqa
|
||||
|
||||
event_sig_re = re.compile(r'([a-zA-Z-]+)\s*\((.*)\)')
|
||||
|
||||
|
||||
def parse_event(env, sig, signode):
|
||||
m = event_sig_re.match(sig)
|
||||
if not m:
|
||||
|
@ -24,6 +24,6 @@ directory = sphinx/locale/
|
||||
universal = 1
|
||||
|
||||
[flake8]
|
||||
max-line-length=90
|
||||
ignore=W503,E111,E112,E113,E114,E115,E116,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E202,E203,E205,E221,E222,E225,E226,E231,E241,E251,E261,E262,E265,E266,E271,E301,E302,E303,E402,E701,E703,E704,E711,E712,E713,E721,E731,F401,F402,F403,F812,F841
|
||||
exclude=utils/*,tests/*,build/*,sphinx/search/*,sphinx/pycode/pgen2/*
|
||||
max-line-length=95
|
||||
ignore=E116,E221,E226,E241,E251
|
||||
exclude=ez_setup.py,utils/*,tests/*,build/*,sphinx/search/*,sphinx/pycode/pgen2/*
|
||||
|
4
setup.py
4
setup.py
@ -157,12 +157,12 @@ else:
|
||||
|
||||
outfile = open(js_file, 'wb')
|
||||
try:
|
||||
outfile.write('Documentation.addTranslations(');
|
||||
outfile.write('Documentation.addTranslations(')
|
||||
dump(dict(
|
||||
messages=jscatalog,
|
||||
plural_expr=catalog.plural_expr,
|
||||
locale=str(catalog.locale)
|
||||
), outfile)
|
||||
), outfile)
|
||||
outfile.write(');')
|
||||
finally:
|
||||
outfile.close()
|
||||
|
@ -36,7 +36,7 @@ from sphinx.domains import ObjType, BUILTIN_DOMAINS
|
||||
from sphinx.domains.std import GenericObject, Target, StandardDomain
|
||||
from sphinx.builders import BUILTIN_BUILDERS
|
||||
from sphinx.environment import BuildEnvironment, SphinxStandaloneReader
|
||||
from sphinx.util import pycompat # imported for side-effects
|
||||
from sphinx.util import pycompat # noqa: imported for side-effects
|
||||
from sphinx.util import import_object
|
||||
from sphinx.util.tags import Tags
|
||||
from sphinx.util.osutil import ENOENT
|
||||
|
@ -27,8 +27,8 @@ from sphinx.util.parallel import ParallelTasks, SerialTasks, make_chunks, \
|
||||
parallel_available
|
||||
|
||||
# side effect: registers roles and directives
|
||||
from sphinx import roles
|
||||
from sphinx import directives
|
||||
from sphinx import roles # noqa
|
||||
from sphinx import directives # noqa
|
||||
|
||||
|
||||
class Builder(object):
|
||||
|
@ -18,8 +18,8 @@ from sphinx import addnodes
|
||||
from sphinx.util.docfields import DocFieldTransformer
|
||||
|
||||
# import and register directives
|
||||
from sphinx.directives.code import *
|
||||
from sphinx.directives.other import *
|
||||
from sphinx.directives.code import * # noqa
|
||||
from sphinx.directives.other import * # noqa
|
||||
|
||||
|
||||
# RE to strip backslash escapes
|
||||
|
@ -275,12 +275,12 @@ class Domain(object):
|
||||
return _('%s %s') % (self.label, type.lname)
|
||||
|
||||
|
||||
from sphinx.domains.c import CDomain
|
||||
from sphinx.domains.cpp import CPPDomain
|
||||
from sphinx.domains.std import StandardDomain
|
||||
from sphinx.domains.python import PythonDomain
|
||||
from sphinx.domains.javascript import JavaScriptDomain
|
||||
from sphinx.domains.rst import ReSTDomain
|
||||
from sphinx.domains.c import CDomain # noqa
|
||||
from sphinx.domains.cpp import CPPDomain # noqa
|
||||
from sphinx.domains.std import StandardDomain # noqa
|
||||
from sphinx.domains.python import PythonDomain # noqa
|
||||
from sphinx.domains.javascript import JavaScriptDomain # noqa
|
||||
from sphinx.domains.rst import ReSTDomain # noqa
|
||||
|
||||
BUILTIN_DOMAINS = {
|
||||
'std': StandardDomain,
|
||||
|
@ -201,7 +201,7 @@ def html_visit_math(self, node):
|
||||
self.body.append('<span class="math">%s</span>' %
|
||||
self.encode(node['latex']).strip())
|
||||
else:
|
||||
c = ('<img class="math" src="%s"' % fname) + get_tooltip(self, node)
|
||||
c = ('<img class="math" src="%s"' % fname) + get_tooltip(self, node)
|
||||
if depth is not None:
|
||||
c += ' style="vertical-align: %dpx"' % (-depth)
|
||||
self.body.append(c + '/>')
|
||||
|
@ -32,7 +32,7 @@ proj_name = os.getenv('SPHINXPROJ', '<project>')
|
||||
BUILDERS = [
|
||||
("", "html", "to make standalone HTML files"),
|
||||
("", "dirhtml", "to make HTML files named index.html in directories"),
|
||||
("", "singlehtml","to make a single large HTML file"),
|
||||
("", "singlehtml", "to make a single large HTML file"),
|
||||
("", "pickle", "to make pickle files"),
|
||||
("", "json", "to make JSON files"),
|
||||
("", "htmlhelp", "to make HTML files and a HTML help project"),
|
||||
@ -41,7 +41,7 @@ BUILDERS = [
|
||||
("", "epub", "to make an epub"),
|
||||
("", "latex", "to make LaTeX files, you can set PAPER=a4 or PAPER=letter"),
|
||||
("posix", "latexpdf", "to make LaTeX files and run them through pdflatex"),
|
||||
("posix", "latexpdfja","to make LaTeX files and run them through platex/dvipdfmx"),
|
||||
("posix", "latexpdfja", "to make LaTeX files and run them through platex/dvipdfmx"),
|
||||
("", "text", "to make text files"),
|
||||
("", "man", "to make manual pages"),
|
||||
("", "texinfo", "to make Texinfo files"),
|
||||
|
@ -30,8 +30,10 @@ _grammarfile = path.join(package_dir, 'pycode',
|
||||
pygrammar = driver.load_grammar(_grammarfile)
|
||||
pydriver = driver.Driver(pygrammar, convert=nodes.convert)
|
||||
|
||||
|
||||
# an object with attributes corresponding to token and symbol names
|
||||
class sym: pass
|
||||
class sym:
|
||||
pass
|
||||
for k, v in iteritems(pygrammar.symbol2number):
|
||||
setattr(sym, k, v)
|
||||
for k, v in iteritems(token.tok_name):
|
||||
@ -132,7 +134,7 @@ class AttrDocVisitor(nodes.NodeVisitor):
|
||||
if not prev:
|
||||
return
|
||||
if prev.type == sym.simple_stmt and \
|
||||
prev[0].type == sym.expr_stmt and _eq in prev[0].children:
|
||||
prev[0].type == sym.expr_stmt and _eq in prev[0].children:
|
||||
# need to "eval" the string because it's returned in its
|
||||
# original form
|
||||
docstring = literals.evalString(node[0].value, self.encoding)
|
||||
@ -145,16 +147,16 @@ class AttrDocVisitor(nodes.NodeVisitor):
|
||||
target = node[i]
|
||||
if self.in_init and self.number2name[target.type] == 'power':
|
||||
# maybe an attribute assignment -- check necessary conditions
|
||||
if (# node must have two children
|
||||
len(target) != 2 or
|
||||
# first child must be "self"
|
||||
target[0].type != token.NAME or target[0].value != 'self' or
|
||||
# second child must be a "trailer" with two children
|
||||
self.number2name[target[1].type] != 'trailer' or
|
||||
len(target[1]) != 2 or
|
||||
# first child must be a dot, second child a name
|
||||
target[1][0].type != token.DOT or
|
||||
target[1][1].type != token.NAME):
|
||||
if ( # node must have two children
|
||||
len(target) != 2 or
|
||||
# first child must be "self"
|
||||
target[0].type != token.NAME or target[0].value != 'self' or
|
||||
# second child must be a "trailer" with two children
|
||||
self.number2name[target[1].type] != 'trailer' or
|
||||
len(target[1]) != 2 or
|
||||
# first child must be a dot, second child a name
|
||||
target[1][0].type != token.DOT or
|
||||
target[1][1].type != token.NAME):
|
||||
continue
|
||||
name = target[1][1].value
|
||||
elif target.type != token.NAME:
|
||||
@ -287,6 +289,7 @@ class ModuleAnalyzer(object):
|
||||
indent = 0
|
||||
defline = False
|
||||
expect_indent = False
|
||||
|
||||
def tokeniter(ignore = (token.COMMENT, token.NL)):
|
||||
for tokentup in self.tokens:
|
||||
if tokentup[0] not in ignore:
|
||||
@ -335,17 +338,17 @@ if __name__ == '__main__':
|
||||
import time
|
||||
import pprint
|
||||
x0 = time.time()
|
||||
#ma = ModuleAnalyzer.for_file(__file__.rstrip('c'), 'sphinx.builders.html')
|
||||
# ma = ModuleAnalyzer.for_file(__file__.rstrip('c'), 'sphinx.builders.html')
|
||||
ma = ModuleAnalyzer.for_file('sphinx/environment.py',
|
||||
'sphinx.environment')
|
||||
ma.tokenize()
|
||||
x1 = time.time()
|
||||
ma.parse()
|
||||
x2 = time.time()
|
||||
#for (ns, name), doc in iteritems(ma.find_attr_docs()):
|
||||
# print '>>', ns, name
|
||||
# print '\n'.join(doc)
|
||||
# for (ns, name), doc in iteritems(ma.find_attr_docs()):
|
||||
# print '>>', ns, name
|
||||
# print '\n'.join(doc)
|
||||
pprint.pprint(ma.find_tags())
|
||||
x3 = time.time()
|
||||
#print nodes.nice_repr(ma.parsetree, number2name)
|
||||
# print nodes.nice_repr(ma.parsetree, number2name)
|
||||
print("tokenizing %.4f, parsing %.4f, finding %.4f" % (x1-x0, x2-x1, x3-x2))
|
||||
|
@ -176,6 +176,7 @@ def nice_repr(node, number2name, prefix=False):
|
||||
else:
|
||||
return "%s(%s)" % (number2name[node.type],
|
||||
', '.join(map(_repr, node.children)))
|
||||
|
||||
def _prepr(node):
|
||||
if isinstance(node, Leaf):
|
||||
return "%s(%r, %r)" % (number2name[node.type],
|
||||
|
@ -12,7 +12,7 @@
|
||||
from pygments.style import Style
|
||||
from pygments.styles.friendly import FriendlyStyle
|
||||
from pygments.token import Generic, Comment, Number, Whitespace, Keyword, \
|
||||
Operator, Name, String, Error
|
||||
Operator, Name, String, Error
|
||||
|
||||
|
||||
class NoneStyle(Style):
|
||||
|
@ -81,7 +81,6 @@ class BuildDoc(Command):
|
||||
]
|
||||
boolean_options = ['fresh-env', 'all-files', 'link-index']
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
self.fresh_env = self.all_files = False
|
||||
self.source_dir = self.build_dir = None
|
||||
@ -147,13 +146,13 @@ class BuildDoc(Command):
|
||||
status_stream = sys.stdout
|
||||
confoverrides = {}
|
||||
if self.project:
|
||||
confoverrides['project'] = self.project
|
||||
confoverrides['project'] = self.project
|
||||
if self.version:
|
||||
confoverrides['version'] = self.version
|
||||
confoverrides['version'] = self.version
|
||||
if self.release:
|
||||
confoverrides['release'] = self.release
|
||||
confoverrides['release'] = self.release
|
||||
if self.today:
|
||||
confoverrides['today'] = self.today
|
||||
confoverrides['today'] = self.today
|
||||
if self.copyright:
|
||||
confoverrides['copyright'] = self.copyright
|
||||
app = Sphinx(self.source_dir, self.config_dir,
|
||||
|
@ -35,11 +35,13 @@ from sphinx.util.osutil import fs_encoding
|
||||
|
||||
# import other utilities; partly for backwards compatibility, so don't
|
||||
# prune unused ones indiscriminately
|
||||
from sphinx.util.osutil import SEP, os_path, relative_uri, ensuredir, walk, \
|
||||
mtimes_of_files, movefile, copyfile, copytimes, make_filename, ustrftime
|
||||
from sphinx.util.nodes import nested_parse_with_titles, split_explicit_title, \
|
||||
explicit_title_re, caption_ref_re
|
||||
from sphinx.util.matching import patfilter
|
||||
from sphinx.util.osutil import ( # noqa
|
||||
SEP, os_path, relative_uri, ensuredir, walk, mtimes_of_files, movefile,
|
||||
copyfile, copytimes, make_filename, ustrftime)
|
||||
from sphinx.util.nodes import ( # noqa
|
||||
nested_parse_with_titles, split_explicit_title, explicit_title_re,
|
||||
caption_ref_re)
|
||||
from sphinx.util.matching import patfilter # noqa
|
||||
|
||||
# Generally useful regular expressions.
|
||||
ws_re = re.compile(r'\s+')
|
||||
|
@ -11,7 +11,7 @@
|
||||
import warnings
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive
|
||||
from docutils.parsers.rst import Directive # noqa
|
||||
|
||||
from docutils import __version__ as _du_version
|
||||
docutils_version = tuple(int(x) for x in _du_version.split('.')[:2])
|
||||
|
@ -9,8 +9,14 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
import io
|
||||
import sys
|
||||
import codecs
|
||||
import warnings
|
||||
|
||||
from six import class_types
|
||||
from six.moves import zip_longest
|
||||
from itertools import product
|
||||
|
||||
from six import PY3, text_type, exec_
|
||||
|
||||
@ -45,7 +51,7 @@ if PY3:
|
||||
# try to match ParseError details with SyntaxError details
|
||||
raise SyntaxError(err.msg, (filepath, lineno, offset, err.value))
|
||||
return text_type(tree)
|
||||
from html import escape as htmlescape # >= Python 3.2
|
||||
from html import escape as htmlescape # noqa: >= Python 3.2
|
||||
|
||||
class UnicodeMixin:
|
||||
"""Mixin class to handle defining the proper __str__/__unicode__
|
||||
@ -72,7 +78,7 @@ else:
|
||||
# error handler
|
||||
sys_encoding = __import__('locale').getpreferredencoding()
|
||||
# use Python 3 name
|
||||
from cgi import escape as htmlescape # 2.6, 2.7
|
||||
from cgi import escape as htmlescape # noqa: 2.6, 2.7
|
||||
|
||||
class UnicodeMixin(object):
|
||||
"""Mixin class to handle defining the proper __str__/__unicode__
|
||||
@ -124,13 +130,6 @@ def execfile_(filepath, _globals, open=open):
|
||||
# ------------------------------------------------------------------------------
|
||||
# Internal module backwards-compatibility
|
||||
|
||||
import warnings
|
||||
|
||||
from six import class_types
|
||||
from six.moves import zip_longest
|
||||
import io
|
||||
from itertools import product
|
||||
|
||||
|
||||
class _DeprecationWrapper(object):
|
||||
def __init__(self, mod, deprecated):
|
||||
|
@ -58,7 +58,7 @@ class Node(Base):
|
||||
|
||||
# Filter out all comments that are not moderated yet.
|
||||
if not moderator:
|
||||
q = q.filter(Comment.displayed == True)
|
||||
q = q.filter(Comment.displayed == True) # noqa
|
||||
|
||||
# Retrieve all results. Results must be ordered by Comment.path
|
||||
# so that we can easily transform them from a flat list to a tree.
|
||||
|
Loading…
Reference in New Issue
Block a user