mirror of
https://github.com/sphinx-doc/sphinx.git
synced 2025-02-25 18:55:22 -06:00
use six privided iteritems(),itervalues() to support py2/py3 in one source. refs #1350.
This commit is contained in:
@@ -20,6 +20,7 @@ import traceback
|
||||
from os import path
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import convert_directive_function, \
|
||||
directives, roles
|
||||
@@ -388,7 +389,7 @@ class Sphinx(object):
|
||||
|
||||
def disconnect(self, listener_id):
|
||||
self.debug('[app] disconnecting event: [id=%s]', listener_id)
|
||||
for event in self._listeners.itervalues():
|
||||
for event in itervalues(self._listeners):
|
||||
event.pop(listener_id, None)
|
||||
|
||||
def emit(self, event, *args):
|
||||
@@ -399,7 +400,7 @@ class Sphinx(object):
|
||||
pass
|
||||
results = []
|
||||
if event in self._listeners:
|
||||
for _, callback in self._listeners[event].iteritems():
|
||||
for _, callback in iteritems(self._listeners[event]):
|
||||
results.append(callback(self, *args))
|
||||
return results
|
||||
|
||||
@@ -443,7 +444,7 @@ class Sphinx(object):
|
||||
def add_node(self, node, **kwds):
|
||||
self.debug('[app] adding node: %r', (node, kwds))
|
||||
nodes._add_node_class_names([node.__name__])
|
||||
for key, val in kwds.iteritems():
|
||||
for key, val in iteritems(kwds):
|
||||
try:
|
||||
visit, depart = val
|
||||
except ValueError:
|
||||
|
||||
@@ -12,6 +12,8 @@
|
||||
import codecs
|
||||
from os import path
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from sphinx import package_dir
|
||||
from sphinx.util import copy_static_entry
|
||||
from sphinx.locale import _
|
||||
@@ -93,9 +95,9 @@ class ChangesBuilder(Builder):
|
||||
'version': version,
|
||||
'docstitle': self.config.html_title,
|
||||
'shorttitle': self.config.html_short_title,
|
||||
'libchanges': sorted(libchanges.iteritems()),
|
||||
'libchanges': sorted(iteritems(libchanges)),
|
||||
'apichanges': sorted(apichanges),
|
||||
'otherchanges': sorted(otherchanges.iteritems()),
|
||||
'otherchanges': sorted(iteritems(otherchanges)),
|
||||
'show_copyright': self.config.html_show_copyright,
|
||||
'show_sphinx': self.config.html_show_sphinx,
|
||||
}
|
||||
@@ -142,7 +144,7 @@ class ChangesBuilder(Builder):
|
||||
finally:
|
||||
f.close()
|
||||
themectx = dict(('theme_' + key, val) for (key, val) in
|
||||
self.theme.get_options({}).iteritems())
|
||||
iteritems(self.theme.get_options({})))
|
||||
copy_static_entry(path.join(package_dir, 'themes', 'default',
|
||||
'static', 'default.css_t'),
|
||||
self.outdir, self, themectx)
|
||||
|
||||
@@ -18,6 +18,8 @@ from datetime import datetime, tzinfo, timedelta
|
||||
from collections import defaultdict
|
||||
from uuid import uuid4
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from sphinx.builders import Builder
|
||||
from sphinx.util import split_index_msg
|
||||
from sphinx.util.nodes import extract_messages, traverse_translatable_index
|
||||
@@ -186,7 +188,7 @@ class MessageCatalogBuilder(I18nBuilder):
|
||||
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
|
||||
)
|
||||
for textdomain, catalog in self.status_iterator(
|
||||
self.catalogs.iteritems(), "writing message catalogs... ",
|
||||
iteritems(self.catalogs), "writing message catalogs... ",
|
||||
darkgreen, len(self.catalogs),
|
||||
lambda textdomain__: textdomain__[0]):
|
||||
# noop if config.gettext_compact is set
|
||||
|
||||
@@ -18,6 +18,7 @@ from os import path
|
||||
from hashlib import md5
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
from six.moves import cPickle as pickle
|
||||
from docutils import nodes
|
||||
from docutils.io import DocTreeInput, StringOutput
|
||||
@@ -164,7 +165,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
|
||||
def get_outdated_docs(self):
|
||||
cfgdict = dict((name, self.config[name])
|
||||
for (name, desc) in self.config.values.iteritems()
|
||||
for (name, desc) in iteritems(self.config.values)
|
||||
if desc[1] == 'html')
|
||||
self.config_hash = get_stable_hash(cfgdict)
|
||||
self.tags_hash = get_stable_hash(sorted(self.tags))
|
||||
@@ -265,7 +266,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
# html_domain_indices can be False/True or a list of index names
|
||||
indices_config = self.config.html_domain_indices
|
||||
if indices_config:
|
||||
for domain in self.env.domains.itervalues():
|
||||
for domain in itervalues(self.env.domains):
|
||||
for indexcls in domain.indices:
|
||||
indexname = '%s-%s' % (domain.name, indexcls.name)
|
||||
if isinstance(indices_config, list):
|
||||
@@ -346,7 +347,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
if self.theme:
|
||||
self.globalcontext.update(
|
||||
('theme_' + key, val) for (key, val) in
|
||||
self.theme.get_options(self.theme_options).iteritems())
|
||||
iteritems(self.theme.get_options(self.theme_options)))
|
||||
self.globalcontext.update(self.config.html_context)
|
||||
|
||||
def get_doc_context(self, docname, body, metatags):
|
||||
@@ -697,7 +698,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
sidebars = None
|
||||
matched = None
|
||||
customsidebar = None
|
||||
for pattern, patsidebars in self.config.html_sidebars.iteritems():
|
||||
for pattern, patsidebars in iteritems(self.config.html_sidebars):
|
||||
if patmatch(pagename, pattern):
|
||||
if matched:
|
||||
if has_wildcard(pattern):
|
||||
@@ -799,7 +800,7 @@ class StandaloneHTMLBuilder(Builder):
|
||||
% (self.config.project, self.config.version)
|
||||
).encode('utf-8'))
|
||||
compressor = zlib.compressobj(9)
|
||||
for domainname, domain in self.env.domains.iteritems():
|
||||
for domainname, domain in iteritems(self.env.domains):
|
||||
for name, dispname, type, docname, anchor, prio in \
|
||||
domain.get_objects():
|
||||
if anchor.endswith(name):
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import os
|
||||
from os import path
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes
|
||||
from docutils.io import FileOutput
|
||||
from docutils.utils import new_document
|
||||
@@ -152,7 +153,7 @@ class LaTeXBuilder(Builder):
|
||||
# copy image files
|
||||
if self.images:
|
||||
self.info(bold('copying images...'), nonl=1)
|
||||
for src, dest in self.images.iteritems():
|
||||
for src, dest in iteritems(self.images):
|
||||
self.info(' '+src, nonl=1)
|
||||
copyfile(path.join(self.srcdir, src),
|
||||
path.join(self.outdir, dest))
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
|
||||
from os import path
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes
|
||||
from docutils.io import FileOutput
|
||||
from docutils.utils import new_document
|
||||
@@ -207,7 +208,7 @@ class TexinfoBuilder(Builder):
|
||||
# copy image files
|
||||
if self.images:
|
||||
self.info(bold('copying images...'), nonl=1)
|
||||
for src, dest in self.images.iteritems():
|
||||
for src, dest in iteritems(self.images):
|
||||
self.info(' '+src, nonl=1)
|
||||
copyfile(path.join(self.srcdir, src),
|
||||
path.join(self.outdir, dest))
|
||||
|
||||
@@ -14,6 +14,7 @@ import re
|
||||
from os import path
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from sphinx.errors import ConfigError
|
||||
from sphinx.locale import l_
|
||||
@@ -243,7 +244,7 @@ class Config(object):
|
||||
def check_unicode(self, warn):
|
||||
# check all string values for non-ASCII characters in bytestrings,
|
||||
# since that can result in UnicodeErrors all over the place
|
||||
for name, value in self._raw_config.iteritems():
|
||||
for name, value in iteritems(self._raw_config):
|
||||
if isinstance(value, six.binary_type) and nonascii_re.search(value):
|
||||
warn('the config value %r is set to a string with non-ASCII '
|
||||
'characters; this can lead to Unicode errors occurring. '
|
||||
@@ -252,7 +253,7 @@ class Config(object):
|
||||
|
||||
def init_values(self, warn):
|
||||
config = self._raw_config
|
||||
for valname, value in self.overrides.iteritems():
|
||||
for valname, value in iteritems(self.overrides):
|
||||
if '.' in valname:
|
||||
realvalname, key = valname.split('.', 1)
|
||||
config.setdefault(realvalname, {})[key] = value
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
|
||||
import re
|
||||
|
||||
from six import itervalues
|
||||
from docutils.parsers.rst import Directive, directives
|
||||
|
||||
from sphinx import addnodes
|
||||
@@ -178,7 +179,7 @@ class DefaultDomain(Directive):
|
||||
domain_name = self.arguments[0].lower()
|
||||
# if domain_name not in env.domains:
|
||||
# # try searching by label
|
||||
# for domain in env.domains.itervalues():
|
||||
# for domain in itervalues(env.domains):
|
||||
# if domain.label.lower() == domain_name:
|
||||
# domain_name = domain.name
|
||||
# break
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from sphinx.errors import SphinxError
|
||||
from sphinx.locale import _
|
||||
|
||||
@@ -153,7 +155,7 @@ class Domain(object):
|
||||
self._role_cache = {}
|
||||
self._directive_cache = {}
|
||||
self._role2type = {}
|
||||
for name, obj in self.object_types.iteritems():
|
||||
for name, obj in iteritems(self.object_types):
|
||||
for rolename in obj.roles:
|
||||
self._role2type.setdefault(rolename, []).append(name)
|
||||
self.objtypes_for_role = self._role2type.get
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import re
|
||||
import string
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
@@ -249,5 +250,5 @@ class CDomain(Domain):
|
||||
contnode, target)
|
||||
|
||||
def get_objects(self):
|
||||
for refname, (docname, type) in self.data['objects'].iteritems():
|
||||
for refname, (docname, type) in iteritems(self.data['objects']):
|
||||
yield (refname, refname, type, docname, 'c.' + refname, 1)
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import re
|
||||
from copy import deepcopy
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
@@ -121,7 +122,7 @@ class DefExpr(object):
|
||||
if type(self) is not type(other):
|
||||
return False
|
||||
try:
|
||||
for key, value in self.__dict__.iteritems():
|
||||
for key, value in iteritems(self.__dict__):
|
||||
if value != getattr(other, key):
|
||||
return False
|
||||
except AttributeError:
|
||||
@@ -1313,5 +1314,5 @@ class CPPDomain(Domain):
|
||||
return _create_refnode(expr.prefix(parent))
|
||||
|
||||
def get_objects(self):
|
||||
for refname, (docname, type, theid) in self.data['objects'].iteritems():
|
||||
for refname, (docname, type, theid) in iteritems(self.data['objects']):
|
||||
yield (refname, refname, type, docname, refname, 1)
|
||||
|
||||
@@ -9,6 +9,8 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.locale import l_, _
|
||||
@@ -215,6 +217,6 @@ class JavaScriptDomain(Domain):
|
||||
name.replace('$', '_S_'), contnode, name)
|
||||
|
||||
def get_objects(self):
|
||||
for refname, (docname, type) in self.data['objects'].iteritems():
|
||||
for refname, (docname, type) in iteritems(self.data['objects']):
|
||||
yield refname, refname, type, docname, \
|
||||
refname.replace('$', '_S_'), 1
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
|
||||
import re
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
@@ -514,7 +515,7 @@ class PythonModuleIndex(Index):
|
||||
ignores = self.domain.env.config['modindex_common_prefix']
|
||||
ignores = sorted(ignores, key=len, reverse=True)
|
||||
# list of all modules, sorted by module name
|
||||
modules = sorted(self.domain.data['modules'].iteritems(),
|
||||
modules = sorted(iteritems(self.domain.data['modules']),
|
||||
key=lambda x: x[0].lower())
|
||||
# sort out collapsable modules
|
||||
prev_modname = ''
|
||||
@@ -564,7 +565,7 @@ class PythonModuleIndex(Index):
|
||||
collapse = len(modules) - num_toplevels < num_toplevels
|
||||
|
||||
# sort by first letter
|
||||
content = sorted(content.iteritems())
|
||||
content = sorted(iteritems(content))
|
||||
|
||||
return content, collapse
|
||||
|
||||
@@ -720,8 +721,8 @@ class PythonDomain(Domain):
|
||||
contnode, name)
|
||||
|
||||
def get_objects(self):
|
||||
for modname, info in self.data['modules'].iteritems():
|
||||
for modname, info in iteritems(self.data['modules']):
|
||||
yield (modname, modname, 'module', info[0], 'module-' + modname, 0)
|
||||
for refname, (docname, type) in self.data['objects'].iteritems():
|
||||
for refname, (docname, type) in iteritems(self.data['objects']):
|
||||
if type != 'module': # modules are already handled
|
||||
yield (refname, refname, type, docname, refname, 1)
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
|
||||
import re
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.locale import l_, _
|
||||
@@ -133,5 +135,5 @@ class ReSTDomain(Domain):
|
||||
contnode, target + ' ' + objtype)
|
||||
|
||||
def get_objects(self):
|
||||
for (typ, name), docname in self.data['objects'].iteritems():
|
||||
for (typ, name), docname in iteritems(self.data['objects']):
|
||||
yield name, name, typ, docname, typ + '-' + name, 1
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import re
|
||||
import unicodedata
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
from docutils.statemachine import ViewList
|
||||
@@ -523,7 +524,7 @@ class StandardDomain(Domain):
|
||||
|
||||
def process_doc(self, env, docname, document):
|
||||
labels, anonlabels = self.data['labels'], self.data['anonlabels']
|
||||
for name, explicit in document.nametypes.iteritems():
|
||||
for name, explicit in iteritems(document.nametypes):
|
||||
if not explicit:
|
||||
continue
|
||||
labelid = document.nameids[name]
|
||||
@@ -621,16 +622,16 @@ class StandardDomain(Domain):
|
||||
labelid, contnode)
|
||||
|
||||
def get_objects(self):
|
||||
for (prog, option), info in self.data['progoptions'].iteritems():
|
||||
for (prog, option), info in iteritems(self.data['progoptions']):
|
||||
yield (option, option, 'option', info[0], info[1], 1)
|
||||
for (type, name), info in self.data['objects'].iteritems():
|
||||
for (type, name), info in iteritems(self.data['objects']):
|
||||
yield (name, name, type, info[0], info[1],
|
||||
self.object_types[type].attrs['searchprio'])
|
||||
for name, info in self.data['labels'].iteritems():
|
||||
for name, info in iteritems(self.data['labels']):
|
||||
yield (name, info[2], 'label', info[0], info[1], -1)
|
||||
# add anonymous-only labels as well
|
||||
non_anon_labels = set(self.data['labels'])
|
||||
for name, info in self.data['anonlabels'].iteritems():
|
||||
for name, info in iteritems(self.data['anonlabels']):
|
||||
if name not in non_anon_labels:
|
||||
yield (name, name, 'label', info[0], info[1], -1)
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ from glob import glob
|
||||
from itertools import groupby
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
from six.moves import cPickle as pickle, zip
|
||||
from docutils import nodes
|
||||
from docutils.io import FileInput, NullOutput
|
||||
@@ -421,7 +422,7 @@ class BuildEnvironment:
|
||||
else:
|
||||
# check if a config value was changed that affects how
|
||||
# doctrees are read
|
||||
for key, descr in config.values.iteritems():
|
||||
for key, descr in iteritems(config.values):
|
||||
if descr[1] != 'env':
|
||||
continue
|
||||
if self.config[key] != config[key]:
|
||||
@@ -632,7 +633,7 @@ class BuildEnvironment:
|
||||
self.note_indexentries_from(docname, doctree)
|
||||
self.note_citations_from(docname, doctree)
|
||||
self.build_toc_from(docname, doctree)
|
||||
for domain in self.domains.itervalues():
|
||||
for domain in itervalues(self.domains):
|
||||
domain.process_doc(self, docname, doctree)
|
||||
|
||||
# allow extension-specific post-processing
|
||||
@@ -818,7 +819,7 @@ class BuildEnvironment:
|
||||
candidates['*'] = rel_imgpath
|
||||
# map image paths to unique image names (so that they can be put
|
||||
# into a single directory)
|
||||
for imgpath in candidates.itervalues():
|
||||
for imgpath in itervalues(candidates):
|
||||
self.dependencies.setdefault(docname, set()).add(imgpath)
|
||||
if not os.access(path.join(self.srcdir, imgpath), os.R_OK):
|
||||
self.warn_node('image file not readable: %s' % imgpath,
|
||||
@@ -1522,7 +1523,7 @@ class BuildEnvironment:
|
||||
else:
|
||||
entry[0].append((main, uri))
|
||||
|
||||
for fn, entries in self.indexentries.iteritems():
|
||||
for fn, entries in iteritems(self.indexentries):
|
||||
# new entry types must be listed in directives/other.py!
|
||||
for type, value, tid, main in entries:
|
||||
try:
|
||||
@@ -1597,7 +1598,7 @@ class BuildEnvironment:
|
||||
def keyfunc2(item, letters=string.ascii_uppercase + '_'):
|
||||
# hack: mutating the subitems dicts to a list in the keyfunc
|
||||
k, v = item
|
||||
v[1] = sorted((si, se) for (si, (se, void)) in v[1].iteritems())
|
||||
v[1] = sorted((si, se) for (si, (se, void)) in iteritems(v[1]))
|
||||
# now calculate the key
|
||||
letter = unicodedata.normalize('NFD', k[0])[0].upper()
|
||||
if letter in letters:
|
||||
|
||||
@@ -18,6 +18,7 @@ import traceback
|
||||
from types import FunctionType, BuiltinFunctionType, MethodType
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
from docutils import nodes
|
||||
from docutils.utils import assemble_option_dict
|
||||
from docutils.statemachine import ViewList
|
||||
@@ -264,7 +265,7 @@ class Documenter(object):
|
||||
@staticmethod
|
||||
def get_attr(obj, name, *defargs):
|
||||
"""getattr() override for types such as Zope interfaces."""
|
||||
for typ, func in AutoDirective._special_attrgetters.iteritems():
|
||||
for typ, func in iteritems(AutoDirective._special_attrgetters):
|
||||
if isinstance(obj, typ):
|
||||
return func(obj, name, *defargs)
|
||||
return safe_getattr(obj, name, *defargs)
|
||||
@@ -550,7 +551,7 @@ class Documenter(object):
|
||||
if self.analyzer:
|
||||
attr_docs = self.analyzer.find_attr_docs()
|
||||
namespace = '.'.join(self.objpath)
|
||||
for item in attr_docs.iteritems():
|
||||
for item in iteritems(attr_docs):
|
||||
if item[0][0] == namespace:
|
||||
analyzed_member_names.add(item[0][1])
|
||||
if not want_all:
|
||||
@@ -691,7 +692,7 @@ class Documenter(object):
|
||||
# document non-skipped members
|
||||
memberdocumenters = []
|
||||
for (mname, member, isattr) in self.filter_members(members, want_all):
|
||||
classes = [cls for cls in AutoDirective._registry.itervalues()
|
||||
classes = [cls for cls in itervalues(AutoDirective._registry)
|
||||
if cls.can_document_member(member, mname, isattr, self)]
|
||||
if not classes:
|
||||
# don't know how to document this member
|
||||
|
||||
@@ -15,6 +15,7 @@ import glob
|
||||
import inspect
|
||||
from os import path
|
||||
|
||||
from six import iteritems
|
||||
from six.moves import cPickle as pickle
|
||||
|
||||
from sphinx.builders import Builder
|
||||
@@ -53,7 +54,7 @@ class CoverageBuilder(Builder):
|
||||
self.warn('invalid regex %r in coverage_c_regexes' % exp)
|
||||
|
||||
self.c_ignorexps = {}
|
||||
for (name, exps) in self.config.coverage_ignore_c_items.iteritems():
|
||||
for (name, exps) in iteritems(self.config.coverage_ignore_c_items):
|
||||
self.c_ignorexps[name] = compile_regex_list(
|
||||
'coverage_ignore_c_items', exps, self.warn)
|
||||
self.mod_ignorexps = compile_regex_list(
|
||||
@@ -110,7 +111,7 @@ class CoverageBuilder(Builder):
|
||||
write_header(op, 'Undocumented C API elements', '=')
|
||||
op.write('\n')
|
||||
|
||||
for filename, undoc in self.c_undoc.iteritems():
|
||||
for filename, undoc in iteritems(self.c_undoc):
|
||||
write_header(op, filename)
|
||||
for typ, name in undoc:
|
||||
op.write(' * %-50s [%9s]\n' % (name, typ))
|
||||
@@ -230,7 +231,7 @@ class CoverageBuilder(Builder):
|
||||
if undoc['classes']:
|
||||
op.write('Classes:\n')
|
||||
for name, methods in sorted(
|
||||
undoc['classes'].iteritems()):
|
||||
iteritems(undoc['classes'])):
|
||||
if not methods:
|
||||
op.write(' * %s\n' % name)
|
||||
else:
|
||||
|
||||
@@ -19,6 +19,7 @@ from os import path
|
||||
doctest = __import__('doctest')
|
||||
|
||||
import six
|
||||
from six import itervalues
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
@@ -311,24 +312,24 @@ Doctest summary
|
||||
groups[groupname] = TestGroup(groupname)
|
||||
groups[groupname].add_code(code)
|
||||
for code in add_to_all_groups:
|
||||
for group in groups.itervalues():
|
||||
for group in itervalues(groups):
|
||||
group.add_code(code)
|
||||
if self.config.doctest_global_setup:
|
||||
code = TestCode(self.config.doctest_global_setup,
|
||||
'testsetup', lineno=0)
|
||||
for group in groups.itervalues():
|
||||
for group in itervalues(groups):
|
||||
group.add_code(code, prepend=True)
|
||||
if self.config.doctest_global_cleanup:
|
||||
code = TestCode(self.config.doctest_global_cleanup,
|
||||
'testcleanup', lineno=0)
|
||||
for group in groups.itervalues():
|
||||
for group in itervalues(groups):
|
||||
group.add_code(code)
|
||||
if not groups:
|
||||
return
|
||||
|
||||
self._out('\nDocument: %s\n----------%s\n' %
|
||||
(docname, '-'*len(docname)))
|
||||
for group in groups.itervalues():
|
||||
for group in itervalues(groups):
|
||||
self.test_group(group, self.env.doc2path(docname, base=None))
|
||||
# Separately count results from setup code
|
||||
res_f, res_t = self.setup_runner.summarize(self._out, verbose=False)
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes, utils
|
||||
|
||||
from sphinx.util.nodes import split_explicit_title
|
||||
@@ -51,7 +52,7 @@ def make_link_role(base_url, prefix):
|
||||
return role
|
||||
|
||||
def setup_link_roles(app):
|
||||
for name, (base_url, prefix) in app.config.extlinks.iteritems():
|
||||
for name, (base_url, prefix) in iteritems(app.config.extlinks):
|
||||
app.add_role(name, make_link_role(base_url, prefix))
|
||||
|
||||
def setup(app):
|
||||
|
||||
@@ -32,6 +32,7 @@ import posixpath
|
||||
from os import path
|
||||
import re
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes
|
||||
from docutils.utils import relative_path
|
||||
|
||||
@@ -167,7 +168,7 @@ def load_mappings(app):
|
||||
env.intersphinx_named_inventory = {}
|
||||
cache = env.intersphinx_cache
|
||||
update = False
|
||||
for key, value in app.config.intersphinx_mapping.iteritems():
|
||||
for key, value in iteritems(app.config.intersphinx_mapping):
|
||||
if isinstance(value, tuple):
|
||||
# new format
|
||||
name, (uri, inv) = key, value
|
||||
@@ -202,13 +203,13 @@ def load_mappings(app):
|
||||
# add the unnamed inventories last. This means that the
|
||||
# unnamed inventories will shadow the named ones but the named
|
||||
# ones can still be accessed when the name is specified.
|
||||
cached_vals = list(cache.itervalues())
|
||||
cached_vals = list(cache.values())
|
||||
named_vals = sorted(v for v in cached_vals if v[0])
|
||||
unnamed_vals = [v for v in cached_vals if not v[0]]
|
||||
for name, _, invdata in named_vals + unnamed_vals:
|
||||
if name:
|
||||
env.intersphinx_named_inventory[name] = invdata
|
||||
for type, objects in invdata.iteritems():
|
||||
for type, objects in iteritems(invdata):
|
||||
env.intersphinx_inventory.setdefault(
|
||||
type, {}).update(objects)
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import sys
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring
|
||||
|
||||
@@ -217,9 +218,9 @@ class Config(object):
|
||||
}
|
||||
|
||||
def __init__(self, **settings):
|
||||
for name, (default, rebuild) in self._config_values.iteritems():
|
||||
for name, (default, rebuild) in iteritems(self._config_values):
|
||||
setattr(self, name, default)
|
||||
for name, value in settings.iteritems():
|
||||
for name, value in iteritems(settings):
|
||||
setattr(self, name, value)
|
||||
|
||||
|
||||
@@ -252,7 +253,7 @@ def setup(app):
|
||||
app.connect('autodoc-process-docstring', _process_docstring)
|
||||
app.connect('autodoc-skip-member', _skip_member)
|
||||
|
||||
for name, (default, rebuild) in Config._config_values.iteritems():
|
||||
for name, (default, rebuild) in iteritems(Config._config_values):
|
||||
app.add_config_value(name, default, rebuild)
|
||||
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
:license: BSD, see LICENSE for details.
|
||||
"""
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes
|
||||
|
||||
from sphinx import addnodes
|
||||
@@ -91,7 +92,7 @@ def collect_pages(app):
|
||||
app.builder.info(' (%d module code pages)' %
|
||||
len(env._viewcode_modules), nonl=1)
|
||||
|
||||
for modname, entry in env._viewcode_modules.iteritems():
|
||||
for modname, entry in iteritems(env._viewcode_modules):
|
||||
if not entry:
|
||||
continue
|
||||
code, tags, used = entry
|
||||
@@ -109,7 +110,7 @@ def collect_pages(app):
|
||||
# the collected tags (HACK: this only works if the tag boundaries are
|
||||
# properly nested!)
|
||||
maxindex = len(lines) - 1
|
||||
for name, docname in used.iteritems():
|
||||
for name, docname in iteritems(used):
|
||||
type, start, end = tags[name]
|
||||
backlink = urito(pagename, docname) + '#' + modname + '.' + name
|
||||
lines[start] = (
|
||||
|
||||
@@ -14,6 +14,7 @@ import sys
|
||||
from os import path
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from sphinx import package_dir
|
||||
from sphinx.errors import PycodeError
|
||||
@@ -32,9 +33,9 @@ pydriver = driver.Driver(pygrammar, convert=nodes.convert)
|
||||
|
||||
# an object with attributes corresponding to token and symbol names
|
||||
class sym: pass
|
||||
for k, v in pygrammar.symbol2number.iteritems():
|
||||
for k, v in iteritems(pygrammar.symbol2number):
|
||||
setattr(sym, k, v)
|
||||
for k, v in token.tok_name.iteritems():
|
||||
for k, v in iteritems(token.tok_name):
|
||||
setattr(sym, v, k)
|
||||
|
||||
# a dict mapping terminal and nonterminal numbers to their names
|
||||
@@ -341,7 +342,7 @@ if __name__ == '__main__':
|
||||
x1 = time.time()
|
||||
ma.parse()
|
||||
x2 = time.time()
|
||||
#for (ns, name), doc in ma.find_attr_docs().iteritems():
|
||||
#for (ns, name), doc in iteritems(ma.find_attr_docs()):
|
||||
# print '>>', ns, name
|
||||
# print '\n'.join(doc)
|
||||
pprint.pprint(ma.find_tags())
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from six import iteritems
|
||||
|
||||
# Pgen imports
|
||||
|
||||
from sphinx.pycode.pgen2 import grammar, token, tokenize
|
||||
@@ -42,7 +44,7 @@ class ParserGenerator(object):
|
||||
states = []
|
||||
for state in dfa:
|
||||
arcs = []
|
||||
for label, next in state.arcs.iteritems():
|
||||
for label, next in iteritems(state.arcs):
|
||||
arcs.append((self.make_label(c, label), dfa.index(next)))
|
||||
if state.isfinal:
|
||||
arcs.append((0, dfa.index(state)))
|
||||
@@ -121,7 +123,7 @@ class ParserGenerator(object):
|
||||
state = dfa[0]
|
||||
totalset = {}
|
||||
overlapcheck = {}
|
||||
for label, next in state.arcs.iteritems():
|
||||
for label, next in iteritems(state.arcs):
|
||||
if label in self.dfas:
|
||||
if label in self.first:
|
||||
fset = self.first[label]
|
||||
@@ -136,7 +138,7 @@ class ParserGenerator(object):
|
||||
totalset[label] = 1
|
||||
overlapcheck[label] = {label: 1}
|
||||
inverse = {}
|
||||
for label, itsfirst in overlapcheck.iteritems():
|
||||
for label, itsfirst in iteritems(overlapcheck):
|
||||
for symbol in itsfirst:
|
||||
if symbol in inverse:
|
||||
raise ValueError("rule %s is ambiguous; %s is in the"
|
||||
@@ -195,7 +197,7 @@ class ParserGenerator(object):
|
||||
for label, next in nfastate.arcs:
|
||||
if label is not None:
|
||||
addclosure(next, arcs.setdefault(label, {}))
|
||||
for label, nfaset in arcs.iteritems():
|
||||
for label, nfaset in iteritems(arcs):
|
||||
for st in states:
|
||||
if st.nfaset == nfaset:
|
||||
break
|
||||
@@ -225,7 +227,7 @@ class ParserGenerator(object):
|
||||
print("Dump of DFA for", name)
|
||||
for i, state in enumerate(dfa):
|
||||
print(" State", i, state.isfinal and "(final)" or "")
|
||||
for label, next in state.arcs.iteritems():
|
||||
for label, next in iteritems(state.arcs):
|
||||
print(" %s -> %d" % (label, dfa.index(next)))
|
||||
|
||||
def simplify_dfa(self, dfa):
|
||||
@@ -364,7 +366,7 @@ class DFAState(object):
|
||||
self.arcs[label] = next
|
||||
|
||||
def unifystate(self, old, new):
|
||||
for label, next in self.arcs.iteritems():
|
||||
for label, next in iteritems(self.arcs):
|
||||
if next is old:
|
||||
self.arcs[label] = new
|
||||
|
||||
@@ -377,7 +379,7 @@ class DFAState(object):
|
||||
# would invoke this method recursively, with cycles...
|
||||
if len(self.arcs) != len(other.arcs):
|
||||
return False
|
||||
for label, next in self.arcs.iteritems():
|
||||
for label, next in iteritems(self.arcs):
|
||||
if next is not other.arcs.get(label):
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
|
||||
import re
|
||||
|
||||
from six import iteritems
|
||||
from docutils import nodes, utils
|
||||
from docutils.parsers.rst import roles
|
||||
|
||||
@@ -34,7 +35,7 @@ generic_docroles = {
|
||||
'regexp' : nodes.literal,
|
||||
}
|
||||
|
||||
for rolename, nodeclass in generic_docroles.iteritems():
|
||||
for rolename, nodeclass in iteritems(generic_docroles):
|
||||
generic = roles.GenericRole(rolename, nodeclass)
|
||||
role = roles.CustomRole(rolename, generic, {'classes': [rolename]})
|
||||
roles.register_local_role(rolename, role)
|
||||
@@ -313,5 +314,5 @@ specific_docroles = {
|
||||
'index': index_role,
|
||||
}
|
||||
|
||||
for rolename, func in specific_docroles.iteritems():
|
||||
for rolename, func in iteritems(specific_docroles):
|
||||
roles.register_local_role(rolename, func)
|
||||
|
||||
@@ -13,6 +13,7 @@ from __future__ import with_statement
|
||||
import re
|
||||
|
||||
import six
|
||||
from six import iteritems, itervalues
|
||||
from six.moves import cPickle as pickle
|
||||
from docutils.nodes import raw, comment, title, Text, NodeVisitor, SkipNode
|
||||
|
||||
@@ -250,7 +251,7 @@ class IndexBuilder(object):
|
||||
|
||||
def load_terms(mapping):
|
||||
rv = {}
|
||||
for k, v in mapping.iteritems():
|
||||
for k, v in iteritems(mapping):
|
||||
if isinstance(v, int):
|
||||
rv[k] = set([index2fn[v]])
|
||||
else:
|
||||
@@ -271,7 +272,7 @@ class IndexBuilder(object):
|
||||
rv = {}
|
||||
otypes = self._objtypes
|
||||
onames = self._objnames
|
||||
for domainname, domain in self.env.domains.iteritems():
|
||||
for domainname, domain in iteritems(self.env.domains):
|
||||
for fullname, dispname, type, docname, anchor, prio in \
|
||||
domain.get_objects():
|
||||
# XXX use dispname?
|
||||
@@ -305,7 +306,7 @@ class IndexBuilder(object):
|
||||
def get_terms(self, fn2index):
|
||||
rvs = {}, {}
|
||||
for rv, mapping in zip(rvs, (self._mapping, self._title_mapping)):
|
||||
for k, v in mapping.iteritems():
|
||||
for k, v in iteritems(mapping):
|
||||
if len(v) == 1:
|
||||
fn, = v
|
||||
if fn in fn2index:
|
||||
@@ -323,7 +324,7 @@ class IndexBuilder(object):
|
||||
|
||||
objects = self.get_objects(fn2index) # populates _objtypes
|
||||
objtypes = dict((v, k[0] + ':' + k[1])
|
||||
for (k, v) in self._objtypes.iteritems())
|
||||
for (k, v) in iteritems(self._objtypes))
|
||||
objnames = self._objnames
|
||||
return dict(filenames=filenames, titles=titles, terms=terms,
|
||||
objects=objects, objtypes=objtypes, objnames=objnames,
|
||||
@@ -339,9 +340,9 @@ class IndexBuilder(object):
|
||||
if filename in self._titles:
|
||||
new_titles[filename] = self._titles[filename]
|
||||
self._titles = new_titles
|
||||
for wordnames in self._mapping.itervalues():
|
||||
for wordnames in itervalues(self._mapping):
|
||||
wordnames.intersection_update(filenames)
|
||||
for wordnames in self._title_mapping.itervalues():
|
||||
for wordnames in itervalues(self._title_mapping):
|
||||
wordnames.intersection_update(filenames)
|
||||
|
||||
def feed(self, filename, title, doctree):
|
||||
|
||||
@@ -21,6 +21,8 @@ import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from six import iteritems
|
||||
|
||||
try:
|
||||
import MeCab
|
||||
native_module = True
|
||||
@@ -91,14 +93,14 @@ class MecabBinder(object):
|
||||
|
||||
|
||||
class TinySegmenter(object):
|
||||
patterns_ = dict([(re.compile(pattern), value) for pattern, value in {
|
||||
patterns_ = dict([(re.compile(pattern), value) for pattern, value in iteritems({
|
||||
u'[一二三四五六七八九十百千万億兆]': u'M',
|
||||
u'[一-龠々〆ヵヶ]': u'H',
|
||||
u'[ぁ-ん]': u'I',
|
||||
u'[ァ-ヴーア-ン゙ー]': u'K',
|
||||
u'[a-zA-Za-zA-Z]': u'A',
|
||||
u'[0-90-9]': u'N',
|
||||
}.iteritems()])
|
||||
})])
|
||||
BIAS__ = -332
|
||||
BC1__ = {u'HH':6,u'II':2461,u'KH':406,u'OH':-1378}
|
||||
BC2__ = {u'AA':-3267,u'AI':2744,u'AN':-878,u'HH':-4070,u'HM':-1711,u'HN':4012,u'HO':3761,u'IA':1327,u'IH':-1184,u'II':-1332,u'IK':1721,u'IO':5492,u'KI':3831,u'KK':-8741,u'MH':-3132,u'MK':3334,u'OO':-2920}
|
||||
@@ -145,7 +147,7 @@ class TinySegmenter(object):
|
||||
|
||||
# ctype_
|
||||
def ctype_(self, char):
|
||||
for pattern, value in self.patterns_.iteritems():
|
||||
for pattern, value in iteritems(self.patterns_):
|
||||
if pattern.match(char):
|
||||
return value
|
||||
return u'O'
|
||||
|
||||
@@ -16,6 +16,7 @@ import tempfile
|
||||
from os import path
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
from six.moves import configparser
|
||||
|
||||
try:
|
||||
@@ -145,7 +146,7 @@ class Theme(object):
|
||||
options.update(conf.items('options'))
|
||||
except configparser.NoSectionError:
|
||||
pass
|
||||
for option, value in overrides.iteritems():
|
||||
for option, value in iteritems(overrides):
|
||||
if option not in options:
|
||||
raise ThemeError('unsupported theme option %r given' % option)
|
||||
options[option] = value
|
||||
|
||||
@@ -22,6 +22,7 @@ from codecs import open, BOM_UTF8
|
||||
from collections import deque
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
from six.moves import range
|
||||
import docutils
|
||||
from docutils.utils import relative_path
|
||||
@@ -190,7 +191,7 @@ def save_traceback(app):
|
||||
docutils.__version__, docutils.__version_details__,
|
||||
jinja2.__version__)).encode('utf-8'))
|
||||
if app is not None:
|
||||
for extname, extmod in app._extensions.iteritems():
|
||||
for extname, extmod in iteritems(app._extensions):
|
||||
os.write(fd, ('# %s from %s\n' % (
|
||||
extname, getattr(extmod, '__file__', 'unknown'))
|
||||
).encode('utf-8'))
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import re
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
from sphinx.util.pycompat import u
|
||||
|
||||
@@ -92,7 +93,7 @@ def dumps(obj, key=False):
|
||||
return '{%s}' % ','.join('%s:%s' % (
|
||||
dumps(key, True),
|
||||
dumps(value)
|
||||
) for key, value in obj.iteritems())
|
||||
) for key, value in iteritems(obj))
|
||||
elif isinstance(obj, (tuple, list, set)):
|
||||
return '[%s]' % ','.join(dumps(x) for x in obj)
|
||||
elif isinstance(obj, six.string_types):
|
||||
|
||||
@@ -13,6 +13,7 @@ from uuid import uuid4
|
||||
from operator import itemgetter
|
||||
from itertools import product
|
||||
|
||||
from six import iteritems
|
||||
from six.moves import range, zip_longest
|
||||
|
||||
|
||||
@@ -81,7 +82,7 @@ def merge_doctrees(old, new, condition):
|
||||
# choose the old node with the best ratio for each new node and set the uid
|
||||
# as long as the ratio is under a certain value, in which case we consider
|
||||
# them not changed but different
|
||||
ratios = sorted(ratios.iteritems(), key=itemgetter(1))
|
||||
ratios = sorted(iteritems(ratios), key=itemgetter(1))
|
||||
for (old_node, new_node), ratio in ratios:
|
||||
if new_node in seen:
|
||||
continue
|
||||
|
||||
@@ -16,6 +16,7 @@ import re
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
from six import itervalues
|
||||
from docutils import nodes, writers
|
||||
from docutils.writers.latex2e import Babel
|
||||
|
||||
@@ -335,7 +336,7 @@ class LaTeXTranslator(nodes.NodeVisitor):
|
||||
# latex_domain_indices can be False/True or a list of index names
|
||||
indices_config = self.builder.config.latex_domain_indices
|
||||
if indices_config:
|
||||
for domain in self.builder.env.domains.itervalues():
|
||||
for domain in itervalues(self.builder.env.domains):
|
||||
for indexcls in domain.indices:
|
||||
indexname = '%s-%s' % (domain.name, indexcls.name)
|
||||
if isinstance(indices_config, list):
|
||||
|
||||
@@ -13,6 +13,7 @@ import re
|
||||
import textwrap
|
||||
from os import path
|
||||
|
||||
from six import itervalues
|
||||
from six.moves import range
|
||||
from docutils import nodes, writers
|
||||
|
||||
@@ -457,7 +458,7 @@ class TexinfoTranslator(nodes.NodeVisitor):
|
||||
|
||||
indices_config = self.builder.config.texinfo_domain_indices
|
||||
if indices_config:
|
||||
for domain in self.builder.env.domains.itervalues():
|
||||
for domain in itervalues(self.builder.env.domains):
|
||||
for indexcls in domain.indices:
|
||||
indexname = '%s-%s' % (domain.name, indexcls.name)
|
||||
if isinstance(indices_config, list):
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
"""
|
||||
import sys
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from sphinx.ext.autosummary import mangle_signature
|
||||
|
||||
from util import with_app, test_roots
|
||||
@@ -97,6 +99,6 @@ def test_get_items_summary():
|
||||
'noSentence': "this doesn't start with a",
|
||||
'emptyLine': "This is the real summary",
|
||||
}
|
||||
for key, expected in expected_values.iteritems():
|
||||
for key, expected in iteritems(expected_values):
|
||||
assert autosummary_items[key][2] == expected, 'Summary for %s was %r -'\
|
||||
' expected %r' % (key, autosummary_items[key], expected)
|
||||
|
||||
@@ -14,6 +14,7 @@ import re
|
||||
import htmlentitydefs
|
||||
|
||||
import six
|
||||
from six import iteritems
|
||||
|
||||
try:
|
||||
import pygments
|
||||
@@ -344,7 +345,7 @@ def test_html(app):
|
||||
'--- Expected (regex):\n' + html_warnings_exp + \
|
||||
'--- Got:\n' + html_warnings
|
||||
|
||||
for fname, paths in HTML_XPATH.iteritems():
|
||||
for fname, paths in iteritems(HTML_XPATH):
|
||||
parser = NslessParser()
|
||||
parser.entity.update(htmlentitydefs.entitydefs)
|
||||
fp = open(os.path.join(app.outdir, fname), 'rb')
|
||||
|
||||
Reference in New Issue
Block a user