From 2d1549b35a4adea66aebd0b7d9266731dd7ece6f Mon Sep 17 00:00:00 2001 From: Takayuki Shimizukawa Date: Wed, 30 Apr 2014 23:25:44 +0900 Subject: [PATCH] wrap py3 iterators with list() for each places that expect a list object. refs #1350. --- sphinx/builders/gettext.py | 2 +- sphinx/builders/latex.py | 2 +- sphinx/builders/linkcheck.py | 2 +- sphinx/builders/texinfo.py | 2 +- sphinx/domains/cpp.py | 2 +- sphinx/environment.py | 2 +- sphinx/ext/autosummary/generate.py | 5 +---- sphinx/ext/coverage.py | 2 +- sphinx/ext/inheritance_diagram.py | 2 +- sphinx/jinja2glue.py | 2 +- sphinx/pycode/pgen2/pgen.py | 2 +- sphinx/pycode/pgen2/tokenize.py | 5 +++-- sphinx/search/__init__.py | 4 ++-- sphinx/util/__init__.py | 2 +- sphinx/writers/latex.py | 10 +++++----- sphinx/writers/text.py | 2 +- tests/coverage.py | 10 +++++----- tests/etree13/ElementTree.py | 8 ++++---- tests/path.py | 2 +- tests/test_autosummary.py | 2 +- tests/test_intl.py | 2 +- 21 files changed, 35 insertions(+), 37 deletions(-) diff --git a/sphinx/builders/gettext.py b/sphinx/builders/gettext.py index f36d0202c..aeb98aaab 100644 --- a/sphinx/builders/gettext.py +++ b/sphinx/builders/gettext.py @@ -110,7 +110,7 @@ class I18nBuilder(Builder): for node, entries in traverse_translatable_index(doctree): for typ, msg, tid, main in entries: for m in split_index_msg(typ, msg): - if typ == 'pair' and m in pairindextypes.values(): + if typ == 'pair' and m in list(pairindextypes.values()): # avoid built-in translated message was incorporated # in 'sphinx.util.nodes.process_index_entry' continue diff --git a/sphinx/builders/latex.py b/sphinx/builders/latex.py index c9690ebb4..bf7991cfb 100644 --- a/sphinx/builders/latex.py +++ b/sphinx/builders/latex.py @@ -57,7 +57,7 @@ class LaTeXBuilder(Builder): return self.get_target_uri(to, typ) def init_document_data(self): - preliminary_document_data = map(list, self.config.latex_documents) + preliminary_document_data = [list(x) for x in self.config.latex_documents] if not preliminary_document_data: self.warn('no "latex_documents" config value found; no documents ' 'will be written') diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py index 484bd95e3..4d8970efd 100644 --- a/sphinx/builders/linkcheck.py +++ b/sphinx/builders/linkcheck.py @@ -89,7 +89,7 @@ class CheckExternalLinksBuilder(Builder): name = 'linkcheck' def init(self): - self.to_ignore = map(re.compile, self.app.config.linkcheck_ignore) + self.to_ignore = [re.compile(x) for x in self.app.config.linkcheck_ignore] self.good = set() self.broken = {} self.redirected = {} diff --git a/sphinx/builders/texinfo.py b/sphinx/builders/texinfo.py index a3152e7cb..53463f3c7 100644 --- a/sphinx/builders/texinfo.py +++ b/sphinx/builders/texinfo.py @@ -108,7 +108,7 @@ class TexinfoBuilder(Builder): return self.get_target_uri(to, typ) def init_document_data(self): - preliminary_document_data = map(list, self.config.texinfo_documents) + preliminary_document_data = [list(x) for x in self.config.texinfo_documents] if not preliminary_document_data: self.warn('no "texinfo_documents" config value found; no documents ' 'will be written') diff --git a/sphinx/domains/cpp.py b/sphinx/domains/cpp.py index 11fe5ad1b..bac95f0c8 100644 --- a/sphinx/domains/cpp.py +++ b/sphinx/domains/cpp.py @@ -1268,7 +1268,7 @@ class CPPDomain(Domain): } def clear_doc(self, docname): - for fullname, (fn, _, _) in self.data['objects'].items(): + for fullname, (fn, _, _) in list(self.data['objects'].items()): if fn == docname: del self.data['objects'][fullname] diff --git a/sphinx/environment.py b/sphinx/environment.py index c251ebf1f..aded55236 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -1560,7 +1560,7 @@ class BuildEnvironment: if lckey[0:1] in lcletters: return chr(127) + lckey return lckey - newlist = new.items() + newlist = list(new.items()) newlist.sort(key=keyfunc) if group_entries: diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py index 5daf95f5b..7b7c0707d 100644 --- a/sphinx/ext/autosummary/generate.py +++ b/sphinx/ext/autosummary/generate.py @@ -110,14 +110,11 @@ def generate_autosummary_docs(sources, output_dir=None, suffix='.rst', # read items = find_autosummary_in_files(sources) - # remove possible duplicates - items = dict([(item, True) for item in items]).keys() - # keep track of new files new_files = [] # write - for name, path, template_name in sorted(items, key=str): + for name, path, template_name in sorted(set(items), key=str): if path is None: # The corresponding autosummary:: directive did not have # a :toctree: option diff --git a/sphinx/ext/coverage.py b/sphinx/ext/coverage.py index 3cf0ff55c..a55da2f5b 100644 --- a/sphinx/ext/coverage.py +++ b/sphinx/ext/coverage.py @@ -213,7 +213,7 @@ class CoverageBuilder(Builder): try: if self.config.coverage_write_headline: write_header(op, 'Undocumented Python objects', '=') - keys = self.py_undoc.keys() + keys = list(self.py_undoc.keys()) keys.sort() for name in keys: undoc = self.py_undoc[name] diff --git a/sphinx/ext/inheritance_diagram.py b/sphinx/ext/inheritance_diagram.py index 6129da0ce..04e9b0f3f 100644 --- a/sphinx/ext/inheritance_diagram.py +++ b/sphinx/ext/inheritance_diagram.py @@ -143,7 +143,7 @@ class InheritanceGraph(object): displayed node names. """ all_classes = {} - builtins = vars(__builtin__).values() + builtins = list(vars(__builtin__).values()) def recurse(cls): if not show_builtins and cls in builtins: diff --git a/sphinx/jinja2glue.py b/sphinx/jinja2glue.py index c939cba02..b161b4270 100644 --- a/sphinx/jinja2glue.py +++ b/sphinx/jinja2glue.py @@ -114,7 +114,7 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader): self.pathchain = pathchain # make the paths into loaders - self.loaders = map(SphinxFileSystemLoader, loaderchain) + self.loaders = [SphinxFileSystemLoader(x) for x in loaderchain] use_i18n = builder.app.translator is not None extensions = use_i18n and ['jinja2.ext.i18n'] or [] diff --git a/sphinx/pycode/pgen2/pgen.py b/sphinx/pycode/pgen2/pgen.py index 3c37b1399..f405261ab 100644 --- a/sphinx/pycode/pgen2/pgen.py +++ b/sphinx/pycode/pgen2/pgen.py @@ -335,7 +335,7 @@ class ParserGenerator(object): try: msg = msg % args except: - msg = " ".join([msg] + map(str, args)) + msg = " ".join([msg] + [str(x) for x in args]) raise SyntaxError(msg, (self.filename, self.end[0], self.end[1], self.line)) diff --git a/sphinx/pycode/pgen2/tokenize.py b/sphinx/pycode/pgen2/tokenize.py index 4e94fa5c5..f516f78ba 100644 --- a/sphinx/pycode/pgen2/tokenize.py +++ b/sphinx/pycode/pgen2/tokenize.py @@ -97,8 +97,9 @@ ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + PseudoExtras = group(r'\\\r?\n', Comment, Triple) PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) -tokenprog, pseudoprog, single3prog, double3prog = map( - re.compile, (Token, PseudoToken, Single3, Double3)) +tokenprog, pseudoprog, single3prog, double3prog = [ + re.compile(x) for x in (Token, PseudoToken, Single3, Double3) +] endprogs = {"'": re.compile(Single), '"': re.compile(Double), "'''": single3prog, '"""': double3prog, "r'''": single3prog, 'r"""': double3prog, diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index fb44cff84..43e3e4048 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -316,8 +316,8 @@ class IndexBuilder(object): def freeze(self): """Create a usable data structure for serializing.""" - filenames = self._titles.keys() - titles = self._titles.values() + filenames = list(self._titles.keys()) + titles = list(self._titles.values()) fn2index = dict((f, i) for (i, f) in enumerate(filenames)) terms, title_terms = self.get_terms(fn2index) diff --git a/sphinx/util/__init__.py b/sphinx/util/__init__.py index cc3c9fe10..2c6731a22 100644 --- a/sphinx/util/__init__.py +++ b/sphinx/util/__init__.py @@ -368,7 +368,7 @@ def rpartition(s, t): def split_into(n, type, value): """Split an index entry into a given number of parts at semicolons.""" - parts = map(lambda x: x.strip(), value.split(';', n-1)) + parts = [x.strip() for x in value.split(';', n-1)] if sum(1 for part in parts if part) < n: raise ValueError('invalid %s index entry %r' % (type, value)) return parts diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index 21075d6a4..9eeedae35 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -1137,21 +1137,21 @@ class LaTeXTranslator(nodes.NodeVisitor): p = scre.sub('!', self.encode(string)) self.body.append(r'\index{%s%s}' % (p, m)) elif type == 'pair': - p1, p2 = map(self.encode, split_into(2, 'pair', string)) + p1, p2 = [self.encode(x) for x in split_into(2, 'pair', string)] self.body.append(r'\index{%s!%s%s}\index{%s!%s%s}' % (p1, p2, m, p2, p1, m)) elif type == 'triple': - p1, p2, p3 = map(self.encode, - split_into(3, 'triple', string)) + p1, p2, p3 = [self.encode(x) + for x in split_into(3, 'triple', string)] self.body.append( r'\index{%s!%s %s%s}\index{%s!%s, %s%s}' r'\index{%s!%s %s%s}' % (p1, p2, p3, m, p2, p3, p1, m, p3, p1, p2, m)) elif type == 'see': - p1, p2 = map(self.encode, split_into(2, 'see', string)) + p1, p2 = [self.encode(x) for x in split_into(2, 'see', string)] self.body.append(r'\index{%s|see{%s}}' % (p1, p2)) elif type == 'seealso': - p1, p2 = map(self.encode, split_into(2, 'seealso', string)) + p1, p2 = [self.encode(x) for x in split_into(2, 'seealso', string)] self.body.append(r'\index{%s|see{%s}}' % (p1, p2)) else: self.builder.warn( diff --git a/sphinx/writers/text.py b/sphinx/writers/text.py index 62d3791fb..3f2edd145 100644 --- a/sphinx/writers/text.py +++ b/sphinx/writers/text.py @@ -488,7 +488,7 @@ class TextTranslator(nodes.NodeVisitor): for i, cell in enumerate(line): par = my_wrap(cell, width=colwidths[i]) if par: - maxwidth = max(map(column_width, par)) + maxwidth = max(column_width(x) for x in par) else: maxwidth = 0 realwidths[i] = max(realwidths[i], maxwidth) diff --git a/tests/coverage.py b/tests/coverage.py index fa65a7158..2c2ffff17 100755 --- a/tests/coverage.py +++ b/tests/coverage.py @@ -350,7 +350,7 @@ class coverage: '-o:': 'omit=', } short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '') - long_opts = optmap.values() + long_opts = list(optmap.values()) options, args = getopt.getopt(argv, short_opts, long_opts) for o, a in options: if o in optmap: @@ -401,7 +401,7 @@ class coverage: if settings.get('collect'): self.collect() if not args: - args = self.cexecuted.keys() + args = list(self.cexecuted.keys()) ignore_errors = settings.get('ignore-errors') show_missing = settings.get('show-missing') @@ -743,9 +743,9 @@ class coverage: visitor = StatementFindingAstVisitor(statements, excluded, suite_spots) compiler.walk(ast, visitor, walker=visitor) - lines = statements.keys() + lines = list(statements.keys()) lines.sort() - excluded_lines = excluded.keys() + excluded_lines = list(excluded.keys()) excluded_lines.sort() return lines, excluded_lines, suite_spots @@ -850,7 +850,7 @@ class coverage: morfs = self.filter_by_prefix(morfs, omit_prefixes) morfs.sort(self.morf_name_compare) - max_name = max([5,] + map(len, map(self.morf_name, morfs))) + max_name = max([5,] + list(map(len, map(self.morf_name, morfs)))) fmt_name = "%%- %ds " % max_name fmt_err = fmt_name + "%s: %s" header = fmt_name % "Name" + " Stmts Exec Cover" diff --git a/tests/etree13/ElementTree.py b/tests/etree13/ElementTree.py index e9e9d8b5a..5342278c4 100644 --- a/tests/etree13/ElementTree.py +++ b/tests/etree13/ElementTree.py @@ -864,7 +864,7 @@ def _serialize_xml(write, elem, encoding, qnames, namespaces): _serialize_xml(write, e, encoding, qnames, None) else: write("<" + tag) - items = elem.items() + items = list(elem.items()) if items or namespaces: items.sort() # lexical order for k, v in items: @@ -876,7 +876,7 @@ def _serialize_xml(write, elem, encoding, qnames, namespaces): v = _escape_attrib(v, encoding) write(" %s=\"%s\"" % (qnames[k], v)) if namespaces: - items = namespaces.items() + items = list(namespaces.items()) items.sort(key=lambda x: x[1]) # sort on prefix for v, k in items: if k: @@ -921,7 +921,7 @@ def _serialize_html(write, elem, encoding, qnames, namespaces): _serialize_html(write, e, encoding, qnames, None) else: write("<" + tag) - items = elem.items() + items = list(elem.items()) if items or namespaces: items.sort() # lexical order for k, v in items: @@ -934,7 +934,7 @@ def _serialize_html(write, elem, encoding, qnames, namespaces): # FIXME: handle boolean attributes write(" %s=\"%s\"" % (qnames[k], v)) if namespaces: - items = namespaces.items() + items = list(namespaces.items()) items.sort(key=lambda x: x[1]) # sort on prefix for v, k in items: if k: diff --git a/tests/path.py b/tests/path.py index 2a4affe77..1b703afab 100755 --- a/tests/path.py +++ b/tests/path.py @@ -190,7 +190,7 @@ class path(text_type): """ Joins the path with the argument given and returns the result. """ - return self.__class__(os.path.join(self, *map(self.__class__, args))) + return self.__class__(os.path.join(self, *list(map(self.__class__, args)))) __div__ = __truediv__ = joinpath diff --git a/tests/test_autosummary.py b/tests/test_autosummary.py index 7504f11cb..f83e5f33d 100644 --- a/tests/test_autosummary.py +++ b/tests/test_autosummary.py @@ -35,7 +35,7 @@ def test_mangle_signature(): (a=1, b=, c=3) :: ([a, b, c]) """ - TEST = [map(lambda x: x.strip(), x.split("::")) for x in TEST.split("\n") + TEST = [[y.strip() for y in x.split("::")] for x in TEST.split("\n") if '::' in x] for inp, outp in TEST: res = mangle_signature(inp).strip().replace(u"\u00a0", " ") diff --git a/tests/test_intl.py b/tests/test_intl.py index b4ce96449..bb54e5df9 100644 --- a/tests/test_intl.py +++ b/tests/test_intl.py @@ -98,7 +98,7 @@ def assert_elem(elem, texts=None, refs=None, names=None): _texts = elem_gettexts(elem) assert _texts == texts if refs is not None: - _refs = map(elem_getref, elem.findall('reference')) + _refs = [elem_getref(x) for x in elem.findall('reference')] assert _refs == refs if names is not None: _names = elem.attrib.get('names').split()