From f6bf9b13ff40ae8dfbc9afe19db0da3fcbac8f93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Neuh=C3=A4user?= Date: Sat, 1 May 2010 19:17:52 +0200 Subject: [PATCH] Fixed issue #1 --- sphinx/environment.py | 3 ++- sphinx/pycode/pgen2/tokenize.py | 4 +++- utils/reindent.py | 3 ++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/sphinx/environment.py b/sphinx/environment.py index 5edcb4d90..fa8460cb3 100644 --- a/sphinx/environment.py +++ b/sphinx/environment.py @@ -1488,8 +1488,9 @@ class BuildEnvironment: i += 1 # group the entries by letter - def keyfunc2((k, v), letters=string.ascii_uppercase + '_'): + def keyfunc2(item, letters=string.ascii_uppercase + '_'): # hack: mutating the subitems dicts to a list in the keyfunc + k, v = item v[1] = sorted((si, se) for (si, (se, void)) in v[1].iteritems()) # now calculate the key letter = k[0].upper() diff --git a/sphinx/pycode/pgen2/tokenize.py b/sphinx/pycode/pgen2/tokenize.py index 4489db898..7ad9f012c 100644 --- a/sphinx/pycode/pgen2/tokenize.py +++ b/sphinx/pycode/pgen2/tokenize.py @@ -143,7 +143,9 @@ class TokenError(Exception): pass class StopTokenizing(Exception): pass -def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing +def printtoken(type, token, scell, ecell, line): # for testing + srow, scol = scell + erow, ecol = ecell print "%d,%d-%d,%d:\t%s\t%s" % \ (srow, scol, erow, ecol, tok_name[type], repr(token)) diff --git a/utils/reindent.py b/utils/reindent.py index c499f671e..bcb6b4343 100755 --- a/utils/reindent.py +++ b/utils/reindent.py @@ -244,12 +244,13 @@ class Reindenter: return line # Line-eater for tokenize. - def tokeneater(self, type, token, (sline, scol), end, line, + def tokeneater(self, type, token, scell, end, line, INDENT=tokenize.INDENT, DEDENT=tokenize.DEDENT, NEWLINE=tokenize.NEWLINE, COMMENT=tokenize.COMMENT, NL=tokenize.NL): + sline, scol = scell if type == NEWLINE: # A program statement, or ENDMARKER, will eventually follow,