Fixed issue #1

This commit is contained in:
Daniel Neuhäuser 2010-05-01 19:17:52 +02:00
parent 47557af776
commit f6bf9b13ff
3 changed files with 7 additions and 3 deletions

View File

@ -1488,8 +1488,9 @@ class BuildEnvironment:
i += 1
# group the entries by letter
def keyfunc2((k, v), letters=string.ascii_uppercase + '_'):
def keyfunc2(item, letters=string.ascii_uppercase + '_'):
# hack: mutating the subitems dicts to a list in the keyfunc
k, v = item
v[1] = sorted((si, se) for (si, (se, void)) in v[1].iteritems())
# now calculate the key
letter = k[0].upper()

View File

@ -143,7 +143,9 @@ class TokenError(Exception): pass
class StopTokenizing(Exception): pass
def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing
def printtoken(type, token, scell, ecell, line): # for testing
srow, scol = scell
erow, ecol = ecell
print "%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token))

View File

@ -244,12 +244,13 @@ class Reindenter:
return line
# Line-eater for tokenize.
def tokeneater(self, type, token, (sline, scol), end, line,
def tokeneater(self, type, token, scell, end, line,
INDENT=tokenize.INDENT,
DEDENT=tokenize.DEDENT,
NEWLINE=tokenize.NEWLINE,
COMMENT=tokenize.COMMENT,
NL=tokenize.NL):
sline, scol = scell
if type == NEWLINE:
# A program statement, or ENDMARKER, will eventually follow,