Remove py3k deprecation warnings from these Unicode tools.

This commit is contained in:
Florent Xicluna 2010-03-15 14:00:58 +00:00
parent 358e7ff36b
commit dc36472472
3 changed files with 27 additions and 42 deletions

View File

@ -40,8 +40,7 @@ mapRE = re.compile('((?:0x[0-9a-fA-F]+\+?)+)'
'\s*'
'(#.+)?')
def parsecodes(codes,
len=len, filter=filter,range=range):
def parsecodes(codes, len=len, range=range):
""" Converts code combinations to either a single code integer
or a tuple of integers.
@ -62,7 +61,7 @@ def parsecodes(codes,
l[i] = int(l[i],16)
except ValueError:
l[i] = None
l = filter(lambda x: x is not None, l)
l = [x for x in l if x is not None]
if len(l) == 1:
return l[0]
else:
@ -138,7 +137,7 @@ def python_mapdef_code(varname, map, comments=1, precisions=(2, 4)):
l = []
append = l.append
if map.has_key("IDENTITY"):
if "IDENTITY" in map:
append("%s = codecs.make_identity_dict(range(%d))" %
(varname, map["IDENTITY"]))
append("%s.update({" % varname)
@ -150,8 +149,7 @@ def python_mapdef_code(varname, map, comments=1, precisions=(2, 4)):
splits = 0
identity = 0
mappings = map.items()
mappings.sort()
mappings = sorted(map.items())
i = 0
key_precision, value_precision = precisions
for mapkey, mapvalue in mappings:
@ -199,11 +197,10 @@ def python_tabledef_code(varname, map, comments=1, key_precision=2):
append('%s = (' % varname)
# Analyze map and create table dict
mappings = map.items()
mappings.sort()
mappings = sorted(map.items())
table = {}
maxkey = 0
if map.has_key('IDENTITY'):
if 'IDENTITY' in map:
for key in range(256):
table[key] = (key, '')
maxkey = 255
@ -421,6 +418,6 @@ if __name__ == '__main__':
import sys
if 1:
apply(convertdir,tuple(sys.argv[1:]))
convertdir(*sys.argv[1:])
else:
apply(rewritepythondir,tuple(sys.argv[1:]))
rewritepythondir(*sys.argv[1:])

View File

@ -156,8 +156,7 @@ def makeunicodedata(unicode, trace):
prefix = i
assert prefix < 256
# content
decomp = [prefix + (len(decomp)<<8)] +\
map(lambda s: int(s, 16), decomp)
decomp = [prefix + (len(decomp)<<8)] + [int(s, 16) for s in decomp]
# Collect NFC pairs
if not prefix and len(decomp) == 3 and \
char not in unicode.exclusions and \
@ -459,8 +458,7 @@ def makeunicodetype(unicode, trace):
Array("index2", index2).dump(fp, trace)
# Generate code for _PyUnicode_ToNumeric()
numeric_items = numeric.items()
numeric_items.sort()
numeric_items = sorted(numeric.items())
print >>fp, '/* Returns the numeric value as double for Unicode characters'
print >>fp, ' * having this property, -1.0 otherwise.'
print >>fp, ' */'
@ -506,8 +504,7 @@ def makeunicodetype(unicode, trace):
haswide = False
hasnonewide = False
spaces.sort()
for codepoint in spaces:
for codepoint in sorted(spaces):
if codepoint < 0x10000:
hasnonewide = True
if codepoint >= 0x10000 and not haswide:
@ -535,8 +532,7 @@ def makeunicodetype(unicode, trace):
print >>fp, ' switch (ch) {'
haswide = False
hasnonewide = False
linebreaks.sort()
for codepoint in linebreaks:
for codepoint in sorted(linebreaks):
if codepoint < 0x10000:
hasnonewide = True
if codepoint >= 0x10000 and not haswide:
@ -601,12 +597,10 @@ def makeunicodename(unicode, trace):
wordlist = words.items()
# sort on falling frequency, then by name
def cmpwords((aword, alist),(bword, blist)):
r = -cmp(len(alist),len(blist))
if r:
return r
return cmp(aword, bword)
wordlist.sort(cmpwords)
def word_key(a):
aword, alist = a
return -len(alist), aword
wordlist.sort(key=word_key)
# figure out how many phrasebook escapes we need
escapes = 0
@ -630,7 +624,7 @@ def makeunicodename(unicode, trace):
# length (to maximize overlap)
wordlist, wordtail = wordlist[:short], wordlist[short:]
wordtail.sort(lambda a, b: len(b[0])-len(a[0]))
wordtail.sort(key=lambda a: a[0], reverse=True)
wordlist.extend(wordtail)
# generate lexicon from words

View File

@ -1,7 +1,7 @@
import re, unicodedata, sys
if sys.maxunicode == 65535:
raise RuntimeError, "need UCS-4 Python"
raise RuntimeError("need UCS-4 Python")
def gen_category(cats):
for i in range(0, 0x110000):
@ -63,14 +63,14 @@ for l in data:
if m:
if m.group(1) == "Start":
if curname:
raise "Double Start",(curname, l)
raise RuntimeError("Double Start", (curname, l))
curname = m.group(2)
table = {}
tables.append((curname, table))
continue
else:
if not curname:
raise "End without start", l
raise RuntimeError("End without start", l)
curname = None
continue
if not curname:
@ -87,7 +87,7 @@ for l in data:
try:
start, end = fields
except ValueError:
raise "Unpacking problem", l
raise RuntimeError("Unpacking problem", l)
else:
start = end = fields[0]
start = int(start, 16)
@ -146,8 +146,7 @@ def in_table_a1(code):
name, table = tables[0]
del tables[0]
assert name == "B.1"
table = table.keys()
table.sort()
table = sorted(table.keys())
print """
b1_set = """ + compact_set(table) + """
def in_table_b1(code):
@ -177,8 +176,7 @@ for k,v in table_b2.items():
if map(ord, unichr(k).lower()) != v:
b3_exceptions[k] = u"".join(map(unichr,v))
b3 = b3_exceptions.items()
b3.sort()
b3 = sorted(b3_exceptions.items())
print """
b3_exceptions = {"""
@ -353,8 +351,7 @@ name, table = tables[0]
del tables[0]
assert name == "C.6"
table = table.keys()
table.sort()
table = sorted(table.keys())
print """
c6_set = """ + compact_set(table) + """
@ -367,8 +364,7 @@ name, table = tables[0]
del tables[0]
assert name == "C.7"
table = table.keys()
table.sort()
table = sorted(table.keys())
print """
c7_set = """ + compact_set(table) + """
@ -381,8 +377,7 @@ name, table = tables[0]
del tables[0]
assert name == "C.8"
table = table.keys()
table.sort()
table = sorted(table.keys())
print """
c8_set = """ + compact_set(table) + """
@ -395,8 +390,7 @@ name, table = tables[0]
del tables[0]
assert name == "C.9"
table = table.keys()
table.sort()
table = sorted(table.keys())
print """
c9_set = """ + compact_set(table) + """