2000-03-28 16:29:59 -04:00
|
|
|
""" Test script for the unicodedata module.
|
|
|
|
|
2000-09-26 13:18:58 -03:00
|
|
|
Written by Marc-Andre Lemburg (mal@lemburg.com).
|
2000-03-28 16:29:59 -04:00
|
|
|
|
2000-09-26 13:18:58 -03:00
|
|
|
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
|
2000-03-28 16:29:59 -04:00
|
|
|
|
2008-11-21 18:27:24 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import unittest
|
2006-04-30 05:57:35 -03:00
|
|
|
import hashlib
|
2008-11-21 18:27:24 -04:00
|
|
|
import subprocess
|
|
|
|
import test.test_support
|
2000-03-28 16:29:59 -04:00
|
|
|
|
2000-09-27 09:24:34 -03:00
|
|
|
encoding = 'utf-8'
|
|
|
|
|
2000-09-26 13:18:58 -03:00
|
|
|
|
|
|
|
### Run tests
|
|
|
|
|
2003-02-26 10:49:41 -04:00
|
|
|
class UnicodeMethodsTest(unittest.TestCase):
|
|
|
|
|
|
|
|
# update this, if the database changes
|
2010-03-18 18:50:06 -03:00
|
|
|
expectedchecksum = '4504dffd035baea02c5b9de82bebc3d65e0e0baf'
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
def test_method_checksum(self):
|
2006-04-30 05:57:35 -03:00
|
|
|
h = hashlib.sha1()
|
2010-03-30 05:24:06 -03:00
|
|
|
for i in range(0x10000):
|
2003-02-26 10:49:41 -04:00
|
|
|
char = unichr(i)
|
|
|
|
data = [
|
|
|
|
# Predicates (single char)
|
|
|
|
u"01"[char.isalnum()],
|
|
|
|
u"01"[char.isalpha()],
|
|
|
|
u"01"[char.isdecimal()],
|
|
|
|
u"01"[char.isdigit()],
|
|
|
|
u"01"[char.islower()],
|
|
|
|
u"01"[char.isnumeric()],
|
|
|
|
u"01"[char.isspace()],
|
|
|
|
u"01"[char.istitle()],
|
|
|
|
u"01"[char.isupper()],
|
|
|
|
|
|
|
|
# Predicates (multiple chars)
|
|
|
|
u"01"[(char + u'abc').isalnum()],
|
|
|
|
u"01"[(char + u'abc').isalpha()],
|
|
|
|
u"01"[(char + u'123').isdecimal()],
|
|
|
|
u"01"[(char + u'123').isdigit()],
|
|
|
|
u"01"[(char + u'abc').islower()],
|
|
|
|
u"01"[(char + u'123').isnumeric()],
|
|
|
|
u"01"[(char + u' \t').isspace()],
|
|
|
|
u"01"[(char + u'abc').istitle()],
|
|
|
|
u"01"[(char + u'ABC').isupper()],
|
|
|
|
|
|
|
|
# Mappings (single char)
|
|
|
|
char.lower(),
|
|
|
|
char.upper(),
|
|
|
|
char.title(),
|
|
|
|
|
|
|
|
# Mappings (multiple chars)
|
|
|
|
(char + u'abc').lower(),
|
|
|
|
(char + u'ABC').upper(),
|
|
|
|
(char + u'abc').title(),
|
|
|
|
(char + u'ABC').title(),
|
|
|
|
|
|
|
|
]
|
|
|
|
h.update(u''.join(data).encode(encoding))
|
|
|
|
result = h.hexdigest()
|
|
|
|
self.assertEqual(result, self.expectedchecksum)
|
|
|
|
|
|
|
|
class UnicodeDatabaseTest(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
# In case unicodedata is not available, this will raise an ImportError,
|
|
|
|
# but the other test cases will still be run
|
|
|
|
import unicodedata
|
|
|
|
self.db = unicodedata
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
del self.db
|
|
|
|
|
|
|
|
class UnicodeFunctionsTest(UnicodeDatabaseTest):
|
|
|
|
|
|
|
|
# update this, if the database changes
|
2010-03-18 18:50:06 -03:00
|
|
|
expectedchecksum = '6ccf1b1a36460d2694f9b0b0f0324942fe70ede6'
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
def test_function_checksum(self):
|
|
|
|
data = []
|
2006-04-30 05:57:35 -03:00
|
|
|
h = hashlib.sha1()
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
for i in range(0x10000):
|
|
|
|
char = unichr(i)
|
|
|
|
data = [
|
|
|
|
# Properties
|
|
|
|
str(self.db.digit(char, -1)),
|
|
|
|
str(self.db.numeric(char, -1)),
|
|
|
|
str(self.db.decimal(char, -1)),
|
|
|
|
self.db.category(char),
|
|
|
|
self.db.bidirectional(char),
|
|
|
|
self.db.decomposition(char),
|
|
|
|
str(self.db.mirrored(char)),
|
|
|
|
str(self.db.combining(char)),
|
|
|
|
]
|
|
|
|
h.update(''.join(data))
|
|
|
|
result = h.hexdigest()
|
|
|
|
self.assertEqual(result, self.expectedchecksum)
|
|
|
|
|
|
|
|
def test_digit(self):
|
|
|
|
self.assertEqual(self.db.digit(u'A', None), None)
|
|
|
|
self.assertEqual(self.db.digit(u'9'), 9)
|
|
|
|
self.assertEqual(self.db.digit(u'\u215b', None), None)
|
|
|
|
self.assertEqual(self.db.digit(u'\u2468'), 9)
|
2008-06-02 17:36:03 -03:00
|
|
|
self.assertEqual(self.db.digit(u'\U00020000', None), None)
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
self.assertRaises(TypeError, self.db.digit)
|
|
|
|
self.assertRaises(TypeError, self.db.digit, u'xx')
|
|
|
|
self.assertRaises(ValueError, self.db.digit, u'x')
|
|
|
|
|
|
|
|
def test_numeric(self):
|
|
|
|
self.assertEqual(self.db.numeric(u'A',None), None)
|
|
|
|
self.assertEqual(self.db.numeric(u'9'), 9)
|
|
|
|
self.assertEqual(self.db.numeric(u'\u215b'), 0.125)
|
|
|
|
self.assertEqual(self.db.numeric(u'\u2468'), 9.0)
|
2009-10-06 16:56:32 -03:00
|
|
|
self.assertEqual(self.db.numeric(u'\ua627'), 7.0)
|
2008-06-02 17:36:03 -03:00
|
|
|
self.assertEqual(self.db.numeric(u'\U00020000', None), None)
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
self.assertRaises(TypeError, self.db.numeric)
|
|
|
|
self.assertRaises(TypeError, self.db.numeric, u'xx')
|
|
|
|
self.assertRaises(ValueError, self.db.numeric, u'x')
|
|
|
|
|
|
|
|
def test_decimal(self):
|
|
|
|
self.assertEqual(self.db.decimal(u'A',None), None)
|
|
|
|
self.assertEqual(self.db.decimal(u'9'), 9)
|
|
|
|
self.assertEqual(self.db.decimal(u'\u215b', None), None)
|
|
|
|
self.assertEqual(self.db.decimal(u'\u2468', None), None)
|
2008-06-02 17:36:03 -03:00
|
|
|
self.assertEqual(self.db.decimal(u'\U00020000', None), None)
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
self.assertRaises(TypeError, self.db.decimal)
|
|
|
|
self.assertRaises(TypeError, self.db.decimal, u'xx')
|
|
|
|
self.assertRaises(ValueError, self.db.decimal, u'x')
|
|
|
|
|
|
|
|
def test_category(self):
|
|
|
|
self.assertEqual(self.db.category(u'\uFFFE'), 'Cn')
|
|
|
|
self.assertEqual(self.db.category(u'a'), 'Ll')
|
|
|
|
self.assertEqual(self.db.category(u'A'), 'Lu')
|
2008-06-02 17:36:03 -03:00
|
|
|
self.assertEqual(self.db.category(u'\U00020000'), 'Lo')
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
self.assertRaises(TypeError, self.db.category)
|
|
|
|
self.assertRaises(TypeError, self.db.category, u'xx')
|
|
|
|
|
|
|
|
def test_bidirectional(self):
|
|
|
|
self.assertEqual(self.db.bidirectional(u'\uFFFE'), '')
|
|
|
|
self.assertEqual(self.db.bidirectional(u' '), 'WS')
|
|
|
|
self.assertEqual(self.db.bidirectional(u'A'), 'L')
|
2008-06-02 17:36:03 -03:00
|
|
|
self.assertEqual(self.db.bidirectional(u'\U00020000'), 'L')
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
self.assertRaises(TypeError, self.db.bidirectional)
|
|
|
|
self.assertRaises(TypeError, self.db.bidirectional, u'xx')
|
|
|
|
|
|
|
|
def test_decomposition(self):
|
|
|
|
self.assertEqual(self.db.decomposition(u'\uFFFE'),'')
|
|
|
|
self.assertEqual(self.db.decomposition(u'\u00bc'), '<fraction> 0031 2044 0034')
|
|
|
|
|
|
|
|
self.assertRaises(TypeError, self.db.decomposition)
|
|
|
|
self.assertRaises(TypeError, self.db.decomposition, u'xx')
|
|
|
|
|
|
|
|
def test_mirrored(self):
|
|
|
|
self.assertEqual(self.db.mirrored(u'\uFFFE'), 0)
|
|
|
|
self.assertEqual(self.db.mirrored(u'a'), 0)
|
|
|
|
self.assertEqual(self.db.mirrored(u'\u2201'), 1)
|
2008-06-02 17:36:03 -03:00
|
|
|
self.assertEqual(self.db.mirrored(u'\U00020000'), 0)
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
self.assertRaises(TypeError, self.db.mirrored)
|
|
|
|
self.assertRaises(TypeError, self.db.mirrored, u'xx')
|
|
|
|
|
|
|
|
def test_combining(self):
|
|
|
|
self.assertEqual(self.db.combining(u'\uFFFE'), 0)
|
|
|
|
self.assertEqual(self.db.combining(u'a'), 0)
|
|
|
|
self.assertEqual(self.db.combining(u'\u20e1'), 230)
|
2008-06-02 17:36:03 -03:00
|
|
|
self.assertEqual(self.db.combining(u'\U00020000'), 0)
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
self.assertRaises(TypeError, self.db.combining)
|
|
|
|
self.assertRaises(TypeError, self.db.combining, u'xx')
|
|
|
|
|
|
|
|
def test_normalize(self):
|
|
|
|
self.assertRaises(TypeError, self.db.normalize)
|
|
|
|
self.assertRaises(ValueError, self.db.normalize, 'unknown', u'xx')
|
2004-04-17 16:36:48 -03:00
|
|
|
self.assertEqual(self.db.normalize('NFKC', u''), u'')
|
2003-02-26 10:49:41 -04:00
|
|
|
# The rest can be found in test_normalization.py
|
|
|
|
# which requires an external file.
|
|
|
|
|
2010-03-04 08:09:33 -04:00
|
|
|
def test_pr29(self):
|
|
|
|
# http://www.unicode.org/review/pr-29.html
|
|
|
|
for text in (u"\u0b47\u0300\u0b3e", u"\u1100\u0300\u1161"):
|
|
|
|
self.assertEqual(self.db.normalize('NFC', text), text)
|
|
|
|
|
2004-08-04 04:38:35 -03:00
|
|
|
def test_east_asian_width(self):
|
|
|
|
eaw = self.db.east_asian_width
|
|
|
|
self.assertRaises(TypeError, eaw, 'a')
|
|
|
|
self.assertRaises(TypeError, eaw, u'')
|
|
|
|
self.assertRaises(TypeError, eaw, u'ra')
|
|
|
|
self.assertEqual(eaw(u'\x1e'), 'N')
|
|
|
|
self.assertEqual(eaw(u'\x20'), 'Na')
|
|
|
|
self.assertEqual(eaw(u'\uC894'), 'W')
|
|
|
|
self.assertEqual(eaw(u'\uFF66'), 'H')
|
|
|
|
self.assertEqual(eaw(u'\uFF1F'), 'F')
|
|
|
|
self.assertEqual(eaw(u'\u2010'), 'A')
|
2008-06-02 17:36:03 -03:00
|
|
|
self.assertEqual(eaw(u'\U00020000'), 'W')
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
class UnicodeMiscTest(UnicodeDatabaseTest):
|
|
|
|
|
2008-11-21 18:27:24 -04:00
|
|
|
def test_failed_import_during_compiling(self):
|
|
|
|
# Issue 4367
|
|
|
|
# Decoding \N escapes requires the unicodedata module. If it can't be
|
|
|
|
# imported, we shouldn't segfault.
|
|
|
|
|
|
|
|
# This program should raise a SyntaxError in the eval.
|
|
|
|
code = "import sys;" \
|
|
|
|
"sys.modules['unicodedata'] = None;" \
|
|
|
|
"""eval("u'\N{SOFT HYPHEN}'")"""
|
|
|
|
args = [sys.executable, "-c", code]
|
|
|
|
# We use a subprocess because the unicodedata module may already have
|
|
|
|
# been loaded in this process.
|
|
|
|
popen = subprocess.Popen(args, stderr=subprocess.PIPE)
|
|
|
|
popen.wait()
|
|
|
|
self.assertEqual(popen.returncode, 1)
|
|
|
|
error = "SyntaxError: (unicode error) \N escapes not supported " \
|
|
|
|
"(can't load unicodedata module)"
|
2010-01-23 19:04:36 -04:00
|
|
|
self.assertIn(error, popen.stderr.read())
|
2008-11-21 18:27:24 -04:00
|
|
|
|
2003-02-26 10:49:41 -04:00
|
|
|
def test_decimal_numeric_consistent(self):
|
|
|
|
# Test that decimal and numeric are consistent,
|
|
|
|
# i.e. if a character has a decimal value,
|
2005-07-22 18:49:32 -03:00
|
|
|
# its numeric value should be the same.
|
2003-02-26 10:49:41 -04:00
|
|
|
count = 0
|
|
|
|
for i in xrange(0x10000):
|
|
|
|
c = unichr(i)
|
|
|
|
dec = self.db.decimal(c, -1)
|
|
|
|
if dec != -1:
|
|
|
|
self.assertEqual(dec, self.db.numeric(c))
|
|
|
|
count += 1
|
2009-06-30 19:57:08 -03:00
|
|
|
self.assertTrue(count >= 10) # should have tested at least the ASCII digits
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
def test_digit_numeric_consistent(self):
|
|
|
|
# Test that digit and numeric are consistent,
|
2003-03-07 13:30:48 -04:00
|
|
|
# i.e. if a character has a digit value,
|
2005-07-22 18:49:32 -03:00
|
|
|
# its numeric value should be the same.
|
2003-02-26 10:49:41 -04:00
|
|
|
count = 0
|
|
|
|
for i in xrange(0x10000):
|
|
|
|
c = unichr(i)
|
|
|
|
dec = self.db.digit(c, -1)
|
|
|
|
if dec != -1:
|
|
|
|
self.assertEqual(dec, self.db.numeric(c))
|
|
|
|
count += 1
|
2009-06-30 19:57:08 -03:00
|
|
|
self.assertTrue(count >= 10) # should have tested at least the ASCII digits
|
2003-02-26 10:49:41 -04:00
|
|
|
|
2007-07-28 04:03:05 -03:00
|
|
|
def test_bug_1704793(self):
|
2010-11-21 09:34:58 -04:00
|
|
|
self.assertEqual(self.db.lookup("GOTHIC LETTER FAIHU"), u'\U00010346')
|
2007-07-28 04:03:05 -03:00
|
|
|
|
2008-09-10 10:38:12 -03:00
|
|
|
def test_ucd_510(self):
|
|
|
|
import unicodedata
|
|
|
|
# In UCD 5.1.0, a mirrored property changed wrt. UCD 3.2.0
|
2009-06-30 19:57:08 -03:00
|
|
|
self.assertTrue(unicodedata.mirrored(u"\u0f3a"))
|
|
|
|
self.assertTrue(not unicodedata.ucd_3_2_0.mirrored(u"\u0f3a"))
|
2008-09-10 10:38:12 -03:00
|
|
|
# Also, we now have two ways of representing
|
|
|
|
# the upper-case mapping: as delta, or as absolute value
|
2009-06-30 19:57:08 -03:00
|
|
|
self.assertTrue(u"a".upper()==u'A')
|
|
|
|
self.assertTrue(u"\u1d79".upper()==u'\ua77d')
|
|
|
|
self.assertTrue(u".".upper()==u".")
|
2009-04-25 11:03:16 -03:00
|
|
|
|
|
|
|
def test_bug_5828(self):
|
|
|
|
self.assertEqual(u"\u1d79".lower(), u"\u1d79")
|
|
|
|
# Only U+0000 should have U+0000 as its upper/lower/titlecase variant
|
|
|
|
self.assertEqual(
|
|
|
|
[
|
|
|
|
c for c in range(sys.maxunicode+1)
|
|
|
|
if u"\x00" in unichr(c).lower()+unichr(c).upper()+unichr(c).title()
|
|
|
|
],
|
|
|
|
[0]
|
|
|
|
)
|
|
|
|
|
2009-04-26 16:11:43 -03:00
|
|
|
def test_bug_4971(self):
|
2009-04-25 21:53:18 -03:00
|
|
|
# LETTER DZ WITH CARON: DZ, Dz, dz
|
|
|
|
self.assertEqual(u"\u01c4".title(), u"\u01c5")
|
|
|
|
self.assertEqual(u"\u01c5".title(), u"\u01c5")
|
|
|
|
self.assertEqual(u"\u01c6".title(), u"\u01c5")
|
2008-09-10 10:38:12 -03:00
|
|
|
|
2010-03-30 05:24:06 -03:00
|
|
|
def test_linebreak_7643(self):
|
|
|
|
for i in range(0x10000):
|
|
|
|
lines = (unichr(i) + u'A').splitlines()
|
|
|
|
if i in (0x0a, 0x0b, 0x0c, 0x0d, 0x85,
|
|
|
|
0x1c, 0x1d, 0x1e, 0x2028, 0x2029):
|
|
|
|
self.assertEqual(len(lines), 2,
|
|
|
|
r"\u%.4x should be a linebreak" % i)
|
|
|
|
else:
|
|
|
|
self.assertEqual(len(lines), 1,
|
|
|
|
r"\u%.4x should not be a linebreak" % i)
|
|
|
|
|
2003-02-26 10:49:41 -04:00
|
|
|
def test_main():
|
2003-05-01 14:45:56 -03:00
|
|
|
test.test_support.run_unittest(
|
|
|
|
UnicodeMiscTest,
|
|
|
|
UnicodeMethodsTest,
|
|
|
|
UnicodeFunctionsTest
|
|
|
|
)
|
2003-02-26 10:49:41 -04:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
test_main()
|