More changes needed to make things work once bytes and str are truly divorced.

This commit is contained in:
Guido van Rossum 2007-08-27 23:09:25 +00:00
parent 25a29a9534
commit e22905a06c
7 changed files with 80 additions and 75 deletions

View File

@ -6,9 +6,9 @@
__doc__ = """hashlib module - A common interface to many hash functions.
new(name, string='') - returns a new hash object implementing the
given hash function; initializing the hash
using the given string data.
new(name, data=b'') - returns a new hash object implementing the
given hash function; initializing the hash
using the given binary data.
Named constructor functions are also available, these are much faster
than using new():
@ -39,14 +39,14 @@ spammish repetition':
>>> import hashlib
>>> m = hashlib.md5()
>>> m.update("Nobody inspects")
>>> m.update(" the spammish repetition")
>>> m.update(b"Nobody inspects")
>>> m.update(b" the spammish repetition")
>>> m.digest()
'\xbbd\x9c\x83\xdd\x1e\xa5\xc9\xd9\xde\xc9\xa1\x8d\xf0\xff\xe9'
b'\xbbd\x9c\x83\xdd\x1e\xa5\xc9\xd9\xde\xc9\xa1\x8d\xf0\xff\xe9'
More condensed:
>>> hashlib.sha224("Nobody inspects the spammish repetition").hexdigest()
>>> hashlib.sha224(b"Nobody inspects the spammish repetition").hexdigest()
'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2'
"""
@ -77,25 +77,25 @@ def __get_builtin_constructor(name):
raise ValueError, "unsupported hash type"
def __py_new(name, string=''):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
def __py_new(name, data=b''):
"""new(name, data='') - Return a new hashing object using the named algorithm;
optionally initialized with data (which must be bytes).
"""
return __get_builtin_constructor(name)(string)
return __get_builtin_constructor(name)(data)
def __hash_new(name, string=''):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
def __hash_new(name, data=b''):
"""new(name, data=b'') - Return a new hashing object using the named algorithm;
optionally initialized with data (which must be bytes).
"""
try:
return _hashlib.new(name, string)
return _hashlib.new(name, data)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
# This allows for SHA224/256 and SHA384/512 support even though
# the OpenSSL library prior to 0.9.8 doesn't provide them.
return __get_builtin_constructor(name)(string)
return __get_builtin_constructor(name)(data)
try:

View File

@ -21,7 +21,7 @@ class AutoFileTests(unittest.TestCase):
def testWeakRefs(self):
# verify weak references
p = proxy(self.f)
p.write('teststring')
p.write(b'teststring')
self.assertEquals(self.f.tell(), p.tell())
self.f.close()
self.f = None
@ -36,7 +36,7 @@ class AutoFileTests(unittest.TestCase):
def testReadinto(self):
# verify readinto
self.f.write('12')
self.f.write(b'12')
self.f.close()
a = array('b', b'x'*10)
self.f = open(TESTFN, 'rb')
@ -215,7 +215,7 @@ class OtherFileTests(unittest.TestCase):
# Test the complex interaction when mixing file-iteration and the
# various read* methods.
dataoffset = 16384
filler = "ham\n"
filler = b"ham\n"
assert not dataoffset % len(filler), \
"dataoffset must be multiple of len(filler)"
nchunks = dataoffset // len(filler)

View File

@ -27,7 +27,8 @@ def writeTmp(i, lines, mode='w'): # opening in text mode is the default
def remove_tempfiles(*names):
for name in names:
safe_unlink(name)
if name:
safe_unlink(name)
class BufferSizesTests(unittest.TestCase):
def test_buffer_sizes(self):
@ -191,30 +192,33 @@ class FileInputTests(unittest.TestCase):
self.fail("FileInput should reject invalid mode argument")
except ValueError:
pass
t1 = None
try:
# try opening in universal newline mode
t1 = writeTmp(1, ["A\nB\r\nC\rD"], mode="wb")
t1 = writeTmp(1, [b"A\nB\r\nC\rD"], mode="wb")
fi = FileInput(files=t1, mode="U")
lines = list(fi)
self.assertEqual(lines, ["A\n", "B\n", "C\n", "D"])
finally:
remove_tempfiles(t1)
## def test_file_opening_hook(self):
## # XXX The rot13 codec was removed.
## # So this test needs to be changed to use something else.
## try:
## # cannot use openhook and inplace mode
## fi = FileInput(inplace=1, openhook=lambda f, m: None)
## self.fail("FileInput should raise if both inplace "
## "and openhook arguments are given")
## except ValueError:
## pass
## try:
## fi = FileInput(openhook=1)
## self.fail("FileInput should check openhook for being callable")
## except ValueError:
## pass
def test_file_opening_hook(self):
try:
# cannot use openhook and inplace mode
fi = FileInput(inplace=1, openhook=lambda f, m: None)
self.fail("FileInput should raise if both inplace "
"and openhook arguments are given")
except ValueError:
pass
try:
fi = FileInput(openhook=1)
self.fail("FileInput should check openhook for being callable")
except ValueError:
pass
# XXX The rot13 codec was removed.
# So this test needs to be changed to use something else.
# (Or perhaps the API needs to change so we can just pass
# an encoding rather than using a hook?)
## try:
## t1 = writeTmp(1, ["A\nB"], mode="wb")
## fi = FileInput(files=t1, openhook=hook_encoded("rot13"))

View File

@ -41,9 +41,9 @@ class HashLibTestCase(unittest.TestCase):
def test_large_update(self):
aas = 'a' * 128
bees = 'b' * 127
cees = 'c' * 126
aas = b'a' * 128
bees = b'b' * 127
cees = b'c' * 126
for name in self.supported_hash_names:
m1 = hashlib.new(name)
@ -104,83 +104,83 @@ class HashLibTestCase(unittest.TestCase):
# http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
def test_case_sha224_0(self):
self.check('sha224', "",
self.check('sha224', b"",
"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f")
def test_case_sha224_1(self):
self.check('sha224', "abc",
self.check('sha224', b"abc",
"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7")
def test_case_sha224_2(self):
self.check('sha224',
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
b"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525")
def test_case_sha224_3(self):
self.check('sha224', "a" * 1000000,
self.check('sha224', b"a" * 1000000,
"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67")
def test_case_sha256_0(self):
self.check('sha256', "",
self.check('sha256', b"",
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
def test_case_sha256_1(self):
self.check('sha256', "abc",
self.check('sha256', b"abc",
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
def test_case_sha256_2(self):
self.check('sha256',
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
b"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1")
def test_case_sha256_3(self):
self.check('sha256', "a" * 1000000,
self.check('sha256', b"a" * 1000000,
"cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0")
def test_case_sha384_0(self):
self.check('sha384', "",
self.check('sha384', b"",
"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da"+
"274edebfe76f65fbd51ad2f14898b95b")
def test_case_sha384_1(self):
self.check('sha384', "abc",
self.check('sha384', b"abc",
"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed"+
"8086072ba1e7cc2358baeca134c825a7")
def test_case_sha384_2(self):
self.check('sha384',
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
b"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
b"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712"+
"fcc7c71a557e2db966c3e9fa91746039")
def test_case_sha384_3(self):
self.check('sha384', "a" * 1000000,
self.check('sha384', b"a" * 1000000,
"9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b"+
"07b8b3dc38ecc4ebae97ddd87f3d8985")
def test_case_sha512_0(self):
self.check('sha512', "",
self.check('sha512', b"",
"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"+
"47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e")
def test_case_sha512_1(self):
self.check('sha512', "abc",
self.check('sha512', b"abc",
"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a"+
"2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f")
def test_case_sha512_2(self):
self.check('sha512',
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
b"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
b"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+
"501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909")
def test_case_sha512_3(self):
self.check('sha512', "a" * 1000000,
self.check('sha512', b"a" * 1000000,
"e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
"de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")

View File

@ -39,7 +39,7 @@ else:
# 2**31 == 2147483648
f.seek(2147483649)
# Seeking is not enough of a test: you must write and flush, too!
f.write("x")
f.write(b"x")
f.flush()
except (IOError, OverflowError):
f.close()
@ -70,10 +70,10 @@ if test_support.verbose:
print('create large file via seek (may be sparse file) ...')
f = open(name, 'wb')
try:
f.write('z')
f.write(b'z')
f.seek(0)
f.seek(size)
f.write('a')
f.write(b'a')
f.flush()
if test_support.verbose:
print('check file size with os.fstat')

View File

@ -114,27 +114,28 @@ class Test_IncrementalDecoder(unittest.TestCase):
def test_dbcs_keep_buffer(self):
decoder = codecs.getincrementaldecoder('cp949')()
self.assertEqual(decoder.decode(b'\xc6\xc4\xc0'), '\ud30c')
self.assertRaises(UnicodeDecodeError, decoder.decode, '', True)
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', True)
self.assertEqual(decoder.decode(b'\xcc'), '\uc774')
self.assertEqual(decoder.decode(b'\xc6\xc4\xc0'), '\ud30c')
self.assertRaises(UnicodeDecodeError, decoder.decode, '\xcc\xbd', True)
self.assertRaises(UnicodeDecodeError, decoder.decode,
b'\xcc\xbd', True)
self.assertEqual(decoder.decode(b'\xcc'), '\uc774')
def test_iso2022(self):
decoder = codecs.getincrementaldecoder('iso2022-jp')()
ESC = '\x1b'
self.assertEqual(decoder.decode(ESC + '('), '')
self.assertEqual(decoder.decode('B', True), '')
self.assertEqual(decoder.decode(ESC + '$'), '')
self.assertEqual(decoder.decode('B@$'), '\u4e16')
self.assertEqual(decoder.decode('@$@'), '\u4e16')
self.assertEqual(decoder.decode('$', True), '\u4e16')
ESC = b'\x1b'
self.assertEqual(decoder.decode(ESC + b'('), '')
self.assertEqual(decoder.decode(b'B', True), '')
self.assertEqual(decoder.decode(ESC + b'$'), '')
self.assertEqual(decoder.decode(b'B@$'), '\u4e16')
self.assertEqual(decoder.decode(b'@$@'), '\u4e16')
self.assertEqual(decoder.decode(b'$', True), '\u4e16')
self.assertEqual(decoder.reset(), None)
self.assertEqual(decoder.decode('@$'), '@$')
self.assertEqual(decoder.decode(ESC + '$'), '')
self.assertRaises(UnicodeDecodeError, decoder.decode, '', True)
self.assertEqual(decoder.decode('B@$'), '\u4e16')
self.assertEqual(decoder.decode(b'@$'), '@$')
self.assertEqual(decoder.decode(ESC + b'$'), '')
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', True)
self.assertEqual(decoder.decode(b'B@$'), '\u4e16')
class Test_StreamReader(unittest.TestCase):
def test_bug1728403(self):
@ -213,7 +214,7 @@ class Test_StreamWriter(unittest.TestCase):
class Test_ISO2022(unittest.TestCase):
def test_g2(self):
iso2022jp2 = '\x1b(B:hu4:unit\x1b.A\x1bNi de famille'
iso2022jp2 = b'\x1b(B:hu4:unit\x1b.A\x1bNi de famille'
uni = ':hu4:unit\xe9 de famille'
self.assertEqual(iso2022jp2.decode('iso2022-jp-2'), uni)

View File

@ -1147,7 +1147,7 @@ getsockaddrarg(PySocketSockObject *s, PyObject *args,
struct sockaddr_un* addr;
char *path;
int len;
if (!PyArg_Parse(args, "t#", &path, &len))
if (!PyArg_Parse(args, "s#", &path, &len))
return 0;
addr = (struct sockaddr_un*)addr_ret;