cpython/Lib/test/test_io.py

1257 lines
44 KiB
Python
Raw Normal View History

"""Unit tests for io.py."""
import os
import sys
import time
import array
import threading
import random
2007-02-27 01:47:44 -04:00
import unittest
from itertools import chain, cycle
from test import support
import codecs
import io # The module under test
2007-02-27 01:47:44 -04:00
class MockRawIO(io.RawIOBase):
def __init__(self, read_stack=()):
self._read_stack = list(read_stack)
self._write_stack = []
def read(self, n=None):
try:
return self._read_stack.pop(0)
except:
return b""
def write(self, b):
self._write_stack.append(b[:])
return len(b)
def writable(self):
return True
def fileno(self):
return 42
def readable(self):
return True
def seekable(self):
return True
def seek(self, pos, whence):
pass
def tell(self):
return 42
class MockFileIO(io.BytesIO):
def __init__(self, data):
self.read_history = []
io.BytesIO.__init__(self, data)
def read(self, n=None):
res = io.BytesIO.read(self, n)
self.read_history.append(None if res is None else len(res))
return res
class MockNonBlockWriterIO(io.RawIOBase):
def __init__(self, blocking_script):
self._blocking_script = list(blocking_script)
self._write_stack = []
def write(self, b):
self._write_stack.append(b[:])
n = self._blocking_script.pop(0)
if (n < 0):
raise io.BlockingIOError(0, "test blocking", -n)
else:
return n
def writable(self):
return True
2007-02-27 01:47:44 -04:00
class IOTest(unittest.TestCase):
def setUp(self):
support.unlink(support.TESTFN)
2007-03-07 18:59:39 -04:00
def tearDown(self):
support.unlink(support.TESTFN)
2007-03-07 18:59:39 -04:00
2007-02-27 01:47:44 -04:00
def write_ops(self, f):
self.assertEqual(f.write(b"blah."), 5)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(b"Hello."), 6)
2007-02-27 01:47:44 -04:00
self.assertEqual(f.tell(), 6)
self.assertEqual(f.seek(-1, 1), 5)
2007-02-27 01:47:44 -04:00
self.assertEqual(f.tell(), 5)
2007-11-21 15:29:53 -04:00
self.assertEqual(f.write(bytearray(b" world\n\n\n")), 9)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(b"h"), 1)
self.assertEqual(f.seek(-1, 2), 13)
self.assertEqual(f.tell(), 13)
self.assertEqual(f.truncate(12), 12)
self.assertEqual(f.tell(), 12)
self.assertRaises(TypeError, f.seek, 0.0)
2007-02-27 01:47:44 -04:00
def read_ops(self, f, buffered=False):
data = f.read(5)
self.assertEqual(data, b"hello")
2007-11-21 15:29:53 -04:00
data = bytearray(data)
self.assertEqual(f.readinto(data), 5)
self.assertEqual(data, b" worl")
self.assertEqual(f.readinto(data), 2)
self.assertEqual(len(data), 5)
self.assertEqual(data[:2], b"d\n")
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.read(20), b"hello world\n")
self.assertEqual(f.read(1), b"")
2007-11-21 15:29:53 -04:00
self.assertEqual(f.readinto(bytearray(b"x")), 0)
self.assertEqual(f.seek(-6, 2), 6)
self.assertEqual(f.read(5), b"world")
self.assertEqual(f.read(0), b"")
2007-11-21 15:29:53 -04:00
self.assertEqual(f.readinto(bytearray()), 0)
self.assertEqual(f.seek(-6, 1), 5)
self.assertEqual(f.read(5), b" worl")
self.assertEqual(f.tell(), 10)
self.assertRaises(TypeError, f.seek, 0.0)
if buffered:
f.seek(0)
self.assertEqual(f.read(), b"hello world\n")
f.seek(6)
self.assertEqual(f.read(), b"world\n")
self.assertEqual(f.read(), b"")
LARGE = 2**31
def large_file_ops(self, f):
assert f.readable()
assert f.writable()
self.assertEqual(f.seek(self.LARGE), self.LARGE)
self.assertEqual(f.tell(), self.LARGE)
self.assertEqual(f.write(b"xxx"), 3)
self.assertEqual(f.tell(), self.LARGE + 3)
self.assertEqual(f.seek(-1, 1), self.LARGE + 2)
self.assertEqual(f.truncate(), self.LARGE + 2)
self.assertEqual(f.tell(), self.LARGE + 2)
self.assertEqual(f.seek(0, 2), self.LARGE + 2)
self.assertEqual(f.truncate(self.LARGE + 1), self.LARGE + 1)
self.assertEqual(f.tell(), self.LARGE + 1)
self.assertEqual(f.seek(0, 2), self.LARGE + 1)
self.assertEqual(f.seek(-1, 2), self.LARGE)
self.assertEqual(f.read(2), b"x")
2007-02-27 01:47:44 -04:00
def test_raw_file_io(self):
f = io.open(support.TESTFN, "wb", buffering=0)
2007-02-27 01:47:44 -04:00
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.write_ops(f)
f.close()
f = io.open(support.TESTFN, "rb", buffering=0)
2007-02-27 01:47:44 -04:00
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
self.read_ops(f)
f.close()
def test_buffered_file_io(self):
f = io.open(support.TESTFN, "wb")
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.write_ops(f)
f.close()
f = io.open(support.TESTFN, "rb")
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
self.read_ops(f, True)
f.close()
2007-06-07 20:45:37 -03:00
def test_readline(self):
f = io.open(support.TESTFN, "wb")
2007-06-07 20:45:37 -03:00
f.write(b"abc\ndef\nxyzzy\nfoo")
f.close()
f = io.open(support.TESTFN, "rb")
2007-06-07 20:45:37 -03:00
self.assertEqual(f.readline(), b"abc\n")
self.assertEqual(f.readline(10), b"def\n")
self.assertEqual(f.readline(2), b"xy")
self.assertEqual(f.readline(4), b"zzy\n")
self.assertEqual(f.readline(), b"foo")
f.close()
2007-02-27 01:47:44 -04:00
def test_raw_bytes_io(self):
f = io.BytesIO()
self.write_ops(f)
data = f.getvalue()
self.assertEqual(data, b"hello world\n")
f = io.BytesIO(data)
self.read_ops(f, True)
2007-02-27 01:47:44 -04:00
def test_large_file_ops(self):
# On Windows and Mac OSX this test comsumes large resources; It takes
# a long time to build the >2GB file and takes >2GB of disk space
# therefore the resource must be enabled to run this test.
if sys.platform[:3] == 'win' or sys.platform == 'darwin':
if not support.is_resource_enabled("largefile"):
print("\nTesting large file ops skipped on %s." % sys.platform,
file=sys.stderr)
print("It requires %d bytes and a long time." % self.LARGE,
file=sys.stderr)
print("Use 'regrtest.py -u largefile test_io' to run it.",
file=sys.stderr)
return
f = io.open(support.TESTFN, "w+b", 0)
self.large_file_ops(f)
f.close()
f = io.open(support.TESTFN, "w+b")
self.large_file_ops(f)
f.close()
def test_with_open(self):
for bufsize in (0, 1, 100):
f = None
with open(support.TESTFN, "wb", bufsize) as f:
f.write(b"xxx")
self.assertEqual(f.closed, True)
f = None
try:
with open(support.TESTFN, "wb", bufsize) as f:
1/0
except ZeroDivisionError:
self.assertEqual(f.closed, True)
else:
self.fail("1/0 didn't raise an exception")
def test_destructor(self):
record = []
class MyFileIO(io.FileIO):
def __del__(self):
record.append(1)
io.FileIO.__del__(self)
def close(self):
record.append(2)
io.FileIO.close(self)
def flush(self):
record.append(3)
io.FileIO.flush(self)
f = MyFileIO(support.TESTFN, "w")
f.write("xxx")
del f
self.assertEqual(record, [1, 2, 3])
def test_close_flushes(self):
f = io.open(support.TESTFN, "wb")
f.write(b"xxx")
f.close()
f = io.open(support.TESTFN, "rb")
self.assertEqual(f.read(), b"xxx")
f.close()
def test_array_writes(self):
a = array.array('i', range(10))
n = len(memoryview(a))
f = io.open(support.TESTFN, "wb", 0)
self.assertEqual(f.write(a), n)
f.close()
f = io.open(support.TESTFN, "wb")
self.assertEqual(f.write(a), n)
f.close()
def test_closefd(self):
self.assertRaises(ValueError, io.open, support.TESTFN, 'w',
closefd=False)
class MemorySeekTestMixin:
def testInit(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
def testRead(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
self.assertEquals(buf[:1], bytesIo.read(1))
self.assertEquals(buf[1:5], bytesIo.read(4))
self.assertEquals(buf[5:], bytesIo.read(900))
self.assertEquals(self.EOF, bytesIo.read())
def testReadNoArgs(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
self.assertEquals(buf, bytesIo.read())
self.assertEquals(self.EOF, bytesIo.read())
def testSeek(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
bytesIo.read(5)
bytesIo.seek(0)
self.assertEquals(buf, bytesIo.read())
bytesIo.seek(3)
self.assertEquals(buf[3:], bytesIo.read())
self.assertRaises(TypeError, bytesIo.seek, 0.0)
def testTell(self):
buf = self.buftype("1234567890")
bytesIo = self.ioclass(buf)
self.assertEquals(0, bytesIo.tell())
bytesIo.seek(5)
self.assertEquals(5, bytesIo.tell())
bytesIo.seek(10000)
self.assertEquals(10000, bytesIo.tell())
class BytesIOTest(MemorySeekTestMixin, unittest.TestCase):
@staticmethod
def buftype(s):
return s.encode("utf-8")
ioclass = io.BytesIO
EOF = b""
class StringIOTest(MemorySeekTestMixin, unittest.TestCase):
buftype = str
ioclass = io.StringIO
EOF = ""
class BufferedReaderTest(unittest.TestCase):
def testRead(self):
rawio = MockRawIO((b"abc", b"d", b"efg"))
bufio = io.BufferedReader(rawio)
self.assertEquals(b"abcdef", bufio.read(6))
def testBuffering(self):
data = b"abcdefghi"
dlen = len(data)
tests = [
[ 100, [ 3, 1, 4, 8 ], [ dlen, 0 ] ],
[ 100, [ 3, 3, 3], [ dlen ] ],
[ 4, [ 1, 2, 4, 2 ], [ 4, 4, 1 ] ],
]
for bufsize, buf_read_sizes, raw_read_sizes in tests:
rawio = MockFileIO(data)
bufio = io.BufferedReader(rawio, buffer_size=bufsize)
pos = 0
for nbytes in buf_read_sizes:
self.assertEquals(bufio.read(nbytes), data[pos:pos+nbytes])
pos += nbytes
self.assertEquals(rawio.read_history, raw_read_sizes)
def testReadNonBlocking(self):
# Inject some None's in there to simulate EWOULDBLOCK
rawio = MockRawIO((b"abc", b"d", None, b"efg", None, None))
bufio = io.BufferedReader(rawio)
self.assertEquals(b"abcd", bufio.read(6))
self.assertEquals(b"e", bufio.read(1))
self.assertEquals(b"fg", bufio.read())
self.assert_(None is bufio.read())
self.assertEquals(b"", bufio.read())
def testReadToEof(self):
rawio = MockRawIO((b"abc", b"d", b"efg"))
bufio = io.BufferedReader(rawio)
self.assertEquals(b"abcdefg", bufio.read(9000))
def testReadNoArgs(self):
rawio = MockRawIO((b"abc", b"d", b"efg"))
bufio = io.BufferedReader(rawio)
self.assertEquals(b"abcdefg", bufio.read())
def testFileno(self):
rawio = MockRawIO((b"abc", b"d", b"efg"))
bufio = io.BufferedReader(rawio)
self.assertEquals(42, bufio.fileno())
def testFilenoNoFileno(self):
# XXX will we always have fileno() function? If so, kill
# this test. Else, write it.
pass
def testThreads(self):
try:
# Write out many bytes with exactly the same number of 0's,
# 1's... 255's. This will help us check that concurrent reading
# doesn't duplicate or forget contents.
N = 1000
l = list(range(256)) * N
random.shuffle(l)
s = bytes(bytearray(l))
with io.open(support.TESTFN, "wb") as f:
f.write(s)
with io.open(support.TESTFN, "rb", buffering=0) as raw:
bufio = io.BufferedReader(raw, 8)
errors = []
results = []
def f():
try:
# Intra-buffer read then buffer-flushing read
for n in cycle([1, 19]):
s = bufio.read(n)
if not s:
break
# list.append() is atomic
results.append(s)
except Exception as e:
errors.append(e)
raise
threads = [threading.Thread(target=f) for x in range(20)]
for t in threads:
t.start()
time.sleep(0.02) # yield
for t in threads:
t.join()
self.assertFalse(errors,
"the following exceptions were caught: %r" % errors)
s = b''.join(results)
for i in range(256):
c = bytes(bytearray([i]))
self.assertEqual(s.count(c), N)
finally:
support.unlink(support.TESTFN)
class BufferedWriterTest(unittest.TestCase):
def testWrite(self):
# Write to the buffered IO but don't overflow the buffer.
writer = MockRawIO()
bufio = io.BufferedWriter(writer, 8)
bufio.write(b"abc")
self.assertFalse(writer._write_stack)
def testWriteOverflow(self):
writer = MockRawIO()
bufio = io.BufferedWriter(writer, 8)
bufio.write(b"abc")
bufio.write(b"defghijkl")
self.assertEquals(b"abcdefghijkl", writer._write_stack[0])
def testWriteNonBlocking(self):
raw = MockNonBlockWriterIO((9, 2, 22, -6, 10, 12, 12))
bufio = io.BufferedWriter(raw, 8, 16)
bufio.write(b"asdf")
bufio.write(b"asdfa")
self.assertEquals(b"asdfasdfa", raw._write_stack[0])
bufio.write(b"asdfasdfasdf")
self.assertEquals(b"asdfasdfasdf", raw._write_stack[1])
bufio.write(b"asdfasdfasdf")
self.assertEquals(b"dfasdfasdf", raw._write_stack[2])
self.assertEquals(b"asdfasdfasdf", raw._write_stack[3])
bufio.write(b"asdfasdfasdf")
# XXX I don't like this test. It relies too heavily on how the
# algorithm actually works, which we might change. Refactor
# later.
def testFileno(self):
rawio = MockRawIO((b"abc", b"d", b"efg"))
bufio = io.BufferedWriter(rawio)
self.assertEquals(42, bufio.fileno())
def testFlush(self):
writer = MockRawIO()
bufio = io.BufferedWriter(writer, 8)
bufio.write(b"abc")
bufio.flush()
self.assertEquals(b"abc", writer._write_stack[0])
def testThreads(self):
# BufferedWriter should not raise exceptions or crash
# when called from multiple threads.
try:
# We use a real file object because it allows us to
# exercise situations where the GIL is released before
# writing the buffer to the raw streams. This is in addition
# to concurrency issues due to switching threads in the middle
# of Python code.
with io.open(support.TESTFN, "wb", buffering=0) as raw:
bufio = io.BufferedWriter(raw, 8)
errors = []
def f():
try:
# Write enough bytes to flush the buffer
s = b"a" * 19
for i in range(50):
bufio.write(s)
except Exception as e:
errors.append(e)
raise
threads = [threading.Thread(target=f) for x in range(20)]
for t in threads:
t.start()
time.sleep(0.02) # yield
for t in threads:
t.join()
self.assertFalse(errors,
"the following exceptions were caught: %r" % errors)
finally:
support.unlink(support.TESTFN)
class BufferedRWPairTest(unittest.TestCase):
def testRWPair(self):
r = MockRawIO(())
w = MockRawIO()
pair = io.BufferedRWPair(r, w)
# XXX need implementation
class BufferedRandomTest(unittest.TestCase):
def testReadAndWrite(self):
raw = MockRawIO((b"asdf", b"ghjk"))
rw = io.BufferedRandom(raw, 8, 12)
self.assertEqual(b"as", rw.read(2))
rw.write(b"ddd")
rw.write(b"eee")
self.assertFalse(raw._write_stack) # Buffer writes
self.assertEqual(b"ghjk", rw.read()) # This read forces write flush
self.assertEquals(b"dddeee", raw._write_stack[0])
def testSeekAndTell(self):
raw = io.BytesIO(b"asdfghjkl")
rw = io.BufferedRandom(raw)
self.assertEquals(b"as", rw.read(2))
self.assertEquals(2, rw.tell())
rw.seek(0, 0)
self.assertEquals(b"asdf", rw.read(4))
rw.write(b"asdf")
rw.seek(0, 0)
self.assertEquals(b"asdfasdfl", rw.read())
self.assertEquals(9, rw.tell())
rw.seek(-4, 2)
self.assertEquals(5, rw.tell())
rw.seek(2, 1)
self.assertEquals(7, rw.tell())
self.assertEquals(b"fl", rw.read(11))
self.assertRaises(TypeError, rw.seek, 0.0)
# To fully exercise seek/tell, the StatefulIncrementalDecoder has these
# properties:
# - A single output character can correspond to many bytes of input.
# - The number of input bytes to complete the character can be
# undetermined until the last input byte is received.
# - The number of input bytes can vary depending on previous input.
# - A single input byte can correspond to many characters of output.
# - The number of output characters can be undetermined until the
# last input byte is received.
# - The number of output characters can vary depending on previous input.
class StatefulIncrementalDecoder(codecs.IncrementalDecoder):
"""
For testing seek/tell behavior with a stateful, buffering decoder.
Input is a sequence of words. Words may be fixed-length (length set
by input) or variable-length (period-terminated). In variable-length
mode, extra periods are ignored. Possible words are:
- 'i' followed by a number sets the input length, I (maximum 99).
When I is set to 0, words are space-terminated.
- 'o' followed by a number sets the output length, O (maximum 99).
- Any other word is converted into a word followed by a period on
the output. The output word consists of the input word truncated
or padded out with hyphens to make its length equal to O. If O
is 0, the word is output verbatim without truncating or padding.
I and O are initially set to 1. When I changes, any buffered input is
re-scanned according to the new I. EOF also terminates the last word.
"""
def __init__(self, errors='strict'):
2008-03-22 23:11:13 -03:00
codecs.IncrementalDecoder.__init__(self, errors)
self.reset()
def __repr__(self):
return '<SID %x>' % id(self)
def reset(self):
self.i = 1
self.o = 1
self.buffer = bytearray()
def getstate(self):
i, o = self.i ^ 1, self.o ^ 1 # so that flags = 0 after reset()
return bytes(self.buffer), i*100 + o
def setstate(self, state):
buffer, io = state
self.buffer = bytearray(buffer)
i, o = divmod(io, 100)
self.i, self.o = i ^ 1, o ^ 1
def decode(self, input, final=False):
output = ''
for b in input:
if self.i == 0: # variable-length, terminated with period
if b == ord('.'):
if self.buffer:
output += self.process_word()
else:
self.buffer.append(b)
else: # fixed-length, terminate after self.i bytes
self.buffer.append(b)
if len(self.buffer) == self.i:
output += self.process_word()
if final and self.buffer: # EOF terminates the last word
output += self.process_word()
return output
def process_word(self):
output = ''
if self.buffer[0] == ord('i'):
self.i = min(99, int(self.buffer[1:] or 0)) # set input length
elif self.buffer[0] == ord('o'):
self.o = min(99, int(self.buffer[1:] or 0)) # set output length
else:
output = self.buffer.decode('ascii')
if len(output) < self.o:
output += '-'*self.o # pad out with hyphens
if self.o:
output = output[:self.o] # truncate to output length
output += '.'
self.buffer = bytearray()
return output
Merged revisions 62090-62091,62096,62100,62102,62110-62114 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r62090 | brett.cannon | 2008-04-01 07:37:43 -0500 (Tue, 01 Apr 2008) | 3 lines Generalize test.test_support.test_stdout() with a base context manager so that it is easy to capture stderr if desired. ........ r62091 | brett.cannon | 2008-04-01 07:46:02 -0500 (Tue, 01 Apr 2008) | 3 lines Add ``if __name__ == '__main__'`` to some test files where it didn't take a lot of effort to do so. ........ r62096 | amaury.forgeotdarc | 2008-04-01 17:52:48 -0500 (Tue, 01 Apr 2008) | 4 lines Newly enabled test appears to leak: it registers the same codec on each iteration. Do it only once at load time. ........ r62100 | amaury.forgeotdarc | 2008-04-01 19:55:04 -0500 (Tue, 01 Apr 2008) | 4 lines A DocTestSuite cannot run multiple times: it clears its globals dictionary after the first run. Rebuild the DocTestSuite on each iteration. ........ r62102 | jeffrey.yasskin | 2008-04-01 23:07:44 -0500 (Tue, 01 Apr 2008) | 3 lines Try to make test_signal less flaky. I still see some flakiness in test_itimer_prof. ........ r62110 | vinay.sajip | 2008-04-02 16:09:27 -0500 (Wed, 02 Apr 2008) | 1 line Fix: #2315, #2316, #2317: TimedRotatingFileHandler - changed logic to better handle daylight savings time, deletion of old log files, and fixed a bug in calculating rollover when no logging occurs for a longer interval than the rollover period. ........ r62111 | vinay.sajip | 2008-04-02 16:10:23 -0500 (Wed, 02 Apr 2008) | 1 line Added updates with respect to recent changes to TimedRotatingFileHandler. ........ r62112 | vinay.sajip | 2008-04-02 16:17:25 -0500 (Wed, 02 Apr 2008) | 1 line Added updates with respect to recent changes to TimedRotatingFileHandler. ........ r62113 | amaury.forgeotdarc | 2008-04-02 16:18:46 -0500 (Wed, 02 Apr 2008) | 2 lines Remove debug prints; the buildbot now passes the tests ........ r62114 | benjamin.peterson | 2008-04-02 16:20:35 -0500 (Wed, 02 Apr 2008) | 2 lines Suggested proposed changes to Python be considered on some mailing lists first ........
2008-04-02 18:49:44 -03:00
codecEnabled = False
@classmethod
def lookupTestDecoder(cls, name):
if cls.codecEnabled and name == 'test_decoder':
return codecs.CodecInfo(
name='test_decoder', encode=None, decode=None,
incrementalencoder=None,
streamreader=None, streamwriter=None,
incrementaldecoder=cls)
# Register the previous decoder for testing.
# Disabled by default, tests will enable it.
codecs.register(StatefulIncrementalDecoder.lookupTestDecoder)
class StatefulIncrementalDecoderTest(unittest.TestCase):
"""
Make sure the StatefulIncrementalDecoder actually works.
"""
test_cases = [
# I=1, O=1 (fixed-length input == fixed-length output)
(b'abcd', False, 'a.b.c.d.'),
# I=0, O=0 (variable-length input, variable-length output)
(b'oiabcd', True, 'abcd.'),
# I=0, O=0 (should ignore extra periods)
(b'oi...abcd...', True, 'abcd.'),
# I=0, O=6 (variable-length input, fixed-length output)
(b'i.o6.x.xyz.toolongtofit.', False, 'x-----.xyz---.toolon.'),
# I=2, O=6 (fixed-length input < fixed-length output)
(b'i.i2.o6xyz', True, 'xy----.z-----.'),
# I=6, O=3 (fixed-length input > fixed-length output)
(b'i.o3.i6.abcdefghijklmnop', True, 'abc.ghi.mno.'),
# I=0, then 3; O=29, then 15 (with longer output)
(b'i.o29.a.b.cde.o15.abcdefghijabcdefghij.i3.a.b.c.d.ei00k.l.m', True,
'a----------------------------.' +
'b----------------------------.' +
'cde--------------------------.' +
'abcdefghijabcde.' +
'a.b------------.' +
'.c.------------.' +
'd.e------------.' +
'k--------------.' +
'l--------------.' +
'm--------------.')
]
def testDecoder(self):
# Try a few one-shot test cases.
for input, eof, output in self.test_cases:
d = StatefulIncrementalDecoder()
self.assertEquals(d.decode(input, eof), output)
# Also test an unfinished decode, followed by forcing EOF.
d = StatefulIncrementalDecoder()
self.assertEquals(d.decode(b'oiabcd'), '')
self.assertEquals(d.decode(b'', 1), 'abcd.')
class TextIOWrapperTest(unittest.TestCase):
def setUp(self):
self.testdata = b"AAA\r\nBBB\rCCC\r\nDDD\nEEE\r\n"
self.normalized = b"AAA\nBBB\nCCC\nDDD\nEEE\n".decode("ascii")
support.unlink(support.TESTFN)
2007-04-11 13:32:43 -03:00
def tearDown(self):
support.unlink(support.TESTFN)
def testLineBuffering(self):
r = io.BytesIO()
b = io.BufferedWriter(r, 1000)
t = io.TextIOWrapper(b, newline="\n", line_buffering=True)
t.write("X")
self.assertEquals(r.getvalue(), b"") # No flush happened
t.write("Y\nZ")
self.assertEquals(r.getvalue(), b"XY\nZ") # All got flushed
t.write("A\rB")
self.assertEquals(r.getvalue(), b"XY\nZA\rB")
def testEncodingErrorsReading(self):
# (1) default
b = io.BytesIO(b"abc\n\xff\n")
t = io.TextIOWrapper(b, encoding="ascii")
self.assertRaises(UnicodeError, t.read)
# (2) explicit strict
b = io.BytesIO(b"abc\n\xff\n")
t = io.TextIOWrapper(b, encoding="ascii", errors="strict")
self.assertRaises(UnicodeError, t.read)
# (3) ignore
b = io.BytesIO(b"abc\n\xff\n")
t = io.TextIOWrapper(b, encoding="ascii", errors="ignore")
self.assertEquals(t.read(), "abc\n\n")
# (4) replace
b = io.BytesIO(b"abc\n\xff\n")
t = io.TextIOWrapper(b, encoding="ascii", errors="replace")
self.assertEquals(t.read(), "abc\n\ufffd\n")
def testEncodingErrorsWriting(self):
# (1) default
b = io.BytesIO()
t = io.TextIOWrapper(b, encoding="ascii")
self.assertRaises(UnicodeError, t.write, "\xff")
# (2) explicit strict
b = io.BytesIO()
t = io.TextIOWrapper(b, encoding="ascii", errors="strict")
self.assertRaises(UnicodeError, t.write, "\xff")
# (3) ignore
b = io.BytesIO()
t = io.TextIOWrapper(b, encoding="ascii", errors="ignore",
newline="\n")
t.write("abc\xffdef\n")
t.flush()
self.assertEquals(b.getvalue(), b"abcdef\n")
# (4) replace
b = io.BytesIO()
t = io.TextIOWrapper(b, encoding="ascii", errors="replace",
newline="\n")
t.write("abc\xffdef\n")
t.flush()
self.assertEquals(b.getvalue(), b"abc?def\n")
def testNewlinesInput(self):
testdata = b"AAA\nBBB\nCCC\rDDD\rEEE\r\nFFF\r\nGGG"
normalized = testdata.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
for newline, expected in [
(None, normalized.decode("ascii").splitlines(True)),
("", testdata.decode("ascii").splitlines(True)),
("\n", ["AAA\n", "BBB\n", "CCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r\n", ["AAA\nBBB\nCCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r", ["AAA\nBBB\nCCC\r", "DDD\r", "EEE\r", "\nFFF\r", "\nGGG"]),
]:
buf = io.BytesIO(testdata)
txt = io.TextIOWrapper(buf, encoding="ascii", newline=newline)
self.assertEquals(txt.readlines(), expected)
txt.seek(0)
self.assertEquals(txt.read(), "".join(expected))
def testNewlinesOutput(self):
testdict = {
"": b"AAA\nBBB\nCCC\nX\rY\r\nZ",
"\n": b"AAA\nBBB\nCCC\nX\rY\r\nZ",
"\r": b"AAA\rBBB\rCCC\rX\rY\r\rZ",
"\r\n": b"AAA\r\nBBB\r\nCCC\r\nX\rY\r\r\nZ",
}
tests = [(None, testdict[os.linesep])] + sorted(testdict.items())
for newline, expected in tests:
buf = io.BytesIO()
txt = io.TextIOWrapper(buf, encoding="ascii", newline=newline)
txt.write("AAA\nB")
txt.write("BB\nCCC\n")
txt.write("X\rY\r\nZ")
txt.flush()
self.assertEquals(buf.closed, False)
self.assertEquals(buf.getvalue(), expected)
def testNewlines(self):
input_lines = [ "unix\n", "windows\r\n", "os9\r", "last\n", "nonl" ]
tests = [
[ None, [ 'unix\n', 'windows\n', 'os9\n', 'last\n', 'nonl' ] ],
[ '', input_lines ],
[ '\n', [ "unix\n", "windows\r\n", "os9\rlast\n", "nonl" ] ],
[ '\r\n', [ "unix\nwindows\r\n", "os9\rlast\nnonl" ] ],
[ '\r', [ "unix\nwindows\r", "\nos9\r", "last\nnonl" ] ],
]
encodings = ('utf-8', 'latin-1')
# Try a range of buffer sizes to test the case where \r is the last
# character in TextIOWrapper._pending_line.
for encoding in encodings:
# XXX: str.encode() should return bytes
data = bytes(''.join(input_lines).encode(encoding))
for do_reads in (False, True):
for bufsize in range(1, 10):
for newline, exp_lines in tests:
bufio = io.BufferedReader(io.BytesIO(data), bufsize)
textio = io.TextIOWrapper(bufio, newline=newline,
encoding=encoding)
if do_reads:
got_lines = []
while True:
c2 = textio.read(2)
if c2 == '':
break
self.assertEquals(len(c2), 2)
got_lines.append(c2 + textio.readline())
else:
got_lines = list(textio)
for got_line, exp_line in zip(got_lines, exp_lines):
self.assertEquals(got_line, exp_line)
self.assertEquals(len(got_lines), len(exp_lines))
def testNewlinesInput(self):
testdata = b"AAA\nBBB\nCCC\rDDD\rEEE\r\nFFF\r\nGGG"
normalized = testdata.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
for newline, expected in [
(None, normalized.decode("ascii").splitlines(True)),
("", testdata.decode("ascii").splitlines(True)),
("\n", ["AAA\n", "BBB\n", "CCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r\n", ["AAA\nBBB\nCCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r", ["AAA\nBBB\nCCC\r", "DDD\r", "EEE\r", "\nFFF\r", "\nGGG"]),
]:
buf = io.BytesIO(testdata)
txt = io.TextIOWrapper(buf, encoding="ascii", newline=newline)
self.assertEquals(txt.readlines(), expected)
txt.seek(0)
self.assertEquals(txt.read(), "".join(expected))
def testNewlinesOutput(self):
data = "AAA\nBBB\rCCC\n"
data_lf = b"AAA\nBBB\rCCC\n"
data_cr = b"AAA\rBBB\rCCC\r"
data_crlf = b"AAA\r\nBBB\rCCC\r\n"
save_linesep = os.linesep
try:
for os.linesep, newline, expected in [
("\n", None, data_lf),
("\r\n", None, data_crlf),
("\n", "", data_lf),
("\r\n", "", data_lf),
("\n", "\n", data_lf),
("\r\n", "\n", data_lf),
("\n", "\r", data_cr),
("\r\n", "\r", data_cr),
("\n", "\r\n", data_crlf),
("\r\n", "\r\n", data_crlf),
]:
buf = io.BytesIO()
txt = io.TextIOWrapper(buf, encoding="ascii", newline=newline)
txt.write(data)
txt.close()
self.assertEquals(buf.closed, True)
self.assertRaises(ValueError, buf.getvalue)
finally:
os.linesep = save_linesep
# Systematic tests of the text I/O API
def testBasicIO(self):
for chunksize in (1, 2, 3, 4, 5, 15, 16, 17, 31, 32, 33, 63, 64, 65):
for enc in "ascii", "latin1", "utf8" :# , "utf-16-be", "utf-16-le":
f = io.open(support.TESTFN, "w+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEquals(f.write("abc"), 3)
f.close()
f = io.open(support.TESTFN, "r+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEquals(f.tell(), 0)
self.assertEquals(f.read(), "abc")
cookie = f.tell()
self.assertEquals(f.seek(0), 0)
self.assertEquals(f.read(2), "ab")
self.assertEquals(f.read(1), "c")
self.assertEquals(f.read(1), "")
self.assertEquals(f.read(), "")
self.assertEquals(f.tell(), cookie)
self.assertEquals(f.seek(0), 0)
self.assertEquals(f.seek(0, 2), cookie)
self.assertEquals(f.write("def"), 3)
self.assertEquals(f.seek(cookie), cookie)
self.assertEquals(f.read(), "def")
if enc.startswith("utf"):
self.multi_line_test(f, enc)
f.close()
def multi_line_test(self, f, enc):
f.seek(0)
f.truncate()
sample = "s\xff\u0fff\uffff"
wlines = []
for size in (0, 1, 2, 3, 4, 5, 30, 31, 32, 33, 62, 63, 64, 65, 1000):
chars = []
Merged revisions 55007-55179 via svnmerge from svn+ssh://pythondev@svn.python.org/python/branches/p3yk ........ r55077 | guido.van.rossum | 2007-05-02 11:54:37 -0700 (Wed, 02 May 2007) | 2 lines Use the new print syntax, at least. ........ r55142 | fred.drake | 2007-05-04 21:27:30 -0700 (Fri, 04 May 2007) | 1 line remove old cruftiness ........ r55143 | fred.drake | 2007-05-04 21:52:16 -0700 (Fri, 04 May 2007) | 1 line make this work with the new Python ........ r55162 | neal.norwitz | 2007-05-06 22:29:18 -0700 (Sun, 06 May 2007) | 1 line Get asdl code gen working with Python 2.3. Should continue to work with 3.0 ........ r55164 | neal.norwitz | 2007-05-07 00:00:38 -0700 (Mon, 07 May 2007) | 1 line Verify checkins to p3yk (sic) branch go to 3000 list. ........ r55166 | neal.norwitz | 2007-05-07 00:12:35 -0700 (Mon, 07 May 2007) | 1 line Fix this test so it runs again by importing warnings_test properly. ........ r55167 | neal.norwitz | 2007-05-07 01:03:22 -0700 (Mon, 07 May 2007) | 8 lines So long xrange. range() now supports values that are outside -sys.maxint to sys.maxint. floats raise a TypeError. This has been sitting for a long time. It probably has some problems and needs cleanup. Objects/rangeobject.c now uses 4-space indents since it is almost completely new. ........ r55171 | guido.van.rossum | 2007-05-07 10:21:26 -0700 (Mon, 07 May 2007) | 4 lines Fix two tests that were previously depending on significant spaces at the end of a line (and before that on Python 2.x print behavior that has no exact equivalent in 3.0). ........
2007-05-07 19:24:25 -03:00
for i in range(size):
chars.append(sample[i % len(sample)])
line = "".join(chars) + "\n"
wlines.append((f.tell(), line))
f.write(line)
f.seek(0)
rlines = []
while True:
pos = f.tell()
line = f.readline()
if not line:
break
rlines.append((pos, line))
self.assertEquals(rlines, wlines)
def testTelling(self):
f = io.open(support.TESTFN, "w+", encoding="utf8")
p0 = f.tell()
f.write("\xff\n")
p1 = f.tell()
f.write("\xff\n")
p2 = f.tell()
f.seek(0)
self.assertEquals(f.tell(), p0)
self.assertEquals(f.readline(), "\xff\n")
self.assertEquals(f.tell(), p1)
self.assertEquals(f.readline(), "\xff\n")
self.assertEquals(f.tell(), p2)
f.seek(0)
for line in f:
self.assertEquals(line, "\xff\n")
self.assertRaises(IOError, f.tell)
self.assertEquals(f.tell(), p2)
f.close()
def testSeeking(self):
chunk_size = io.TextIOWrapper._CHUNK_SIZE
prefix_size = chunk_size - 2
u_prefix = "a" * prefix_size
prefix = bytes(u_prefix.encode("utf-8"))
self.assertEquals(len(u_prefix), len(prefix))
u_suffix = "\u8888\n"
suffix = bytes(u_suffix.encode("utf-8"))
line = prefix + suffix
f = io.open(support.TESTFN, "wb")
f.write(line*2)
f.close()
f = io.open(support.TESTFN, "r", encoding="utf-8")
s = f.read(prefix_size)
self.assertEquals(s, str(prefix, "ascii"))
self.assertEquals(f.tell(), prefix_size)
self.assertEquals(f.readline(), u_suffix)
def testSeekingToo(self):
# Regression test for a specific bug
data = b'\xe0\xbf\xbf\n'
f = io.open(support.TESTFN, "wb")
f.write(data)
f.close()
f = io.open(support.TESTFN, "r", encoding="utf-8")
f._CHUNK_SIZE # Just test that it exists
f._CHUNK_SIZE = 2
f.readline()
f.tell()
def testSeekAndTell(self):
"""Test seek/tell using the StatefulIncrementalDecoder."""
def testSeekAndTellWithData(data, min_pos=0):
"""Tell/seek to various points within a data stream and ensure
that the decoded data returned by read() is consistent."""
f = io.open(support.TESTFN, 'wb')
f.write(data)
f.close()
f = io.open(support.TESTFN, encoding='test_decoder')
decoded = f.read()
f.close()
for i in range(min_pos, len(decoded) + 1): # seek positions
for j in [1, 5, len(decoded) - i]: # read lengths
f = io.open(support.TESTFN, encoding='test_decoder')
self.assertEquals(f.read(i), decoded[:i])
cookie = f.tell()
self.assertEquals(f.read(j), decoded[i:i + j])
f.seek(cookie)
self.assertEquals(f.read(), decoded[i:])
f.close()
Merged revisions 62090-62091,62096,62100,62102,62110-62114 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r62090 | brett.cannon | 2008-04-01 07:37:43 -0500 (Tue, 01 Apr 2008) | 3 lines Generalize test.test_support.test_stdout() with a base context manager so that it is easy to capture stderr if desired. ........ r62091 | brett.cannon | 2008-04-01 07:46:02 -0500 (Tue, 01 Apr 2008) | 3 lines Add ``if __name__ == '__main__'`` to some test files where it didn't take a lot of effort to do so. ........ r62096 | amaury.forgeotdarc | 2008-04-01 17:52:48 -0500 (Tue, 01 Apr 2008) | 4 lines Newly enabled test appears to leak: it registers the same codec on each iteration. Do it only once at load time. ........ r62100 | amaury.forgeotdarc | 2008-04-01 19:55:04 -0500 (Tue, 01 Apr 2008) | 4 lines A DocTestSuite cannot run multiple times: it clears its globals dictionary after the first run. Rebuild the DocTestSuite on each iteration. ........ r62102 | jeffrey.yasskin | 2008-04-01 23:07:44 -0500 (Tue, 01 Apr 2008) | 3 lines Try to make test_signal less flaky. I still see some flakiness in test_itimer_prof. ........ r62110 | vinay.sajip | 2008-04-02 16:09:27 -0500 (Wed, 02 Apr 2008) | 1 line Fix: #2315, #2316, #2317: TimedRotatingFileHandler - changed logic to better handle daylight savings time, deletion of old log files, and fixed a bug in calculating rollover when no logging occurs for a longer interval than the rollover period. ........ r62111 | vinay.sajip | 2008-04-02 16:10:23 -0500 (Wed, 02 Apr 2008) | 1 line Added updates with respect to recent changes to TimedRotatingFileHandler. ........ r62112 | vinay.sajip | 2008-04-02 16:17:25 -0500 (Wed, 02 Apr 2008) | 1 line Added updates with respect to recent changes to TimedRotatingFileHandler. ........ r62113 | amaury.forgeotdarc | 2008-04-02 16:18:46 -0500 (Wed, 02 Apr 2008) | 2 lines Remove debug prints; the buildbot now passes the tests ........ r62114 | benjamin.peterson | 2008-04-02 16:20:35 -0500 (Wed, 02 Apr 2008) | 2 lines Suggested proposed changes to Python be considered on some mailing lists first ........
2008-04-02 18:49:44 -03:00
# Enable the test decoder.
StatefulIncrementalDecoder.codecEnabled = 1
# Run the tests.
try:
# Try each test case.
for input, _, _ in StatefulIncrementalDecoderTest.test_cases:
testSeekAndTellWithData(input)
# Position each test case so that it crosses a chunk boundary.
CHUNK_SIZE = io.TextIOWrapper._CHUNK_SIZE
for input, _, _ in StatefulIncrementalDecoderTest.test_cases:
offset = CHUNK_SIZE - len(input)//2
prefix = b'.'*offset
# Don't bother seeking into the prefix (takes too long).
min_pos = offset*2
testSeekAndTellWithData(prefix + input, min_pos)
# Ensure our test decoder won't interfere with subsequent tests.
finally:
Merged revisions 62090-62091,62096,62100,62102,62110-62114 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r62090 | brett.cannon | 2008-04-01 07:37:43 -0500 (Tue, 01 Apr 2008) | 3 lines Generalize test.test_support.test_stdout() with a base context manager so that it is easy to capture stderr if desired. ........ r62091 | brett.cannon | 2008-04-01 07:46:02 -0500 (Tue, 01 Apr 2008) | 3 lines Add ``if __name__ == '__main__'`` to some test files where it didn't take a lot of effort to do so. ........ r62096 | amaury.forgeotdarc | 2008-04-01 17:52:48 -0500 (Tue, 01 Apr 2008) | 4 lines Newly enabled test appears to leak: it registers the same codec on each iteration. Do it only once at load time. ........ r62100 | amaury.forgeotdarc | 2008-04-01 19:55:04 -0500 (Tue, 01 Apr 2008) | 4 lines A DocTestSuite cannot run multiple times: it clears its globals dictionary after the first run. Rebuild the DocTestSuite on each iteration. ........ r62102 | jeffrey.yasskin | 2008-04-01 23:07:44 -0500 (Tue, 01 Apr 2008) | 3 lines Try to make test_signal less flaky. I still see some flakiness in test_itimer_prof. ........ r62110 | vinay.sajip | 2008-04-02 16:09:27 -0500 (Wed, 02 Apr 2008) | 1 line Fix: #2315, #2316, #2317: TimedRotatingFileHandler - changed logic to better handle daylight savings time, deletion of old log files, and fixed a bug in calculating rollover when no logging occurs for a longer interval than the rollover period. ........ r62111 | vinay.sajip | 2008-04-02 16:10:23 -0500 (Wed, 02 Apr 2008) | 1 line Added updates with respect to recent changes to TimedRotatingFileHandler. ........ r62112 | vinay.sajip | 2008-04-02 16:17:25 -0500 (Wed, 02 Apr 2008) | 1 line Added updates with respect to recent changes to TimedRotatingFileHandler. ........ r62113 | amaury.forgeotdarc | 2008-04-02 16:18:46 -0500 (Wed, 02 Apr 2008) | 2 lines Remove debug prints; the buildbot now passes the tests ........ r62114 | benjamin.peterson | 2008-04-02 16:20:35 -0500 (Wed, 02 Apr 2008) | 2 lines Suggested proposed changes to Python be considered on some mailing lists first ........
2008-04-02 18:49:44 -03:00
StatefulIncrementalDecoder.codecEnabled = 0
def testEncodedWrites(self):
data = "1234567890"
tests = ("utf-16",
"utf-16-le",
"utf-16-be",
"utf-32",
"utf-32-le",
"utf-32-be")
for encoding in tests:
buf = io.BytesIO()
f = io.TextIOWrapper(buf, encoding=encoding)
# Check if the BOM is written only once (see issue1753).
f.write(data)
f.write(data)
f.seek(0)
self.assertEquals(f.read(), data * 2)
self.assertEquals(buf.getvalue(), (data * 2).encode(encoding))
def timingTest(self):
timer = time.time
enc = "utf8"
line = "\0\x0f\xff\u0fff\uffff\U000fffff\U0010ffff"*3 + "\n"
nlines = 10000
nchars = len(line)
nbytes = len(line.encode(enc))
for chunk_size in (32, 64, 128, 256):
f = io.open(support.TESTFN, "w+", encoding=enc)
f._CHUNK_SIZE = chunk_size
t0 = timer()
for i in range(nlines):
f.write(line)
f.flush()
t1 = timer()
f.seek(0)
for line in f:
pass
t2 = timer()
f.seek(0)
while f.readline():
pass
t3 = timer()
f.seek(0)
while f.readline():
f.tell()
t4 = timer()
f.close()
if support.verbose:
print("\nTiming test: %d lines of %d characters (%d bytes)" %
(nlines, nchars, nbytes))
print("File chunk size: %6s" % f._CHUNK_SIZE)
print("Writing: %6.3f seconds" % (t1-t0))
print("Reading using iteration: %6.3f seconds" % (t2-t1))
print("Reading using readline(): %6.3f seconds" % (t3-t2))
print("Using readline()+tell(): %6.3f seconds" % (t4-t3))
def testReadOneByOne(self):
txt = io.TextIOWrapper(io.BytesIO(b"AA\r\nBB"))
reads = ""
while True:
c = txt.read(1)
if not c:
break
reads += c
self.assertEquals(reads, "AA\nBB")
# read in amounts equal to TextIOWrapper._CHUNK_SIZE which is 128.
def testReadByChunk(self):
# make sure "\r\n" straddles 128 char boundary.
txt = io.TextIOWrapper(io.BytesIO(b"A" * 127 + b"\r\nB"))
reads = ""
while True:
c = txt.read(128)
if not c:
break
reads += c
self.assertEquals(reads, "A"*127+"\nB")
def test_issue1395_1(self):
txt = io.TextIOWrapper(io.BytesIO(self.testdata), encoding="ascii")
# read one char at a time
reads = ""
while True:
c = txt.read(1)
if not c:
break
reads += c
self.assertEquals(reads, self.normalized)
def test_issue1395_2(self):
txt = io.TextIOWrapper(io.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = ""
while True:
c = txt.read(4)
if not c:
break
reads += c
self.assertEquals(reads, self.normalized)
def test_issue1395_3(self):
txt = io.TextIOWrapper(io.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
reads += txt.read(4)
reads += txt.readline()
reads += txt.readline()
reads += txt.readline()
self.assertEquals(reads, self.normalized)
def test_issue1395_4(self):
txt = io.TextIOWrapper(io.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
reads += txt.read()
self.assertEquals(reads, self.normalized)
def test_issue1395_5(self):
txt = io.TextIOWrapper(io.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
pos = txt.tell()
txt.seek(0)
txt.seek(pos)
self.assertEquals(txt.read(4), "BBB\n")
def test_issue2282(self):
buffer = io.BytesIO(self.testdata)
txt = io.TextIOWrapper(buffer, encoding="ascii")
self.assertEqual(buffer.seekable(), txt.seekable())
def test_newline_decoder(self):
import codecs
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = io.IncrementalNewlineDecoder(decoder, translate=True)
self.assertEquals(decoder.decode(b'\xe8\xa2\x88'), "\u8888")
self.assertEquals(decoder.decode(b'\xe8'), "")
self.assertEquals(decoder.decode(b'\xa2'), "")
self.assertEquals(decoder.decode(b'\x88'), "\u8888")
self.assertEquals(decoder.decode(b'\xe8'), "")
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', final=True)
decoder.setstate((b'', 0))
self.assertEquals(decoder.decode(b'\n'), "\n")
self.assertEquals(decoder.decode(b'\r'), "")
self.assertEquals(decoder.decode(b'', final=True), "\n")
self.assertEquals(decoder.decode(b'\r', final=True), "\n")
self.assertEquals(decoder.decode(b'\r'), "")
self.assertEquals(decoder.decode(b'a'), "\na")
self.assertEquals(decoder.decode(b'\r\r\n'), "\n\n")
self.assertEquals(decoder.decode(b'\r'), "")
self.assertEquals(decoder.decode(b'\r'), "\n")
self.assertEquals(decoder.decode(b'\na'), "\na")
self.assertEquals(decoder.decode(b'\xe8\xa2\x88\r\n'), "\u8888\n")
self.assertEquals(decoder.decode(b'\xe8\xa2\x88'), "\u8888")
self.assertEquals(decoder.decode(b'\n'), "\n")
self.assertEquals(decoder.decode(b'\xe8\xa2\x88\r'), "\u8888")
self.assertEquals(decoder.decode(b'\n'), "\n")
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = io.IncrementalNewlineDecoder(decoder, translate=True)
self.assertEquals(decoder.newlines, None)
decoder.decode(b"abc\n\r")
self.assertEquals(decoder.newlines, '\n')
decoder.decode(b"\nabc")
self.assertEquals(decoder.newlines, ('\n', '\r\n'))
decoder.decode(b"abc\r")
self.assertEquals(decoder.newlines, ('\n', '\r\n'))
decoder.decode(b"abc")
self.assertEquals(decoder.newlines, ('\r', '\n', '\r\n'))
decoder.decode(b"abc\r")
decoder.reset()
self.assertEquals(decoder.decode(b"abc"), "abc")
self.assertEquals(decoder.newlines, None)
# XXX Tests for open()
class MiscIOTest(unittest.TestCase):
def testImport__all__(self):
for name in io.__all__:
obj = getattr(io, name, None)
self.assert_(obj is not None, name)
if name == "open":
continue
elif "error" in name.lower():
self.assert_(issubclass(obj, Exception), name)
else:
self.assert_(issubclass(obj, io.IOBase))
def test_fileio_warnings(self):
with support.check_warnings() as w:
self.assertEqual(w.warnings, [])
self.assertRaises(TypeError, io.FileIO, [])
self.assertEqual(w.warnings, [])
self.assertRaises(ValueError, io.FileIO, "/some/invalid/name", "rt")
self.assertEqual(w.warnings, [])
2007-02-27 01:47:44 -04:00
def test_main():
support.run_unittest(IOTest, BytesIOTest, StringIOTest,
Merged revisions 62425-62429,62434-62436,62441,62444,62446-62448,62450-62455,62463,62465-62466,62469,62474,62476-62478,62480,62485,62492,62497-62498,62500,62507,62513-62514,62516,62521,62531,62535,62545-62546,62548-62551,62553-62559,62569,62574,62577,62593,62595,62604-62606,62608,62616,62626-62627,62636,62638,62644-62645,62647-62648,62651-62653,62656,62661,62663,62680,62686-62687,62696,62699-62703,62711 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ................ r62425 | andrew.kuchling | 2008-04-21 03:45:57 +0200 (Mon, 21 Apr 2008) | 1 line Comment typo ................ r62426 | mark.dickinson | 2008-04-21 03:55:50 +0200 (Mon, 21 Apr 2008) | 2 lines Silence 'r may be used uninitialized' compiler warning. ................ r62427 | andrew.kuchling | 2008-04-21 04:08:00 +0200 (Mon, 21 Apr 2008) | 1 line Markup fix ................ r62428 | andrew.kuchling | 2008-04-21 04:08:13 +0200 (Mon, 21 Apr 2008) | 1 line Wording changes ................ r62429 | andrew.kuchling | 2008-04-21 04:14:24 +0200 (Mon, 21 Apr 2008) | 1 line Add various items ................ r62434 | thomas.heller | 2008-04-21 15:46:55 +0200 (Mon, 21 Apr 2008) | 1 line Fix typo. ................ r62435 | david.goodger | 2008-04-21 16:40:22 +0200 (Mon, 21 Apr 2008) | 1 line corrections ("reStructuredText" is one word) ................ r62436 | david.goodger | 2008-04-21 16:43:33 +0200 (Mon, 21 Apr 2008) | 1 line capitalization ................ r62441 | gregory.p.smith | 2008-04-21 19:46:40 +0200 (Mon, 21 Apr 2008) | 2 lines explicitly flush after the ... since there wasn't a newline ................ r62444 | jeroen.ruigrok | 2008-04-21 22:15:39 +0200 (Mon, 21 Apr 2008) | 2 lines Windows x64 also falls under VER_PLATFORM_WIN32_NT. ................ r62446 | gregory.p.smith | 2008-04-21 23:31:08 +0200 (Mon, 21 Apr 2008) | 3 lines If sys.stdin is not a tty, fall back to default_getpass after printing a warning instead of failing with a termios.error. ................ r62447 | mark.dickinson | 2008-04-22 00:32:24 +0200 (Tue, 22 Apr 2008) | 8 lines test_math and test_cmath are failing on the FreeBSD 6.2 trunk buildbot, apparently because tanh(-0.) loses the sign of zero on that platform. If true, this is a bug in FreeBSD. Added a configure test to verify this. I still need to figure out how best to deal with this failure. ................ r62448 | amaury.forgeotdarc | 2008-04-22 00:35:30 +0200 (Tue, 22 Apr 2008) | 7 lines Issue 2665: On Windows, sys.stderr does not contain a valid file when running without a console. It seems to work, but will fail at the first flush. This causes IDLE to crash when too many warnings are printed. Will backport. ................ r62450 | benjamin.peterson | 2008-04-22 00:57:00 +0200 (Tue, 22 Apr 2008) | 2 lines Fix Sphinx warnings ................ r62451 | mark.dickinson | 2008-04-22 02:54:27 +0200 (Tue, 22 Apr 2008) | 3 lines Make configure test for tanh(-0.) == -0. committed in r62447 actually work. (The test wasn't properly linked with libm. Sigh.) ................ r62452 | benjamin.peterson | 2008-04-22 04:16:03 +0200 (Tue, 22 Apr 2008) | 2 lines Various io doc updates ................ r62453 | neal.norwitz | 2008-04-22 07:07:47 +0200 (Tue, 22 Apr 2008) | 1 line Add Thomas Lee ................ r62454 | gregory.p.smith | 2008-04-22 10:08:41 +0200 (Tue, 22 Apr 2008) | 8 lines Major improvements: * Default to using /dev/tty for the password prompt and input before falling back to sys.stdin and sys.stderr. * Use sys.stderr instead of sys.stdout. * print the 'password may be echoed' warning to stream used to display the prompt rather than always sys.stderr. * warn() with GetPassWarning when input may be echoed. ................ r62455 | gregory.p.smith | 2008-04-22 10:11:33 +0200 (Tue, 22 Apr 2008) | 2 lines update the getpass entry ................ r62463 | amaury.forgeotdarc | 2008-04-22 23:14:41 +0200 (Tue, 22 Apr 2008) | 5 lines Issue #2670: urllib2.build_opener() failed when two handlers derive the same default base class. Will backport. ................ r62465 | skip.montanaro | 2008-04-23 00:45:09 +0200 (Wed, 23 Apr 2008) | 3 lines Factor in documentation changes from issue 1753732. ................ r62466 | gregory.p.smith | 2008-04-23 03:06:42 +0200 (Wed, 23 Apr 2008) | 2 lines syntax fixup ................ r62469 | benjamin.peterson | 2008-04-23 22:38:06 +0200 (Wed, 23 Apr 2008) | 2 lines #2673 Fix example typo in optparse docs ................ r62474 | martin.v.loewis | 2008-04-24 11:50:50 +0200 (Thu, 24 Apr 2008) | 2 lines Add Guilherme Polo. ................ r62476 | martin.v.loewis | 2008-04-24 15:16:36 +0200 (Thu, 24 Apr 2008) | 3 lines Remove Py_Refcnt, Py_Type, Py_Size, as they were added only for backwards compatibility, yet 2.5 did not have them at all. ................ r62477 | martin.v.loewis | 2008-04-24 15:17:24 +0200 (Thu, 24 Apr 2008) | 2 lines Fix typo. ................ r62478 | martin.v.loewis | 2008-04-24 15:18:03 +0200 (Thu, 24 Apr 2008) | 2 lines Add Jesus Cea. ................ r62480 | amaury.forgeotdarc | 2008-04-24 20:07:05 +0200 (Thu, 24 Apr 2008) | 4 lines Issue2681: the literal 0o8 was wrongly accepted, and evaluated as float(0.0). This happened only when 8 is the first digit. Credits go to Lukas Meuser. ................ r62485 | amaury.forgeotdarc | 2008-04-24 22:10:26 +0200 (Thu, 24 Apr 2008) | 5 lines Disable gc when running test_trace, or we may record the __del__ of collected objects. See http://mail.python.org/pipermail/python-checkins/2008-April/068633.html the extra events perfectly match several calls to socket._fileobject.__del__() ................ r62492 | neal.norwitz | 2008-04-25 05:40:17 +0200 (Fri, 25 Apr 2008) | 1 line Fix typo (now -> no) ................ r62497 | armin.rigo | 2008-04-25 11:35:18 +0200 (Fri, 25 Apr 2008) | 2 lines A new crasher. ................ r62498 | thomas.heller | 2008-04-25 17:44:16 +0200 (Fri, 25 Apr 2008) | 1 line Add from_buffer and from_buffer_copy class methods to ctypes types. ................ r62500 | mark.dickinson | 2008-04-25 18:59:09 +0200 (Fri, 25 Apr 2008) | 3 lines Issue 2635: fix bug in the fix_sentence_endings option to textwrap.fill. ................ r62507 | benjamin.peterson | 2008-04-25 23:43:56 +0200 (Fri, 25 Apr 2008) | 2 lines Allow test_import to work when it is invoked directly ................ r62513 | georg.brandl | 2008-04-26 20:31:07 +0200 (Sat, 26 Apr 2008) | 2 lines #2691: document PyLong (s)size_t APIs, patch by Alexander Belopolsky. ................ r62514 | georg.brandl | 2008-04-26 20:32:17 +0200 (Sat, 26 Apr 2008) | 2 lines Add missing return type to dealloc. ................ r62516 | alexandre.vassalotti | 2008-04-27 02:52:24 +0200 (Sun, 27 Apr 2008) | 2 lines Fixed URL of PEP 205 in weakref's module docstring. ................ r62521 | georg.brandl | 2008-04-27 11:39:59 +0200 (Sun, 27 Apr 2008) | 2 lines #2677: add note that not all functions may accept keyword args. ................ r62531 | georg.brandl | 2008-04-27 19:38:55 +0200 (Sun, 27 Apr 2008) | 2 lines Use correct XHTML tags. ................ r62535 | benjamin.peterson | 2008-04-27 20:14:39 +0200 (Sun, 27 Apr 2008) | 2 lines #2700 Document PyNumber_ToBase ................ r62545 | skip.montanaro | 2008-04-27 22:53:57 +0200 (Sun, 27 Apr 2008) | 1 line minor wording changes, rewrap a few lines ................ r62546 | kurt.kaiser | 2008-04-27 23:07:41 +0200 (Sun, 27 Apr 2008) | 7 lines Home / Control-A toggles between left margin and end of leading white space. Patch 1196903 Jeff Shute. M idlelib/PyShell.py M idlelib/EditorWindow.py M idlelib/NEWS.txt ................ r62548 | kurt.kaiser | 2008-04-27 23:38:05 +0200 (Sun, 27 Apr 2008) | 2 lines Improved AutoCompleteWindow logic. Patch 2062 Tal Einat. ................ r62549 | kurt.kaiser | 2008-04-27 23:52:19 +0200 (Sun, 27 Apr 2008) | 4 lines Autocompletion of filenames now support alternate separators, e.g. the '/' char on Windows. Patch 2061 Tal Einat. ................ r62550 | skip.montanaro | 2008-04-28 00:49:56 +0200 (Mon, 28 Apr 2008) | 6 lines A few small changes: * The only exception we should catch when trying to import cStringIO is an ImportError. * Delete the function signatures embedded in the mk*temp docstrings. * The tempdir global variable was initialized twice. ................ r62551 | skip.montanaro | 2008-04-28 00:52:02 +0200 (Mon, 28 Apr 2008) | 4 lines Wrap some long paragraphs and include the default values for optional function parameters. ................ r62553 | skip.montanaro | 2008-04-28 04:57:23 +0200 (Mon, 28 Apr 2008) | 7 lines Minor cleanups: * Avoid creating unused local variables where we can. Where we can't prefix the unused variables with '_'. * Avoid shadowing builtins where it won't change the external interface of a function. * Use None as default path arg to readmodule and readmodule_ex. ................ r62554 | skip.montanaro | 2008-04-28 04:59:45 +0200 (Mon, 28 Apr 2008) | 6 lines Correct documentation to match implementation: "Class" instead of "class_descriptor", "Function" instead of "function_descriptor". Note default path value for readmodule*. Wrap some long paragraphs. Don't mention 'inpackage' which isn't part of the public API. ................ r62555 | brett.cannon | 2008-04-28 05:23:50 +0200 (Mon, 28 Apr 2008) | 5 lines Fix a bug introduced by the warnings rewrite where tracebacks were being improperly indented. Closes issue #2699. ................ r62556 | skip.montanaro | 2008-04-28 05:25:37 +0200 (Mon, 28 Apr 2008) | 2 lines Wrap some long lines. ................ r62557 | skip.montanaro | 2008-04-28 05:27:53 +0200 (Mon, 28 Apr 2008) | 6 lines Get rid of _test(), _main(), _debug() and _check(). Tests are no longer needed (better set available in Lib/test/test_robotparser.py). Clean up a few PEP 8 nits (compound statements on a single line, whitespace around operators). ................ r62558 | brett.cannon | 2008-04-28 06:50:06 +0200 (Mon, 28 Apr 2008) | 3 lines Rename the test_traceback_print() function to traceback_print() to prevent test_capi from automatically calling the function. ................ r62559 | georg.brandl | 2008-04-28 07:16:30 +0200 (Mon, 28 Apr 2008) | 2 lines Fix markup. ................ r62569 | amaury.forgeotdarc | 2008-04-28 23:07:06 +0200 (Mon, 28 Apr 2008) | 5 lines test_sundry performs minimal tests (a simple import...) on modules that are not tested otherwise. Some of them now have tests and can be removed. Only 70 to go... ................ r62574 | andrew.kuchling | 2008-04-29 04:03:54 +0200 (Tue, 29 Apr 2008) | 1 line Strip down SSL docs; I'm not managing to get test programs working, so I'll just give a minimal description ................ r62577 | martin.v.loewis | 2008-04-29 08:10:53 +0200 (Tue, 29 Apr 2008) | 2 lines Add Rodrigo and Heiko. ................ r62593 | nick.coghlan | 2008-04-30 16:23:36 +0200 (Wed, 30 Apr 2008) | 1 line Update command line usage documentation to reflect 2.6 changes (also includes some minor cleanups). Addresses TODO list issue 2258 ................ r62595 | andrew.kuchling | 2008-04-30 18:19:55 +0200 (Wed, 30 Apr 2008) | 1 line Typo fix ................ r62604 | benjamin.peterson | 2008-04-30 23:03:58 +0200 (Wed, 30 Apr 2008) | 2 lines make test_support's captured_output a bit more robust when exceptions happen ................ r62605 | georg.brandl | 2008-04-30 23:08:42 +0200 (Wed, 30 Apr 2008) | 2 lines #1748: use functools.wraps instead of rolling own metadata update. ................ r62606 | benjamin.peterson | 2008-04-30 23:25:55 +0200 (Wed, 30 Apr 2008) | 2 lines Remove some from __future__ import with_statements ................ r62608 | benjamin.peterson | 2008-05-01 00:03:36 +0200 (Thu, 01 May 2008) | 2 lines Fix typo in whatsnew ................ r62616 | georg.brandl | 2008-05-01 20:24:32 +0200 (Thu, 01 May 2008) | 2 lines Fix synopsis. ................ r62626 | brett.cannon | 2008-05-02 04:25:09 +0200 (Fri, 02 May 2008) | 6 lines Fix a backwards-compatibility mistake where a new optional argument for warnings.showwarning() was being used. This broke pre-existing replacements for the function since they didn't support the extra argument. Closes issue 2705. ................ r62627 | gregory.p.smith | 2008-05-02 09:26:52 +0200 (Fri, 02 May 2008) | 20 lines This should fix issue2632. A long description of the two competing problems is in the bug report (one old, one recently introduced trying to fix the old one). In short: buffer data during socket._fileobject.read() and readlines() within a cStringIO object instead of a [] of str()s returned from the recv() call. This prevents excessive memory use due to the size parameter being passed to recv() being grossly larger than the actual size of the data returned *and* prevents excessive cpu usage due to looping in python calling recv() with a very tiny size value if min() is used as the previous memory-use bug "fix" did. It also documents what the socket._fileobject._rbufsize member is actually used for. This is a candidate for back porting to 2.5. ................ r62636 | mark.hammond | 2008-05-02 14:48:15 +0200 (Fri, 02 May 2008) | 2 lines #2581: Vista UAC/elevation support for bdist_wininst ................ r62638 | facundo.batista | 2008-05-02 19:39:00 +0200 (Fri, 02 May 2008) | 3 lines Fixed some test structures. Thanks Mark Dickinson. ................ r62644 | ronald.oussoren | 2008-05-02 21:45:11 +0200 (Fri, 02 May 2008) | 7 lines Fix for issue #2573: Can't change the framework name on OS X builds This introduces a new configure option: --with-framework-name=NAME (defaulting to 'Python'). This allows you to install several copies of the Python framework with different names (such as a normal build and a debug build). ................ r62645 | ronald.oussoren | 2008-05-02 21:58:56 +0200 (Fri, 02 May 2008) | 2 lines Finish fix for issue2573, previous patch was incomplete. ................ r62647 | martin.v.loewis | 2008-05-02 23:30:20 +0200 (Fri, 02 May 2008) | 13 lines Merged revisions 62263-62646 via svnmerge from svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r62470 | david.wolever | 2008-04-24 02:11:07 +0200 (Do, 24 Apr 2008) | 3 lines Fixed up and applied the patch for #2431 -- speeding up 2to3 with a lookup table. ........ r62646 | martin.v.loewis | 2008-05-02 23:29:27 +0200 (Fr, 02 Mai 2008) | 2 lines Fix whitespace. ........ ................ r62648 | ronald.oussoren | 2008-05-02 23:42:35 +0200 (Fri, 02 May 2008) | 4 lines Fix for #1905: PythonLauncher not working correctly on OSX 10.5/Leopard This fixes both Python Launchar and the terminalcommand module. ................ r62651 | ronald.oussoren | 2008-05-02 23:54:56 +0200 (Fri, 02 May 2008) | 2 lines Fix for issue #2520 (cannot import macerrors) ................ r62652 | benjamin.peterson | 2008-05-03 00:12:58 +0200 (Sat, 03 May 2008) | 2 lines capitalization nit for reStructuredText ................ r62653 | brett.cannon | 2008-05-03 03:02:41 +0200 (Sat, 03 May 2008) | 2 lines Fix some indentation errors. ................ r62656 | brett.cannon | 2008-05-03 05:19:39 +0200 (Sat, 03 May 2008) | 6 lines Fix the C implementation of 'warnings' to infer the filename of the module that raised an exception properly when __file__ is not set, __name__ == '__main__', and sys.argv[0] is a false value. Closes issue2743. ................ r62661 | amaury.forgeotdarc | 2008-05-03 14:21:13 +0200 (Sat, 03 May 2008) | 8 lines In test_io, StatefulIncrementalDecoderTest was not part of the test suite. And of course, the test failed: a bytearray was used without reason in io.TextIOWrapper.tell(). The difference is that iterating over bytes (i.e. str in python2.6) returns 1-char bytes, whereas bytearrays yield integers. This code should still work with python3.0 ................ r62663 | benjamin.peterson | 2008-05-03 17:56:42 +0200 (Sat, 03 May 2008) | 2 lines The compiling struct is now passed around to all AST helpers (see issue 2720) ................ r62680 | benjamin.peterson | 2008-05-03 23:35:18 +0200 (Sat, 03 May 2008) | 2 lines Moved testing of builtin types out of test_builtin and into type specific modules ................ r62686 | mark.dickinson | 2008-05-04 04:25:46 +0200 (Sun, 04 May 2008) | 4 lines Make sure that Context traps and flags dictionaries have values 0 and 1 (as documented) rather than True and False. ................ r62687 | benjamin.peterson | 2008-05-04 05:05:49 +0200 (Sun, 04 May 2008) | 2 lines Fix typo in whatsnew ................ r62696 | georg.brandl | 2008-05-04 11:15:04 +0200 (Sun, 04 May 2008) | 2 lines #2752: wrong meaning of '' for socket host. ................ r62699 | christian.heimes | 2008-05-04 13:50:53 +0200 (Sun, 04 May 2008) | 1 line Added note that Python requires at least Win2k SP4 ................ r62700 | gerhard.haering | 2008-05-04 14:59:57 +0200 (Sun, 04 May 2008) | 3 lines SQLite requires 64-bit integers in order to build. So the whole HAVE_LONG_LONG #ifdefing was useless. ................ r62701 | gerhard.haering | 2008-05-04 15:15:12 +0200 (Sun, 04 May 2008) | 3 lines Applied sqliterow-richcmp.diff patch from Thomas Heller in Issue2152. The sqlite3.Row type is now correctly hashable. ................ r62702 | gerhard.haering | 2008-05-04 15:42:44 +0200 (Sun, 04 May 2008) | 5 lines Implemented feature request 2157: Converter names are cut off at '(' characters. This avoids the common case of something like 'NUMBER(10)' not being parsed as 'NUMBER', like expected. Also corrected the docs about converter names being case-sensitive. They aren't any longer. ................ r62703 | georg.brandl | 2008-05-04 17:45:05 +0200 (Sun, 04 May 2008) | 2 lines #2757: Remove spare newline. ................ r62711 | benjamin.peterson | 2008-05-04 21:10:02 +0200 (Sun, 04 May 2008) | 2 lines Fix typo in bugs.rst ................
2008-05-04 19:42:01 -03:00
BufferedReaderTest, BufferedWriterTest,
BufferedRWPairTest, BufferedRandomTest,
StatefulIncrementalDecoderTest,
TextIOWrapperTest, MiscIOTest)
2007-02-27 01:47:44 -04:00
if __name__ == "__main__":
unittest.main()