Issue #27130: Fix handling of buffers exceeding UINT_MAX in “zlib” module

Patch by Xiang Zhang.
This commit is contained in:
Martin Panter 2016-07-23 03:02:07 +00:00
parent 524714eeda
commit 84544c1020
5 changed files with 433 additions and 415 deletions

View File

@ -1249,17 +1249,17 @@ Here's the simplest example of a custom converter, from ``Modules/zlibmodule.c``
/*[python input]
class capped_uint_converter(CConverter):
type = 'unsigned int'
converter = 'capped_uint_converter'
class ssize_t_converter(CConverter):
type = 'Py_ssize_t'
converter = 'ssize_t_converter'
[python start generated code]*/
/*[python end generated code: output=da39a3ee5e6b4b0d input=35521e4e733823c7]*/
This block adds a converter to Argument Clinic named ``capped_uint``. Parameters
declared as ``capped_uint`` will be declared as type ``unsigned int``, and will
This block adds a converter to Argument Clinic named ``ssize_t``. Parameters
declared as ``ssize_t`` will be declared as type ``Py_ssize_t``, and will
be parsed by the ``'O&'`` format unit, which will call the
``capped_uint_converter`` converter function. ``capped_uint`` variables
``ssize_t_converter`` converter function. ``ssize_t`` variables
automatically support default values.
More sophisticated custom converters can insert custom C code to

View File

@ -121,6 +121,8 @@ class ExceptionTestCase(unittest.TestCase):
def test_overflow(self):
with self.assertRaisesRegex(OverflowError, 'int too large'):
zlib.decompress(b'', 15, sys.maxsize + 1)
with self.assertRaisesRegex(OverflowError, 'int too large'):
zlib.decompressobj().decompress(b'', sys.maxsize + 1)
with self.assertRaisesRegex(OverflowError, 'int too large'):
zlib.decompressobj().flush(sys.maxsize + 1)
@ -188,15 +190,6 @@ class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
def test_big_decompress_buffer(self, size):
self.check_big_decompress_buffer(size, zlib.decompress)
@bigmemtest(size=_4G + 100, memuse=1, dry_run=False)
def test_length_overflow(self, size):
data = b'x' * size
try:
self.assertRaises(OverflowError, zlib.compress, data, 1)
self.assertRaises(OverflowError, zlib.decompress, data)
finally:
data = None
@bigmemtest(size=_4G, memuse=1)
def test_large_bufsize(self, size):
# Test decompress(bufsize) parameter greater than the internal limit
@ -209,6 +202,16 @@ class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
compressed = zlib.compress(data, 1)
self.assertEqual(zlib.decompress(compressed, 15, CustomInt()), data)
@unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')
@bigmemtest(size=_4G + 100, memuse=4)
def test_64bit_compress(self, size):
data = b'x' * size
try:
comp = zlib.compress(data, 0)
self.assertEqual(zlib.decompress(comp), data)
finally:
comp = data = None
class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
# Test compression object
@ -678,16 +681,45 @@ class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
decompress = lambda s: d.decompress(s) + d.flush()
self.check_big_decompress_buffer(size, decompress)
@bigmemtest(size=_4G + 100, memuse=1, dry_run=False)
def test_length_overflow(self, size):
@unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')
@bigmemtest(size=_4G + 100, memuse=4)
def test_64bit_compress(self, size):
data = b'x' * size
c = zlib.compressobj(1)
d = zlib.decompressobj()
co = zlib.compressobj(0)
do = zlib.decompressobj()
try:
self.assertRaises(OverflowError, c.compress, data)
self.assertRaises(OverflowError, d.decompress, data)
comp = co.compress(data) + co.flush()
uncomp = do.decompress(comp) + do.flush()
self.assertEqual(uncomp, data)
finally:
data = None
comp = uncomp = data = None
@unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')
@bigmemtest(size=_4G + 100, memuse=3)
def test_large_unused_data(self, size):
data = b'abcdefghijklmnop'
unused = b'x' * size
comp = zlib.compress(data) + unused
do = zlib.decompressobj()
try:
uncomp = do.decompress(comp) + do.flush()
self.assertEqual(unused, do.unused_data)
self.assertEqual(uncomp, data)
finally:
unused = comp = do = None
@unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')
@bigmemtest(size=_4G + 100, memuse=5)
def test_large_unconsumed_tail(self, size):
data = b'x' * size
do = zlib.decompressobj()
try:
comp = zlib.compress(data, 0)
uncomp = do.decompress(comp, 1) + do.flush()
self.assertEqual(uncomp, data)
self.assertEqual(do.unconsumed_tail, b'')
finally:
comp = uncomp = data = None
def test_wbits(self):
# wbits=0 only supported since zlib v1.2.3.5

View File

@ -31,6 +31,11 @@ Core and Builtins
Library
-------
- Issue #27130: In the "zlib" module, fix handling of large buffers
(typically 4 GiB) when compressing and decompressing. Previously, inputs
were limited to 4 GiB, and compression and decompression operations did not
properly handle results of 4 GiB.
- Issue #27533: Release GIL in nt._isdir
- Issue #17711: Fixed unpickling by the persistent ID with protocol 0.

View File

@ -57,7 +57,7 @@ PyDoc_STRVAR(zlib_decompress__doc__,
static PyObject *
zlib_decompress_impl(PyObject *module, Py_buffer *data, int wbits,
unsigned int bufsize);
Py_ssize_t bufsize);
static PyObject *
zlib_decompress(PyObject *module, PyObject *args)
@ -65,10 +65,10 @@ zlib_decompress(PyObject *module, PyObject *args)
PyObject *return_value = NULL;
Py_buffer data = {NULL, NULL};
int wbits = MAX_WBITS;
unsigned int bufsize = DEF_BUF_SIZE;
Py_ssize_t bufsize = DEF_BUF_SIZE;
if (!PyArg_ParseTuple(args, "y*|iO&:decompress",
&data, &wbits, capped_uint_converter, &bufsize))
&data, &wbits, ssize_t_converter, &bufsize))
goto exit;
return_value = zlib_decompress_impl(module, &data, wbits, bufsize);
@ -236,17 +236,17 @@ PyDoc_STRVAR(zlib_Decompress_decompress__doc__,
static PyObject *
zlib_Decompress_decompress_impl(compobject *self, Py_buffer *data,
unsigned int max_length);
Py_ssize_t max_length);
static PyObject *
zlib_Decompress_decompress(compobject *self, PyObject *args)
{
PyObject *return_value = NULL;
Py_buffer data = {NULL, NULL};
unsigned int max_length = 0;
Py_ssize_t max_length = 0;
if (!PyArg_ParseTuple(args, "y*|O&:decompress",
&data, capped_uint_converter, &max_length))
&data, ssize_t_converter, &max_length))
goto exit;
return_value = zlib_Decompress_decompress_impl(self, &data, max_length);
@ -348,16 +348,16 @@ PyDoc_STRVAR(zlib_Decompress_flush__doc__,
{"flush", (PyCFunction)zlib_Decompress_flush, METH_VARARGS, zlib_Decompress_flush__doc__},
static PyObject *
zlib_Decompress_flush_impl(compobject *self, unsigned int length);
zlib_Decompress_flush_impl(compobject *self, Py_ssize_t length);
static PyObject *
zlib_Decompress_flush(compobject *self, PyObject *args)
{
PyObject *return_value = NULL;
unsigned int length = DEF_BUF_SIZE;
Py_ssize_t length = DEF_BUF_SIZE;
if (!PyArg_ParseTuple(args, "|O&:flush",
capped_uint_converter, &length))
ssize_t_converter, &length))
goto exit;
return_value = zlib_Decompress_flush_impl(self, length);
@ -442,4 +442,4 @@ exit:
#ifndef ZLIB_COMPRESS_COPY_METHODDEF
#define ZLIB_COMPRESS_COPY_METHODDEF
#endif /* !defined(ZLIB_COMPRESS_COPY_METHODDEF) */
/*[clinic end generated code: output=8545565b1a1822de input=a9049054013a1b77]*/
/*[clinic end generated code: output=7711ef02d1d5776c input=a9049054013a1b77]*/

File diff suppressed because it is too large Load Diff