#3640: Correct a crash in cPickle on 64bit platforms, in the case of deeply nested lists or dicts.

Reviewed by Martin von Loewis.
This commit is contained in:
Amaury Forgeot d'Arc 2008-09-11 21:03:37 +00:00
parent 06974bb1cc
commit fb1a5eb101
2 changed files with 139 additions and 63 deletions

View File

@ -20,6 +20,7 @@ MemoryError.
"""
import sys
import itertools
class RecursiveBlowup1:
def __init__(self):
@ -59,6 +60,24 @@ def test_getitem():
def test_recurse():
return test_recurse()
def test_cpickle(_cache={}):
import io
try:
import _pickle
except ImportError:
print("cannot import _pickle, skipped!")
return
l = None
for n in itertools.count():
try:
l = _cache[n]
continue # Already tried and it works, let's save some time
except KeyError:
for i in range(100):
l = [l]
_pickle.Pickler(io.BytesIO(), protocol=-1).dump(l)
_cache[n] = l
def check_limit(n, test_func_name):
sys.setrecursionlimit(n)
if test_func_name.startswith("test_"):
@ -81,5 +100,6 @@ while 1:
check_limit(limit, "test_init")
check_limit(limit, "test_getattr")
check_limit(limit, "test_getitem")
check_limit(limit, "test_cpickle")
print("Limit of %d is fine" % limit)
limit = limit + 100

View File

@ -1353,8 +1353,8 @@ save_tuple(PicklerObject *self, PyObject *obj)
static int
batch_list(PicklerObject *self, PyObject *iter)
{
PyObject *obj;
PyObject *slice[BATCHSIZE];
PyObject *obj = NULL;
PyObject *firstitem = NULL;
int i, n;
const char mark_op = MARK;
@ -1389,44 +1389,69 @@ batch_list(PicklerObject *self, PyObject *iter)
/* proto > 0: write in batches of BATCHSIZE. */
do {
/* Get next group of (no more than) BATCHSIZE elements. */
for (n = 0; n < BATCHSIZE; n++) {
/* Get first item */
firstitem = PyIter_Next(iter);
if (firstitem == NULL) {
if (PyErr_Occurred())
goto error;
/* nothing more to add */
break;
}
/* Try to get a second item */
obj = PyIter_Next(iter);
if (obj == NULL) {
if (PyErr_Occurred())
goto error;
/* Only one item to write */
if (save(self, firstitem, 0) < 0)
goto error;
if (pickler_write(self, &append_op, 1) < 0)
goto error;
Py_CLEAR(firstitem);
break;
}
/* More than one item to write */
/* Pump out MARK, items, APPENDS. */
if (pickler_write(self, &mark_op, 1) < 0)
goto error;
if (save(self, firstitem, 0) < 0)
goto error;
Py_CLEAR(firstitem);
n = 1;
/* Fetch and save up to BATCHSIZE items */
while (obj) {
if (save(self, obj, 0) < 0)
goto error;
Py_CLEAR(obj);
n += 1;
if (n == BATCHSIZE)
break;
obj = PyIter_Next(iter);
if (obj == NULL) {
if (PyErr_Occurred())
goto error;
break;
}
slice[n] = obj;
}
if (n > 1) {
/* Pump out MARK, slice[0:n], APPENDS. */
if (pickler_write(self, &mark_op, 1) < 0)
goto error;
for (i = 0; i < n; i++) {
if (save(self, slice[i], 0) < 0)
goto error;
}
if (pickler_write(self, &appends_op, 1) < 0)
goto error;
}
else if (n == 1) {
if (save(self, slice[0], 0) < 0 ||
pickler_write(self, &append_op, 1) < 0)
goto error;
}
if (pickler_write(self, &appends_op, 1) < 0)
goto error;
for (i = 0; i < n; i++) {
Py_DECREF(slice[i]);
}
} while (n == BATCHSIZE);
return 0;
error:
while (--n >= 0) {
Py_DECREF(slice[n]);
}
Py_XDECREF(firstitem);
Py_XDECREF(obj);
return -1;
}
@ -1496,8 +1521,8 @@ save_list(PicklerObject *self, PyObject *obj)
static int
batch_dict(PicklerObject *self, PyObject *iter)
{
PyObject *obj;
PyObject *slice[BATCHSIZE];
PyObject *obj = NULL;
PyObject *firstitem = NULL;
int i, n;
const char mark_op = MARK;
@ -1534,53 +1559,84 @@ batch_dict(PicklerObject *self, PyObject *iter)
/* proto > 0: write in batches of BATCHSIZE. */
do {
/* Get next group of (no more than) BATCHSIZE elements. */
for (n = 0; n < BATCHSIZE; n++) {
/* Get first item */
firstitem = PyIter_Next(iter);
if (firstitem == NULL) {
if (PyErr_Occurred())
goto error;
/* nothing more to add */
break;
}
if (!PyTuple_Check(firstitem) || PyTuple_Size(firstitem) != 2) {
PyErr_SetString(PyExc_TypeError, "dict items "
"iterator must return 2-tuples");
goto error;
}
/* Try to get a second item */
obj = PyIter_Next(iter);
if (obj == NULL) {
if (PyErr_Occurred())
goto error;
/* Only one item to write */
if (save(self, PyTuple_GET_ITEM(firstitem, 0), 0) < 0)
goto error;
if (save(self, PyTuple_GET_ITEM(firstitem, 1), 0) < 0)
goto error;
if (pickler_write(self, &setitem_op, 1) < 0)
goto error;
Py_CLEAR(firstitem);
break;
}
/* More than one item to write */
/* Pump out MARK, items, SETITEMS. */
if (pickler_write(self, &mark_op, 1) < 0)
goto error;
if (save(self, PyTuple_GET_ITEM(firstitem, 0), 0) < 0)
goto error;
if (save(self, PyTuple_GET_ITEM(firstitem, 1), 0) < 0)
goto error;
Py_CLEAR(firstitem);
n = 1;
/* Fetch and save up to BATCHSIZE items */
while (obj) {
if (!PyTuple_Check(obj) || PyTuple_Size(obj) != 2) {
PyErr_SetString(PyExc_TypeError, "dict items "
"iterator must return 2-tuples");
goto error;
}
if (save(self, PyTuple_GET_ITEM(obj, 0), 0) < 0 ||
save(self, PyTuple_GET_ITEM(obj, 1), 0) < 0)
goto error;
Py_CLEAR(obj);
n += 1;
if (n == BATCHSIZE)
break;
obj = PyIter_Next(iter);
if (obj == NULL) {
if (PyErr_Occurred())
goto error;
break;
}
if (!PyTuple_Check(obj) || PyTuple_Size(obj) != 2) {
PyErr_SetString(PyExc_TypeError, "dict items "
"iterator must return 2-tuples");
goto error;
}
slice[n] = obj;
}
if (n > 1) {
/* Pump out MARK, slice[0:n], SETITEMS. */
if (pickler_write(self, &mark_op, 1) < 0)
goto error;
for (i = 0; i < n; i++) {
obj = slice[i];
if (save(self, PyTuple_GET_ITEM(obj, 0), 0) < 0 ||
save(self, PyTuple_GET_ITEM(obj, 1), 0) < 0)
goto error;
}
if (pickler_write(self, &setitems_op, 1) < 0)
goto error;
}
else if (n == 1) {
obj = slice[0];
if (save(self, PyTuple_GET_ITEM(obj, 0), 0) < 0 ||
save(self, PyTuple_GET_ITEM(obj, 1), 0) < 0 ||
pickler_write(self, &setitem_op, 1) < 0)
goto error;
}
if (pickler_write(self, &setitems_op, 1) < 0)
goto error;
for (i = 0; i < n; i++) {
Py_DECREF(slice[i]);
}
} while (n == BATCHSIZE);
return 0;
error:
while (--n >= 0) {
Py_DECREF(slice[n]);
}
Py_XDECREF(firstitem);
Py_XDECREF(obj);
return -1;
}