bpo-30341: Improve _PyTrash_thread_destroy_chain() a little bit (#1545)
* add a comment about why we need to increase trash_delete_nesting * move increase and decrese outside of the loop
This commit is contained in:
parent
8619c5417c
commit
a66f9c6bb1
|
@ -1029,7 +1029,7 @@ without deallocating anything (and so unbounded call-stack depth is avoided).
|
|||
When the call stack finishes unwinding again, code generated by the END macro
|
||||
notices this, and calls another routine to deallocate all the objects that
|
||||
may have been added to the list of deferred deallocations. In effect, a
|
||||
chain of N deallocations is broken into N / PyTrash_UNWIND_LEVEL pieces,
|
||||
chain of N deallocations is broken into (N-1)/(PyTrash_UNWIND_LEVEL-1) pieces,
|
||||
with the call stack never exceeding a depth of PyTrash_UNWIND_LEVEL.
|
||||
*/
|
||||
|
||||
|
|
|
@ -2093,6 +2093,19 @@ void
|
|||
_PyTrash_thread_destroy_chain(void)
|
||||
{
|
||||
PyThreadState *tstate = PyThreadState_GET();
|
||||
/* We need to increase trash_delete_nesting here, otherwise,
|
||||
_PyTrash_thread_destroy_chain will be called recursively
|
||||
and then possibly crash. An example that may crash without
|
||||
increase:
|
||||
N = 500000 # need to be large enough
|
||||
ob = object()
|
||||
tups = [(ob,) for i in range(N)]
|
||||
for i in range(49):
|
||||
tups = [(tup,) for tup in tups]
|
||||
del tups
|
||||
*/
|
||||
assert(tstate->trash_delete_nesting == 0);
|
||||
++tstate->trash_delete_nesting;
|
||||
while (tstate->trash_delete_later) {
|
||||
PyObject *op = tstate->trash_delete_later;
|
||||
destructor dealloc = Py_TYPE(op)->tp_dealloc;
|
||||
|
@ -2107,10 +2120,10 @@ _PyTrash_thread_destroy_chain(void)
|
|||
* up distorting allocation statistics.
|
||||
*/
|
||||
assert(op->ob_refcnt == 0);
|
||||
++tstate->trash_delete_nesting;
|
||||
(*dealloc)(op);
|
||||
--tstate->trash_delete_nesting;
|
||||
assert(tstate->trash_delete_nesting == 1);
|
||||
}
|
||||
--tstate->trash_delete_nesting;
|
||||
}
|
||||
|
||||
#ifndef Py_TRACE_REFS
|
||||
|
|
Loading…
Reference in New Issue