bpo-31857: Make the behavior of USE_STACKCHECK deterministic (#4098)

This commit is contained in:
pdox 2017-10-25 23:03:01 -07:00 committed by Benjamin Peterson
parent 32318930da
commit 1896793520
6 changed files with 15 additions and 11 deletions

View File

@ -93,21 +93,19 @@ PyAPI_FUNC(int) Py_GetRecursionLimit(void);
PyThreadState_GET()->overflowed = 0; \
} while(0)
PyAPI_FUNC(int) _Py_CheckRecursiveCall(const char *where);
/* XXX _Py_CheckRecursionLimit should be changed to
_PyRuntime.ceval.check_recursion_limit. However, due to the macros
in which it's used, _Py_CheckRecursionLimit is stuck in the stable
ABI. It should be removed therefrom when possible.
/* Due to the macros in which it's used, _Py_CheckRecursionLimit is in
the stable ABI. It should be removed therefrom when possible.
*/
PyAPI_DATA(int) _Py_CheckRecursionLimit;
#ifdef USE_STACKCHECK
/* With USE_STACKCHECK, we artificially decrement the recursion limit in order
to trigger regular stack checks in _Py_CheckRecursiveCall(), except if
the "overflowed" flag is set, in which case we need the true value
of _Py_CheckRecursionLimit for _Py_MakeEndRecCheck() to function properly.
/* With USE_STACKCHECK, trigger stack checks in _Py_CheckRecursiveCall()
on every 64th call to Py_EnterRecursiveCall.
*/
# define _Py_MakeRecCheck(x) \
(++(x) > (_Py_CheckRecursionLimit += PyThreadState_GET()->overflowed - 1))
(++(x) > _Py_CheckRecursionLimit || \
++(PyThreadState_GET()->stackcheck_counter) > 64)
#else
# define _Py_MakeRecCheck(x) (++(x) > _Py_CheckRecursionLimit)
#endif

View File

@ -29,7 +29,6 @@ struct _pending_calls {
struct _ceval_runtime_state {
int recursion_limit;
int check_recursion_limit;
/* Records whether tracing is on for any thread. Counts the number
of threads for which tstate->c_tracefunc is non-NULL, so if the
value is 0, we know we don't have to check this thread's

View File

@ -151,6 +151,8 @@ typedef struct _ts {
to handle the runtime error. */
char recursion_critical; /* The current calls must not cause
a stack overflow. */
int stackcheck_counter;
/* 'tracing' keeps track of the execution depth when tracing/profiling.
This is to prevent the actual trace/profile code from being recorded in
the trace/profile. */

View File

@ -0,0 +1,2 @@
Make the behavior of USE_STACKCHECK deterministic in a multi-threaded
environment.

View File

@ -469,13 +469,15 @@ _Py_CheckRecursiveCall(const char *where)
int recursion_limit = _PyRuntime.ceval.recursion_limit;
#ifdef USE_STACKCHECK
tstate->stackcheck_counter = 0;
if (PyOS_CheckStack()) {
--tstate->recursion_depth;
PyErr_SetString(PyExc_MemoryError, "Stack overflow");
return -1;
}
#endif
/* Needed for ABI backwards-compatibility (see bpo-31857) */
_Py_CheckRecursionLimit = recursion_limit;
#endif
if (tstate->recursion_critical)
/* Somebody asked that we don't check for recursion. */
return 0;

View File

@ -245,6 +245,7 @@ new_threadstate(PyInterpreterState *interp, int init)
tstate->recursion_depth = 0;
tstate->overflowed = 0;
tstate->recursion_critical = 0;
tstate->stackcheck_counter = 0;
tstate->tracing = 0;
tstate->use_tracing = 0;
tstate->gilstate_counter = 0;