mirror of https://github.com/python/cpython
GH-125174: Make immortal objects more robust, following design from PEP 683 (GH-125251)
This commit is contained in:
parent
01fc3b34cc
commit
c9014374c5
|
@ -11,7 +11,7 @@ extern "C" {
|
|||
#ifdef Py_DEBUG
|
||||
static inline void
|
||||
_PyStaticObject_CheckRefcnt(PyObject *obj) {
|
||||
if (Py_REFCNT(obj) < _Py_IMMORTAL_REFCNT) {
|
||||
if (!_Py_IsImmortal(obj)) {
|
||||
fprintf(stderr, "Immortal Object has less refcnt than expected.\n");
|
||||
_PyObject_Dump(obj);
|
||||
}
|
||||
|
|
|
@ -16,9 +16,6 @@ extern "C" {
|
|||
#include "pycore_pystate.h" // _PyInterpreterState_GET()
|
||||
#include "pycore_uniqueid.h" // _PyType_IncrefSlow
|
||||
|
||||
|
||||
#define _Py_IMMORTAL_REFCNT_LOOSE ((_Py_IMMORTAL_REFCNT >> 1) + 1)
|
||||
|
||||
// This value is added to `ob_ref_shared` for objects that use deferred
|
||||
// reference counting so that they are not immediately deallocated when the
|
||||
// non-deferred reference count drops to zero.
|
||||
|
@ -27,25 +24,8 @@ extern "C" {
|
|||
// `ob_ref_shared` are used for flags.
|
||||
#define _Py_REF_DEFERRED (PY_SSIZE_T_MAX / 8)
|
||||
|
||||
// gh-121528, gh-118997: Similar to _Py_IsImmortal() but be more loose when
|
||||
// comparing the reference count to stay compatible with C extensions built
|
||||
// with the stable ABI 3.11 or older. Such extensions implement INCREF/DECREF
|
||||
// as refcnt++ and refcnt-- without taking in account immortal objects. For
|
||||
// example, the reference count of an immortal object can change from
|
||||
// _Py_IMMORTAL_REFCNT to _Py_IMMORTAL_REFCNT+1 (INCREF) or
|
||||
// _Py_IMMORTAL_REFCNT-1 (DECREF).
|
||||
//
|
||||
// This function should only be used in assertions. Otherwise, _Py_IsImmortal()
|
||||
// must be used instead.
|
||||
static inline int _Py_IsImmortalLoose(PyObject *op)
|
||||
{
|
||||
#if defined(Py_GIL_DISABLED)
|
||||
return _Py_IsImmortal(op);
|
||||
#else
|
||||
return (op->ob_refcnt >= _Py_IMMORTAL_REFCNT_LOOSE);
|
||||
#endif
|
||||
}
|
||||
#define _Py_IsImmortalLoose(op) _Py_IsImmortalLoose(_PyObject_CAST(op))
|
||||
/* For backwards compatibility -- Do not use this */
|
||||
#define _Py_IsImmortalLoose(op) _Py_IsImmortal
|
||||
|
||||
|
||||
/* Check if an object is consistent. For example, ensure that the reference
|
||||
|
@ -97,7 +77,7 @@ PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *);
|
|||
#else
|
||||
#define _PyObject_HEAD_INIT(type) \
|
||||
{ \
|
||||
.ob_refcnt = _Py_IMMORTAL_REFCNT, \
|
||||
.ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT, \
|
||||
.ob_type = (type) \
|
||||
}
|
||||
#endif
|
||||
|
@ -184,7 +164,7 @@ PyAPI_FUNC(void) _Py_SetImmortalUntracked(PyObject *op);
|
|||
static inline void _Py_SetMortal(PyObject *op, Py_ssize_t refcnt)
|
||||
{
|
||||
if (op) {
|
||||
assert(_Py_IsImmortalLoose(op));
|
||||
assert(_Py_IsImmortal(op));
|
||||
#ifdef Py_GIL_DISABLED
|
||||
op->ob_tid = _Py_UNOWNED_TID;
|
||||
op->ob_ref_local = 0;
|
||||
|
@ -316,7 +296,7 @@ static inline void
|
|||
_Py_INCREF_TYPE(PyTypeObject *type)
|
||||
{
|
||||
if (!_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
|
||||
assert(_Py_IsImmortalLoose(type));
|
||||
assert(_Py_IsImmortal(type));
|
||||
_Py_INCREF_IMMORTAL_STAT_INC();
|
||||
return;
|
||||
}
|
||||
|
@ -357,7 +337,7 @@ static inline void
|
|||
_Py_DECREF_TYPE(PyTypeObject *type)
|
||||
{
|
||||
if (!_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
|
||||
assert(_Py_IsImmortalLoose(type));
|
||||
assert(_Py_IsImmortal(type));
|
||||
_Py_DECREF_IMMORTAL_STAT_INC();
|
||||
return;
|
||||
}
|
||||
|
@ -393,7 +373,7 @@ _PyObject_Init(PyObject *op, PyTypeObject *typeobj)
|
|||
{
|
||||
assert(op != NULL);
|
||||
Py_SET_TYPE(op, typeobj);
|
||||
assert(_PyType_HasFeature(typeobj, Py_TPFLAGS_HEAPTYPE) || _Py_IsImmortalLoose(typeobj));
|
||||
assert(_PyType_HasFeature(typeobj, Py_TPFLAGS_HEAPTYPE) || _Py_IsImmortal(typeobj));
|
||||
_Py_INCREF_TYPE(typeobj);
|
||||
_Py_NewReference(op);
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ whose size is determined when the object is allocated.
|
|||
#else
|
||||
#define PyObject_HEAD_INIT(type) \
|
||||
{ \
|
||||
{ _Py_IMMORTAL_REFCNT }, \
|
||||
{ _Py_IMMORTAL_INITIAL_REFCNT }, \
|
||||
(type) \
|
||||
},
|
||||
#endif
|
||||
|
|
|
@ -21,25 +21,30 @@ cleanup during runtime finalization.
|
|||
|
||||
#if SIZEOF_VOID_P > 4
|
||||
/*
|
||||
In 64+ bit systems, an object will be marked as immortal by setting all of the
|
||||
lower 32 bits of the reference count field, which is equal to: 0xFFFFFFFF
|
||||
In 64+ bit systems, any object whose 32 bit reference count is >= 2**31
|
||||
will be treated as immortal.
|
||||
|
||||
Using the lower 32 bits makes the value backwards compatible by allowing
|
||||
C-Extensions without the updated checks in Py_INCREF and Py_DECREF to safely
|
||||
increase and decrease the objects reference count. The object would lose its
|
||||
immortality, but the execution would still be correct.
|
||||
increase and decrease the objects reference count.
|
||||
|
||||
In order to offer sufficient resilience to C extensions using the stable ABI
|
||||
compiled against 3.11 or earlier, we set the initial value near the
|
||||
middle of the range (2**31, 2**32). That way the the refcount can be
|
||||
off by ~1 billion without affecting immortality.
|
||||
|
||||
Reference count increases will use saturated arithmetic, taking advantage of
|
||||
having all the lower 32 bits set, which will avoid the reference count to go
|
||||
beyond the refcount limit. Immortality checks for reference count decreases will
|
||||
be done by checking the bit sign flag in the lower 32 bits.
|
||||
|
||||
*/
|
||||
#define _Py_IMMORTAL_REFCNT _Py_CAST(Py_ssize_t, UINT_MAX)
|
||||
#define _Py_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(3UL << 30))
|
||||
|
||||
#else
|
||||
/*
|
||||
In 32 bit systems, an object will be marked as immortal by setting all of the
|
||||
lower 30 bits of the reference count field, which is equal to: 0x3FFFFFFF
|
||||
In 32 bit systems, an object will be treated as immortal if its reference
|
||||
count equals or exceeds _Py_IMMORTAL_MINIMUM_REFCNT (2**30).
|
||||
|
||||
Using the lower 30 bits makes the value backwards compatible by allowing
|
||||
C-Extensions without the updated checks in Py_INCREF and Py_DECREF to safely
|
||||
|
@ -47,9 +52,10 @@ increase and decrease the objects reference count. The object would lose its
|
|||
immortality, but the execution would still be correct.
|
||||
|
||||
Reference count increases and decreases will first go through an immortality
|
||||
check by comparing the reference count field to the immortality reference count.
|
||||
check by comparing the reference count field to the minimum immortality refcount.
|
||||
*/
|
||||
#define _Py_IMMORTAL_REFCNT _Py_CAST(Py_ssize_t, UINT_MAX >> 2)
|
||||
#define _Py_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(3L << 29))
|
||||
#define _Py_IMMORTAL_MINIMUM_REFCNT ((Py_ssize_t)(1L << 30))
|
||||
#endif
|
||||
|
||||
// Py_GIL_DISABLED builds indicate immortal objects using `ob_ref_local`, which is
|
||||
|
@ -90,7 +96,7 @@ PyAPI_FUNC(Py_ssize_t) Py_REFCNT(PyObject *ob);
|
|||
#else
|
||||
uint32_t local = _Py_atomic_load_uint32_relaxed(&ob->ob_ref_local);
|
||||
if (local == _Py_IMMORTAL_REFCNT_LOCAL) {
|
||||
return _Py_IMMORTAL_REFCNT;
|
||||
return _Py_IMMORTAL_INITIAL_REFCNT;
|
||||
}
|
||||
Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&ob->ob_ref_shared);
|
||||
return _Py_STATIC_CAST(Py_ssize_t, local) +
|
||||
|
@ -109,9 +115,9 @@ static inline Py_ALWAYS_INLINE int _Py_IsImmortal(PyObject *op)
|
|||
return (_Py_atomic_load_uint32_relaxed(&op->ob_ref_local) ==
|
||||
_Py_IMMORTAL_REFCNT_LOCAL);
|
||||
#elif SIZEOF_VOID_P > 4
|
||||
return (_Py_CAST(PY_INT32_T, op->ob_refcnt) < 0);
|
||||
return _Py_CAST(PY_INT32_T, op->ob_refcnt) < 0;
|
||||
#else
|
||||
return (op->ob_refcnt == _Py_IMMORTAL_REFCNT);
|
||||
return op->ob_refcnt >= _Py_IMMORTAL_MINIMUM_REFCNT;
|
||||
#endif
|
||||
}
|
||||
#define _Py_IsImmortal(op) _Py_IsImmortal(_PyObject_CAST(op))
|
||||
|
@ -236,7 +242,7 @@ static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op)
|
|||
uint32_t new_local = local + 1;
|
||||
if (new_local == 0) {
|
||||
_Py_INCREF_IMMORTAL_STAT_INC();
|
||||
// local is equal to _Py_IMMORTAL_REFCNT: do nothing
|
||||
// local is equal to _Py_IMMORTAL_REFCNT_LOCAL: do nothing
|
||||
return;
|
||||
}
|
||||
if (_Py_IsOwnedByCurrentThread(op)) {
|
||||
|
@ -246,18 +252,14 @@ static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op)
|
|||
_Py_atomic_add_ssize(&op->ob_ref_shared, (1 << _Py_REF_SHARED_SHIFT));
|
||||
}
|
||||
#elif SIZEOF_VOID_P > 4
|
||||
// Portable saturated add, branching on the carry flag and set low bits
|
||||
PY_UINT32_T cur_refcnt = op->ob_refcnt_split[PY_BIG_ENDIAN];
|
||||
PY_UINT32_T new_refcnt = cur_refcnt + 1;
|
||||
if (new_refcnt == 0) {
|
||||
if (((int32_t)cur_refcnt) < 0) {
|
||||
// the object is immortal
|
||||
_Py_INCREF_IMMORTAL_STAT_INC();
|
||||
// cur_refcnt is equal to _Py_IMMORTAL_REFCNT: the object is immortal,
|
||||
// do nothing
|
||||
return;
|
||||
}
|
||||
op->ob_refcnt_split[PY_BIG_ENDIAN] = new_refcnt;
|
||||
op->ob_refcnt_split[PY_BIG_ENDIAN] = cur_refcnt + 1;
|
||||
#else
|
||||
// Explicitly check immortality against the immortal value
|
||||
if (_Py_IsImmortal(op)) {
|
||||
_Py_INCREF_IMMORTAL_STAT_INC();
|
||||
return;
|
||||
|
|
|
@ -2574,9 +2574,9 @@ class ShutdownTest(unittest.TestCase):
|
|||
class ImmortalTests(unittest.TestCase):
|
||||
|
||||
if sys.maxsize < (1 << 32):
|
||||
IMMORTAL_REFCOUNT = (1 << 30) - 1
|
||||
IMMORTAL_REFCOUNT = 3 << 29
|
||||
else:
|
||||
IMMORTAL_REFCOUNT = (1 << 32) - 1
|
||||
IMMORTAL_REFCOUNT = 3 << 30
|
||||
|
||||
IMMORTALS = (None, True, False, Ellipsis, NotImplemented, *range(-5, 257))
|
||||
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
Make the handling of reference counts of immortal objects more robust.
|
||||
Immortal objects with reference counts that deviate from their original
|
||||
reference count by up to a billion (half a billion on 32 bit builds) are
|
||||
still counted as immortal.
|
|
@ -1387,7 +1387,7 @@ FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored))
|
|||
default:
|
||||
assert (0);
|
||||
}
|
||||
assert(_Py_IsImmortalLoose(ret));
|
||||
assert(_Py_IsImmortal(ret));
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ Py_LOCAL_INLINE(Py_ssize_t) _PyBytesWriter_GetSize(_PyBytesWriter *writer,
|
|||
static inline PyObject* bytes_get_empty(void)
|
||||
{
|
||||
PyObject *empty = &EMPTY->ob_base.ob_base;
|
||||
assert(_Py_IsImmortalLoose(empty));
|
||||
assert(_Py_IsImmortal(empty));
|
||||
return empty;
|
||||
}
|
||||
|
||||
|
@ -119,7 +119,7 @@ PyBytes_FromStringAndSize(const char *str, Py_ssize_t size)
|
|||
}
|
||||
if (size == 1 && str != NULL) {
|
||||
op = CHARACTER(*str & 255);
|
||||
assert(_Py_IsImmortalLoose(op));
|
||||
assert(_Py_IsImmortal(op));
|
||||
return (PyObject *)op;
|
||||
}
|
||||
if (size == 0) {
|
||||
|
@ -155,7 +155,7 @@ PyBytes_FromString(const char *str)
|
|||
}
|
||||
else if (size == 1) {
|
||||
op = CHARACTER(*str & 255);
|
||||
assert(_Py_IsImmortalLoose(op));
|
||||
assert(_Py_IsImmortal(op));
|
||||
return (PyObject *)op;
|
||||
}
|
||||
|
||||
|
|
|
@ -416,6 +416,8 @@ _PyDict_DebugMallocStats(FILE *out)
|
|||
|
||||
#define DK_MASK(dk) (DK_SIZE(dk)-1)
|
||||
|
||||
#define _Py_DICT_IMMORTAL_INITIAL_REFCNT PY_SSIZE_T_MIN
|
||||
|
||||
static void free_keys_object(PyDictKeysObject *keys, bool use_qsbr);
|
||||
|
||||
/* PyDictKeysObject has refcounts like PyObject does, so we have the
|
||||
|
@ -428,7 +430,8 @@ static void free_keys_object(PyDictKeysObject *keys, bool use_qsbr);
|
|||
static inline void
|
||||
dictkeys_incref(PyDictKeysObject *dk)
|
||||
{
|
||||
if (FT_ATOMIC_LOAD_SSIZE_RELAXED(dk->dk_refcnt) == _Py_IMMORTAL_REFCNT) {
|
||||
if (FT_ATOMIC_LOAD_SSIZE_RELAXED(dk->dk_refcnt) < 0) {
|
||||
assert(FT_ATOMIC_LOAD_SSIZE_RELAXED(dk->dk_refcnt) == _Py_DICT_IMMORTAL_INITIAL_REFCNT);
|
||||
return;
|
||||
}
|
||||
#ifdef Py_REF_DEBUG
|
||||
|
@ -440,7 +443,8 @@ dictkeys_incref(PyDictKeysObject *dk)
|
|||
static inline void
|
||||
dictkeys_decref(PyInterpreterState *interp, PyDictKeysObject *dk, bool use_qsbr)
|
||||
{
|
||||
if (FT_ATOMIC_LOAD_SSIZE_RELAXED(dk->dk_refcnt) == _Py_IMMORTAL_REFCNT) {
|
||||
if (FT_ATOMIC_LOAD_SSIZE_RELAXED(dk->dk_refcnt) < 0) {
|
||||
assert(FT_ATOMIC_LOAD_SSIZE_RELAXED(dk->dk_refcnt) == _Py_DICT_IMMORTAL_INITIAL_REFCNT);
|
||||
return;
|
||||
}
|
||||
assert(FT_ATOMIC_LOAD_SSIZE(dk->dk_refcnt) > 0);
|
||||
|
@ -586,7 +590,7 @@ estimate_log2_keysize(Py_ssize_t n)
|
|||
* (which cannot fail and thus can do no allocation).
|
||||
*/
|
||||
static PyDictKeysObject empty_keys_struct = {
|
||||
_Py_IMMORTAL_REFCNT, /* dk_refcnt */
|
||||
_Py_DICT_IMMORTAL_INITIAL_REFCNT, /* dk_refcnt */
|
||||
0, /* dk_log2_size */
|
||||
0, /* dk_log2_index_bytes */
|
||||
DICT_KEYS_UNICODE, /* dk_kind */
|
||||
|
|
|
@ -2453,7 +2453,7 @@ _Py_SetImmortalUntracked(PyObject *op)
|
|||
op->ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL;
|
||||
op->ob_ref_shared = 0;
|
||||
#else
|
||||
op->ob_refcnt = _Py_IMMORTAL_REFCNT;
|
||||
op->ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -708,7 +708,7 @@ _PyStructSequence_FiniBuiltin(PyInterpreterState *interp, PyTypeObject *type)
|
|||
assert(type->tp_name != NULL);
|
||||
assert(type->tp_base == &PyTuple_Type);
|
||||
assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN));
|
||||
assert(_Py_IsImmortalLoose(type));
|
||||
assert(_Py_IsImmortal(type));
|
||||
|
||||
// Cannot delete a type if it still has subclasses
|
||||
if (_PyType_HasSubclasses(type)) {
|
||||
|
|
|
@ -476,7 +476,7 @@ set_tp_bases(PyTypeObject *self, PyObject *bases, int initial)
|
|||
assert(PyTuple_GET_SIZE(bases) == 1);
|
||||
assert(PyTuple_GET_ITEM(bases, 0) == (PyObject *)self->tp_base);
|
||||
assert(self->tp_base->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN);
|
||||
assert(_Py_IsImmortalLoose(self->tp_base));
|
||||
assert(_Py_IsImmortal(self->tp_base));
|
||||
}
|
||||
_Py_SetImmortal(bases);
|
||||
}
|
||||
|
@ -493,7 +493,7 @@ clear_tp_bases(PyTypeObject *self, int final)
|
|||
Py_CLEAR(self->tp_bases);
|
||||
}
|
||||
else {
|
||||
assert(_Py_IsImmortalLoose(self->tp_bases));
|
||||
assert(_Py_IsImmortal(self->tp_bases));
|
||||
_Py_ClearImmortal(self->tp_bases);
|
||||
}
|
||||
}
|
||||
|
@ -558,7 +558,7 @@ clear_tp_mro(PyTypeObject *self, int final)
|
|||
Py_CLEAR(self->tp_mro);
|
||||
}
|
||||
else {
|
||||
assert(_Py_IsImmortalLoose(self->tp_mro));
|
||||
assert(_Py_IsImmortal(self->tp_mro));
|
||||
_Py_ClearImmortal(self->tp_mro);
|
||||
}
|
||||
}
|
||||
|
@ -5966,7 +5966,7 @@ fini_static_type(PyInterpreterState *interp, PyTypeObject *type,
|
|||
int isbuiltin, int final)
|
||||
{
|
||||
assert(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN);
|
||||
assert(_Py_IsImmortalLoose((PyObject *)type));
|
||||
assert(_Py_IsImmortal((PyObject *)type));
|
||||
|
||||
type_dealloc_common(type);
|
||||
|
||||
|
|
|
@ -381,7 +381,7 @@ dummy_func(
|
|||
EXIT_IF(!PyLong_CheckExact(value_o));
|
||||
STAT_INC(TO_BOOL, hit);
|
||||
if (_PyLong_IsZero((PyLongObject *)value_o)) {
|
||||
assert(_Py_IsImmortalLoose(value_o));
|
||||
assert(_Py_IsImmortal(value_o));
|
||||
DEAD(value);
|
||||
res = PyStackRef_False;
|
||||
}
|
||||
|
@ -412,7 +412,7 @@ dummy_func(
|
|||
EXIT_IF(!PyUnicode_CheckExact(value_o));
|
||||
STAT_INC(TO_BOOL, hit);
|
||||
if (value_o == &_Py_STR(empty)) {
|
||||
assert(_Py_IsImmortalLoose(value_o));
|
||||
assert(_Py_IsImmortal(value_o));
|
||||
DEAD(value);
|
||||
res = PyStackRef_False;
|
||||
}
|
||||
|
|
|
@ -402,7 +402,7 @@
|
|||
}
|
||||
STAT_INC(TO_BOOL, hit);
|
||||
if (_PyLong_IsZero((PyLongObject *)value_o)) {
|
||||
assert(_Py_IsImmortalLoose(value_o));
|
||||
assert(_Py_IsImmortal(value_o));
|
||||
res = PyStackRef_False;
|
||||
}
|
||||
else {
|
||||
|
@ -455,7 +455,7 @@
|
|||
}
|
||||
STAT_INC(TO_BOOL, hit);
|
||||
if (value_o == &_Py_STR(empty)) {
|
||||
assert(_Py_IsImmortalLoose(value_o));
|
||||
assert(_Py_IsImmortal(value_o));
|
||||
res = PyStackRef_False;
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -7840,7 +7840,7 @@
|
|||
DEOPT_IF(!PyLong_CheckExact(value_o), TO_BOOL);
|
||||
STAT_INC(TO_BOOL, hit);
|
||||
if (_PyLong_IsZero((PyLongObject *)value_o)) {
|
||||
assert(_Py_IsImmortalLoose(value_o));
|
||||
assert(_Py_IsImmortal(value_o));
|
||||
res = PyStackRef_False;
|
||||
}
|
||||
else {
|
||||
|
@ -7902,7 +7902,7 @@
|
|||
DEOPT_IF(!PyUnicode_CheckExact(value_o), TO_BOOL);
|
||||
STAT_INC(TO_BOOL, hit);
|
||||
if (value_o == &_Py_STR(empty)) {
|
||||
assert(_Py_IsImmortalLoose(value_o));
|
||||
assert(_Py_IsImmortal(value_o));
|
||||
res = PyStackRef_False;
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -1051,7 +1051,7 @@ del_cached_def(struct extensions_cache_value *value)
|
|||
However, this decref would be problematic if the module def were
|
||||
dynamically allocated, it were the last ref, and this function
|
||||
were called with an interpreter other than the def's owner. */
|
||||
assert(value->def == NULL || _Py_IsImmortalLoose(value->def));
|
||||
assert(value->def == NULL || _Py_IsImmortal(value->def));
|
||||
|
||||
Py_XDECREF(value->def->m_base.m_copy);
|
||||
value->def->m_base.m_copy = NULL;
|
||||
|
|
|
@ -614,7 +614,6 @@ NON_ESCAPING_FUNCTIONS = (
|
|||
"_Py_EnterRecursiveCallTstateUnchecked",
|
||||
"_Py_ID",
|
||||
"_Py_IsImmortal",
|
||||
"_Py_IsImmortalLoose",
|
||||
"_Py_LeaveRecursiveCallPy",
|
||||
"_Py_LeaveRecursiveCallTstate",
|
||||
"_Py_NewRef",
|
||||
|
|
Loading…
Reference in New Issue