mirror of https://github.com/python/cpython
gh-123923: Defer refcounting for `f_executable` in `_PyInterpreterFrame` (#123924)
Use a `_PyStackRef` and defer the reference to `f_executable` when possible. This avoids some reference count contention in the common case of executing the same code object from multiple threads concurrently in the free-threaded build.
This commit is contained in:
parent
4ed7d1d6ac
commit
b2afe2aae4
|
@ -60,7 +60,7 @@ enum _frameowner {
|
|||
};
|
||||
|
||||
typedef struct _PyInterpreterFrame {
|
||||
PyObject *f_executable; /* Strong reference (code object or None) */
|
||||
_PyStackRef f_executable; /* Deferred or strong reference (code object or None) */
|
||||
struct _PyInterpreterFrame *previous;
|
||||
PyObject *f_funcobj; /* Strong reference. Only valid if not on C stack */
|
||||
PyObject *f_globals; /* Borrowed reference. Only valid if not on C stack */
|
||||
|
@ -79,8 +79,9 @@ typedef struct _PyInterpreterFrame {
|
|||
((int)((IF)->instr_ptr - _PyCode_CODE(_PyFrame_GetCode(IF))))
|
||||
|
||||
static inline PyCodeObject *_PyFrame_GetCode(_PyInterpreterFrame *f) {
|
||||
assert(PyCode_Check(f->f_executable));
|
||||
return (PyCodeObject *)f->f_executable;
|
||||
PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable);
|
||||
assert(PyCode_Check(executable));
|
||||
return (PyCodeObject *)executable;
|
||||
}
|
||||
|
||||
static inline _PyStackRef *_PyFrame_Stackbase(_PyInterpreterFrame *f) {
|
||||
|
@ -130,7 +131,7 @@ static inline void _PyFrame_Copy(_PyInterpreterFrame *src, _PyInterpreterFrame *
|
|||
dest->previous = NULL;
|
||||
|
||||
#ifdef Py_GIL_DISABLED
|
||||
PyCodeObject *co = (PyCodeObject *)dest->f_executable;
|
||||
PyCodeObject *co = _PyFrame_GetCode(dest);
|
||||
for (int i = stacktop; i < co->co_nlocalsplus + co->co_stacksize; i++) {
|
||||
dest->localsplus[i] = PyStackRef_NULL;
|
||||
}
|
||||
|
@ -148,7 +149,7 @@ _PyFrame_Initialize(
|
|||
{
|
||||
frame->previous = previous;
|
||||
frame->f_funcobj = (PyObject *)func;
|
||||
frame->f_executable = Py_NewRef(code);
|
||||
frame->f_executable = PyStackRef_FromPyObjectNew(code);
|
||||
frame->f_builtins = func->func_builtins;
|
||||
frame->f_globals = func->func_globals;
|
||||
frame->f_locals = locals;
|
||||
|
@ -321,7 +322,7 @@ _PyFrame_PushTrampolineUnchecked(PyThreadState *tstate, PyCodeObject *code, int
|
|||
assert(tstate->datastack_top < tstate->datastack_limit);
|
||||
frame->previous = previous;
|
||||
frame->f_funcobj = Py_None;
|
||||
frame->f_executable = Py_NewRef(code);
|
||||
frame->f_executable = PyStackRef_FromPyObjectNew(code);
|
||||
#ifdef Py_DEBUG
|
||||
frame->f_builtins = NULL;
|
||||
frame->f_globals = NULL;
|
||||
|
|
|
@ -381,8 +381,11 @@ extern void _PyGC_ClearAllFreeLists(PyInterpreterState *interp);
|
|||
extern void _Py_ScheduleGC(PyThreadState *tstate);
|
||||
extern void _Py_RunGC(PyThreadState *tstate);
|
||||
|
||||
union _PyStackRef;
|
||||
|
||||
// GC visit callback for tracked interpreter frames
|
||||
extern int _PyGC_VisitFrameStack(struct _PyInterpreterFrame *frame, visitproc visit, void *arg);
|
||||
extern int _PyGC_VisitStackRef(union _PyStackRef *ref, visitproc visit, void *arg);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
|
|
@ -227,6 +227,13 @@ PyStackRef_DUP(_PyStackRef stackref)
|
|||
# define PyStackRef_DUP(stackref) PyStackRef_FromPyObjectSteal(Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref)))
|
||||
#endif
|
||||
|
||||
// Convert a possibly deferred reference to a strong reference.
|
||||
static inline _PyStackRef
|
||||
PyStackRef_AsStrongReference(_PyStackRef stackref)
|
||||
{
|
||||
return PyStackRef_FromPyObjectSteal(PyStackRef_AsPyObjectSteal(stackref));
|
||||
}
|
||||
|
||||
static inline void
|
||||
_PyObjectStack_FromStackRefStack(PyObject **dst, const _PyStackRef *src, size_t length)
|
||||
{
|
||||
|
@ -261,6 +268,11 @@ PyStackRef_ExceptionInstanceCheck(_PyStackRef stackref)
|
|||
return PyExceptionInstance_Check(PyStackRef_AsPyObjectBorrow(stackref));
|
||||
}
|
||||
|
||||
static inline bool
|
||||
PyStackRef_CodeCheck(_PyStackRef stackref)
|
||||
{
|
||||
return PyCode_Check(PyStackRef_AsPyObjectBorrow(stackref));
|
||||
}
|
||||
|
||||
static inline bool
|
||||
PyStackRef_FunctionCheck(_PyStackRef stackref)
|
||||
|
|
|
@ -6,6 +6,7 @@ import doctest
|
|||
import unittest
|
||||
import weakref
|
||||
import inspect
|
||||
import textwrap
|
||||
import types
|
||||
|
||||
from test import support
|
||||
|
@ -112,6 +113,27 @@ class FinalizationTest(unittest.TestCase):
|
|||
gen.send(2)
|
||||
self.assertEqual(cm.exception.value, 2)
|
||||
|
||||
def test_generator_resurrect(self):
|
||||
# Test that a resurrected generator still has a valid gi_code
|
||||
resurrected = []
|
||||
|
||||
# Resurrect a generator in a finalizer
|
||||
exec(textwrap.dedent("""
|
||||
def gen():
|
||||
try:
|
||||
yield
|
||||
except:
|
||||
resurrected.append(g)
|
||||
|
||||
g = gen()
|
||||
next(g)
|
||||
"""), {"resurrected": resurrected})
|
||||
|
||||
support.gc_collect()
|
||||
|
||||
self.assertEqual(len(resurrected), 1)
|
||||
self.assertIsInstance(resurrected[0].gi_code, types.CodeType)
|
||||
|
||||
|
||||
class GeneratorTest(unittest.TestCase):
|
||||
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
The ``f_executable`` field in the internal :c:struct:`_PyInterpreterFrame`
|
||||
struct now uses a tagged pointer. Profilers and debuggers that uses this
|
||||
field should clear the least significant bit to recover the
|
||||
:c:expr:`PyObject*` pointer.
|
|
@ -510,7 +510,7 @@ parse_frame_object(
|
|||
return 0;
|
||||
}
|
||||
|
||||
void* address_of_code_object;
|
||||
uintptr_t address_of_code_object;
|
||||
bytes_read = read_memory(
|
||||
pid,
|
||||
(void*)(address + offsets->interpreter_frame.executable),
|
||||
|
@ -520,10 +520,11 @@ parse_frame_object(
|
|||
return -1;
|
||||
}
|
||||
|
||||
if (address_of_code_object == NULL) {
|
||||
if (address_of_code_object == 0) {
|
||||
return 0;
|
||||
}
|
||||
return parse_code_object(pid, result, offsets, address_of_code_object, previous_frame);
|
||||
address_of_code_object &= ~Py_TAG_BITS;
|
||||
return parse_code_object(pid, result, offsets, (void *)address_of_code_object, previous_frame);
|
||||
}
|
||||
|
||||
static PyObject*
|
||||
|
|
|
@ -1625,8 +1625,6 @@ frame_dealloc(PyFrameObject *f)
|
|||
}
|
||||
|
||||
Py_TRASHCAN_BEGIN(f, frame_dealloc);
|
||||
PyObject *co = NULL;
|
||||
|
||||
/* GH-106092: If f->f_frame was on the stack and we reached the maximum
|
||||
* nesting depth for deallocations, the trashcan may have delayed this
|
||||
* deallocation until after f->f_frame is freed. Avoid dereferencing
|
||||
|
@ -1635,9 +1633,7 @@ frame_dealloc(PyFrameObject *f)
|
|||
|
||||
/* Kill all local variables including specials, if we own them */
|
||||
if (f->f_frame == frame && frame->owner == FRAME_OWNED_BY_FRAME_OBJECT) {
|
||||
/* Don't clear code object until the end */
|
||||
co = frame->f_executable;
|
||||
frame->f_executable = NULL;
|
||||
PyStackRef_CLEAR(frame->f_executable);
|
||||
Py_CLEAR(frame->f_funcobj);
|
||||
Py_CLEAR(frame->f_locals);
|
||||
_PyStackRef *locals = _PyFrame_GetLocalsArray(frame);
|
||||
|
@ -1652,7 +1648,6 @@ frame_dealloc(PyFrameObject *f)
|
|||
Py_CLEAR(f->f_extra_locals);
|
||||
Py_CLEAR(f->f_locals_cache);
|
||||
PyObject_GC_Del(f);
|
||||
Py_XDECREF(co);
|
||||
Py_TRASHCAN_END;
|
||||
}
|
||||
|
||||
|
|
|
@ -55,6 +55,14 @@ gen_traverse(PyGenObject *gen, visitproc visit, void *arg)
|
|||
return err;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// We still need to visit the code object when the frame is cleared to
|
||||
// ensure that it's kept alive if the reference is deferred.
|
||||
int err = _PyGC_VisitStackRef(&gen->gi_iframe.f_executable, visit, arg);
|
||||
if (err) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
/* No need to visit cr_origin, because it's just tuples/str/int, so can't
|
||||
participate in a reference cycle. */
|
||||
Py_VISIT(gen->gi_exc_state.exc_value);
|
||||
|
@ -139,6 +147,9 @@ gen_dealloc(PyGenObject *gen)
|
|||
and GC_Del. */
|
||||
Py_CLEAR(((PyAsyncGenObject*)gen)->ag_origin_or_finalizer);
|
||||
}
|
||||
if (PyCoro_CheckExact(gen)) {
|
||||
Py_CLEAR(((PyCoroObject *)gen)->cr_origin_or_finalizer);
|
||||
}
|
||||
if (gen->gi_frame_state != FRAME_CLEARED) {
|
||||
_PyInterpreterFrame *frame = &gen->gi_iframe;
|
||||
gen->gi_frame_state = FRAME_CLEARED;
|
||||
|
@ -147,10 +158,7 @@ gen_dealloc(PyGenObject *gen)
|
|||
_PyErr_ClearExcState(&gen->gi_exc_state);
|
||||
}
|
||||
assert(gen->gi_exc_state.exc_value == NULL);
|
||||
if (_PyGen_GetCode(gen)->co_flags & CO_COROUTINE) {
|
||||
Py_CLEAR(((PyCoroObject *)gen)->cr_origin_or_finalizer);
|
||||
}
|
||||
Py_DECREF(_PyGen_GetCode(gen));
|
||||
PyStackRef_CLEAR(gen->gi_iframe.f_executable);
|
||||
Py_CLEAR(gen->gi_name);
|
||||
Py_CLEAR(gen->gi_qualname);
|
||||
|
||||
|
|
|
@ -3598,7 +3598,7 @@ dummy_func(
|
|||
op(_CREATE_INIT_FRAME, (self, init, args[oparg] -- init_frame: _PyInterpreterFrame *)) {
|
||||
_PyInterpreterFrame *shim = _PyFrame_PushTrampolineUnchecked(
|
||||
tstate, (PyCodeObject *)&_Py_InitCleanup, 1, frame);
|
||||
assert(_PyCode_CODE((PyCodeObject *)shim->f_executable)[0].op.code == EXIT_INIT_CHECK);
|
||||
assert(_PyCode_CODE(_PyFrame_GetCode(shim))[0].op.code == EXIT_INIT_CHECK);
|
||||
/* Push self onto stack of shim */
|
||||
shim->localsplus[0] = PyStackRef_DUP(self);
|
||||
PyFunctionObject *init_func = (PyFunctionObject *)PyStackRef_AsPyObjectSteal(init);
|
||||
|
@ -4798,7 +4798,7 @@ dummy_func(
|
|||
#endif
|
||||
_PyExecutorObject *executor;
|
||||
if (target->op.code == ENTER_EXECUTOR) {
|
||||
PyCodeObject *code = (PyCodeObject *)frame->f_executable;
|
||||
PyCodeObject *code = _PyFrame_GetCode(frame);
|
||||
executor = code->co_executors->executors[target->op.arg];
|
||||
Py_INCREF(executor);
|
||||
}
|
||||
|
|
|
@ -194,7 +194,7 @@ static void
|
|||
lltrace_resume_frame(_PyInterpreterFrame *frame)
|
||||
{
|
||||
PyObject *fobj = frame->f_funcobj;
|
||||
if (!PyCode_Check(frame->f_executable) ||
|
||||
if (!PyStackRef_CodeCheck(frame->f_executable) ||
|
||||
fobj == NULL ||
|
||||
!PyFunction_Check(fobj)
|
||||
) {
|
||||
|
@ -784,7 +784,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
|
|||
entry_frame.f_globals = (PyObject*)0xaaa3;
|
||||
entry_frame.f_builtins = (PyObject*)0xaaa4;
|
||||
#endif
|
||||
entry_frame.f_executable = Py_None;
|
||||
entry_frame.f_executable = PyStackRef_None;
|
||||
entry_frame.instr_ptr = (_Py_CODEUNIT *)_Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS + 1;
|
||||
entry_frame.stackpointer = entry_frame.localsplus;
|
||||
entry_frame.owner = FRAME_OWNED_BY_CSTACK;
|
||||
|
@ -1681,7 +1681,7 @@ clear_thread_frame(PyThreadState *tstate, _PyInterpreterFrame * frame)
|
|||
tstate->c_recursion_remaining--;
|
||||
assert(frame->frame_obj == NULL || frame->frame_obj->f_frame == frame);
|
||||
_PyFrame_ClearExceptCode(frame);
|
||||
Py_DECREF(frame->f_executable);
|
||||
PyStackRef_CLEAR(frame->f_executable);
|
||||
tstate->c_recursion_remaining++;
|
||||
_PyThreadState_PopFrame(tstate, frame);
|
||||
}
|
||||
|
|
|
@ -4143,7 +4143,7 @@
|
|||
self = stack_pointer[-2 - oparg];
|
||||
_PyInterpreterFrame *shim = _PyFrame_PushTrampolineUnchecked(
|
||||
tstate, (PyCodeObject *)&_Py_InitCleanup, 1, frame);
|
||||
assert(_PyCode_CODE((PyCodeObject *)shim->f_executable)[0].op.code == EXIT_INIT_CHECK);
|
||||
assert(_PyCode_CODE(_PyFrame_GetCode(shim))[0].op.code == EXIT_INIT_CHECK);
|
||||
/* Push self onto stack of shim */
|
||||
shim->localsplus[0] = PyStackRef_DUP(self);
|
||||
PyFunctionObject *init_func = (PyFunctionObject *)PyStackRef_AsPyObjectSteal(init);
|
||||
|
@ -5413,7 +5413,7 @@
|
|||
#endif
|
||||
_PyExecutorObject *executor;
|
||||
if (target->op.code == ENTER_EXECUTOR) {
|
||||
PyCodeObject *code = (PyCodeObject *)frame->f_executable;
|
||||
PyCodeObject *code = _PyFrame_GetCode(frame);
|
||||
executor = code->co_executors->executors[target->op.arg];
|
||||
Py_INCREF(executor);
|
||||
}
|
||||
|
|
|
@ -14,7 +14,10 @@ _PyFrame_Traverse(_PyInterpreterFrame *frame, visitproc visit, void *arg)
|
|||
Py_VISIT(frame->frame_obj);
|
||||
Py_VISIT(frame->f_locals);
|
||||
Py_VISIT(frame->f_funcobj);
|
||||
Py_VISIT(_PyFrame_GetCode(frame));
|
||||
int err = _PyGC_VisitStackRef(&frame->f_executable, visit, arg);
|
||||
if (err) {
|
||||
return err;
|
||||
}
|
||||
return _PyGC_VisitFrameStack(frame, visit, arg);
|
||||
}
|
||||
|
||||
|
@ -53,10 +56,10 @@ take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame)
|
|||
assert(frame->owner != FRAME_OWNED_BY_FRAME_OBJECT);
|
||||
assert(frame->owner != FRAME_CLEARED);
|
||||
Py_ssize_t size = ((char*)frame->stackpointer) - (char *)frame;
|
||||
Py_INCREF(_PyFrame_GetCode(frame));
|
||||
memcpy((_PyInterpreterFrame *)f->_f_frame_data, frame, size);
|
||||
frame = (_PyInterpreterFrame *)f->_f_frame_data;
|
||||
frame->stackpointer = (_PyStackRef *)(((char *)frame) + size);
|
||||
frame->f_executable = PyStackRef_DUP(frame->f_executable);
|
||||
f->f_frame = frame;
|
||||
frame->owner = FRAME_OWNED_BY_FRAME_OBJECT;
|
||||
if (_PyFrame_IsIncomplete(frame)) {
|
||||
|
@ -131,9 +134,7 @@ _PyFrame_ClearExceptCode(_PyInterpreterFrame *frame)
|
|||
PyObject *
|
||||
PyUnstable_InterpreterFrame_GetCode(struct _PyInterpreterFrame *frame)
|
||||
{
|
||||
PyObject *code = frame->f_executable;
|
||||
Py_INCREF(code);
|
||||
return code;
|
||||
return PyStackRef_AsPyObjectNew(frame->f_executable);
|
||||
}
|
||||
|
||||
int
|
||||
|
|
|
@ -534,6 +534,13 @@ visit_decref(PyObject *op, void *parent)
|
|||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
_PyGC_VisitStackRef(_PyStackRef *ref, visitproc visit, void *arg)
|
||||
{
|
||||
Py_VISIT(PyStackRef_AsPyObjectBorrow(*ref));
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
_PyGC_VisitFrameStack(_PyInterpreterFrame *frame, visitproc visit, void *arg)
|
||||
{
|
||||
|
|
|
@ -161,39 +161,6 @@ gc_decref(PyObject *op)
|
|||
op->ob_tid -= 1;
|
||||
}
|
||||
|
||||
static void
|
||||
disable_deferred_refcounting(PyObject *op)
|
||||
{
|
||||
if (!_PyObject_HasDeferredRefcount(op)) {
|
||||
return;
|
||||
}
|
||||
|
||||
op->ob_gc_bits &= ~_PyGC_BITS_DEFERRED;
|
||||
op->ob_ref_shared -= _Py_REF_SHARED(_Py_REF_DEFERRED, 0);
|
||||
|
||||
if (PyType_Check(op)) {
|
||||
// Disable thread-local refcounting for heap types
|
||||
PyTypeObject *type = (PyTypeObject *)op;
|
||||
if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
|
||||
_PyType_ReleaseId((PyHeapTypeObject *)op);
|
||||
}
|
||||
}
|
||||
else if (PyGen_CheckExact(op) || PyCoro_CheckExact(op) || PyAsyncGen_CheckExact(op)) {
|
||||
// Ensure any non-refcounted pointers in locals are converted to
|
||||
// strong references. This ensures that the generator/coroutine is not
|
||||
// freed before its locals.
|
||||
PyGenObject *gen = (PyGenObject *)op;
|
||||
struct _PyInterpreterFrame *frame = &gen->gi_iframe;
|
||||
assert(frame->stackpointer != NULL);
|
||||
for (_PyStackRef *ref = frame->localsplus; ref < frame->stackpointer; ref++) {
|
||||
if (!PyStackRef_IsNull(*ref) && PyStackRef_IsDeferred(*ref)) {
|
||||
// Convert a deferred reference to a strong reference.
|
||||
*ref = PyStackRef_FromPyObjectSteal(PyStackRef_AsPyObjectSteal(*ref));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static Py_ssize_t
|
||||
merge_refcount(PyObject *op, Py_ssize_t extra)
|
||||
{
|
||||
|
@ -213,6 +180,51 @@ merge_refcount(PyObject *op, Py_ssize_t extra)
|
|||
return refcount;
|
||||
}
|
||||
|
||||
static void
|
||||
frame_disable_deferred_refcounting(_PyInterpreterFrame *frame)
|
||||
{
|
||||
// Convert locals, variables, and the executable object to strong
|
||||
// references from (possibly) deferred references.
|
||||
assert(frame->stackpointer != NULL);
|
||||
frame->f_executable = PyStackRef_AsStrongReference(frame->f_executable);
|
||||
for (_PyStackRef *ref = frame->localsplus; ref < frame->stackpointer; ref++) {
|
||||
if (!PyStackRef_IsNull(*ref) && PyStackRef_IsDeferred(*ref)) {
|
||||
*ref = PyStackRef_AsStrongReference(*ref);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
disable_deferred_refcounting(PyObject *op)
|
||||
{
|
||||
if (_PyObject_HasDeferredRefcount(op)) {
|
||||
op->ob_gc_bits &= ~_PyGC_BITS_DEFERRED;
|
||||
op->ob_ref_shared -= _Py_REF_SHARED(_Py_REF_DEFERRED, 0);
|
||||
merge_refcount(op, 0);
|
||||
}
|
||||
|
||||
// Heap types also use thread-local refcounting -- disable it here.
|
||||
if (PyType_Check(op)) {
|
||||
// Disable thread-local refcounting for heap types
|
||||
PyTypeObject *type = (PyTypeObject *)op;
|
||||
if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
|
||||
_PyType_ReleaseId((PyHeapTypeObject *)op);
|
||||
}
|
||||
}
|
||||
|
||||
// Generators and frame objects may contain deferred references to other
|
||||
// objects. If the pointed-to objects are part of cyclic trash, we may
|
||||
// have disabled deferred refcounting on them and need to ensure that we
|
||||
// use strong references, in case the generator or frame object is
|
||||
// resurrected by a finalizer.
|
||||
if (PyGen_CheckExact(op) || PyCoro_CheckExact(op) || PyAsyncGen_CheckExact(op)) {
|
||||
frame_disable_deferred_refcounting(&((PyGenObject *)op)->gi_iframe);
|
||||
}
|
||||
else if (PyFrame_Check(op)) {
|
||||
frame_disable_deferred_refcounting(((PyFrameObject *)op)->f_frame);
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gc_restore_tid(PyObject *op)
|
||||
{
|
||||
|
@ -349,17 +361,19 @@ gc_visit_thread_stacks(PyInterpreterState *interp)
|
|||
{
|
||||
HEAD_LOCK(&_PyRuntime);
|
||||
for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) {
|
||||
_PyInterpreterFrame *f = p->current_frame;
|
||||
while (f != NULL) {
|
||||
if (f->f_executable != NULL && PyCode_Check(f->f_executable)) {
|
||||
PyCodeObject *co = (PyCodeObject *)f->f_executable;
|
||||
for (_PyInterpreterFrame *f = p->current_frame; f != NULL; f = f->previous) {
|
||||
PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable);
|
||||
if (executable == NULL || !PyCode_Check(executable)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
PyCodeObject *co = (PyCodeObject *)executable;
|
||||
int max_stack = co->co_nlocalsplus + co->co_stacksize;
|
||||
gc_visit_stackref(f->f_executable);
|
||||
for (int i = 0; i < max_stack; i++) {
|
||||
gc_visit_stackref(f->localsplus[i]);
|
||||
}
|
||||
}
|
||||
f = f->previous;
|
||||
}
|
||||
}
|
||||
HEAD_UNLOCK(&_PyRuntime);
|
||||
}
|
||||
|
@ -623,23 +637,19 @@ scan_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area,
|
|||
else {
|
||||
worklist_push(&state->unreachable, op);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
else if (state->reason == _Py_GC_REASON_SHUTDOWN &&
|
||||
_PyObject_HasDeferredRefcount(op))
|
||||
{
|
||||
|
||||
if (state->reason == _Py_GC_REASON_SHUTDOWN) {
|
||||
// Disable deferred refcounting for reachable objects as well during
|
||||
// interpreter shutdown. This ensures that these objects are collected
|
||||
// immediately when their last reference is removed.
|
||||
disable_deferred_refcounting(op);
|
||||
merge_refcount(op, 0);
|
||||
state->long_lived_total++;
|
||||
}
|
||||
else {
|
||||
|
||||
// object is reachable, restore `ob_tid`; we're done with these objects
|
||||
gc_restore_tid(op);
|
||||
state->long_lived_total++;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -951,20 +961,29 @@ visit_decref_unreachable(PyObject *op, void *data)
|
|||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
_PyGC_VisitStackRef(_PyStackRef *ref, visitproc visit, void *arg)
|
||||
{
|
||||
// This is a bit tricky! We want to ignore deferred references when
|
||||
// computing the incoming references, but otherwise treat them like
|
||||
// regular references.
|
||||
if (!PyStackRef_IsDeferred(*ref) ||
|
||||
(visit != visit_decref && visit != visit_decref_unreachable))
|
||||
{
|
||||
Py_VISIT(PyStackRef_AsPyObjectBorrow(*ref));
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
_PyGC_VisitFrameStack(_PyInterpreterFrame *frame, visitproc visit, void *arg)
|
||||
{
|
||||
_PyStackRef *ref = _PyFrame_GetLocalsArray(frame);
|
||||
/* locals and stack */
|
||||
for (; ref < frame->stackpointer; ref++) {
|
||||
// This is a bit tricky! We want to ignore deferred references when
|
||||
// computing the incoming references, but otherwise treat them like
|
||||
// regular references.
|
||||
if (PyStackRef_IsDeferred(*ref) &&
|
||||
(visit == visit_decref || visit == visit_decref_unreachable)) {
|
||||
continue;
|
||||
if (_PyGC_VisitStackRef(ref, visit, arg) < 0) {
|
||||
return -1;
|
||||
}
|
||||
Py_VISIT(PyStackRef_AsPyObjectBorrow(*ref));
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -1018,7 +1018,7 @@
|
|||
{
|
||||
_PyInterpreterFrame *shim = _PyFrame_PushTrampolineUnchecked(
|
||||
tstate, (PyCodeObject *)&_Py_InitCleanup, 1, frame);
|
||||
assert(_PyCode_CODE((PyCodeObject *)shim->f_executable)[0].op.code == EXIT_INIT_CHECK);
|
||||
assert(_PyCode_CODE(_PyFrame_GetCode(shim))[0].op.code == EXIT_INIT_CHECK);
|
||||
/* Push self onto stack of shim */
|
||||
shim->localsplus[0] = PyStackRef_DUP(self);
|
||||
PyFunctionObject *init_func = (PyFunctionObject *)PyStackRef_AsPyObjectSteal(init);
|
||||
|
|
|
@ -143,9 +143,8 @@ _PyEval_SetOpcodeTrace(
|
|||
bool enable
|
||||
) {
|
||||
assert(frame != NULL);
|
||||
assert(PyCode_Check(frame->f_frame->f_executable));
|
||||
|
||||
PyCodeObject *code = (PyCodeObject *)frame->f_frame->f_executable;
|
||||
PyCodeObject *code = _PyFrame_GetCode(frame->f_frame);
|
||||
_PyMonitoringEventSet events = 0;
|
||||
|
||||
if (_PyMonitoring_GetLocalEvents(code, PY_MONITORING_SYS_TRACE_ID, &events) < 0) {
|
||||
|
|
|
@ -250,7 +250,7 @@ hashtable_compare_traceback(const void *key1, const void *key2)
|
|||
static void
|
||||
tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame)
|
||||
{
|
||||
assert(PyCode_Check(pyframe->f_executable));
|
||||
assert(PyStackRef_CodeCheck(pyframe->f_executable));
|
||||
frame->filename = &_Py_STR(anon_unknown);
|
||||
int lineno = PyUnstable_InterpreterFrame_GetLine(pyframe);
|
||||
if (lineno < 0) {
|
||||
|
@ -258,7 +258,7 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame)
|
|||
}
|
||||
frame->lineno = (unsigned int)lineno;
|
||||
|
||||
PyObject *filename = filename = ((PyCodeObject *)pyframe->f_executable)->co_filename;
|
||||
PyObject *filename = filename = _PyFrame_GetCode(pyframe)->co_filename;
|
||||
|
||||
if (filename == NULL) {
|
||||
#ifdef TRACE_DEBUG
|
||||
|
|
|
@ -158,8 +158,11 @@ class PyObjectPtr(object):
|
|||
|
||||
def __init__(self, gdbval, cast_to=None):
|
||||
# Clear the tagged pointer
|
||||
gdbval = gdb.Value(int(gdbval) & (~USED_TAGS)).cast(gdbval.type)
|
||||
if cast_to:
|
||||
if gdbval.type.name == '_PyStackRef':
|
||||
if cast_to is None:
|
||||
cast_to = gdb.lookup_type('PyObject').pointer()
|
||||
self._gdbval = gdb.Value(int(gdbval['bits']) & ~USED_TAGS).cast(cast_to)
|
||||
elif cast_to:
|
||||
self._gdbval = gdbval.cast(cast_to)
|
||||
else:
|
||||
self._gdbval = gdbval
|
||||
|
@ -1052,7 +1055,7 @@ class PyFramePtr:
|
|||
|
||||
obj_ptr_ptr = gdb.lookup_type("PyObject").pointer().pointer()
|
||||
|
||||
localsplus = self._gdbval["localsplus"].cast(obj_ptr_ptr)
|
||||
localsplus = self._gdbval["localsplus"]
|
||||
|
||||
for i in safe_range(self.co_nlocals):
|
||||
pyop_value = PyObjectPtr.from_pyobject_ptr(localsplus[i])
|
||||
|
@ -1581,7 +1584,10 @@ class PyObjectPtrPrinter:
|
|||
return stringify(proxyval)
|
||||
|
||||
def pretty_printer_lookup(gdbval):
|
||||
type = gdbval.type.unqualified()
|
||||
type = gdbval.type.strip_typedefs().unqualified()
|
||||
if type.code == gdb.TYPE_CODE_UNION and type.tag == '_PyStackRef':
|
||||
return PyObjectPtrPrinter(gdbval)
|
||||
|
||||
if type.code != gdb.TYPE_CODE_PTR:
|
||||
return None
|
||||
|
||||
|
|
Loading…
Reference in New Issue