2018-11-21 17:27:47 -04:00
|
|
|
#ifndef Py_INTERNAL_OBJECT_H
|
|
|
|
#define Py_INTERNAL_OBJECT_H
|
|
|
|
#ifdef __cplusplus
|
|
|
|
extern "C" {
|
|
|
|
#endif
|
|
|
|
|
2019-04-17 18:02:26 -03:00
|
|
|
#ifndef Py_BUILD_CORE
|
|
|
|
# error "this header requires Py_BUILD_CORE define"
|
2018-11-21 17:27:47 -04:00
|
|
|
#endif
|
|
|
|
|
2022-02-08 16:39:07 -04:00
|
|
|
#include <stdbool.h>
|
2021-02-19 08:21:28 -04:00
|
|
|
#include "pycore_gc.h" // _PyObject_GC_IS_TRACKED()
|
2023-09-15 19:04:21 -03:00
|
|
|
#include "pycore_emscripten_trampoline.h" // _PyCFunction_TrampolineCall()
|
2021-02-19 08:21:28 -04:00
|
|
|
#include "pycore_interp.h" // PyInterpreterState.gc
|
2024-04-22 02:57:05 -03:00
|
|
|
#include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_STORE_PTR_RELAXED
|
2021-02-19 08:21:28 -04:00
|
|
|
#include "pycore_pystate.h" // _PyInterpreterState_GET()
|
2024-10-15 16:06:41 -03:00
|
|
|
#include "pycore_uniqueid.h" // _PyObject_ThreadIncrefSlow()
|
2018-11-21 20:02:54 -04:00
|
|
|
|
2024-08-06 15:36:57 -03:00
|
|
|
// This value is added to `ob_ref_shared` for objects that use deferred
|
|
|
|
// reference counting so that they are not immediately deallocated when the
|
|
|
|
// non-deferred reference count drops to zero.
|
|
|
|
//
|
|
|
|
// The value is half the maximum shared refcount because the low two bits of
|
|
|
|
// `ob_ref_shared` are used for flags.
|
|
|
|
#define _Py_REF_DEFERRED (PY_SSIZE_T_MAX / 8)
|
|
|
|
|
2024-10-10 14:19:08 -03:00
|
|
|
/* For backwards compatibility -- Do not use this */
|
|
|
|
#define _Py_IsImmortalLoose(op) _Py_IsImmortal
|
2024-07-17 16:49:37 -03:00
|
|
|
|
|
|
|
|
2023-07-23 17:09:08 -03:00
|
|
|
/* Check if an object is consistent. For example, ensure that the reference
|
|
|
|
counter is greater than or equal to 1, and ensure that ob_type is not NULL.
|
|
|
|
|
|
|
|
Call _PyObject_AssertFailed() if the object is inconsistent.
|
|
|
|
|
|
|
|
If check_content is zero, only check header fields: reduce the overhead.
|
|
|
|
|
|
|
|
The function always return 1. The return value is just here to be able to
|
|
|
|
write:
|
|
|
|
|
|
|
|
assert(_PyObject_CheckConsistency(obj, 1)); */
|
|
|
|
extern int _PyObject_CheckConsistency(PyObject *op, int check_content);
|
|
|
|
|
|
|
|
extern void _PyDebugAllocatorStats(FILE *out, const char *block_name,
|
|
|
|
int num_blocks, size_t sizeof_block);
|
|
|
|
|
|
|
|
extern void _PyObject_DebugTypeStats(FILE *out);
|
|
|
|
|
2023-08-30 00:34:43 -03:00
|
|
|
#ifdef Py_TRACE_REFS
|
2023-08-31 06:15:31 -03:00
|
|
|
// Forget a reference registered by _Py_NewReference(). Function called by
|
|
|
|
// _Py_Dealloc().
|
|
|
|
//
|
|
|
|
// On a free list, the function can be used before modifying an object to
|
|
|
|
// remove the object from traced objects. Then _Py_NewReference() or
|
|
|
|
// _Py_NewReferenceNoTotal() should be called again on the object to trace
|
|
|
|
// it again.
|
|
|
|
extern void _Py_ForgetReference(PyObject *);
|
2023-08-30 00:34:43 -03:00
|
|
|
#endif
|
|
|
|
|
2023-07-23 17:09:08 -03:00
|
|
|
// Export for shared _testinternalcapi extension
|
2023-07-25 00:48:04 -03:00
|
|
|
PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *);
|
2023-07-23 17:09:08 -03:00
|
|
|
|
2023-04-22 16:39:37 -03:00
|
|
|
/* We need to maintain an internal copy of Py{Var}Object_HEAD_INIT to avoid
|
|
|
|
designated initializer conflicts in C++20. If we use the deinition in
|
|
|
|
object.h, we will be mixing designated and non-designated initializers in
|
|
|
|
pycore objects which is forbiddent in C++20. However, if we then use
|
|
|
|
designated initializers in object.h then Extensions without designated break.
|
|
|
|
Furthermore, we can't use designated initializers in Extensions since these
|
|
|
|
are not supported pre-C++20. Thus, keeping an internal copy here is the most
|
|
|
|
backwards compatible solution */
|
2023-11-20 09:52:00 -04:00
|
|
|
#if defined(Py_GIL_DISABLED)
|
2023-10-30 13:06:09 -03:00
|
|
|
#define _PyObject_HEAD_INIT(type) \
|
|
|
|
{ \
|
|
|
|
.ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL, \
|
|
|
|
.ob_type = (type) \
|
|
|
|
}
|
|
|
|
#else
|
2023-04-22 16:39:37 -03:00
|
|
|
#define _PyObject_HEAD_INIT(type) \
|
|
|
|
{ \
|
2024-10-10 14:19:08 -03:00
|
|
|
.ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT, \
|
2023-04-22 16:39:37 -03:00
|
|
|
.ob_type = (type) \
|
2023-10-30 13:06:09 -03:00
|
|
|
}
|
|
|
|
#endif
|
2023-04-22 16:39:37 -03:00
|
|
|
#define _PyVarObject_HEAD_INIT(type, size) \
|
|
|
|
{ \
|
2023-10-30 13:06:09 -03:00
|
|
|
.ob_base = _PyObject_HEAD_INIT(type), \
|
2023-04-22 16:39:37 -03:00
|
|
|
.ob_size = size \
|
2023-10-30 13:06:09 -03:00
|
|
|
}
|
2022-01-13 18:54:36 -04:00
|
|
|
|
2024-02-29 12:11:28 -04:00
|
|
|
PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc(
|
2022-04-19 15:02:19 -03:00
|
|
|
const char *func,
|
|
|
|
const char *message);
|
|
|
|
|
2022-06-20 11:04:52 -03:00
|
|
|
#define _Py_FatalRefcountError(message) \
|
|
|
|
_Py_FatalRefcountErrorFunc(__func__, (message))
|
2022-04-19 15:02:19 -03:00
|
|
|
|
2023-03-08 15:03:50 -04:00
|
|
|
|
|
|
|
#ifdef Py_REF_DEBUG
|
|
|
|
/* The symbol is only exposed in the API for the sake of extensions
|
|
|
|
built against the pre-3.12 stable ABI. */
|
|
|
|
PyAPI_DATA(Py_ssize_t) _Py_RefTotal;
|
|
|
|
|
2024-04-08 13:11:36 -03:00
|
|
|
extern void _Py_AddRefTotal(PyThreadState *, Py_ssize_t);
|
|
|
|
extern void _Py_IncRefTotal(PyThreadState *);
|
|
|
|
extern void _Py_DecRefTotal(PyThreadState *);
|
2023-03-20 13:03:04 -03:00
|
|
|
|
2023-03-21 14:46:09 -03:00
|
|
|
# define _Py_DEC_REFTOTAL(interp) \
|
|
|
|
interp->object_state.reftotal--
|
2023-03-08 15:03:50 -04:00
|
|
|
#endif
|
|
|
|
|
2022-07-25 23:10:23 -03:00
|
|
|
// Increment reference count by n
|
|
|
|
static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n)
|
|
|
|
{
|
2023-05-04 21:00:07 -03:00
|
|
|
if (_Py_IsImmortal(op)) {
|
|
|
|
return;
|
|
|
|
}
|
2022-07-25 23:10:23 -03:00
|
|
|
#ifdef Py_REF_DEBUG
|
2024-04-08 13:11:36 -03:00
|
|
|
_Py_AddRefTotal(_PyThreadState_GET(), n);
|
2022-07-25 23:10:23 -03:00
|
|
|
#endif
|
2023-11-20 09:52:00 -04:00
|
|
|
#if !defined(Py_GIL_DISABLED)
|
2022-07-25 23:10:23 -03:00
|
|
|
op->ob_refcnt += n;
|
2023-10-30 13:06:09 -03:00
|
|
|
#else
|
|
|
|
if (_Py_IsOwnedByCurrentThread(op)) {
|
|
|
|
uint32_t local = op->ob_ref_local;
|
|
|
|
Py_ssize_t refcnt = (Py_ssize_t)local + n;
|
|
|
|
# if PY_SSIZE_T_MAX > UINT32_MAX
|
|
|
|
if (refcnt > (Py_ssize_t)UINT32_MAX) {
|
|
|
|
// Make the object immortal if the 32-bit local reference count
|
|
|
|
// would overflow.
|
|
|
|
refcnt = _Py_IMMORTAL_REFCNT_LOCAL;
|
|
|
|
}
|
|
|
|
# endif
|
|
|
|
_Py_atomic_store_uint32_relaxed(&op->ob_ref_local, (uint32_t)refcnt);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
_Py_atomic_add_ssize(&op->ob_ref_shared, (n << _Py_REF_SHARED_SHIFT));
|
|
|
|
}
|
|
|
|
#endif
|
2022-07-25 23:10:23 -03:00
|
|
|
}
|
|
|
|
#define _Py_RefcntAdd(op, n) _Py_RefcntAdd(_PyObject_CAST(op), n)
|
|
|
|
|
2024-08-27 16:22:43 -03:00
|
|
|
// Checks if an object has a single, unique reference. If the caller holds a
|
|
|
|
// unique reference, it may be able to safely modify the object in-place.
|
|
|
|
static inline int
|
|
|
|
_PyObject_IsUniquelyReferenced(PyObject *ob)
|
|
|
|
{
|
|
|
|
#if !defined(Py_GIL_DISABLED)
|
|
|
|
return Py_REFCNT(ob) == 1;
|
|
|
|
#else
|
|
|
|
// NOTE: the entire ob_ref_shared field must be zero, including flags, to
|
|
|
|
// ensure that other threads cannot concurrently create new references to
|
|
|
|
// this object.
|
|
|
|
return (_Py_IsOwnedByCurrentThread(ob) &&
|
|
|
|
_Py_atomic_load_uint32_relaxed(&ob->ob_ref_local) == 1 &&
|
|
|
|
_Py_atomic_load_ssize_relaxed(&ob->ob_ref_shared) == 0);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2024-06-26 02:11:32 -03:00
|
|
|
PyAPI_FUNC(void) _Py_SetImmortal(PyObject *op);
|
|
|
|
PyAPI_FUNC(void) _Py_SetImmortalUntracked(PyObject *op);
|
2023-04-22 16:39:37 -03:00
|
|
|
|
2023-10-30 13:06:09 -03:00
|
|
|
// Makes an immortal object mortal again with the specified refcnt. Should only
|
|
|
|
// be used during runtime finalization.
|
|
|
|
static inline void _Py_SetMortal(PyObject *op, Py_ssize_t refcnt)
|
|
|
|
{
|
|
|
|
if (op) {
|
2024-10-10 14:19:08 -03:00
|
|
|
assert(_Py_IsImmortal(op));
|
2023-11-20 09:52:00 -04:00
|
|
|
#ifdef Py_GIL_DISABLED
|
2023-10-30 13:06:09 -03:00
|
|
|
op->ob_tid = _Py_UNOWNED_TID;
|
|
|
|
op->ob_ref_local = 0;
|
|
|
|
op->ob_ref_shared = _Py_REF_SHARED(refcnt, _Py_REF_MERGED);
|
|
|
|
#else
|
|
|
|
op->ob_refcnt = refcnt;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-30 21:13:35 -03:00
|
|
|
/* _Py_ClearImmortal() should only be used during runtime finalization. */
|
|
|
|
static inline void _Py_ClearImmortal(PyObject *op)
|
|
|
|
{
|
|
|
|
if (op) {
|
2023-10-30 13:06:09 -03:00
|
|
|
_Py_SetMortal(op, 1);
|
2023-05-30 21:13:35 -03:00
|
|
|
Py_DECREF(op);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#define _Py_ClearImmortal(op) \
|
|
|
|
do { \
|
|
|
|
_Py_ClearImmortal(_PyObject_CAST(op)); \
|
|
|
|
op = NULL; \
|
|
|
|
} while (0)
|
|
|
|
|
2023-11-20 09:52:00 -04:00
|
|
|
#if !defined(Py_GIL_DISABLED)
|
2022-04-19 15:02:19 -03:00
|
|
|
static inline void
|
|
|
|
_Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
|
|
|
|
{
|
2023-04-22 16:39:37 -03:00
|
|
|
if (_Py_IsImmortal(op)) {
|
2024-09-23 15:10:55 -03:00
|
|
|
_Py_DECREF_IMMORTAL_STAT_INC();
|
2023-04-22 16:39:37 -03:00
|
|
|
return;
|
|
|
|
}
|
2022-05-16 10:35:11 -03:00
|
|
|
_Py_DECREF_STAT_INC();
|
2022-04-19 15:02:19 -03:00
|
|
|
#ifdef Py_REF_DEBUG
|
2024-01-28 22:48:48 -04:00
|
|
|
_Py_DEC_REFTOTAL(PyInterpreterState_Get());
|
2022-04-19 15:02:19 -03:00
|
|
|
#endif
|
|
|
|
if (--op->ob_refcnt != 0) {
|
|
|
|
assert(op->ob_refcnt > 0);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
#ifdef Py_TRACE_REFS
|
|
|
|
_Py_ForgetReference(op);
|
|
|
|
#endif
|
2024-10-18 13:09:34 -03:00
|
|
|
struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer;
|
|
|
|
if (tracer->tracer_func != NULL) {
|
|
|
|
void* data = tracer->tracer_data;
|
|
|
|
tracer->tracer_func(op, PyRefTracer_DESTROY, data);
|
|
|
|
}
|
2022-04-19 15:02:19 -03:00
|
|
|
destruct(op);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
_Py_DECREF_NO_DEALLOC(PyObject *op)
|
|
|
|
{
|
2023-04-22 16:39:37 -03:00
|
|
|
if (_Py_IsImmortal(op)) {
|
2024-09-23 15:10:55 -03:00
|
|
|
_Py_DECREF_IMMORTAL_STAT_INC();
|
2023-04-22 16:39:37 -03:00
|
|
|
return;
|
|
|
|
}
|
2022-05-16 10:35:11 -03:00
|
|
|
_Py_DECREF_STAT_INC();
|
2022-04-19 15:02:19 -03:00
|
|
|
#ifdef Py_REF_DEBUG
|
2024-01-28 22:48:48 -04:00
|
|
|
_Py_DEC_REFTOTAL(PyInterpreterState_Get());
|
2022-04-19 15:02:19 -03:00
|
|
|
#endif
|
|
|
|
op->ob_refcnt--;
|
|
|
|
#ifdef Py_DEBUG
|
|
|
|
if (op->ob_refcnt <= 0) {
|
|
|
|
_Py_FatalRefcountError("Expected a positive remaining refcount");
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
2022-01-13 18:54:36 -04:00
|
|
|
|
2023-10-30 13:06:09 -03:00
|
|
|
#else
|
2023-11-20 09:52:00 -04:00
|
|
|
// TODO: implement Py_DECREF specializations for Py_GIL_DISABLED build
|
2023-10-30 13:06:09 -03:00
|
|
|
static inline void
|
|
|
|
_Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
|
|
|
|
{
|
|
|
|
Py_DECREF(op);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
_Py_DECREF_NO_DEALLOC(PyObject *op)
|
|
|
|
{
|
|
|
|
Py_DECREF(op);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline int
|
|
|
|
_Py_REF_IS_MERGED(Py_ssize_t ob_ref_shared)
|
|
|
|
{
|
|
|
|
return (ob_ref_shared & _Py_REF_SHARED_FLAG_MASK) == _Py_REF_MERGED;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline int
|
|
|
|
_Py_REF_IS_QUEUED(Py_ssize_t ob_ref_shared)
|
|
|
|
{
|
|
|
|
return (ob_ref_shared & _Py_REF_SHARED_FLAG_MASK) == _Py_REF_QUEUED;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Merge the local and shared reference count fields and add `extra` to the
|
|
|
|
// refcount when merging.
|
|
|
|
Py_ssize_t _Py_ExplicitMergeRefcount(PyObject *op, Py_ssize_t extra);
|
2023-11-20 09:52:00 -04:00
|
|
|
#endif // !defined(Py_GIL_DISABLED)
|
2023-10-30 13:06:09 -03:00
|
|
|
|
2023-03-08 15:03:50 -04:00
|
|
|
#ifdef Py_REF_DEBUG
|
|
|
|
# undef _Py_DEC_REFTOTAL
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2023-07-24 22:49:28 -03:00
|
|
|
extern int _PyType_CheckConsistency(PyTypeObject *type);
|
|
|
|
extern int _PyDict_CheckConsistency(PyObject *mp, int check_content);
|
2019-04-12 16:51:34 -03:00
|
|
|
|
2020-06-16 12:29:50 -03:00
|
|
|
/* Update the Python traceback of an object. This function must be called
|
|
|
|
when a memory block is reused from a free list.
|
|
|
|
|
|
|
|
Internal function called by _Py_NewReference(). */
|
2024-05-02 14:30:00 -03:00
|
|
|
extern int _PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, void*);
|
2020-06-16 12:29:50 -03:00
|
|
|
|
2020-06-15 20:28:07 -03:00
|
|
|
// Fast inlined version of PyType_HasFeature()
|
|
|
|
static inline int
|
|
|
|
_PyType_HasFeature(PyTypeObject *type, unsigned long feature) {
|
2024-06-13 06:29:19 -03:00
|
|
|
return ((FT_ATOMIC_LOAD_ULONG_RELAXED(type->tp_flags) & feature) != 0);
|
2020-06-15 20:28:07 -03:00
|
|
|
}
|
|
|
|
|
2020-12-25 20:45:43 -04:00
|
|
|
extern void _PyType_InitCache(PyInterpreterState *interp);
|
|
|
|
|
2023-09-01 07:43:30 -03:00
|
|
|
extern PyStatus _PyObject_InitState(PyInterpreterState *interp);
|
2023-08-31 13:33:34 -03:00
|
|
|
extern void _PyObject_FiniState(PyInterpreterState *interp);
|
|
|
|
extern bool _PyRefchain_IsTraced(PyInterpreterState *interp, PyObject *obj);
|
2020-12-25 20:45:43 -04:00
|
|
|
|
2024-10-21 13:51:29 -03:00
|
|
|
// Macros used for per-thread reference counting in the free threading build.
|
|
|
|
// They resolve to normal Py_INCREF/DECREF calls in the default build.
|
|
|
|
//
|
|
|
|
// The macros are used for only a few references that would otherwise cause
|
|
|
|
// scaling bottlenecks in the free threading build:
|
|
|
|
// - The reference from an object to `ob_type`.
|
|
|
|
// - The reference from a function to `func_code`.
|
|
|
|
// - The reference from a function to `func_globals` and `func_builtins`.
|
|
|
|
//
|
|
|
|
// It's safe, but not performant or necessary, to use these macros for other
|
|
|
|
// references to code, type, or dict objects. It's also safe to mix their
|
|
|
|
// usage with normal Py_INCREF/DECREF calls.
|
|
|
|
//
|
|
|
|
// See also Include/internal/pycore_dict.h for _Py_INCREF_DICT/_Py_DECREF_DICT.
|
2024-08-06 15:36:57 -03:00
|
|
|
#ifndef Py_GIL_DISABLED
|
|
|
|
# define _Py_INCREF_TYPE Py_INCREF
|
|
|
|
# define _Py_DECREF_TYPE Py_DECREF
|
2024-10-15 16:06:41 -03:00
|
|
|
# define _Py_INCREF_CODE Py_INCREF
|
|
|
|
# define _Py_DECREF_CODE Py_DECREF
|
2024-08-06 15:36:57 -03:00
|
|
|
#else
|
2024-10-15 16:06:41 -03:00
|
|
|
static inline void
|
|
|
|
_Py_THREAD_INCREF_OBJECT(PyObject *obj, Py_ssize_t unique_id)
|
|
|
|
{
|
|
|
|
_PyThreadStateImpl *tstate = (_PyThreadStateImpl *)_PyThreadState_GET();
|
|
|
|
|
|
|
|
// Unsigned comparison so that `unique_id=-1`, which indicates that
|
|
|
|
// per-thread refcounting has been disabled on this object, is handled by
|
|
|
|
// the "else".
|
|
|
|
if ((size_t)unique_id < (size_t)tstate->refcounts.size) {
|
|
|
|
# ifdef Py_REF_DEBUG
|
|
|
|
_Py_INCREF_IncRefTotal();
|
|
|
|
# endif
|
|
|
|
_Py_INCREF_STAT_INC();
|
|
|
|
tstate->refcounts.values[unique_id]++;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// The slow path resizes the per-thread refcount array if necessary.
|
|
|
|
// It handles the unique_id=-1 case to keep the inlinable function smaller.
|
|
|
|
_PyObject_ThreadIncrefSlow(obj, unique_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-06 15:36:57 -03:00
|
|
|
static inline void
|
|
|
|
_Py_INCREF_TYPE(PyTypeObject *type)
|
|
|
|
{
|
|
|
|
if (!_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
|
2024-10-10 14:19:08 -03:00
|
|
|
assert(_Py_IsImmortal(type));
|
2024-09-23 15:10:55 -03:00
|
|
|
_Py_INCREF_IMMORTAL_STAT_INC();
|
2024-08-06 15:36:57 -03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-08-17 17:03:50 -03:00
|
|
|
// gh-122974: GCC 11 warns about the access to PyHeapTypeObject fields when
|
|
|
|
// _Py_INCREF_TYPE() is called on a statically allocated type. The
|
|
|
|
// _PyType_HasFeature check above ensures that the type is a heap type.
|
|
|
|
#if defined(__GNUC__) && __GNUC__ >= 11
|
|
|
|
# pragma GCC diagnostic push
|
|
|
|
# pragma GCC diagnostic ignored "-Warray-bounds"
|
|
|
|
#endif
|
2024-10-15 16:06:41 -03:00
|
|
|
_Py_THREAD_INCREF_OBJECT((PyObject *)type, ((PyHeapTypeObject *)type)->unique_id);
|
|
|
|
#if defined(__GNUC__) && __GNUC__ >= 11
|
|
|
|
# pragma GCC diagnostic pop
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
_Py_INCREF_CODE(PyCodeObject *co)
|
|
|
|
{
|
|
|
|
_Py_THREAD_INCREF_OBJECT((PyObject *)co, co->_co_unique_id);
|
|
|
|
}
|
2024-08-17 17:03:50 -03:00
|
|
|
|
2024-10-15 16:06:41 -03:00
|
|
|
static inline void
|
|
|
|
_Py_THREAD_DECREF_OBJECT(PyObject *obj, Py_ssize_t unique_id)
|
|
|
|
{
|
2024-08-06 15:36:57 -03:00
|
|
|
_PyThreadStateImpl *tstate = (_PyThreadStateImpl *)_PyThreadState_GET();
|
|
|
|
|
|
|
|
// Unsigned comparison so that `unique_id=-1`, which indicates that
|
2024-10-15 16:06:41 -03:00
|
|
|
// per-thread refcounting has been disabled on this object, is handled by
|
2024-08-06 15:36:57 -03:00
|
|
|
// the "else".
|
2024-10-15 16:06:41 -03:00
|
|
|
if ((size_t)unique_id < (size_t)tstate->refcounts.size) {
|
2024-08-06 15:36:57 -03:00
|
|
|
# ifdef Py_REF_DEBUG
|
2024-10-15 16:06:41 -03:00
|
|
|
_Py_DECREF_DecRefTotal();
|
2024-08-06 15:36:57 -03:00
|
|
|
# endif
|
2024-10-15 16:06:41 -03:00
|
|
|
_Py_DECREF_STAT_INC();
|
|
|
|
tstate->refcounts.values[unique_id]--;
|
2024-08-06 15:36:57 -03:00
|
|
|
}
|
|
|
|
else {
|
2024-10-15 16:06:41 -03:00
|
|
|
// Directly decref the object if the id is not assigned or if
|
|
|
|
// per-thread refcounting has been disabled on this object.
|
|
|
|
Py_DECREF(obj);
|
2024-08-06 15:36:57 -03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
_Py_DECREF_TYPE(PyTypeObject *type)
|
|
|
|
{
|
|
|
|
if (!_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
|
2024-10-10 14:19:08 -03:00
|
|
|
assert(_Py_IsImmortal(type));
|
2024-09-23 15:10:55 -03:00
|
|
|
_Py_DECREF_IMMORTAL_STAT_INC();
|
2024-08-06 15:36:57 -03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
PyHeapTypeObject *ht = (PyHeapTypeObject *)type;
|
2024-10-15 16:06:41 -03:00
|
|
|
_Py_THREAD_DECREF_OBJECT((PyObject *)type, ht->unique_id);
|
|
|
|
}
|
2024-08-06 15:36:57 -03:00
|
|
|
|
2024-10-15 16:06:41 -03:00
|
|
|
static inline void
|
|
|
|
_Py_DECREF_CODE(PyCodeObject *co)
|
|
|
|
{
|
|
|
|
_Py_THREAD_DECREF_OBJECT((PyObject *)co, co->_co_unique_id);
|
2024-08-06 15:36:57 -03:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2020-06-15 20:28:07 -03:00
|
|
|
/* Inline functions trading binary compatibility for speed:
|
|
|
|
_PyObject_Init() is the fast version of PyObject_Init(), and
|
|
|
|
_PyObject_InitVar() is the fast version of PyObject_InitVar().
|
|
|
|
|
|
|
|
These inline functions must not be called with op=NULL. */
|
|
|
|
static inline void
|
|
|
|
_PyObject_Init(PyObject *op, PyTypeObject *typeobj)
|
|
|
|
{
|
|
|
|
assert(op != NULL);
|
|
|
|
Py_SET_TYPE(op, typeobj);
|
2024-10-10 14:19:08 -03:00
|
|
|
assert(_PyType_HasFeature(typeobj, Py_TPFLAGS_HEAPTYPE) || _Py_IsImmortal(typeobj));
|
2024-08-06 15:36:57 -03:00
|
|
|
_Py_INCREF_TYPE(typeobj);
|
2020-06-15 20:28:07 -03:00
|
|
|
_Py_NewReference(op);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
_PyObject_InitVar(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size)
|
|
|
|
{
|
|
|
|
assert(op != NULL);
|
2023-03-22 11:49:51 -03:00
|
|
|
assert(typeobj != &PyLong_Type);
|
2020-06-15 20:28:07 -03:00
|
|
|
_PyObject_Init((PyObject *)op, typeobj);
|
2023-03-22 11:49:51 -03:00
|
|
|
Py_SET_SIZE(op, size);
|
2020-06-15 20:28:07 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-11-21 17:27:47 -04:00
|
|
|
/* Tell the GC to track this object.
|
2020-06-17 09:23:04 -03:00
|
|
|
*
|
|
|
|
* The object must not be tracked by the GC.
|
2018-11-21 17:27:47 -04:00
|
|
|
*
|
|
|
|
* NB: While the object is tracked by the collector, it must be safe to call the
|
|
|
|
* ob_traverse method.
|
|
|
|
*
|
2019-11-20 06:48:18 -04:00
|
|
|
* Internal note: interp->gc.generation0->_gc_prev doesn't have any bit flags
|
2019-06-03 22:15:09 -03:00
|
|
|
* because it's not object header. So we don't use _PyGCHead_PREV() and
|
|
|
|
* _PyGCHead_SET_PREV() for it to avoid unnecessary bitwise operations.
|
2018-11-21 17:27:47 -04:00
|
|
|
*
|
2020-06-17 09:23:04 -03:00
|
|
|
* See also the public PyObject_GC_Track() function.
|
2018-11-21 17:27:47 -04:00
|
|
|
*/
|
2020-06-17 09:23:04 -03:00
|
|
|
static inline void _PyObject_GC_TRACK(
|
|
|
|
// The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined
|
|
|
|
#ifndef NDEBUG
|
|
|
|
const char *filename, int lineno,
|
|
|
|
#endif
|
|
|
|
PyObject *op)
|
2018-11-21 20:02:54 -04:00
|
|
|
{
|
|
|
|
_PyObject_ASSERT_FROM(op, !_PyObject_GC_IS_TRACKED(op),
|
|
|
|
"object already tracked by the garbage collector",
|
2020-06-17 09:23:04 -03:00
|
|
|
filename, lineno, __func__);
|
2024-02-01 16:29:19 -04:00
|
|
|
#ifdef Py_GIL_DISABLED
|
2024-05-08 17:03:39 -03:00
|
|
|
_PyObject_SET_GC_BITS(op, _PyGC_BITS_TRACKED);
|
2024-02-01 16:29:19 -04:00
|
|
|
#else
|
2018-11-21 20:02:54 -04:00
|
|
|
PyGC_Head *gc = _Py_AS_GC(op);
|
|
|
|
_PyObject_ASSERT_FROM(op,
|
|
|
|
(gc->_gc_prev & _PyGC_PREV_MASK_COLLECTING) == 0,
|
|
|
|
"object is in generation which is garbage collected",
|
2020-06-17 09:23:04 -03:00
|
|
|
filename, lineno, __func__);
|
2018-11-21 20:02:54 -04:00
|
|
|
|
2021-02-19 08:21:28 -04:00
|
|
|
PyInterpreterState *interp = _PyInterpreterState_GET();
|
2024-03-20 05:54:42 -03:00
|
|
|
PyGC_Head *generation0 = &interp->gc.young.head;
|
2019-11-20 06:48:18 -04:00
|
|
|
PyGC_Head *last = (PyGC_Head*)(generation0->_gc_prev);
|
2018-11-21 20:02:54 -04:00
|
|
|
_PyGCHead_SET_NEXT(last, gc);
|
|
|
|
_PyGCHead_SET_PREV(gc, last);
|
2024-03-26 08:11:42 -03:00
|
|
|
/* Young objects will be moved into the visited space during GC, so set the bit here */
|
2024-09-17 12:35:40 -03:00
|
|
|
gc->_gc_next = ((uintptr_t)generation0) | (uintptr_t)interp->gc.visited_space;
|
2019-11-20 06:48:18 -04:00
|
|
|
generation0->_gc_prev = (uintptr_t)gc;
|
2024-01-25 14:27:36 -04:00
|
|
|
#endif
|
2018-11-21 20:02:54 -04:00
|
|
|
}
|
|
|
|
|
2018-11-21 17:27:47 -04:00
|
|
|
/* Tell the GC to stop tracking this object.
|
|
|
|
*
|
2018-11-21 20:02:54 -04:00
|
|
|
* Internal note: This may be called while GC. So _PyGC_PREV_MASK_COLLECTING
|
|
|
|
* must be cleared. But _PyGC_PREV_MASK_FINALIZED bit is kept.
|
|
|
|
*
|
|
|
|
* The object must be tracked by the GC.
|
2018-11-21 17:27:47 -04:00
|
|
|
*
|
2020-06-17 09:23:04 -03:00
|
|
|
* See also the public PyObject_GC_UnTrack() which accept an object which is
|
|
|
|
* not tracked.
|
2018-11-21 17:27:47 -04:00
|
|
|
*/
|
2020-06-17 09:23:04 -03:00
|
|
|
static inline void _PyObject_GC_UNTRACK(
|
|
|
|
// The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined
|
|
|
|
#ifndef NDEBUG
|
|
|
|
const char *filename, int lineno,
|
|
|
|
#endif
|
|
|
|
PyObject *op)
|
2018-11-21 20:02:54 -04:00
|
|
|
{
|
|
|
|
_PyObject_ASSERT_FROM(op, _PyObject_GC_IS_TRACKED(op),
|
|
|
|
"object not tracked by the garbage collector",
|
2020-06-17 09:23:04 -03:00
|
|
|
filename, lineno, __func__);
|
2018-11-21 20:02:54 -04:00
|
|
|
|
2024-01-25 14:27:36 -04:00
|
|
|
#ifdef Py_GIL_DISABLED
|
2024-05-08 17:03:39 -03:00
|
|
|
_PyObject_CLEAR_GC_BITS(op, _PyGC_BITS_TRACKED);
|
2024-01-25 14:27:36 -04:00
|
|
|
#else
|
2018-11-21 20:02:54 -04:00
|
|
|
PyGC_Head *gc = _Py_AS_GC(op);
|
|
|
|
PyGC_Head *prev = _PyGCHead_PREV(gc);
|
|
|
|
PyGC_Head *next = _PyGCHead_NEXT(gc);
|
|
|
|
_PyGCHead_SET_NEXT(prev, next);
|
|
|
|
_PyGCHead_SET_PREV(next, prev);
|
|
|
|
gc->_gc_next = 0;
|
|
|
|
gc->_gc_prev &= _PyGC_PREV_MASK_FINALIZED;
|
2024-01-25 14:27:36 -04:00
|
|
|
#endif
|
2018-11-21 20:02:54 -04:00
|
|
|
}
|
|
|
|
|
2020-06-17 09:23:04 -03:00
|
|
|
// Macros to accept any type for the parameter, and to automatically pass
|
|
|
|
// the filename and the filename (if NDEBUG is not defined) where the macro
|
|
|
|
// is called.
|
|
|
|
#ifdef NDEBUG
|
|
|
|
# define _PyObject_GC_TRACK(op) \
|
|
|
|
_PyObject_GC_TRACK(_PyObject_CAST(op))
|
|
|
|
# define _PyObject_GC_UNTRACK(op) \
|
|
|
|
_PyObject_GC_UNTRACK(_PyObject_CAST(op))
|
|
|
|
#else
|
|
|
|
# define _PyObject_GC_TRACK(op) \
|
|
|
|
_PyObject_GC_TRACK(__FILE__, __LINE__, _PyObject_CAST(op))
|
|
|
|
# define _PyObject_GC_UNTRACK(op) \
|
|
|
|
_PyObject_GC_UNTRACK(__FILE__, __LINE__, _PyObject_CAST(op))
|
|
|
|
#endif
|
2018-11-21 17:27:47 -04:00
|
|
|
|
2024-01-25 13:34:03 -04:00
|
|
|
#ifdef Py_GIL_DISABLED
|
|
|
|
|
|
|
|
/* Tries to increment an object's reference count
|
|
|
|
*
|
|
|
|
* This is a specialized version of _Py_TryIncref that only succeeds if the
|
|
|
|
* object is immortal or local to this thread. It does not handle the case
|
|
|
|
* where the reference count modification requires an atomic operation. This
|
|
|
|
* allows call sites to specialize for the immortal/local case.
|
|
|
|
*/
|
|
|
|
static inline int
|
|
|
|
_Py_TryIncrefFast(PyObject *op) {
|
|
|
|
uint32_t local = _Py_atomic_load_uint32_relaxed(&op->ob_ref_local);
|
|
|
|
local += 1;
|
|
|
|
if (local == 0) {
|
|
|
|
// immortal
|
2024-09-23 15:10:55 -03:00
|
|
|
_Py_INCREF_IMMORTAL_STAT_INC();
|
2024-01-25 13:34:03 -04:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
if (_Py_IsOwnedByCurrentThread(op)) {
|
|
|
|
_Py_INCREF_STAT_INC();
|
|
|
|
_Py_atomic_store_uint32_relaxed(&op->ob_ref_local, local);
|
|
|
|
#ifdef Py_REF_DEBUG
|
2024-04-08 13:11:36 -03:00
|
|
|
_Py_IncRefTotal(_PyThreadState_GET());
|
2024-01-25 13:34:03 -04:00
|
|
|
#endif
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline int
|
|
|
|
_Py_TryIncRefShared(PyObject *op)
|
|
|
|
{
|
|
|
|
Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared);
|
|
|
|
for (;;) {
|
|
|
|
// If the shared refcount is zero and the object is either merged
|
|
|
|
// or may not have weak references, then we cannot incref it.
|
|
|
|
if (shared == 0 || shared == _Py_REF_MERGED) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (_Py_atomic_compare_exchange_ssize(
|
|
|
|
&op->ob_ref_shared,
|
|
|
|
&shared,
|
|
|
|
shared + (1 << _Py_REF_SHARED_SHIFT))) {
|
|
|
|
#ifdef Py_REF_DEBUG
|
2024-04-08 13:11:36 -03:00
|
|
|
_Py_IncRefTotal(_PyThreadState_GET());
|
2024-01-25 13:34:03 -04:00
|
|
|
#endif
|
|
|
|
_Py_INCREF_STAT_INC();
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Tries to incref the object op and ensures that *src still points to it. */
|
|
|
|
static inline int
|
2024-04-08 11:58:38 -03:00
|
|
|
_Py_TryIncrefCompare(PyObject **src, PyObject *op)
|
2024-01-25 13:34:03 -04:00
|
|
|
{
|
|
|
|
if (_Py_TryIncrefFast(op)) {
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
if (!_Py_TryIncRefShared(op)) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
if (op != _Py_atomic_load_ptr(src)) {
|
|
|
|
Py_DECREF(op);
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Loads and increfs an object from ptr, which may contain a NULL value.
|
|
|
|
Safe with concurrent (atomic) updates to ptr.
|
|
|
|
NOTE: The writer must set maybe-weakref on the stored object! */
|
|
|
|
static inline PyObject *
|
|
|
|
_Py_XGetRef(PyObject **ptr)
|
|
|
|
{
|
|
|
|
for (;;) {
|
|
|
|
PyObject *value = _Py_atomic_load_ptr(ptr);
|
|
|
|
if (value == NULL) {
|
|
|
|
return value;
|
|
|
|
}
|
2024-04-08 11:58:38 -03:00
|
|
|
if (_Py_TryIncrefCompare(ptr, value)) {
|
2024-01-25 13:34:03 -04:00
|
|
|
return value;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Attempts to loads and increfs an object from ptr. Returns NULL
|
|
|
|
on failure, which may be due to a NULL value or a concurrent update. */
|
|
|
|
static inline PyObject *
|
|
|
|
_Py_TryXGetRef(PyObject **ptr)
|
|
|
|
{
|
|
|
|
PyObject *value = _Py_atomic_load_ptr(ptr);
|
|
|
|
if (value == NULL) {
|
|
|
|
return value;
|
|
|
|
}
|
2024-04-08 11:58:38 -03:00
|
|
|
if (_Py_TryIncrefCompare(ptr, value)) {
|
2024-01-25 13:34:03 -04:00
|
|
|
return value;
|
|
|
|
}
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Like Py_NewRef but also optimistically sets _Py_REF_MAYBE_WEAKREF
|
|
|
|
on objects owned by a different thread. */
|
|
|
|
static inline PyObject *
|
|
|
|
_Py_NewRefWithLock(PyObject *op)
|
|
|
|
{
|
|
|
|
if (_Py_TryIncrefFast(op)) {
|
|
|
|
return op;
|
|
|
|
}
|
2024-05-31 15:18:24 -03:00
|
|
|
#ifdef Py_REF_DEBUG
|
|
|
|
_Py_IncRefTotal(_PyThreadState_GET());
|
|
|
|
#endif
|
2024-01-25 13:34:03 -04:00
|
|
|
_Py_INCREF_STAT_INC();
|
|
|
|
for (;;) {
|
|
|
|
Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared);
|
|
|
|
Py_ssize_t new_shared = shared + (1 << _Py_REF_SHARED_SHIFT);
|
|
|
|
if ((shared & _Py_REF_SHARED_FLAG_MASK) == 0) {
|
|
|
|
new_shared |= _Py_REF_MAYBE_WEAKREF;
|
|
|
|
}
|
|
|
|
if (_Py_atomic_compare_exchange_ssize(
|
|
|
|
&op->ob_ref_shared,
|
|
|
|
&shared,
|
|
|
|
new_shared)) {
|
|
|
|
return op;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline PyObject *
|
|
|
|
_Py_XNewRefWithLock(PyObject *obj)
|
|
|
|
{
|
|
|
|
if (obj == NULL) {
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
return _Py_NewRefWithLock(obj);
|
|
|
|
}
|
|
|
|
|
2024-04-08 11:58:38 -03:00
|
|
|
static inline void
|
|
|
|
_PyObject_SetMaybeWeakref(PyObject *op)
|
|
|
|
{
|
|
|
|
if (_Py_IsImmortal(op)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
for (;;) {
|
|
|
|
Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared);
|
|
|
|
if ((shared & _Py_REF_SHARED_FLAG_MASK) != 0) {
|
|
|
|
// Nothing to do if it's in WEAKREFS, QUEUED, or MERGED states.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (_Py_atomic_compare_exchange_ssize(
|
|
|
|
&op->ob_ref_shared, &shared, shared | _Py_REF_MAYBE_WEAKREF)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-25 13:34:03 -04:00
|
|
|
#endif
|
|
|
|
|
2024-04-08 11:58:38 -03:00
|
|
|
/* Tries to incref op and returns 1 if successful or 0 otherwise. */
|
|
|
|
static inline int
|
|
|
|
_Py_TryIncref(PyObject *op)
|
|
|
|
{
|
|
|
|
#ifdef Py_GIL_DISABLED
|
|
|
|
return _Py_TryIncrefFast(op) || _Py_TryIncRefShared(op);
|
|
|
|
#else
|
|
|
|
if (Py_REFCNT(op) > 0) {
|
|
|
|
Py_INCREF(op);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2020-02-03 12:28:26 -04:00
|
|
|
#ifdef Py_REF_DEBUG
|
2023-03-21 14:46:09 -03:00
|
|
|
extern void _PyInterpreterState_FinalizeRefTotal(PyInterpreterState *);
|
2023-03-20 13:03:04 -03:00
|
|
|
extern void _Py_FinalizeRefTotal(_PyRuntimeState *);
|
2020-02-03 12:28:26 -04:00
|
|
|
extern void _PyDebug_PrintTotalRefs(void);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef Py_TRACE_REFS
|
2023-08-31 13:33:34 -03:00
|
|
|
extern void _Py_AddToAllObjects(PyObject *op);
|
2023-08-03 16:51:08 -03:00
|
|
|
extern void _Py_PrintReferences(PyInterpreterState *, FILE *);
|
|
|
|
extern void _Py_PrintReferenceAddresses(PyInterpreterState *, FILE *);
|
2020-02-03 12:28:26 -04:00
|
|
|
#endif
|
|
|
|
|
2022-08-04 14:28:15 -03:00
|
|
|
|
|
|
|
/* Return the *address* of the object's weaklist. The address may be
|
|
|
|
* dereferenced to get the current head of the weaklist. This is useful
|
|
|
|
* for iterating over the linked list of weakrefs, especially when the
|
|
|
|
* list is being modified externally (e.g. refs getting removed).
|
|
|
|
*
|
|
|
|
* The returned pointer should not be used to change the head of the list
|
|
|
|
* nor should it be used to add, remove, or swap any refs in the list.
|
|
|
|
* That is the sole responsibility of the code in weakrefobject.c.
|
|
|
|
*/
|
2020-04-06 09:07:02 -03:00
|
|
|
static inline PyObject **
|
|
|
|
_PyObject_GET_WEAKREFS_LISTPTR(PyObject *op)
|
|
|
|
{
|
2022-07-28 22:23:47 -03:00
|
|
|
if (PyType_Check(op) &&
|
|
|
|
((PyTypeObject *)op)->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) {
|
2023-05-01 22:36:00 -03:00
|
|
|
PyInterpreterState *interp = _PyInterpreterState_GET();
|
2024-06-03 20:09:18 -03:00
|
|
|
managed_static_type_state *state = _PyStaticType_GetState(
|
2023-05-01 22:36:00 -03:00
|
|
|
interp, (PyTypeObject *)op);
|
2022-07-28 22:23:47 -03:00
|
|
|
return _PyStaticType_GET_WEAKREFS_LISTPTR(state);
|
|
|
|
}
|
2022-08-04 14:28:15 -03:00
|
|
|
// Essentially _PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET():
|
2020-04-06 09:07:02 -03:00
|
|
|
Py_ssize_t offset = Py_TYPE(op)->tp_weaklistoffset;
|
|
|
|
return (PyObject **)((char *)op + offset);
|
|
|
|
}
|
|
|
|
|
2022-08-04 14:28:15 -03:00
|
|
|
/* This is a special case of _PyObject_GET_WEAKREFS_LISTPTR().
|
|
|
|
* Only the most fundamental lookup path is used.
|
|
|
|
* Consequently, static types should not be used.
|
|
|
|
*
|
|
|
|
* For static builtin types the returned pointer will always point
|
|
|
|
* to a NULL tp_weaklist. This is fine for any deallocation cases,
|
|
|
|
* since static types are never deallocated and static builtin types
|
|
|
|
* are only finalized at the end of runtime finalization.
|
|
|
|
*
|
|
|
|
* If the weaklist for static types is actually needed then use
|
|
|
|
* _PyObject_GET_WEAKREFS_LISTPTR().
|
|
|
|
*/
|
|
|
|
static inline PyWeakReference **
|
|
|
|
_PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET(PyObject *op)
|
|
|
|
{
|
|
|
|
assert(!PyType_Check(op) ||
|
|
|
|
((PyTypeObject *)op)->tp_flags & Py_TPFLAGS_HEAPTYPE);
|
|
|
|
Py_ssize_t offset = Py_TYPE(op)->tp_weaklistoffset;
|
|
|
|
return (PyWeakReference **)((char *)op + offset);
|
|
|
|
}
|
|
|
|
|
2024-09-23 16:14:15 -03:00
|
|
|
// Fast inlined version of PyType_IS_GC()
|
|
|
|
#define _PyType_IS_GC(t) _PyType_HasFeature((t), Py_TPFLAGS_HAVE_GC)
|
|
|
|
|
2020-04-14 15:11:20 -03:00
|
|
|
// Fast inlined version of PyObject_IS_GC()
|
|
|
|
static inline int
|
|
|
|
_PyObject_IS_GC(PyObject *obj)
|
|
|
|
{
|
2023-06-06 09:44:48 -03:00
|
|
|
PyTypeObject *type = Py_TYPE(obj);
|
2024-09-23 16:14:15 -03:00
|
|
|
return (_PyType_IS_GC(type)
|
2023-06-06 09:44:48 -03:00
|
|
|
&& (type->tp_is_gc == NULL || type->tp_is_gc(obj)));
|
2020-04-14 15:11:20 -03:00
|
|
|
}
|
|
|
|
|
2024-07-01 16:11:39 -03:00
|
|
|
// Fast inlined version of PyObject_Hash()
|
|
|
|
static inline Py_hash_t
|
|
|
|
_PyObject_HashFast(PyObject *op)
|
|
|
|
{
|
|
|
|
if (PyUnicode_CheckExact(op)) {
|
|
|
|
Py_hash_t hash = FT_ATOMIC_LOAD_SSIZE_RELAXED(
|
|
|
|
_PyASCIIObject_CAST(op)->hash);
|
|
|
|
if (hash != -1) {
|
|
|
|
return hash;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return PyObject_Hash(op);
|
|
|
|
}
|
|
|
|
|
2021-12-07 12:02:53 -04:00
|
|
|
static inline size_t
|
|
|
|
_PyType_PreHeaderSize(PyTypeObject *tp)
|
|
|
|
{
|
2024-02-01 16:29:19 -04:00
|
|
|
return (
|
|
|
|
#ifndef Py_GIL_DISABLED
|
2024-09-17 12:35:40 -03:00
|
|
|
(size_t)_PyType_IS_GC(tp) * sizeof(PyGC_Head) +
|
2024-02-01 16:29:19 -04:00
|
|
|
#endif
|
2024-09-17 12:35:40 -03:00
|
|
|
(size_t)_PyType_HasFeature(tp, Py_TPFLAGS_PREHEADER) * 2 * sizeof(PyObject *)
|
2024-02-01 16:29:19 -04:00
|
|
|
);
|
2021-12-07 12:02:53 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
void _PyObject_GC_Link(PyObject *op);
|
|
|
|
|
2021-09-21 20:09:00 -03:00
|
|
|
// Usage: assert(_Py_CheckSlotResult(obj, "__getitem__", result != NULL));
|
2021-01-27 12:39:16 -04:00
|
|
|
extern int _Py_CheckSlotResult(
|
|
|
|
PyObject *obj,
|
|
|
|
const char *slot_name,
|
|
|
|
int success);
|
|
|
|
|
2022-01-26 22:00:55 -04:00
|
|
|
// Test if a type supports weak references
|
|
|
|
static inline int _PyType_SUPPORTS_WEAKREFS(PyTypeObject *type) {
|
2022-08-16 09:57:18 -03:00
|
|
|
return (type->tp_weaklistoffset != 0);
|
2022-01-26 22:00:55 -04:00
|
|
|
}
|
|
|
|
|
2021-06-30 21:30:46 -03:00
|
|
|
extern PyObject* _PyType_AllocNoTrack(PyTypeObject *type, Py_ssize_t nitems);
|
2024-08-20 12:52:58 -03:00
|
|
|
PyAPI_FUNC(PyObject *) _PyType_NewManagedObject(PyTypeObject *type);
|
2023-07-23 19:25:48 -03:00
|
|
|
|
|
|
|
extern PyTypeObject* _PyType_CalculateMetaclass(PyTypeObject *, PyObject *);
|
|
|
|
extern PyObject* _PyType_GetDocFromInternalDoc(const char *, const char *);
|
2023-08-11 12:08:38 -03:00
|
|
|
extern PyObject* _PyType_GetTextSignatureFromInternalDoc(const char *, const char *, int);
|
2024-05-06 14:50:35 -03:00
|
|
|
extern int _PyObject_SetAttributeErrorContext(PyObject *v, PyObject* name);
|
2021-06-30 21:30:46 -03:00
|
|
|
|
2024-04-02 07:59:21 -03:00
|
|
|
void _PyObject_InitInlineValues(PyObject *obj, PyTypeObject *tp);
|
2024-04-22 02:57:05 -03:00
|
|
|
extern int _PyObject_StoreInstanceAttribute(PyObject *obj,
|
|
|
|
PyObject *name, PyObject *value);
|
|
|
|
extern bool _PyObject_TryGetInstanceAttribute(PyObject *obj, PyObject *name,
|
|
|
|
PyObject **attr);
|
2021-12-07 12:02:53 -04:00
|
|
|
|
2024-02-01 16:29:19 -04:00
|
|
|
#ifdef Py_GIL_DISABLED
|
|
|
|
# define MANAGED_DICT_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-1)
|
|
|
|
# define MANAGED_WEAKREF_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-2)
|
|
|
|
#else
|
|
|
|
# define MANAGED_DICT_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-3)
|
|
|
|
# define MANAGED_WEAKREF_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-4)
|
|
|
|
#endif
|
|
|
|
|
2022-08-01 10:34:54 -03:00
|
|
|
typedef union {
|
2024-04-02 07:59:21 -03:00
|
|
|
PyDictObject *dict;
|
|
|
|
} PyManagedDictPointer;
|
2022-08-01 10:34:54 -03:00
|
|
|
|
2024-04-02 07:59:21 -03:00
|
|
|
static inline PyManagedDictPointer *
|
|
|
|
_PyObject_ManagedDictPointer(PyObject *obj)
|
2021-12-07 12:02:53 -04:00
|
|
|
{
|
|
|
|
assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
|
2024-04-02 07:59:21 -03:00
|
|
|
return (PyManagedDictPointer *)((char *)obj + MANAGED_DICT_OFFSET);
|
2022-08-01 10:34:54 -03:00
|
|
|
}
|
|
|
|
|
2024-04-22 02:57:05 -03:00
|
|
|
static inline PyDictObject *
|
|
|
|
_PyObject_GetManagedDict(PyObject *obj)
|
|
|
|
{
|
|
|
|
PyManagedDictPointer *dorv = _PyObject_ManagedDictPointer(obj);
|
2024-04-25 12:53:29 -03:00
|
|
|
return (PyDictObject *)FT_ATOMIC_LOAD_PTR_ACQUIRE(dorv->dict);
|
2024-04-22 02:57:05 -03:00
|
|
|
}
|
|
|
|
|
2022-08-01 10:34:54 -03:00
|
|
|
static inline PyDictValues *
|
2024-04-02 07:59:21 -03:00
|
|
|
_PyObject_InlineValues(PyObject *obj)
|
2022-08-01 10:34:54 -03:00
|
|
|
{
|
2024-08-21 11:52:04 -03:00
|
|
|
PyTypeObject *tp = Py_TYPE(obj);
|
2024-09-17 12:35:40 -03:00
|
|
|
assert(tp->tp_basicsize > 0 && (size_t)tp->tp_basicsize % sizeof(PyObject *) == 0);
|
2024-04-02 07:59:21 -03:00
|
|
|
assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
|
|
|
|
assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
|
2024-08-21 11:52:04 -03:00
|
|
|
return (PyDictValues *)((char *)obj + tp->tp_basicsize);
|
2021-12-07 12:02:53 -04:00
|
|
|
}
|
|
|
|
|
2022-08-01 10:34:54 -03:00
|
|
|
extern PyObject ** _PyObject_ComputedDictPointer(PyObject *);
|
2022-01-21 18:29:10 -04:00
|
|
|
extern int _PyObject_IsInstanceDictEmpty(PyObject *);
|
2021-08-04 12:41:14 -03:00
|
|
|
|
2023-07-25 00:16:28 -03:00
|
|
|
// Export for 'math' shared extension
|
|
|
|
PyAPI_FUNC(PyObject*) _PyObject_LookupSpecial(PyObject *, PyObject *);
|
2024-06-18 08:17:46 -03:00
|
|
|
PyAPI_FUNC(PyObject*) _PyObject_LookupSpecialMethod(PyObject *self, PyObject *attr, PyObject **self_or_null);
|
2022-02-08 16:39:07 -04:00
|
|
|
|
2023-07-23 19:25:48 -03:00
|
|
|
extern int _PyObject_IsAbstract(PyObject *);
|
|
|
|
|
2024-02-29 12:11:28 -04:00
|
|
|
PyAPI_FUNC(int) _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method);
|
2023-07-23 19:25:48 -03:00
|
|
|
extern PyObject* _PyObject_NextNotImplemented(PyObject *);
|
|
|
|
|
2023-08-29 00:38:51 -03:00
|
|
|
// Pickle support.
|
|
|
|
// Export for '_datetime' shared extension
|
|
|
|
PyAPI_FUNC(PyObject*) _PyObject_GetState(PyObject *);
|
|
|
|
|
2022-03-30 16:28:33 -03:00
|
|
|
/* C function call trampolines to mitigate bad function pointer casts.
|
|
|
|
*
|
|
|
|
* Typical native ABIs ignore additional arguments or fill in missing
|
|
|
|
* values with 0/NULL in function pointer cast. Compilers do not show
|
|
|
|
* warnings when a function pointer is explicitly casted to an
|
|
|
|
* incompatible type.
|
|
|
|
*
|
|
|
|
* Bad fpcasts are an issue in WebAssembly. WASM's indirect_call has strict
|
|
|
|
* function signature checks. Argument count, types, and return type must
|
|
|
|
* match.
|
|
|
|
*
|
|
|
|
* Third party code unintentionally rely on problematic fpcasts. The call
|
2022-12-05 01:42:38 -04:00
|
|
|
* trampoline mitigates common occurrences of bad fpcasts on Emscripten.
|
2022-03-30 16:28:33 -03:00
|
|
|
*/
|
2024-07-14 06:24:09 -03:00
|
|
|
#if !(defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE))
|
2022-03-30 16:28:33 -03:00
|
|
|
#define _PyCFunction_TrampolineCall(meth, self, args) \
|
|
|
|
(meth)((self), (args))
|
|
|
|
#define _PyCFunctionWithKeywords_TrampolineCall(meth, self, args, kw) \
|
|
|
|
(meth)((self), (args), (kw))
|
|
|
|
#endif // __EMSCRIPTEN__ && PY_CALL_TRAMPOLINE
|
|
|
|
|
2023-08-24 12:28:35 -03:00
|
|
|
// Export these 2 symbols for '_pickle' shared extension
|
2023-07-22 11:12:17 -03:00
|
|
|
PyAPI_DATA(PyTypeObject) _PyNone_Type;
|
|
|
|
PyAPI_DATA(PyTypeObject) _PyNotImplemented_Type;
|
|
|
|
|
2023-07-25 00:48:04 -03:00
|
|
|
// Maps Py_LT to Py_GT, ..., Py_GE to Py_LE.
|
|
|
|
// Export for the stable ABI.
|
2023-07-22 13:07:07 -03:00
|
|
|
PyAPI_DATA(int) _Py_SwappedOp[];
|
|
|
|
|
2024-03-21 13:07:00 -03:00
|
|
|
extern void _Py_GetConstant_Init(void);
|
|
|
|
|
2018-11-21 17:27:47 -04:00
|
|
|
#ifdef __cplusplus
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
#endif /* !Py_INTERNAL_OBJECT_H */
|