diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h
index a1898d926ac..ade69be1cf2 100644
--- a/Include/internal/pycore_interp.h
+++ b/Include/internal/pycore_interp.h
@@ -35,7 +35,7 @@ extern "C" {
#include "pycore_qsbr.h" // struct _qsbr_state
#include "pycore_tstate.h" // _PyThreadStateImpl
#include "pycore_tuple.h" // struct _Py_tuple_state
-#include "pycore_typeid.h" // struct _Py_type_id_pool
+#include "pycore_uniqueid.h" // struct _Py_unique_id_pool
#include "pycore_typeobject.h" // struct types_state
#include "pycore_unicodeobject.h" // struct _Py_unicode_state
#include "pycore_warnings.h" // struct _warnings_runtime_state
@@ -221,7 +221,7 @@ struct _is {
#if defined(Py_GIL_DISABLED)
struct _mimalloc_interp_state mimalloc;
struct _brc_state brc; // biased reference counting state
- struct _Py_type_id_pool type_ids;
+ struct _Py_unique_id_pool unique_ids; // object ids for per-thread refcounts
PyMutex weakref_locks[NUM_WEAKREF_LIST_LOCKS];
#endif
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index 80b588815bc..0af13b1bcda 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -14,7 +14,7 @@ extern "C" {
#include "pycore_interp.h" // PyInterpreterState.gc
#include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_STORE_PTR_RELAXED
#include "pycore_pystate.h" // _PyInterpreterState_GET()
-#include "pycore_typeid.h" // _PyType_IncrefSlow
+#include "pycore_uniqueid.h" // _PyType_IncrefSlow
#define _Py_IMMORTAL_REFCNT_LOOSE ((_Py_IMMORTAL_REFCNT >> 1) + 1)
@@ -335,12 +335,12 @@ _Py_INCREF_TYPE(PyTypeObject *type)
// Unsigned comparison so that `unique_id=-1`, which indicates that
// per-thread refcounting has been disabled on this type, is handled by
// the "else".
- if ((size_t)ht->unique_id < (size_t)tstate->types.size) {
+ if ((size_t)ht->unique_id < (size_t)tstate->refcounts.size) {
# ifdef Py_REF_DEBUG
_Py_INCREF_IncRefTotal();
# endif
_Py_INCREF_STAT_INC();
- tstate->types.refcounts[ht->unique_id]++;
+ tstate->refcounts.values[ht->unique_id]++;
}
else {
// The slow path resizes the thread-local refcount array if necessary.
@@ -368,12 +368,12 @@ _Py_DECREF_TYPE(PyTypeObject *type)
// Unsigned comparison so that `unique_id=-1`, which indicates that
// per-thread refcounting has been disabled on this type, is handled by
// the "else".
- if ((size_t)ht->unique_id < (size_t)tstate->types.size) {
+ if ((size_t)ht->unique_id < (size_t)tstate->refcounts.size) {
# ifdef Py_REF_DEBUG
_Py_DECREF_DecRefTotal();
# endif
_Py_DECREF_STAT_INC();
- tstate->types.refcounts[ht->unique_id]--;
+ tstate->refcounts.values[ht->unique_id]--;
}
else {
// Directly decref the type if the type id is not assigned or if
diff --git a/Include/internal/pycore_tstate.h b/Include/internal/pycore_tstate.h
index f681b644c9a..a72ef4493b7 100644
--- a/Include/internal/pycore_tstate.h
+++ b/Include/internal/pycore_tstate.h
@@ -32,15 +32,15 @@ typedef struct _PyThreadStateImpl {
struct _Py_freelists freelists;
struct _brc_thread_state brc;
struct {
- // The thread-local refcounts for heap type objects
- Py_ssize_t *refcounts;
+ // The per-thread refcounts
+ Py_ssize_t *values;
// Size of the refcounts array.
Py_ssize_t size;
- // If set, don't use thread-local refcounts
+ // If set, don't use per-thread refcounts
int is_finalized;
- } types;
+ } refcounts;
#endif
#if defined(Py_REF_DEBUG) && defined(Py_GIL_DISABLED)
diff --git a/Include/internal/pycore_typeid.h b/Include/internal/pycore_typeid.h
deleted file mode 100644
index e64d1447f6b..00000000000
--- a/Include/internal/pycore_typeid.h
+++ /dev/null
@@ -1,75 +0,0 @@
-#ifndef Py_INTERNAL_TYPEID_H
-#define Py_INTERNAL_TYPEID_H
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#ifndef Py_BUILD_CORE
-# error "this header requires Py_BUILD_CORE define"
-#endif
-
-#ifdef Py_GIL_DISABLED
-
-// This contains code for allocating unique ids to heap type objects
-// and re-using those ids when the type is deallocated.
-//
-// The type ids are used to implement per-thread reference counts of
-// heap type objects to avoid contention on the reference count fields
-// of heap type objects. Static type objects are immortal, so contention
-// is not an issue for those types.
-//
-// Type id of -1 is used to indicate a type doesn't use thread-local
-// refcounting. This value is used when a type object is finalized by the GC
-// and during interpreter shutdown to allow the type object to be
-// deallocated promptly when the object's refcount reaches zero.
-//
-// Each entry implicitly represents a type id based on it's offset in the
-// table. Non-allocated entries form a free-list via the 'next' pointer.
-// Allocated entries store the corresponding PyTypeObject.
-typedef union _Py_type_id_entry {
- // Points to the next free type id, when part of the freelist
- union _Py_type_id_entry *next;
-
- // Stores the type object when the id is assigned
- PyHeapTypeObject *type;
-} _Py_type_id_entry;
-
-struct _Py_type_id_pool {
- PyMutex mutex;
-
- // combined table of types with allocated type ids and unallocated
- // type ids.
- _Py_type_id_entry *table;
-
- // Next entry to allocate inside 'table' or NULL
- _Py_type_id_entry *freelist;
-
- // size of 'table'
- Py_ssize_t size;
-};
-
-// Assigns the next id from the pool of type ids.
-extern void _PyType_AssignId(PyHeapTypeObject *type);
-
-// Releases the allocated type id back to the pool.
-extern void _PyType_ReleaseId(PyHeapTypeObject *type);
-
-// Merges the thread-local reference counts into the corresponding types.
-extern void _PyType_MergeThreadLocalRefcounts(_PyThreadStateImpl *tstate);
-
-// Like _PyType_MergeThreadLocalRefcounts, but also frees the thread-local
-// array of refcounts.
-extern void _PyType_FinalizeThreadLocalRefcounts(_PyThreadStateImpl *tstate);
-
-// Frees the interpreter's pool of type ids.
-extern void _PyType_FinalizeIdPool(PyInterpreterState *interp);
-
-// Increfs the type, resizing the thread-local refcount array if necessary.
-PyAPI_FUNC(void) _PyType_IncrefSlow(PyHeapTypeObject *type);
-
-#endif /* Py_GIL_DISABLED */
-
-#ifdef __cplusplus
-}
-#endif
-#endif /* !Py_INTERNAL_TYPEID_H */
diff --git a/Include/internal/pycore_uniqueid.h b/Include/internal/pycore_uniqueid.h
new file mode 100644
index 00000000000..8f3b4418408
--- /dev/null
+++ b/Include/internal/pycore_uniqueid.h
@@ -0,0 +1,72 @@
+#ifndef Py_INTERNAL_UNIQUEID_H
+#define Py_INTERNAL_UNIQUEID_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#ifdef Py_GIL_DISABLED
+
+// This contains code for allocating unique ids to objects for per-thread
+// reference counting.
+//
+// Per-thread reference counting is used along with deferred reference
+// counting to avoid scaling bottlenecks due to reference count contention.
+//
+// An id of -1 is used to indicate that an object doesn't use per-thread
+// refcounting. This value is used when the object is finalized by the GC
+// and during interpreter shutdown to allow the object to be
+// deallocated promptly when the object's refcount reaches zero.
+//
+// Each entry implicitly represents a unique id based on its offset in the
+// table. Non-allocated entries form a free-list via the 'next' pointer.
+// Allocated entries store the corresponding PyObject.
+typedef union _Py_unique_id_entry {
+ // Points to the next free type id, when part of the freelist
+ union _Py_unique_id_entry *next;
+
+ // Stores the object when the id is assigned
+ PyObject *obj;
+} _Py_unique_id_entry;
+
+struct _Py_unique_id_pool {
+ PyMutex mutex;
+
+ // combined table of object with allocated unique ids and unallocated ids.
+ _Py_unique_id_entry *table;
+
+ // Next entry to allocate inside 'table' or NULL
+ _Py_unique_id_entry *freelist;
+
+ // size of 'table'
+ Py_ssize_t size;
+};
+
+// Assigns the next id from the pool of ids.
+extern Py_ssize_t _PyObject_AssignUniqueId(PyObject *obj);
+
+// Releases the allocated id back to the pool.
+extern void _PyObject_ReleaseUniqueId(Py_ssize_t unique_id);
+
+// Merges the per-thread reference counts into the corresponding objects.
+extern void _PyObject_MergePerThreadRefcounts(_PyThreadStateImpl *tstate);
+
+// Like _PyObject_MergePerThreadRefcounts, but also frees the per-thread
+// array of refcounts.
+extern void _PyObject_FinalizePerThreadRefcounts(_PyThreadStateImpl *tstate);
+
+// Frees the interpreter's pool of type ids.
+extern void _PyObject_FinalizeUniqueIdPool(PyInterpreterState *interp);
+
+// Increfs the type, resizing the per-thread refcount array if necessary.
+PyAPI_FUNC(void) _PyType_IncrefSlow(PyHeapTypeObject *type);
+
+#endif /* Py_GIL_DISABLED */
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_UNIQUEID_H */
diff --git a/Makefile.pre.in b/Makefile.pre.in
index a4d99262702..07c8a4d2014 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -490,7 +490,7 @@ PYTHON_OBJS= \
Python/thread.o \
Python/traceback.o \
Python/tracemalloc.o \
- Python/typeid.o \
+ Python/uniqueid.o \
Python/getopt.o \
Python/pystrcmp.o \
Python/pystrtod.o \
@@ -1279,7 +1279,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_tracemalloc.h \
$(srcdir)/Include/internal/pycore_tstate.h \
$(srcdir)/Include/internal/pycore_tuple.h \
- $(srcdir)/Include/internal/pycore_typeid.h \
+ $(srcdir)/Include/internal/pycore_uniqueid.h \
$(srcdir)/Include/internal/pycore_typeobject.h \
$(srcdir)/Include/internal/pycore_typevarobject.h \
$(srcdir)/Include/internal/pycore_ucnhash.h \
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index 0e2d9758a5f..6484e8921f8 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -3932,7 +3932,7 @@ type_new_alloc(type_new_ctx *ctx)
et->ht_token = NULL;
#ifdef Py_GIL_DISABLED
- _PyType_AssignId(et);
+ et->unique_id = _PyObject_AssignUniqueId((PyObject *)et);
#endif
return type;
@@ -5026,7 +5026,7 @@ PyType_FromMetaclass(
#ifdef Py_GIL_DISABLED
// Assign a type id to enable thread-local refcounting
- _PyType_AssignId(res);
+ res->unique_id = _PyObject_AssignUniqueId((PyObject *)res);
#endif
/* Ready the type (which includes inheritance).
@@ -6080,7 +6080,7 @@ type_dealloc(PyObject *self)
Py_XDECREF(et->ht_module);
PyMem_Free(et->_ht_tpname);
#ifdef Py_GIL_DISABLED
- _PyType_ReleaseId(et);
+ _PyObject_ReleaseUniqueId(et->unique_id);
#endif
et->ht_token = NULL;
Py_TYPE(type)->tp_free((PyObject *)type);
diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj
index 743e6e2a66a..a3c2d32c454 100644
--- a/PCbuild/_freeze_module.vcxproj
+++ b/PCbuild/_freeze_module.vcxproj
@@ -268,7 +268,7 @@
-
+
diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters
index 0887a47917a..91b1d75fb8d 100644
--- a/PCbuild/_freeze_module.vcxproj.filters
+++ b/PCbuild/_freeze_module.vcxproj.filters
@@ -467,7 +467,7 @@
Source Files
-
+
Source Files
diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj
index 19b982db7f5..3b33c6bf6bb 100644
--- a/PCbuild/pythoncore.vcxproj
+++ b/PCbuild/pythoncore.vcxproj
@@ -304,13 +304,13 @@
-
+
@@ -657,7 +657,7 @@
-
+
diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters
index 23f2e9c8bc0..ee2930b1043 100644
--- a/PCbuild/pythoncore.vcxproj.filters
+++ b/PCbuild/pythoncore.vcxproj.filters
@@ -831,9 +831,6 @@
Include\internal
-
- Include\internal
-
Include\internal
@@ -846,6 +843,9 @@
Include\internal
+
+ Include\internal
+
Include\internal\mimalloc
@@ -1499,7 +1499,7 @@
Python
-
+
Python
diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c
index a5bc9b9b578..38564d9d9b0 100644
--- a/Python/gc_free_threading.c
+++ b/Python/gc_free_threading.c
@@ -15,7 +15,7 @@
#include "pycore_tstate.h" // _PyThreadStateImpl
#include "pycore_weakref.h" // _PyWeakref_ClearRef()
#include "pydtrace.h"
-#include "pycore_typeid.h" // _PyType_MergeThreadLocalRefcounts
+#include "pycore_uniqueid.h" // _PyType_MergeThreadLocalRefcounts
#ifdef Py_GIL_DISABLED
@@ -217,12 +217,12 @@ disable_deferred_refcounting(PyObject *op)
merge_refcount(op, 0);
}
- // Heap types also use thread-local refcounting -- disable it here.
+ // Heap types also use per-thread refcounting -- disable it here.
if (PyType_Check(op)) {
- // Disable thread-local refcounting for heap types
- PyTypeObject *type = (PyTypeObject *)op;
- if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
- _PyType_ReleaseId((PyHeapTypeObject *)op);
+ if (PyType_HasFeature((PyTypeObject *)op, Py_TPFLAGS_HEAPTYPE)) {
+ PyHeapTypeObject *ht = (PyHeapTypeObject *)op;
+ _PyObject_ReleaseUniqueId(ht->unique_id);
+ ht->unique_id = -1;
}
}
@@ -1221,7 +1221,7 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state,
_PyThreadStateImpl *tstate = (_PyThreadStateImpl *)p;
// merge per-thread refcount for types into the type's actual refcount
- _PyType_MergeThreadLocalRefcounts(tstate);
+ _PyObject_MergePerThreadRefcounts(tstate);
// merge refcounts for all queued objects
merge_queued_objects(tstate, state);
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index 8aebbe5c405..d9e89edf5dd 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -28,7 +28,7 @@
#include "pycore_sliceobject.h" // _PySlice_Fini()
#include "pycore_sysmodule.h" // _PySys_ClearAuditHooks()
#include "pycore_traceback.h" // _Py_DumpTracebackThreads()
-#include "pycore_typeid.h" // _PyType_FinalizeIdPool()
+#include "pycore_uniqueid.h" // _PyType_FinalizeIdPool()
#include "pycore_typeobject.h" // _PyTypes_InitTypes()
#include "pycore_typevarobject.h" // _Py_clear_generic_types()
#include "pycore_unicodeobject.h" // _PyUnicode_InitTypes()
@@ -1834,7 +1834,7 @@ finalize_interp_types(PyInterpreterState *interp)
_PyTypes_Fini(interp);
#ifdef Py_GIL_DISABLED
- _PyType_FinalizeIdPool(interp);
+ _PyObject_FinalizeUniqueIdPool(interp);
#endif
_PyCode_Fini(interp);
diff --git a/Python/pystate.c b/Python/pystate.c
index 6b85e5a64fe..9d11e2d2549 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -20,7 +20,7 @@
#include "pycore_runtime_init.h" // _PyRuntimeState_INIT
#include "pycore_sysmodule.h" // _PySys_Audit()
#include "pycore_obmalloc.h" // _PyMem_obmalloc_state_on_heap()
-#include "pycore_typeid.h" // _PyType_FinalizeThreadLocalRefcounts()
+#include "pycore_uniqueid.h" // _PyType_FinalizeThreadLocalRefcounts()
/* --------------------------------------------------------------------------
CAUTION
@@ -1745,7 +1745,7 @@ PyThreadState_Clear(PyThreadState *tstate)
// Merge our thread-local refcounts into the type's own refcount and
// free our local refcount array.
- _PyType_FinalizeThreadLocalRefcounts((_PyThreadStateImpl *)tstate);
+ _PyObject_FinalizePerThreadRefcounts((_PyThreadStateImpl *)tstate);
// Remove ourself from the biased reference counting table of threads.
_Py_brc_remove_thread(tstate);
@@ -1805,7 +1805,7 @@ tstate_delete_common(PyThreadState *tstate, int release_gil)
_PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate;
tstate->interp->object_state.reftotal += tstate_impl->reftotal;
tstate_impl->reftotal = 0;
- assert(tstate_impl->types.refcounts == NULL);
+ assert(tstate_impl->refcounts.values == NULL);
#endif
HEAD_UNLOCK(runtime);
diff --git a/Python/typeid.c b/Python/uniqueid.c
similarity index 51%
rename from Python/typeid.c
rename to Python/uniqueid.c
index 83a68723ded..9a9ee2f3946 100644
--- a/Python/typeid.c
+++ b/Python/uniqueid.c
@@ -3,12 +3,14 @@
#include "pycore_lock.h" // PyMutex_LockFlags()
#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_object.h" // _Py_IncRefTotal
-#include "pycore_typeid.h"
+#include "pycore_uniqueid.h"
-// This contains code for allocating unique ids to heap type objects
-// and re-using those ids when the type is deallocated.
+// This contains code for allocating unique ids for per-thread reference
+// counting and re-using those ids when an object is deallocated.
//
-// See Include/internal/pycore_typeid.h for more details.
+// Currently, per-thread reference counting is only used for heap types.
+//
+// See Include/internal/pycore_uniqueid.h for more details.
#ifdef Py_GIL_DISABLED
@@ -18,7 +20,7 @@
#define UNLOCK_POOL(pool) PyMutex_Unlock(&pool->mutex)
static int
-resize_interp_type_id_pool(struct _Py_type_id_pool *pool)
+resize_interp_type_id_pool(struct _Py_unique_id_pool *pool)
{
if ((size_t)pool->size > PY_SSIZE_T_MAX / (2 * sizeof(*pool->table))) {
return -1;
@@ -29,8 +31,8 @@ resize_interp_type_id_pool(struct _Py_type_id_pool *pool)
new_size = POOL_MIN_SIZE;
}
- _Py_type_id_entry *table = PyMem_Realloc(pool->table,
- new_size * sizeof(*pool->table));
+ _Py_unique_id_entry *table = PyMem_Realloc(pool->table,
+ new_size * sizeof(*pool->table));
if (table == NULL) {
return -1;
}
@@ -50,70 +52,67 @@ resize_interp_type_id_pool(struct _Py_type_id_pool *pool)
static int
resize_local_refcounts(_PyThreadStateImpl *tstate)
{
- if (tstate->types.is_finalized) {
+ if (tstate->refcounts.is_finalized) {
return -1;
}
- struct _Py_type_id_pool *pool = &tstate->base.interp->type_ids;
+ struct _Py_unique_id_pool *pool = &tstate->base.interp->unique_ids;
Py_ssize_t size = _Py_atomic_load_ssize(&pool->size);
- Py_ssize_t *refcnts = PyMem_Realloc(tstate->types.refcounts,
+ Py_ssize_t *refcnts = PyMem_Realloc(tstate->refcounts.values,
size * sizeof(Py_ssize_t));
if (refcnts == NULL) {
return -1;
}
- Py_ssize_t old_size = tstate->types.size;
+ Py_ssize_t old_size = tstate->refcounts.size;
if (old_size < size) {
memset(refcnts + old_size, 0, (size - old_size) * sizeof(Py_ssize_t));
}
- tstate->types.refcounts = refcnts;
- tstate->types.size = size;
+ tstate->refcounts.values = refcnts;
+ tstate->refcounts.size = size;
return 0;
}
-void
-_PyType_AssignId(PyHeapTypeObject *type)
+Py_ssize_t
+_PyObject_AssignUniqueId(PyObject *obj)
{
PyInterpreterState *interp = _PyInterpreterState_GET();
- struct _Py_type_id_pool *pool = &interp->type_ids;
+ struct _Py_unique_id_pool *pool = &interp->unique_ids;
LOCK_POOL(pool);
if (pool->freelist == NULL) {
if (resize_interp_type_id_pool(pool) < 0) {
- type->unique_id = -1;
UNLOCK_POOL(pool);
- return;
+ return -1;
}
}
- _Py_type_id_entry *entry = pool->freelist;
+ _Py_unique_id_entry *entry = pool->freelist;
pool->freelist = entry->next;
- entry->type = type;
- _PyObject_SetDeferredRefcount((PyObject *)type);
- type->unique_id = (entry - pool->table);
+ entry->obj = obj;
+ _PyObject_SetDeferredRefcount(obj);
+ Py_ssize_t unique_id = (entry - pool->table);
UNLOCK_POOL(pool);
+ return unique_id;
}
void
-_PyType_ReleaseId(PyHeapTypeObject *type)
+_PyObject_ReleaseUniqueId(Py_ssize_t unique_id)
{
PyInterpreterState *interp = _PyInterpreterState_GET();
- struct _Py_type_id_pool *pool = &interp->type_ids;
+ struct _Py_unique_id_pool *pool = &interp->unique_ids;
- if (type->unique_id < 0) {
- // The type doesn't have an id assigned.
+ if (unique_id < 0) {
+ // The id is not assigned
return;
}
LOCK_POOL(pool);
- _Py_type_id_entry *entry = &pool->table[type->unique_id];
- assert(entry->type == type);
+ _Py_unique_id_entry *entry = &pool->table[unique_id];
entry->next = pool->freelist;
pool->freelist = entry;
-
- type->unique_id = -1;
UNLOCK_POOL(pool);
}
@@ -127,8 +126,8 @@ _PyType_IncrefSlow(PyHeapTypeObject *type)
return;
}
- assert(type->unique_id < tstate->types.size);
- tstate->types.refcounts[type->unique_id]++;
+ assert(type->unique_id < tstate->refcounts.size);
+ tstate->refcounts.values[type->unique_id]++;
#ifdef Py_REF_DEBUG
_Py_IncRefTotal((PyThreadState *)tstate);
#endif
@@ -136,59 +135,64 @@ _PyType_IncrefSlow(PyHeapTypeObject *type)
}
void
-_PyType_MergeThreadLocalRefcounts(_PyThreadStateImpl *tstate)
+_PyObject_MergePerThreadRefcounts(_PyThreadStateImpl *tstate)
{
- if (tstate->types.refcounts == NULL) {
+ if (tstate->refcounts.values == NULL) {
return;
}
- struct _Py_type_id_pool *pool = &tstate->base.interp->type_ids;
+ struct _Py_unique_id_pool *pool = &tstate->base.interp->unique_ids;
LOCK_POOL(pool);
- for (Py_ssize_t i = 0, n = tstate->types.size; i < n; i++) {
- Py_ssize_t refcnt = tstate->types.refcounts[i];
+ for (Py_ssize_t i = 0, n = tstate->refcounts.size; i < n; i++) {
+ Py_ssize_t refcnt = tstate->refcounts.values[i];
if (refcnt != 0) {
- PyObject *type = (PyObject *)pool->table[i].type;
- assert(PyType_Check(type));
-
- _Py_atomic_add_ssize(&type->ob_ref_shared,
+ PyObject *obj = pool->table[i].obj;
+ _Py_atomic_add_ssize(&obj->ob_ref_shared,
refcnt << _Py_REF_SHARED_SHIFT);
- tstate->types.refcounts[i] = 0;
+ tstate->refcounts.values[i] = 0;
}
}
UNLOCK_POOL(pool);
}
void
-_PyType_FinalizeThreadLocalRefcounts(_PyThreadStateImpl *tstate)
+_PyObject_FinalizePerThreadRefcounts(_PyThreadStateImpl *tstate)
{
- _PyType_MergeThreadLocalRefcounts(tstate);
+ _PyObject_MergePerThreadRefcounts(tstate);
- PyMem_Free(tstate->types.refcounts);
- tstate->types.refcounts = NULL;
- tstate->types.size = 0;
- tstate->types.is_finalized = 1;
+ PyMem_Free(tstate->refcounts.values);
+ tstate->refcounts.values = NULL;
+ tstate->refcounts.size = 0;
+ tstate->refcounts.is_finalized = 1;
}
void
-_PyType_FinalizeIdPool(PyInterpreterState *interp)
+_PyObject_FinalizeUniqueIdPool(PyInterpreterState *interp)
{
- struct _Py_type_id_pool *pool = &interp->type_ids;
+ struct _Py_unique_id_pool *pool = &interp->unique_ids;
// First, set the free-list to NULL values
while (pool->freelist) {
- _Py_type_id_entry *next = pool->freelist->next;
- pool->freelist->type = NULL;
+ _Py_unique_id_entry *next = pool->freelist->next;
+ pool->freelist->obj = NULL;
pool->freelist = next;
}
// Now everything non-NULL is a type. Set the type's id to -1 in case it
// outlives the interpreter.
for (Py_ssize_t i = 0; i < pool->size; i++) {
- PyHeapTypeObject *ht = pool->table[i].type;
- if (ht) {
- ht->unique_id = -1;
- pool->table[i].type = NULL;
+ PyObject *obj = pool->table[i].obj;
+ pool->table[i].obj = NULL;
+ if (obj == NULL) {
+ continue;
+ }
+ if (PyType_Check(obj)) {
+ assert(PyType_HasFeature((PyTypeObject *)obj, Py_TPFLAGS_HEAPTYPE));
+ ((PyHeapTypeObject *)obj)->unique_id = -1;
+ }
+ else {
+ Py_UNREACHABLE();
}
}
PyMem_Free(pool->table);