gh-114271: Make `thread._rlock` thread-safe in free-threaded builds (#115102)

The ID of the owning thread (`rlock_owner`) may be accessed by
multiple threads without holding the underlying lock; relaxed
atomics are used in place of the previous loads/stores.

The number of times that the lock has been acquired (`rlock_count`)
is only ever accessed by the thread that holds the lock; we do not
need to use atomics to access it.
This commit is contained in:
mpage 2024-02-16 10:29:25 -08:00 committed by GitHub
parent 13addd2bbd
commit f366e21504
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 68 additions and 10 deletions

View File

@ -360,6 +360,8 @@ _Py_atomic_load_ssize_relaxed(const Py_ssize_t *obj);
static inline void *
_Py_atomic_load_ptr_relaxed(const void *obj);
static inline unsigned long long
_Py_atomic_load_ullong_relaxed(const unsigned long long *obj);
// --- _Py_atomic_store ------------------------------------------------------
// Atomically performs `*obj = value` (sequential consistency)
@ -452,6 +454,10 @@ _Py_atomic_store_ptr_relaxed(void *obj, void *value);
static inline void
_Py_atomic_store_ssize_relaxed(Py_ssize_t *obj, Py_ssize_t value);
static inline void
_Py_atomic_store_ullong_relaxed(unsigned long long *obj,
unsigned long long value);
// --- _Py_atomic_load_ptr_acquire / _Py_atomic_store_ptr_release ------------

View File

@ -358,6 +358,10 @@ static inline void *
_Py_atomic_load_ptr_relaxed(const void *obj)
{ return (void *)__atomic_load_n((const void **)obj, __ATOMIC_RELAXED); }
static inline unsigned long long
_Py_atomic_load_ullong_relaxed(const unsigned long long *obj)
{ return __atomic_load_n(obj, __ATOMIC_RELAXED); }
// --- _Py_atomic_store ------------------------------------------------------
@ -476,6 +480,11 @@ static inline void
_Py_atomic_store_ssize_relaxed(Py_ssize_t *obj, Py_ssize_t value)
{ __atomic_store_n(obj, value, __ATOMIC_RELAXED); }
static inline void
_Py_atomic_store_ullong_relaxed(unsigned long long *obj,
unsigned long long value)
{ __atomic_store_n(obj, value, __ATOMIC_RELAXED); }
// --- _Py_atomic_load_ptr_acquire / _Py_atomic_store_ptr_release ------------

View File

@ -712,6 +712,12 @@ _Py_atomic_load_ptr_relaxed(const void *obj)
return *(void * volatile *)obj;
}
static inline unsigned long long
_Py_atomic_load_ullong_relaxed(const unsigned long long *obj)
{
return *(volatile unsigned long long *)obj;
}
// --- _Py_atomic_store ------------------------------------------------------
@ -886,6 +892,14 @@ _Py_atomic_store_ssize_relaxed(Py_ssize_t *obj, Py_ssize_t value)
*(volatile Py_ssize_t *)obj = value;
}
static inline void
_Py_atomic_store_ullong_relaxed(unsigned long long *obj,
unsigned long long value)
{
*(volatile unsigned long long *)obj = value;
}
// --- _Py_atomic_load_ptr_acquire / _Py_atomic_store_ptr_release ------------
static inline void *

View File

@ -619,6 +619,14 @@ _Py_atomic_load_ptr_relaxed(const void *obj)
memory_order_relaxed);
}
static inline unsigned long long
_Py_atomic_load_ullong_relaxed(const unsigned long long *obj)
{
_Py_USING_STD;
return atomic_load_explicit((const _Atomic(unsigned long long)*)obj,
memory_order_relaxed);
}
// --- _Py_atomic_store ------------------------------------------------------
@ -835,6 +843,15 @@ _Py_atomic_store_ssize_relaxed(Py_ssize_t *obj, Py_ssize_t value)
memory_order_relaxed);
}
static inline void
_Py_atomic_store_ullong_relaxed(unsigned long long *obj,
unsigned long long value)
{
_Py_USING_STD;
atomic_store_explicit((_Atomic(unsigned long long)*)obj, value,
memory_order_relaxed);
}
// --- _Py_atomic_load_ptr_acquire / _Py_atomic_store_ptr_release ------------

View File

@ -11,6 +11,7 @@
#include "pycore_sysmodule.h" // _PySys_GetAttr()
#include "pycore_weakref.h" // _PyWeakref_GET_REF()
#include <stdbool.h>
#include <stddef.h> // offsetof()
#ifdef HAVE_SIGNAL_H
# include <signal.h> // SIGINT
@ -489,6 +490,14 @@ rlock_dealloc(rlockobject *self)
Py_DECREF(tp);
}
static bool
rlock_is_owned_by(rlockobject *self, PyThread_ident_t tid)
{
PyThread_ident_t owner_tid =
_Py_atomic_load_ullong_relaxed(&self->rlock_owner);
return owner_tid == tid && self->rlock_count > 0;
}
static PyObject *
rlock_acquire(rlockobject *self, PyObject *args, PyObject *kwds)
{
@ -500,7 +509,7 @@ rlock_acquire(rlockobject *self, PyObject *args, PyObject *kwds)
return NULL;
tid = PyThread_get_thread_ident_ex();
if (self->rlock_count > 0 && tid == self->rlock_owner) {
if (rlock_is_owned_by(self, tid)) {
unsigned long count = self->rlock_count + 1;
if (count <= self->rlock_count) {
PyErr_SetString(PyExc_OverflowError,
@ -513,7 +522,7 @@ rlock_acquire(rlockobject *self, PyObject *args, PyObject *kwds)
r = acquire_timed(self->rlock_lock, timeout);
if (r == PY_LOCK_ACQUIRED) {
assert(self->rlock_count == 0);
self->rlock_owner = tid;
_Py_atomic_store_ullong_relaxed(&self->rlock_owner, tid);
self->rlock_count = 1;
}
else if (r == PY_LOCK_INTR) {
@ -544,13 +553,13 @@ rlock_release(rlockobject *self, PyObject *Py_UNUSED(ignored))
{
PyThread_ident_t tid = PyThread_get_thread_ident_ex();
if (self->rlock_count == 0 || self->rlock_owner != tid) {
if (!rlock_is_owned_by(self, tid)) {
PyErr_SetString(PyExc_RuntimeError,
"cannot release un-acquired lock");
return NULL;
}
if (--self->rlock_count == 0) {
self->rlock_owner = 0;
_Py_atomic_store_ullong_relaxed(&self->rlock_owner, 0);
PyThread_release_lock(self->rlock_lock);
}
Py_RETURN_NONE;
@ -589,7 +598,7 @@ rlock_acquire_restore(rlockobject *self, PyObject *args)
return NULL;
}
assert(self->rlock_count == 0);
self->rlock_owner = owner;
_Py_atomic_store_ullong_relaxed(&self->rlock_owner, owner);
self->rlock_count = count;
Py_RETURN_NONE;
}
@ -614,7 +623,7 @@ rlock_release_save(rlockobject *self, PyObject *Py_UNUSED(ignored))
owner = self->rlock_owner;
count = self->rlock_count;
self->rlock_count = 0;
self->rlock_owner = 0;
_Py_atomic_store_ullong_relaxed(&self->rlock_owner, 0);
PyThread_release_lock(self->rlock_lock);
return Py_BuildValue("k" Py_PARSE_THREAD_IDENT_T, count, owner);
}
@ -628,8 +637,9 @@ static PyObject *
rlock_recursion_count(rlockobject *self, PyObject *Py_UNUSED(ignored))
{
PyThread_ident_t tid = PyThread_get_thread_ident_ex();
return PyLong_FromUnsignedLong(
self->rlock_owner == tid ? self->rlock_count : 0UL);
PyThread_ident_t owner =
_Py_atomic_load_ullong_relaxed(&self->rlock_owner);
return PyLong_FromUnsignedLong(owner == tid ? self->rlock_count : 0UL);
}
PyDoc_STRVAR(rlock_recursion_count_doc,
@ -642,7 +652,7 @@ rlock_is_owned(rlockobject *self, PyObject *Py_UNUSED(ignored))
{
PyThread_ident_t tid = PyThread_get_thread_ident_ex();
if (self->rlock_count > 0 && self->rlock_owner == tid) {
if (rlock_is_owned_by(self, tid)) {
Py_RETURN_TRUE;
}
Py_RETURN_FALSE;
@ -676,10 +686,12 @@ rlock_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
static PyObject *
rlock_repr(rlockobject *self)
{
PyThread_ident_t owner =
_Py_atomic_load_ullong_relaxed(&self->rlock_owner);
return PyUnicode_FromFormat(
"<%s %s object owner=%" PY_FORMAT_THREAD_IDENT_T " count=%lu at %p>",
self->rlock_count ? "locked" : "unlocked",
Py_TYPE(self)->tp_name, self->rlock_owner,
Py_TYPE(self)->tp_name, owner,
self->rlock_count, self);
}