From a4b32c25761e3de55d42a4799a303f36aa198fb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Storsj=C3=B6?= Date: Wed, 5 Jun 2024 22:49:58 +0300 Subject: [PATCH] Revert "[compiler-rt][builtins] Switch libatomic locks to pthread_mutex_t (#94374)" This reverts commit b62b7a42bbee4a3bbf9094808f460fdc9c119bd7 and a5729b71d844c1444f7d348dc2d4ea5b98de5ec5. This commit broke compilation for systems that lack pthreads. --- compiler-rt/lib/builtins/atomic.c | 31 +++++++++++++------------------ 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/compiler-rt/lib/builtins/atomic.c b/compiler-rt/lib/builtins/atomic.c index c3a36a9aaba6..852bb20f0867 100644 --- a/compiler-rt/lib/builtins/atomic.c +++ b/compiler-rt/lib/builtins/atomic.c @@ -51,14 +51,6 @@ #endif static const long SPINLOCK_MASK = SPINLOCK_COUNT - 1; -#ifndef CACHE_LINE_SIZE -#define CACHE_LINE_SIZE 64 -#endif - -#ifdef __clang__ -#pragma clang diagnostic ignored "-Wgnu-designator" -#endif - //////////////////////////////////////////////////////////////////////////////// // Platform-specific lock implementation. Falls back to spinlocks if none is // defined. Each platform should define the Lock type, and corresponding @@ -102,18 +94,21 @@ static Lock locks[SPINLOCK_COUNT]; // initialized to OS_SPINLOCK_INIT which is 0 #else _Static_assert(__atomic_always_lock_free(sizeof(uintptr_t), 0), "Implementation assumes lock-free pointer-size cmpxchg"); -#include -#include -typedef struct { - alignas(CACHE_LINE_SIZE) pthread_mutex_t m; -} Lock; +typedef _Atomic(uintptr_t) Lock; /// Unlock a lock. This is a release operation. -__inline static void unlock(Lock *l) { pthread_mutex_unlock(&l->m); } -/// Locks a lock. -__inline static void lock(Lock *l) { pthread_mutex_lock(&l->m); } +__inline static void unlock(Lock *l) { + __c11_atomic_store(l, 0, __ATOMIC_RELEASE); +} +/// Locks a lock. In the current implementation, this is potentially +/// unbounded in the contended case. +__inline static void lock(Lock *l) { + uintptr_t old = 0; + while (!__c11_atomic_compare_exchange_weak(l, &old, 1, __ATOMIC_ACQUIRE, + __ATOMIC_RELAXED)) + old = 0; +} /// locks for atomic operations -static Lock locks[SPINLOCK_COUNT] = { - [0 ... SPINLOCK_COUNT - 1] = {PTHREAD_MUTEX_INITIALIZER}}; +static Lock locks[SPINLOCK_COUNT]; #endif /// Returns a lock to use for a given pointer.