39 #if OPAL_HAVE_ATOMIC_CMPSET_32
41 #if !defined(OPAL_HAVE_ATOMIC_SWAP_32)
42 #define OPAL_HAVE_ATOMIC_SWAP_32 1
43 static inline int32_t opal_atomic_swap_32(
volatile int32_t *addr,
49 }
while (0 == opal_atomic_cmpset_32(addr, old, newval));
55 #if !defined(OPAL_HAVE_ATOMIC_ADD_32)
56 #define OPAL_HAVE_ATOMIC_ADD_32 1
58 opal_atomic_add_32(
volatile int32_t *addr,
int delta)
64 }
while (0 == opal_atomic_cmpset_32(addr, oldval, oldval + delta));
65 return (oldval + delta);
70 #if !defined(OPAL_HAVE_ATOMIC_SUB_32)
71 #define OPAL_HAVE_ATOMIC_SUB_32 1
73 opal_atomic_sub_32(
volatile int32_t *addr,
int delta)
79 }
while (0 == opal_atomic_cmpset_32(addr, oldval, oldval - delta));
80 return (oldval - delta);
87 #if OPAL_HAVE_ATOMIC_CMPSET_64
89 #if !defined(OPAL_HAVE_ATOMIC_SWAP_64)
90 #define OPAL_HAVE_ATOMIC_SWAP_64 1
91 static inline int64_t opal_atomic_swap_64(
volatile int64_t *addr,
97 }
while (0 == opal_atomic_cmpset_64(addr, old, newval));
102 #if !defined(OPAL_HAVE_ATOMIC_ADD_64)
103 #define OPAL_HAVE_ATOMIC_ADD_64 1
104 static inline int64_t
105 opal_atomic_add_64(
volatile int64_t *addr, int64_t delta)
111 }
while (0 == opal_atomic_cmpset_64(addr, oldval, oldval + delta));
112 return (oldval + delta);
117 #if !defined(OPAL_HAVE_ATOMIC_SUB_64)
118 #define OPAL_HAVE_ATOMIC_SUB_64 1
119 static inline int64_t
120 opal_atomic_sub_64(
volatile int64_t *addr, int64_t delta)
126 }
while (0 == opal_atomic_cmpset_64(addr, oldval, oldval - delta));
127 return (oldval - delta);
133 #if !defined(OPAL_HAVE_ATOMIC_ADD_64)
134 #define OPAL_HAVE_ATOMIC_ADD_64 0
137 #if !defined(OPAL_HAVE_ATOMIC_SUB_64)
138 #define OPAL_HAVE_ATOMIC_SUB_64 0
144 #if (OPAL_HAVE_ATOMIC_CMPSET_32 || OPAL_HAVE_ATOMIC_CMPSET_64)
147 opal_atomic_cmpset_xx(
volatile void* addr, int64_t oldval,
148 int64_t newval,
size_t length)
151 #if OPAL_HAVE_ATOMIC_CMPSET_32
153 return opal_atomic_cmpset_32( (
volatile int32_t*)addr,
154 (int32_t)oldval, (int32_t)newval );
157 #if OPAL_HAVE_ATOMIC_CMPSET_64
159 return opal_atomic_cmpset_64( (
volatile int64_t*)addr,
160 (int64_t)oldval, (int64_t)newval );
172 opal_atomic_cmpset_acq_xx(
volatile void* addr, int64_t oldval,
173 int64_t newval,
size_t length)
176 #if OPAL_HAVE_ATOMIC_CMPSET_32
178 return opal_atomic_cmpset_acq_32( (
volatile int32_t*)addr,
179 (int32_t)oldval, (int32_t)newval );
182 #if OPAL_HAVE_ATOMIC_CMPSET_64
184 return opal_atomic_cmpset_acq_64( (
volatile int64_t*)addr,
185 (int64_t)oldval, (int64_t)newval );
197 opal_atomic_cmpset_rel_xx(
volatile void* addr, int64_t oldval,
198 int64_t newval,
size_t length)
201 #if OPAL_HAVE_ATOMIC_CMPSET_32
203 return opal_atomic_cmpset_rel_32( (
volatile int32_t*)addr,
204 (int32_t)oldval, (int32_t)newval );
207 #if OPAL_HAVE_ATOMIC_CMPSET_64
209 return opal_atomic_cmpset_rel_64( (
volatile int64_t*)addr,
210 (int64_t)oldval, (int64_t)newval );
222 opal_atomic_cmpset_ptr(
volatile void* addr,
226 #if SIZEOF_VOID_P == 4 && OPAL_HAVE_ATOMIC_CMPSET_32
227 return opal_atomic_cmpset_32((int32_t*) addr, (
unsigned long) oldval,
228 (
unsigned long) newval);
229 #elif SIZEOF_VOID_P == 8 && OPAL_HAVE_ATOMIC_CMPSET_64
230 return opal_atomic_cmpset_64((int64_t*) addr, (
unsigned long) oldval,
231 (
unsigned long) newval);
240 opal_atomic_cmpset_acq_ptr(
volatile void* addr,
244 #if SIZEOF_VOID_P == 4 && OPAL_HAVE_ATOMIC_CMPSET_32
245 return opal_atomic_cmpset_acq_32((int32_t*) addr, (
unsigned long) oldval,
246 (
unsigned long) newval);
247 #elif SIZEOF_VOID_P == 8 && OPAL_HAVE_ATOMIC_CMPSET_64
248 return opal_atomic_cmpset_acq_64((int64_t*) addr, (
unsigned long) oldval,
249 (
unsigned long) newval);
257 static inline int opal_atomic_cmpset_rel_ptr(
volatile void* addr,
261 #if SIZEOF_VOID_P == 4 && OPAL_HAVE_ATOMIC_CMPSET_32
262 return opal_atomic_cmpset_rel_32((int32_t*) addr, (
unsigned long) oldval,
263 (
unsigned long) newval);
264 #elif SIZEOF_VOID_P == 8 && OPAL_HAVE_ATOMIC_CMPSET_64
265 return opal_atomic_cmpset_rel_64((int64_t*) addr, (
unsigned long) oldval,
266 (
unsigned long) newval);
275 #if (OPAL_HAVE_ATOMIC_SWAP_32 || OPAL_HAVE_ATOMIC_SWAP_64)
277 #if SIZEOF_VOID_P == 4 && OPAL_HAVE_ATOMIC_SWAP_32
278 #define opal_atomic_swap_ptr(addr, value) opal_atomic_swap_32((int32_t *) addr, value)
279 #elif SIZEOF_VOID_P == 8 && OPAL_HAVE_ATOMIC_SWAP_64
280 #define opal_atomic_swap_ptr(addr, value) opal_atomic_swap_64((int64_t *) addr, value)
285 #if OPAL_HAVE_ATOMIC_MATH_32 || OPAL_HAVE_ATOMIC_MATH_64
289 opal_atomic_add_xx(
volatile void* addr, int32_t value,
size_t length)
292 #if OPAL_HAVE_ATOMIC_ADD_32
294 opal_atomic_add_32( (
volatile int32_t*)addr, (int32_t)value );
298 #if OPAL_HAVE_ATOMIC_ADD_64
300 opal_atomic_add_64( (
volatile int64_t*)addr, (int64_t)value );
312 opal_atomic_sub_xx(
volatile void* addr, int32_t value,
size_t length)
315 #if OPAL_HAVE_ATOMIC_SUB_32
317 opal_atomic_sub_32( (
volatile int32_t*)addr, (int32_t)value );
321 #if OPAL_HAVE_ATOMIC_SUB_64
323 opal_atomic_sub_64( (
volatile int64_t*)addr, (int64_t)value );
333 #if SIZEOF_VOID_P == 4 && OPAL_HAVE_ATOMIC_ADD_32
334 static inline int32_t opal_atomic_add_ptr(
volatile void* addr,
337 return opal_atomic_add_32((int32_t*) addr, (
unsigned long) delta);
339 #elif SIZEOF_VOID_P == 8 && OPAL_HAVE_ATOMIC_ADD_64
340 static inline int64_t opal_atomic_add_ptr(
volatile void* addr,
343 return opal_atomic_add_64((int64_t*) addr, (
unsigned long) delta);
346 static inline int32_t opal_atomic_add_ptr(
volatile void* addr,
354 #if SIZEOF_VOID_P == 4 && OPAL_HAVE_ATOMIC_SUB_32
355 static inline int32_t opal_atomic_sub_ptr(
volatile void* addr,
358 return opal_atomic_sub_32((int32_t*) addr, (
unsigned long) delta);
360 #elif SIZEOF_VOID_P == 8 && OPAL_HAVE_ATOMIC_SUB_32
361 static inline int64_t opal_atomic_sub_ptr(
volatile void* addr,
364 return opal_atomic_sub_64((int64_t*) addr, (
unsigned long) delta);
367 static inline int32_t opal_atomic_sub_ptr(
volatile void* addr,
382 #ifdef OPAL_NEED_INLINE_ATOMIC_SPINLOCKS
390 lock->u.
lock = value;
397 int ret = opal_atomic_cmpset_acq_32( &(lock->u.
lock),
398 OPAL_ATOMIC_UNLOCKED, OPAL_ATOMIC_LOCKED);
399 return (ret == 0) ? 1 : 0;
406 while( !opal_atomic_cmpset_acq_32( &(lock->u.
lock),
407 OPAL_ATOMIC_UNLOCKED, OPAL_ATOMIC_LOCKED) ) {
408 while (lock->u.
lock == OPAL_ATOMIC_LOCKED) {
419 lock->u.
lock=OPAL_ATOMIC_UNLOCKED;
Volatile lock object (with optional padding).
Definition: atomic.h:102
static void opal_atomic_unlock(opal_atomic_lock_t *lock)
Release a lock.
static void opal_atomic_lock(opal_atomic_lock_t *lock)
Acquire a lock by spinning.
void opal_atomic_wmb(void)
Write memory barrier.
static void opal_atomic_init(opal_atomic_lock_t *lock, int32_t value)
Initialize a lock to value.
volatile int32_t lock
The lock address (an integer)
Definition: atomic.h:104
static int opal_atomic_trylock(opal_atomic_lock_t *lock)
Try to acquire a lock.