20 #ifndef OMPI_SYS_ARCH_ATOMIC_H
21 #define OMPI_SYS_ARCH_ATOMIC_H 1
27 #if OPAL_WANT_SMP_LOCKS
28 #define SMPLOCK "lock; "
29 #define MB() __asm__ __volatile__("": : :"memory")
41 #define OPAL_HAVE_ATOMIC_MEM_BARRIER 1
43 #define OPAL_HAVE_ATOMIC_CMPSET_32 1
45 #define OPAL_HAVE_ATOMIC_MATH_32 1
46 #define OPAL_HAVE_ATOMIC_ADD_32 1
47 #define OPAL_HAVE_ATOMIC_SUB_32 1
49 #define OPAL_HAVE_ATOMIC_CMPSET_64 1
51 #undef OPAL_HAVE_INLINE_ATOMIC_CMPSET_64
52 #define OPAL_HAVE_INLINE_ATOMIC_CMPSET_64 0
59 #if OMPI_GCC_INLINE_ASSEMBLY
86 #if OMPI_GCC_INLINE_ASSEMBLY
88 static inline int opal_atomic_cmpset_32(
volatile int32_t *addr,
93 __asm__ __volatile__ (
94 SMPLOCK
"cmpxchgl %3,%2 \n\t"
96 :
"=qm" (ret),
"+a" (oldval),
"+m" (*addr)
105 #define opal_atomic_cmpset_acq_32 opal_atomic_cmpset_32
106 #define opal_atomic_cmpset_rel_32 opal_atomic_cmpset_32
108 #if OMPI_GCC_INLINE_ASSEMBLY
117 #define ll_low(x) *(((unsigned int*)&(x))+0)
118 #define ll_high(x) *(((unsigned int*)&(x))+1)
127 static inline int opal_atomic_cmpset_64(
volatile int64_t *addr,
137 __asm__ __volatile__(
139 "movl %4, %%ebx \n\t"
140 SMPLOCK
"cmpxchg8b (%1) \n\t"
144 :
"D"(addr),
"a"(ll_low(oldval)),
"d"(ll_high(oldval)),
145 "r"(ll_low(newval)),
"c"(ll_high(newval))
146 :
"cc",
"memory",
"ebx");
153 #define opal_atomic_cmpset_acq_64 opal_atomic_cmpset_64
154 #define opal_atomic_cmpset_rel_64 opal_atomic_cmpset_64
156 #if OMPI_GCC_INLINE_ASSEMBLY
165 static inline int32_t opal_atomic_add_32(
volatile int32_t* v,
int i)
168 __asm__ __volatile__(
169 SMPLOCK
"xaddl %1,%0"
170 :
"=m" (*v),
"+r" (ret)
185 static inline int32_t opal_atomic_sub_32(
volatile int32_t* v,
int i)
188 __asm__ __volatile__(
189 SMPLOCK
"xaddl %1,%0"
190 :
"=m" (*v),
"+r" (ret)
void opal_atomic_rmb(void)
Read memory barrier.
void opal_atomic_mb(void)
Memory barrier.
void opal_atomic_wmb(void)
Write memory barrier.