19 #ifndef OMPI_SYS_ARCH_ATOMIC_H
20 #define OMPI_SYS_ARCH_ATOMIC_H 1
27 #define MB() __asm__ __volatile__("mf": : :"memory")
35 #define OPAL_HAVE_ATOMIC_MEM_BARRIER 1
37 #define OPAL_HAVE_ATOMIC_CMPSET_32 1
38 #define OPAL_HAVE_ATOMIC_CMPSET_64 1
45 #if OMPI_GCC_INLINE_ASSEMBLY
73 #if OMPI_GCC_INLINE_ASSEMBLY
75 #define ia64_cmpxchg4_acq(ptr, new, old) \
77 __u64 ia64_intri_res; \
81 static inline int opal_atomic_cmpset_acq_32(
volatile int32_t *addr,
82 int32_t oldval, int32_t newval)
86 __asm__ __volatile__ (
"mov ar.ccv=%0;;" ::
"rO"(oldval));
87 __asm__ __volatile__ (
"cmpxchg4.acq %0=[%1],%2,ar.ccv":
88 "=r"(ret) :
"r"(addr),
"r"(newval) :
"memory");
90 return ((int32_t)ret == oldval);
94 static inline int opal_atomic_cmpset_rel_32(
volatile int32_t *addr,
95 int32_t oldval, int32_t newval)
99 __asm__ __volatile__ (
"mov ar.ccv=%0;;" ::
"rO"(oldval));
100 __asm__ __volatile__ (
"cmpxchg4.rel %0=[%1],%2,ar.ccv":
101 "=r"(ret) :
"r"(addr),
"r"(newval) :
"memory");
103 return ((int32_t)ret == oldval);
109 #define opal_atomic_cmpset_32 opal_atomic_cmpset_acq_32
111 #if OMPI_GCC_INLINE_ASSEMBLY
113 static inline int opal_atomic_cmpset_acq_64(
volatile int64_t *addr,
114 int64_t oldval, int64_t newval)
118 __asm__ __volatile__ (
"mov ar.ccv=%0;;" ::
"rO"(oldval));
119 __asm__ __volatile__ (
"cmpxchg8.acq %0=[%1],%2,ar.ccv":
120 "=r"(ret) :
"r"(addr),
"r"(newval) :
"memory");
122 return ((int32_t)ret == oldval);
126 static inline int opal_atomic_cmpset_rel_64(
volatile int64_t *addr,
127 int64_t oldval, int64_t newval)
131 __asm__ __volatile__ (
"mov ar.ccv=%0;;" ::
"rO"(oldval));
132 __asm__ __volatile__ (
"cmpxchg8.rel %0=[%1],%2,ar.ccv":
133 "=r"(ret) :
"r"(addr),
"r"(newval) :
"memory");
135 return ((int32_t)ret == oldval);
140 #define opal_atomic_cmpset_64 opal_atomic_cmpset_acq_64
void opal_atomic_rmb(void)
Read memory barrier.
void opal_atomic_mb(void)
Memory barrier.
void opal_atomic_wmb(void)
Write memory barrier.