48 #include <citrusleaf/cf_arch.h>
61 #define SIZEOF_ATOMIC_INT 4
62 typedef volatile uint32_t cf_atomic32;
63 typedef volatile uint32_t cf_atomic_p;
64 typedef volatile uint32_t cf_atomic_int;
65 typedef uint32_t cf_atomic_int_t;
69 #define SIZEOF_ATOMIC_INT 8
70 typedef volatile uint64_t cf_atomic64;
71 typedef volatile uint32_t cf_atomic32;
72 typedef volatile uint64_t cf_atomic_p;
73 typedef volatile uint64_t cf_atomic_int;
74 typedef uint64_t cf_atomic_int_t;
82 #define cf_atomic32_get(a) (a)
83 #define cf_atomic32_set(a, b) (*(a) = (b))
84 #define cf_atomic32_sub(a,b) (cf_atomic32_add((a), (0 - (b))))
85 #define cf_atomic32_incr(a) (cf_atomic32_add((a), 1))
86 #define cf_atomic32_decr(a) (cf_atomic32_add((a), -1))
91 #define cf_atomic64_get(a) (a)
92 #define cf_atomic64_set(a, b) (*(a) = (b))
93 #define cf_atomic64_sub(a,b) (cf_atomic64_add((a), (0 - (b))))
94 #define cf_atomic64_incr(a) (cf_atomic64_add((a), 1))
95 #define cf_atomic64_decr(a) (cf_atomic64_add((a), -1))
97 #define cf_atomic_p_get(_a) cf_atomic64_get(_a)
98 #define cf_atomic_p_set(_a, _b) cf_atomic64_set(_a, _b)
99 #define cf_atomic_p_add(_a, _b) cf_atomic64_add(_a, _b)
100 #define cf_atomic_p_incr(_a) cf_atomic64_add((_a), 1)
101 #define cf_atomic_p_decr(_a) cf_atomic64_add((_a), -1)
103 #define cf_atomic_int_get(_a) cf_atomic64_get(_a)
104 #define cf_atomic_int_set(_a, _b) cf_atomic64_set(_a, _b)
105 #define cf_atomic_int_add(_a, _b) cf_atomic64_add(_a, _b)
106 #define cf_atomic_int_sub(_a, _b) cf_atomic64_sub(_a, _b)
107 #define cf_atomic_int_incr(_a) cf_atomic64_add((_a), 1)
108 #define cf_atomic_int_decr(_a) cf_atomic64_add((_a), -1)
110 #else // ifndef MARCH_x86_64
112 #define cf_atomic_p_get(_a) cf_atomic32_get(_a)
113 #define cf_atomic_p_set(_a, _b) cf_atomic32_set(_a, _b)
114 #define cf_atomic_p_add(_a, _b) cf_atomic32_add(_a, _b)
115 #define cf_atomic_p_incr(_a) cf_atomic32_add((_a), 1)
116 #define cf_atomic_p_decr(_a) cf_atomic32_add((_a), -1)
118 #define cf_atomic_int_get(_a) cf_atomic32_get(_a)
119 #define cf_atomic_int_set(_a, _b) cf_atomic32_set(_a, _b)
120 #define cf_atomic_int_add(_a, _b) cf_atomic32_add(_a, _b)
121 #define cf_atomic_int_sub(_a, _b) cf_atomic32_sub(_a, _b)
122 #define cf_atomic_int_incr(_a) cf_atomic32_add((_a), 1)
123 #define cf_atomic_int_decr(_a) cf_atomic32_add((_a), -1)
125 #endif // ifdef MARCH_x86_64
131 #define cf_atomic_p_cas(_a, _b, _x) cf_atomic64_cas(_a, _b, _x)
132 #define cf_atomic_p_cas_m(_a, _b, _x) cf_atomic64_cas_m(_a, _b, _x)
133 #define cf_atomic_p_fas(_a, _b) cf_atomic64_fas(_a, _b)
134 #define cf_atomic_p_fas_m(_a, _b) cf_atomic64_fas_m(_a, _b)
135 #define cf_atomic_p_addunless(_a, _b, _x) cf_atomic64_addunless(_a, _b, _x)
136 #define cf_atomic_p_setmax(_a, _x) cf_atomic64_setmax(_a, _x)
138 #define cf_atomic_int_cas(_a, _b, _x) cf_atomic64_cas(_a, _b, _x)
139 #define cf_atomic_int_cas_m(_a, _b, _x) cf_atomic64_cas_m(_a, _b, _x)
140 #define cf_atomic_int_fas(_a, _b) cf_atomic64_fas(_a, _b)
141 #define cf_atomic_int_fas_m(_a, _b) cf_atomic64_fas_m(_a, _b)
142 #define cf_atomic_int_addunless(_a, _b, _x) cf_atomic64_addunless(_a, _b, _x)
143 #define cf_atomic_int_setmax(_a, _x) cf_atomic64_setmax(_a, _x)
145 #else // ifndef CF_WINDOWS && ifndef MARCH_x86_64
147 #define cf_atomic_p_cas(_a, _b, _x) cf_atomic32_cas(_a, _b, _x)
148 #define cf_atomic_p_cas_m(_a, _b, _x) cf_atomic32_cas_m(_a, _b, _x)
149 #define cf_atomic_p_fas(_a, _b) cf_atomic32_fas(_a, _b)
150 #define cf_atomic_p_fas_m(_a, _b) cf_atomic32_fas_m(_a, _b)
151 #define cf_atomic_p_addunless(_a, _b, _x) cf_atomic32_addunless(_a, _b, _x)
152 #define cf_atomic_p_setmax(_a, _x) cf_atomic32_setmax(_a, _x)
154 #define cf_atomic_int_cas(_a, _b, _x) cf_atomic32_cas(_a, _b, _x)
155 #define cf_atomic_int_cas_m(_a, _b, _x) cf_atomic32_cas_m(_a, _b, _x)
156 #define cf_atomic_int_fas(_a, _b) cf_atomic32_fas(_a, _b)
157 #define cf_atomic_int_fas_m(_a, _b) cf_atomic32_fas_m(_a, _b)
158 #define cf_atomic_int_addunless(_a, _b, _x) cf_atomic32_addunless(_a, _b, _x)
159 #define cf_atomic_int_setmax(_a, _x) cf_atomic32_setmax(_a, _x)
161 #endif // ifdef MARCH_x86_64
162 #endif // ifndef CF_WINDOWS
167 #define smb_mb() _ReadWriteBarrier()
169 #else // ifndef CF_WINDOWS
171 #define smb_mb() asm volatile("mfence":::"memory")
175 #define CF_MEMORY_BARRIER() __asm__ __volatile__ ("lock; addl $0,0(%%esp)" : : : "memory")
179 #define CF_MEMORY_BARRIER_READ() CF_MEMORY_BARRIER()
183 #define CF_MEMORY_BARRIER_WRITE() __asm__ __volatile__ ("" : : : "memory")
203 b = _InterlockedExchangeAdd64((LONGLONG *)a, b);
207 #endif // ifdef MARCH_x86_64
208 #endif // ifdef CF_WINDOWS
219 __asm__ __volatile__ (
"lock; xaddq %0, %1" :
"+r" (b),
"+m" (*a) : :
"memory");
223 #define cf_atomic64_cas_m(_a, _b, _x) ({ \
224 __typeof__(_b) __b = _b; \
225 __asm__ __volatile__ ("lock; cmpxchgq %1,%2" : "=a"(__b) : "q"(_x), "m"(*(_a)), "0"(_b) : "memory"); \
229 #define cf_atomic64_fas_m(_a, _b) ({ \
230 __typeof__(_b) __b; \
231 __asm__ __volatile__ ("lock; xchgq %0,%1" : "=r"(__b) : "m"(*(_a)), "0"(_b)); \
235 static inline int64_t cf_atomic64_cas(cf_atomic64 *a, int64_t b, int64_t x) {
237 __asm__ __volatile__ (
"lock; cmpxchgq %1,%2" :
"=a"(p) :
"q"(x),
"m"(*(a)),
"0"(b) :
"memory");
242 static inline int64_t cf_atomic64_fas(cf_atomic64 *a, cf_atomic64 *b) {
244 __asm__ __volatile__ (
"lock; xchgq %0,%1" :
"=r"(p) :
"m"(*(a)),
"0"(*(b)) :
"memory");
248 static inline int64_t cf_atomic64_addunless(cf_atomic64 *a, int64_t b, int64_t x) {
252 cur = cf_atomic64_get(*a);
261 prior = cf_atomic64_cas(a, cur, cur + x);
273 static inline int64_t cf_atomic64_setmax(cf_atomic64 *a, int64_t x) {
277 cur = cf_atomic64_get(*a);
286 prior = cf_atomic64_cas(a, cur, x);
298 #endif // ifdef MARCH_x86_64
299 #endif // ifndef CF_WINDOWS
309 b = _InterlockedExchangeAdd((
volatile long *)a, b);
313 #endif // ifdef CF_WINDOWS
323 __asm__ __volatile__ (
"lock; xadd %0, %1" :
"+r" (b),
"+m" (*a) : :
"memory");
327 #define cf_atomic32_fas_m(_a, _b) ({ \
328 __typeof__(_b) __b; \
329 __asm__ __volatile__ ("lock; xchg %0,%1" : "=r"(__b) : "m"(*(_a)), "0"(_b)); \
333 #define cf_atomic32_cas_m(_a, _b, _x) \ ({ \
334 __typeof__(_b) __b = _b; \
335 __asm__ __volatile__ ("lock; cmpxchg %1,%2" : "=a"(__b) : "q"(_x), "m"(*(_a)), "0"(_b) : "memory"); \
341 __asm__ __volatile__ (
"lock; cmpxchg %1,%2" :
"=a"(p) :
"q"(x),
"m"(*(a)),
"0"(b) :
"memory");
347 __asm__ __volatile__ (
"lock; xchg %0,%1" :
"=r"(p) :
"m"(*(a)),
"0"(*(b)) :
"memory");
367 if (prior == cur)
break;
390 if (prior == cur)
break;
398 #endif // ifndef CF_WINDOWS