1 #ifndef __TOOLS_LINUX_ATOMIC_H
2 #define __TOOLS_LINUX_ATOMIC_H
4 #include <linux/compiler.h>
5 #include <linux/types.h>
21 #include <urcu/uatomic.h>
23 #if (CAA_BITS_PER_LONG != 64)
24 #define ATOMIC64_SPINLOCK
27 #define __ATOMIC_READ(p) uatomic_read(p)
28 #define __ATOMIC_SET(p, v) uatomic_set(p, v)
29 #define __ATOMIC_ADD_RETURN(v, p) uatomic_add_return(p, v)
30 #define __ATOMIC_SUB_RETURN(v, p) uatomic_sub_return(p, v)
31 #define __ATOMIC_ADD(v, p) uatomic_add(p, v)
32 #define __ATOMIC_SUB(v, p) uatomic_sub(p, v)
33 #define __ATOMIC_INC(p) uatomic_inc(p)
34 #define __ATOMIC_DEC(p) uatomic_dec(p)
35 #define __ATOMIC_AND(v, p) uatomic_and(p, v)
36 #define __ATOMIC_OR(v, p) uatomic_or(p, v)
38 #define xchg(p, v) uatomic_xchg(p, v)
39 #define xchg_acquire(p, v) uatomic_xchg(p, v)
40 #define cmpxchg(p, old, new) uatomic_cmpxchg(p, old, new)
41 #define cmpxchg_acquire(p, old, new) uatomic_cmpxchg(p, old, new)
42 #define cmpxchg_release(p, old, new) uatomic_cmpxchg(p, old, new)
44 #define smp_mb__before_atomic() cmm_smp_mb__before_uatomic_add()
45 #define smp_mb__after_atomic() cmm_smp_mb__after_uatomic_add()
46 #define smp_wmb() cmm_smp_wmb()
47 #define smp_rmb() cmm_smp_rmb()
48 #define smp_mb() cmm_smp_mb()
49 #define smp_read_barrier_depends() cmm_smp_read_barrier_depends()
51 #else /* C11_ATOMICS */
53 #define __ATOMIC_READ(p) __atomic_load_n(p, __ATOMIC_RELAXED)
54 #define __ATOMIC_SET(p, v) __atomic_store_n(p, v, __ATOMIC_RELAXED)
55 #define __ATOMIC_ADD_RETURN(v, p) __atomic_add_fetch(p, v, __ATOMIC_RELAXED)
56 #define __ATOMIC_ADD_RETURN_RELEASE(v, p) \
57 __atomic_add_fetch(p, v, __ATOMIC_RELEASE)
58 #define __ATOMIC_SUB_RETURN(v, p) __atomic_sub_fetch(p, v, __ATOMIC_RELAXED)
59 #define __ATOMIC_SUB_RETURN_RELEASE(v, p) \
60 __atomic_sub_fetch(p, v, __ATOMIC_RELEASE)
61 #define __ATOMIC_AND(p) __atomic_and_fetch(p, v, __ATOMIC_RELAXED)
62 #define __ATOMIC_OR(p) __atomic_or_fetch(p, v, __ATOMIC_RELAXED)
64 #define xchg(p, v) __atomic_exchange_n(p, v, __ATOMIC_SEQ_CST)
65 #define xchg_acquire(p, v) __atomic_exchange_n(p, v, __ATOMIC_ACQUIRE)
67 #define cmpxchg(p, old, new) \
69 typeof(*(p)) __old = (old); \
71 __atomic_compare_exchange_n((p), &__old, new, false, \
77 #define cmpxchg_acquire(p, old, new) \
79 typeof(*(p)) __old = (old); \
81 __atomic_compare_exchange_n((p), &__old, new, false, \
87 #define cmpxchg_release(p, old, new) \
89 typeof(*(p)) __old = (old); \
91 __atomic_compare_exchange_n((p), &__old, new, false, \
97 #define smp_mb__before_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
98 #define smp_mb__after_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
99 #define smp_wmb() __atomic_thread_fence(__ATOMIC_SEQ_CST)
100 #define smp_rmb() __atomic_thread_fence(__ATOMIC_SEQ_CST)
101 #define smp_mb() __atomic_thread_fence(__ATOMIC_SEQ_CST)
102 #define smp_read_barrier_depends()
106 #define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0)
108 #define smp_load_acquire(p) \
110 typeof(*p) ___p1 = READ_ONCE(*p); \
115 #define smp_store_release(p, v) \
121 /* atomic interface: */
124 #define __ATOMIC_ADD(i, v) __ATOMIC_ADD_RETURN(i, v)
127 #ifndef __ATOMIC_ADD_RETURN_RELEASE
128 #define __ATOMIC_ADD_RETURN_RELEASE(i, v) \
129 ({ smp_mb__before_atomic(); __ATOMIC_ADD_RETURN(i, v); })
132 #ifndef __ATOMIC_SUB_RETURN_RELEASE
133 #define __ATOMIC_SUB_RETURN_RELEASE(i, v) \
134 ({ smp_mb__before_atomic(); __ATOMIC_SUB_RETURN(i, v); })
138 #define __ATOMIC_SUB(i, v) __ATOMIC_SUB_RETURN(i, v)
141 #ifndef __ATOMIC_INC_RETURN
142 #define __ATOMIC_INC_RETURN(v) __ATOMIC_ADD_RETURN(1, v)
145 #ifndef __ATOMIC_DEC_RETURN
146 #define __ATOMIC_DEC_RETURN(v) __ATOMIC_SUB_RETURN(1, v)
150 #define __ATOMIC_INC(v) __ATOMIC_ADD(1, v)
154 #define __ATOMIC_DEC(v) __ATOMIC_SUB(1, v)
157 #define DEF_ATOMIC_OPS(a_type, i_type) \
158 static inline i_type a_type##_read(const a_type##_t *v) \
160 return __ATOMIC_READ(&v->counter); \
163 static inline void a_type##_set(a_type##_t *v, i_type i) \
165 return __ATOMIC_SET(&v->counter, i); \
168 static inline i_type a_type##_add_return(i_type i, a_type##_t *v) \
170 return __ATOMIC_ADD_RETURN(i, &v->counter); \
173 static inline i_type a_type##_add_return_release(i_type i, a_type##_t *v)\
175 return __ATOMIC_ADD_RETURN_RELEASE(i, &v->counter); \
178 static inline i_type a_type##_sub_return_release(i_type i, a_type##_t *v)\
180 return __ATOMIC_SUB_RETURN_RELEASE(i, &v->counter); \
183 static inline i_type a_type##_sub_return(i_type i, a_type##_t *v) \
185 return __ATOMIC_SUB_RETURN(i, &v->counter); \
188 static inline void a_type##_add(i_type i, a_type##_t *v) \
190 __ATOMIC_ADD(i, &v->counter); \
193 static inline void a_type##_sub(i_type i, a_type##_t *v) \
195 __ATOMIC_SUB(i, &v->counter); \
198 static inline i_type a_type##_inc_return(a_type##_t *v) \
200 return __ATOMIC_INC_RETURN(&v->counter); \
203 static inline i_type a_type##_dec_return(a_type##_t *v) \
205 return __ATOMIC_DEC_RETURN(&v->counter); \
208 static inline void a_type##_inc(a_type##_t *v) \
210 __ATOMIC_INC(&v->counter); \
213 static inline void a_type##_dec(a_type##_t *v) \
215 __ATOMIC_DEC(&v->counter); \
218 static inline bool a_type##_add_negative(i_type i, a_type##_t *v) \
220 return __ATOMIC_ADD_RETURN(i, &v->counter) < 0; \
223 static inline bool a_type##_sub_and_test(i_type i, a_type##_t *v) \
225 return __ATOMIC_SUB_RETURN(i, &v->counter) == 0; \
228 static inline bool a_type##_inc_and_test(a_type##_t *v) \
230 return __ATOMIC_INC_RETURN(&v->counter) == 0; \
233 static inline bool a_type##_dec_and_test(a_type##_t *v) \
235 return __ATOMIC_DEC_RETURN(&v->counter) == 0; \
238 static inline i_type a_type##_add_unless(a_type##_t *v, i_type a, i_type u)\
240 i_type old, c = __ATOMIC_READ(&v->counter); \
241 while (c != u && (old = cmpxchg(&v->counter, c, c + a)) != c) \
246 static inline bool a_type##_inc_not_zero(a_type##_t *v) \
248 return a_type##_add_unless(v, 1, 0); \
251 static inline void a_type##_and(i_type a, a_type##_t *v) \
253 __ATOMIC_AND(a, v); \
256 static inline void a_type##_or(i_type a, a_type##_t *v) \
261 static inline i_type a_type##_xchg(a_type##_t *v, i_type i) \
263 return xchg(&v->counter, i); \
266 static inline i_type a_type##_cmpxchg(a_type##_t *v, i_type old, i_type new)\
268 return cmpxchg(&v->counter, old, new); \
271 static inline i_type a_type##_cmpxchg_acquire(a_type##_t *v, i_type old, i_type new)\
273 return cmpxchg_acquire(&v->counter, old, new); \
276 static inline bool a_type##_try_cmpxchg_acquire(a_type##_t *v, i_type *old, i_type new)\
278 i_type prev = *old; \
279 *old = cmpxchg_acquire(&v->counter, *old, new); \
280 return prev == *old; \
283 DEF_ATOMIC_OPS(atomic, int)
284 DEF_ATOMIC_OPS(atomic_long, long)
286 #ifndef ATOMIC64_SPINLOCK
287 DEF_ATOMIC_OPS(atomic64, s64)
289 s64 atomic64_read(const atomic64_t *v);
290 void atomic64_set(atomic64_t *v, s64);
292 s64 atomic64_add_return(s64, atomic64_t *);
293 s64 atomic64_sub_return(s64, atomic64_t *);
294 void atomic64_add(s64, atomic64_t *);
295 void atomic64_sub(s64, atomic64_t *);
297 s64 atomic64_xchg(atomic64_t *, s64);
298 s64 atomic64_cmpxchg(atomic64_t *, s64, s64);
300 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
301 #define atomic64_inc(v) atomic64_add(1LL, (v))
302 #define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
303 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
304 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
305 #define atomic64_dec(v) atomic64_sub(1LL, (v))
306 #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
307 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
308 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
310 static inline s64 atomic64_add_return_release(s64 i, atomic64_t *v)
312 smp_mb__before_atomic();
313 return atomic64_add_return(i, v);
316 static inline s64 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
318 return atomic64_cmpxchg(v, old, new);
323 #endif /* __TOOLS_LINUX_ATOMIC_H */