1 #ifndef __TOOLS_LINUX_ATOMIC_H
2 #define __TOOLS_LINUX_ATOMIC_H
4 #include <linux/compiler.h>
5 #include <linux/types.h>
21 #include <urcu/uatomic.h>
23 #if (CAA_BITS_PER_LONG != 64)
24 #define ATOMIC64_SPINLOCK
27 #define __ATOMIC_READ(p) uatomic_read(p)
28 #define __ATOMIC_SET(p, v) uatomic_set(p, v)
29 #define __ATOMIC_ADD_RETURN(v, p) uatomic_add_return(p, v)
30 #define __ATOMIC_SUB_RETURN(v, p) uatomic_sub_return(p, v)
31 #define __ATOMIC_ADD(v, p) uatomic_add(p, v)
32 #define __ATOMIC_SUB(v, p) uatomic_sub(p, v)
33 #define __ATOMIC_INC(p) uatomic_inc(p)
34 #define __ATOMIC_DEC(p) uatomic_dec(p)
36 #define xchg(p, v) uatomic_xchg(p, v)
37 #define xchg_acquire(p, v) uatomic_xchg(p, v)
38 #define cmpxchg(p, old, new) uatomic_cmpxchg(p, old, new)
39 #define cmpxchg_acquire(p, old, new) uatomic_cmpxchg(p, old, new)
40 #define cmpxchg_release(p, old, new) uatomic_cmpxchg(p, old, new)
42 #define smp_mb__before_atomic() cmm_smp_mb__before_uatomic_add()
43 #define smp_mb__after_atomic() cmm_smp_mb__after_uatomic_add()
44 #define smp_wmb() cmm_smp_wmb()
45 #define smp_rmb() cmm_smp_rmb()
46 #define smp_mb() cmm_smp_mb()
47 #define smp_read_barrier_depends() cmm_smp_read_barrier_depends()
49 #else /* C11_ATOMICS */
51 #define __ATOMIC_READ(p) __atomic_load_n(p, __ATOMIC_RELAXED)
52 #define __ATOMIC_SET(p, v) __atomic_store_n(p, v, __ATOMIC_RELAXED)
53 #define __ATOMIC_ADD_RETURN(v, p) __atomic_add_fetch(p, v, __ATOMIC_RELAXED)
54 #define __ATOMIC_ADD_RETURN_RELEASE(v, p) \
55 __atomic_add_fetch(p, v, __ATOMIC_RELEASE)
56 #define __ATOMIC_SUB_RETURN(v, p) __atomic_sub_fetch(p, v, __ATOMIC_RELAXED)
57 #define __ATOMIC_SUB_RETURN_RELEASE(v, p) \
58 __atomic_sub_fetch(p, v, __ATOMIC_RELEASE)
60 #define xchg(p, v) __atomic_exchange_n(p, v, __ATOMIC_SEQ_CST)
61 #define xchg_acquire(p, v) __atomic_exchange_n(p, v, __ATOMIC_ACQUIRE)
63 #define cmpxchg(p, old, new) \
65 typeof(*(p)) __old = (old); \
67 __atomic_compare_exchange_n((p), &__old, new, false, \
73 #define cmpxchg_acquire(p, old, new) \
75 typeof(*(p)) __old = (old); \
77 __atomic_compare_exchange_n((p), &__old, new, false, \
83 #define cmpxchg_release(p, old, new) \
85 typeof(*(p)) __old = (old); \
87 __atomic_compare_exchange_n((p), &__old, new, false, \
93 #define smp_mb__before_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
94 #define smp_mb__after_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
95 #define smp_wmb() __atomic_thread_fence(__ATOMIC_SEQ_CST)
96 #define smp_rmb() __atomic_thread_fence(__ATOMIC_SEQ_CST)
97 #define smp_mb() __atomic_thread_fence(__ATOMIC_SEQ_CST)
98 #define smp_read_barrier_depends()
102 #define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0)
104 #define smp_load_acquire(p) \
106 typeof(*p) ___p1 = READ_ONCE(*p); \
111 #define smp_store_release(p, v) \
117 /* atomic interface: */
120 #define __ATOMIC_ADD(i, v) __ATOMIC_ADD_RETURN(i, v)
123 #ifndef __ATOMIC_ADD_RETURN_RELEASE
124 #define __ATOMIC_ADD_RETURN_RELEASE(i, v) \
125 ({ smp_mb__before_atomic(); __ATOMIC_ADD_RETURN(i, v); })
128 #ifndef __ATOMIC_SUB_RETURN_RELEASE
129 #define __ATOMIC_SUB_RETURN_RELEASE(i, v) \
130 ({ smp_mb__before_atomic(); __ATOMIC_SUB_RETURN(i, v); })
134 #define __ATOMIC_SUB(i, v) __ATOMIC_SUB_RETURN(i, v)
137 #ifndef __ATOMIC_INC_RETURN
138 #define __ATOMIC_INC_RETURN(v) __ATOMIC_ADD_RETURN(1, v)
141 #ifndef __ATOMIC_DEC_RETURN
142 #define __ATOMIC_DEC_RETURN(v) __ATOMIC_SUB_RETURN(1, v)
146 #define __ATOMIC_INC(v) __ATOMIC_ADD(1, v)
150 #define __ATOMIC_DEC(v) __ATOMIC_SUB(1, v)
153 #define DEF_ATOMIC_OPS(a_type, i_type) \
154 static inline i_type a_type##_read(const a_type##_t *v) \
156 return __ATOMIC_READ(&v->counter); \
159 static inline void a_type##_set(a_type##_t *v, i_type i) \
161 return __ATOMIC_SET(&v->counter, i); \
164 static inline i_type a_type##_add_return(i_type i, a_type##_t *v) \
166 return __ATOMIC_ADD_RETURN(i, &v->counter); \
169 static inline i_type a_type##_add_return_release(i_type i, a_type##_t *v)\
171 return __ATOMIC_ADD_RETURN_RELEASE(i, &v->counter); \
174 static inline i_type a_type##_sub_return_release(i_type i, a_type##_t *v)\
176 return __ATOMIC_SUB_RETURN_RELEASE(i, &v->counter); \
179 static inline i_type a_type##_sub_return(i_type i, a_type##_t *v) \
181 return __ATOMIC_SUB_RETURN(i, &v->counter); \
184 static inline void a_type##_add(i_type i, a_type##_t *v) \
186 __ATOMIC_ADD(i, &v->counter); \
189 static inline void a_type##_sub(i_type i, a_type##_t *v) \
191 __ATOMIC_SUB(i, &v->counter); \
194 static inline i_type a_type##_inc_return(a_type##_t *v) \
196 return __ATOMIC_INC_RETURN(&v->counter); \
199 static inline i_type a_type##_dec_return(a_type##_t *v) \
201 return __ATOMIC_DEC_RETURN(&v->counter); \
204 static inline void a_type##_inc(a_type##_t *v) \
206 __ATOMIC_INC(&v->counter); \
209 static inline void a_type##_dec(a_type##_t *v) \
211 __ATOMIC_DEC(&v->counter); \
214 static inline bool a_type##_add_negative(i_type i, a_type##_t *v) \
216 return __ATOMIC_ADD_RETURN(i, &v->counter) < 0; \
219 static inline bool a_type##_sub_and_test(i_type i, a_type##_t *v) \
221 return __ATOMIC_SUB_RETURN(i, &v->counter) == 0; \
224 static inline bool a_type##_inc_and_test(a_type##_t *v) \
226 return __ATOMIC_INC_RETURN(&v->counter) == 0; \
229 static inline bool a_type##_dec_and_test(a_type##_t *v) \
231 return __ATOMIC_DEC_RETURN(&v->counter) == 0; \
234 static inline i_type a_type##_add_unless(a_type##_t *v, i_type a, i_type u)\
236 i_type old, c = __ATOMIC_READ(&v->counter); \
237 while (c != u && (old = cmpxchg(&v->counter, c, c + a)) != c) \
242 static inline bool a_type##_inc_not_zero(a_type##_t *v) \
244 return a_type##_add_unless(v, 1, 0); \
247 static inline i_type a_type##_xchg(a_type##_t *v, i_type i) \
249 return xchg(&v->counter, i); \
252 static inline i_type a_type##_cmpxchg(a_type##_t *v, i_type old, i_type new)\
254 return cmpxchg(&v->counter, old, new); \
257 static inline i_type a_type##_cmpxchg_acquire(a_type##_t *v, i_type old, i_type new)\
259 return cmpxchg_acquire(&v->counter, old, new); \
262 DEF_ATOMIC_OPS(atomic, int)
263 DEF_ATOMIC_OPS(atomic_long, long)
265 #ifndef ATOMIC64_SPINLOCK
266 DEF_ATOMIC_OPS(atomic64, s64)
268 s64 atomic64_read(const atomic64_t *v);
269 void atomic64_set(atomic64_t *v, s64);
271 s64 atomic64_add_return(s64, atomic64_t *);
272 s64 atomic64_sub_return(s64, atomic64_t *);
273 void atomic64_add(s64, atomic64_t *);
274 void atomic64_sub(s64, atomic64_t *);
276 s64 atomic64_xchg(atomic64_t *, s64);
277 s64 atomic64_cmpxchg(atomic64_t *, s64, s64);
279 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
280 #define atomic64_inc(v) atomic64_add(1LL, (v))
281 #define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
282 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
283 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
284 #define atomic64_dec(v) atomic64_sub(1LL, (v))
285 #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
286 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
287 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
289 static inline s64 atomic64_add_return_release(s64 i, atomic64_t *v)
291 smp_mb__before_atomic();
292 return atomic64_add_return(i, v);
295 static inline s64 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
297 return atomic64_cmpxchg(v, old, new);
302 #endif /* __TOOLS_LINUX_ATOMIC_H */