]> git.sesse.net Git - bcachefs-tools-debian/blob - include/linux/atomic.h
a9852fa1f99a3370f0baa1c489dada515e1402a2
[bcachefs-tools-debian] / include / linux / atomic.h
1 #ifndef __TOOLS_LINUX_ATOMIC_H
2 #define __TOOLS_LINUX_ATOMIC_H
3
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6
7 typedef struct {
8         int             counter;
9 } atomic_t;
10
11 typedef struct {
12         long            counter;
13 } atomic_long_t;
14
15 typedef struct {
16         u64             counter;
17 } atomic64_t;
18
19 #ifndef C11_ATOMICS
20
21 #include <urcu/uatomic.h>
22
23 #if (CAA_BITS_PER_LONG != 64)
24 #define ATOMIC64_SPINLOCK
25 #endif
26
27 #define __ATOMIC_READ(p)                uatomic_read(p)
28 #define __ATOMIC_SET(p, v)              uatomic_set(p, v)
29 #define __ATOMIC_ADD_RETURN(v, p)       uatomic_add_return(p, v)
30 #define __ATOMIC_SUB_RETURN(v, p)       uatomic_sub_return(p, v)
31 #define __ATOMIC_ADD(v, p)              uatomic_add(p, v)
32 #define __ATOMIC_SUB(v, p)              uatomic_sub(p, v)
33 #define __ATOMIC_INC(p)                 uatomic_inc(p)
34 #define __ATOMIC_DEC(p)                 uatomic_dec(p)
35
36 #define xchg(p, v)                      uatomic_xchg(p, v)
37 #define xchg_acquire(p, v)              uatomic_xchg(p, v)
38 #define cmpxchg(p, old, new)            uatomic_cmpxchg(p, old, new)
39 #define cmpxchg_acquire(p, old, new)    uatomic_cmpxchg(p, old, new)
40 #define cmpxchg_release(p, old, new)    uatomic_cmpxchg(p, old, new)
41
42 #define smp_mb__before_atomic()         cmm_smp_mb__before_uatomic_add()
43 #define smp_mb__after_atomic()          cmm_smp_mb__after_uatomic_add()
44 #define smp_wmb()                       cmm_smp_wmb()
45 #define smp_rmb()                       cmm_smp_rmb()
46 #define smp_mb()                        cmm_smp_mb()
47 #define smp_read_barrier_depends()      cmm_smp_read_barrier_depends()
48
49 #else /* C11_ATOMICS */
50
51 #define __ATOMIC_READ(p)                __atomic_load_n(p,      __ATOMIC_RELAXED)
52 #define __ATOMIC_SET(p, v)              __atomic_store_n(p, v,  __ATOMIC_RELAXED)
53 #define __ATOMIC_ADD_RETURN(v, p)       __atomic_add_fetch(p, v, __ATOMIC_RELAXED)
54 #define __ATOMIC_ADD_RETURN_RELEASE(v, p)                               \
55                                         __atomic_add_fetch(p, v, __ATOMIC_RELEASE)
56 #define __ATOMIC_SUB_RETURN(v, p)       __atomic_sub_fetch(p, v, __ATOMIC_RELAXED)
57 #define __ATOMIC_SUB_RETURN_RELEASE(v, p)                               \
58                                         __atomic_sub_fetch(p, v, __ATOMIC_RELEASE)
59
60 #define xchg(p, v)                      __atomic_exchange_n(p, v, __ATOMIC_SEQ_CST)
61 #define xchg_acquire(p, v)              __atomic_exchange_n(p, v, __ATOMIC_ACQUIRE)
62
63 #define cmpxchg(p, old, new)                                    \
64 ({                                                              \
65         typeof(*(p)) __old = (old);                             \
66                                                                 \
67         __atomic_compare_exchange_n((p), &__old, new, false,    \
68                                     __ATOMIC_SEQ_CST,           \
69                                     __ATOMIC_SEQ_CST);          \
70         __old;                                                  \
71 })
72
73 #define cmpxchg_acquire(p, old, new)                            \
74 ({                                                              \
75         typeof(*(p)) __old = (old);                             \
76                                                                 \
77         __atomic_compare_exchange_n((p), &__old, new, false,    \
78                                     __ATOMIC_ACQUIRE,           \
79                                     __ATOMIC_ACQUIRE);          \
80         __old;                                                  \
81 })
82
83 #define cmpxchg_release(p, old, new)                            \
84 ({                                                              \
85         typeof(*(p)) __old = (old);                             \
86                                                                 \
87         __atomic_compare_exchange_n((p), &__old, new, false,    \
88                                     __ATOMIC_RELEASE,           \
89                                     __ATOMIC_RELEASE);          \
90         __old;                                                  \
91 })
92
93 #define smp_mb__before_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
94 #define smp_mb__after_atomic()  __atomic_thread_fence(__ATOMIC_SEQ_CST)
95 #define smp_wmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
96 #define smp_rmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
97 #define smp_mb()                __atomic_thread_fence(__ATOMIC_SEQ_CST)
98 #define smp_read_barrier_depends()
99
100 #endif
101
102 #define smp_store_mb(var, value)  do { WRITE_ONCE(var, value); smp_mb(); } while (0)
103
104 #define smp_load_acquire(p)                                             \
105 ({                                                                      \
106         typeof(*p) ___p1 = READ_ONCE(*p);                               \
107         smp_mb();                                                       \
108         ___p1;                                                          \
109 })
110
111 #define smp_store_release(p, v)                                         \
112 do {                                                                    \
113         smp_mb();                                                       \
114         WRITE_ONCE(*p, v);                                              \
115 } while (0)
116
117 /* atomic interface: */
118
119 #ifndef __ATOMIC_ADD
120 #define __ATOMIC_ADD(i, v) __ATOMIC_ADD_RETURN(i, v)
121 #endif
122
123 #ifndef __ATOMIC_ADD_RETURN_RELEASE
124 #define __ATOMIC_ADD_RETURN_RELEASE(i, v)                               \
125         ({ smp_mb__before_atomic(); __ATOMIC_ADD_RETURN(i, v); })
126 #endif
127
128 #ifndef __ATOMIC_SUB_RETURN_RELEASE
129 #define __ATOMIC_SUB_RETURN_RELEASE(i, v)                               \
130         ({ smp_mb__before_atomic(); __ATOMIC_SUB_RETURN(i, v); })
131 #endif
132
133 #ifndef __ATOMIC_SUB
134 #define __ATOMIC_SUB(i, v) __ATOMIC_SUB_RETURN(i, v)
135 #endif
136
137 #ifndef __ATOMIC_INC_RETURN
138 #define __ATOMIC_INC_RETURN(v) __ATOMIC_ADD_RETURN(1, v)
139 #endif
140
141 #ifndef __ATOMIC_DEC_RETURN
142 #define __ATOMIC_DEC_RETURN(v) __ATOMIC_SUB_RETURN(1, v)
143 #endif
144
145 #ifndef __ATOMIC_INC
146 #define __ATOMIC_INC(v) __ATOMIC_ADD(1, v)
147 #endif
148
149 #ifndef __ATOMIC_DEC
150 #define __ATOMIC_DEC(v) __ATOMIC_SUB(1, v)
151 #endif
152
153 #define DEF_ATOMIC_OPS(a_type, i_type)                                  \
154 static inline i_type a_type##_read(const a_type##_t *v)                 \
155 {                                                                       \
156         return __ATOMIC_READ(&v->counter);                              \
157 }                                                                       \
158                                                                         \
159 static inline void a_type##_set(a_type##_t *v, i_type i)                \
160 {                                                                       \
161         return __ATOMIC_SET(&v->counter, i);                            \
162 }                                                                       \
163                                                                         \
164 static inline i_type a_type##_add_return(i_type i, a_type##_t *v)       \
165 {                                                                       \
166         return __ATOMIC_ADD_RETURN(i, &v->counter);                     \
167 }                                                                       \
168                                                                         \
169 static inline i_type a_type##_add_return_release(i_type i, a_type##_t *v)\
170 {                                                                       \
171         return __ATOMIC_ADD_RETURN_RELEASE(i, &v->counter);             \
172 }                                                                       \
173                                                                         \
174 static inline i_type a_type##_sub_return_release(i_type i, a_type##_t *v)\
175 {                                                                       \
176         return __ATOMIC_SUB_RETURN_RELEASE(i, &v->counter);             \
177 }                                                                       \
178                                                                         \
179 static inline i_type a_type##_sub_return(i_type i, a_type##_t *v)       \
180 {                                                                       \
181         return __ATOMIC_SUB_RETURN(i, &v->counter);                     \
182 }                                                                       \
183                                                                         \
184 static inline void a_type##_add(i_type i, a_type##_t *v)                \
185 {                                                                       \
186         __ATOMIC_ADD(i, &v->counter);                                   \
187 }                                                                       \
188                                                                         \
189 static inline void a_type##_sub(i_type i, a_type##_t *v)                \
190 {                                                                       \
191         __ATOMIC_SUB(i, &v->counter);                                   \
192 }                                                                       \
193                                                                         \
194 static inline i_type a_type##_inc_return(a_type##_t *v)                 \
195 {                                                                       \
196         return __ATOMIC_INC_RETURN(&v->counter);                        \
197 }                                                                       \
198                                                                         \
199 static inline i_type a_type##_dec_return(a_type##_t *v)                 \
200 {                                                                       \
201         return __ATOMIC_DEC_RETURN(&v->counter);                        \
202 }                                                                       \
203                                                                         \
204 static inline void a_type##_inc(a_type##_t *v)                          \
205 {                                                                       \
206         __ATOMIC_INC(&v->counter);                                      \
207 }                                                                       \
208                                                                         \
209 static inline void a_type##_dec(a_type##_t *v)                          \
210 {                                                                       \
211         __ATOMIC_DEC(&v->counter);                                      \
212 }                                                                       \
213                                                                         \
214 static inline bool a_type##_add_negative(i_type i, a_type##_t *v)       \
215 {                                                                       \
216         return __ATOMIC_ADD_RETURN(i, &v->counter) < 0;                 \
217 }                                                                       \
218                                                                         \
219 static inline bool a_type##_sub_and_test(i_type i, a_type##_t *v)       \
220 {                                                                       \
221         return __ATOMIC_SUB_RETURN(i, &v->counter) == 0;                \
222 }                                                                       \
223                                                                         \
224 static inline bool a_type##_inc_and_test(a_type##_t *v)                 \
225 {                                                                       \
226         return __ATOMIC_INC_RETURN(&v->counter) == 0;                   \
227 }                                                                       \
228                                                                         \
229 static inline bool a_type##_dec_and_test(a_type##_t *v)                 \
230 {                                                                       \
231         return __ATOMIC_DEC_RETURN(&v->counter) == 0;                   \
232 }                                                                       \
233                                                                         \
234 static inline i_type a_type##_add_unless(a_type##_t *v, i_type a, i_type u)\
235 {                                                                       \
236         i_type old, c = __ATOMIC_READ(&v->counter);                     \
237         while (c != u && (old = cmpxchg(&v->counter, c, c + a)) != c)   \
238                 c = old;                                                \
239         return c;                                                       \
240 }                                                                       \
241                                                                         \
242 static inline bool a_type##_inc_not_zero(a_type##_t *v)                 \
243 {                                                                       \
244         return a_type##_add_unless(v, 1, 0);                            \
245 }                                                                       \
246                                                                         \
247 static inline i_type a_type##_xchg(a_type##_t *v, i_type i)             \
248 {                                                                       \
249         return xchg(&v->counter, i);                                    \
250 }                                                                       \
251                                                                         \
252 static inline i_type a_type##_cmpxchg(a_type##_t *v, i_type old, i_type new)\
253 {                                                                       \
254         return cmpxchg(&v->counter, old, new);                          \
255 }                                                                       \
256                                                                         \
257 static inline i_type a_type##_cmpxchg_acquire(a_type##_t *v, i_type old, i_type new)\
258 {                                                                       \
259         return cmpxchg_acquire(&v->counter, old, new);                  \
260 }
261
262 DEF_ATOMIC_OPS(atomic,          int)
263 DEF_ATOMIC_OPS(atomic_long,     long)
264
265 #ifndef ATOMIC64_SPINLOCK
266 DEF_ATOMIC_OPS(atomic64,        s64)
267 #else
268 s64 atomic64_read(const atomic64_t *v);
269 void atomic64_set(atomic64_t *v, s64);
270
271 s64 atomic64_add_return(s64, atomic64_t *);
272 s64 atomic64_sub_return(s64, atomic64_t *);
273 void atomic64_add(s64, atomic64_t *);
274 void atomic64_sub(s64, atomic64_t *);
275
276 s64 atomic64_xchg(atomic64_t *, s64);
277 s64 atomic64_cmpxchg(atomic64_t *, s64, s64);
278
279 #define atomic64_add_negative(a, v)     (atomic64_add_return((a), (v)) < 0)
280 #define atomic64_inc(v)                 atomic64_add(1LL, (v))
281 #define atomic64_inc_return(v)          atomic64_add_return(1LL, (v))
282 #define atomic64_inc_and_test(v)        (atomic64_inc_return(v) == 0)
283 #define atomic64_sub_and_test(a, v)     (atomic64_sub_return((a), (v)) == 0)
284 #define atomic64_dec(v)                 atomic64_sub(1LL, (v))
285 #define atomic64_dec_return(v)          atomic64_sub_return(1LL, (v))
286 #define atomic64_dec_and_test(v)        (atomic64_dec_return((v)) == 0)
287 #define atomic64_inc_not_zero(v)        atomic64_add_unless((v), 1LL, 0LL)
288
289 static inline s64 atomic64_add_return_release(s64 i, atomic64_t *v)
290 {
291         smp_mb__before_atomic();
292         return atomic64_add_return(i, v);
293 }
294
295 static inline s64 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
296 {
297         return atomic64_cmpxchg(v, old, new);
298 }
299
300 #endif
301
302 #endif /* __TOOLS_LINUX_ATOMIC_H */