]> git.sesse.net Git - bcachefs-tools-debian/blob - include/linux/atomic.h
rust: bump rpassword to v7.x
[bcachefs-tools-debian] / include / linux / atomic.h
1 #ifndef __TOOLS_LINUX_ATOMIC_H
2 #define __TOOLS_LINUX_ATOMIC_H
3
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6
7 typedef struct {
8         int             counter;
9 } atomic_t;
10
11 typedef struct {
12         long            counter;
13 } atomic_long_t;
14
15 typedef struct {
16         u64             counter;
17 } atomic64_t;
18
19 #ifndef C11_ATOMICS
20
21 #include <urcu/uatomic.h>
22
23 #if (CAA_BITS_PER_LONG != 64)
24 #define ATOMIC64_SPINLOCK
25 #endif
26
27 #define __ATOMIC_READ(p)                uatomic_read(p)
28 #define __ATOMIC_SET(p, v)              uatomic_set(p, v)
29 #define __ATOMIC_ADD_RETURN(v, p)       uatomic_add_return(p, v)
30 #define __ATOMIC_SUB_RETURN(v, p)       uatomic_sub_return(p, v)
31 #define __ATOMIC_ADD(v, p)              uatomic_add(p, v)
32 #define __ATOMIC_SUB(v, p)              uatomic_sub(p, v)
33 #define __ATOMIC_INC(p)                 uatomic_inc(p)
34 #define __ATOMIC_DEC(p)                 uatomic_dec(p)
35 #define __ATOMIC_AND(v, p)              uatomic_and(p, v)
36 #define __ATOMIC_OR(v, p)               uatomic_or(p, v)
37
38 #define xchg(p, v)                      uatomic_xchg(p, v)
39 #define xchg_acquire(p, v)              uatomic_xchg(p, v)
40 #define cmpxchg(p, old, new)            uatomic_cmpxchg(p, old, new)
41 #define cmpxchg_acquire(p, old, new)    uatomic_cmpxchg(p, old, new)
42 #define cmpxchg_release(p, old, new)    uatomic_cmpxchg(p, old, new)
43
44 #define smp_mb__before_atomic()         cmm_smp_mb__before_uatomic_add()
45 #define smp_mb__after_atomic()          cmm_smp_mb__after_uatomic_add()
46 #define smp_wmb()                       cmm_smp_wmb()
47 #define smp_rmb()                       cmm_smp_rmb()
48 #define smp_mb()                        cmm_smp_mb()
49 #define smp_read_barrier_depends()      cmm_smp_read_barrier_depends()
50 #define smp_acquire__after_ctrl_dep()   cmm_smp_mb()
51
52 #else /* C11_ATOMICS */
53
54 #define __ATOMIC_READ(p)                __atomic_load_n(p,      __ATOMIC_RELAXED)
55 #define __ATOMIC_SET(p, v)              __atomic_store_n(p, v,  __ATOMIC_RELAXED)
56 #define __ATOMIC_ADD_RETURN(v, p)       __atomic_add_fetch(p, v, __ATOMIC_RELAXED)
57 #define __ATOMIC_ADD_RETURN_RELEASE(v, p)                               \
58                                         __atomic_add_fetch(p, v, __ATOMIC_RELEASE)
59 #define __ATOMIC_SUB_RETURN(v, p)       __atomic_sub_fetch(p, v, __ATOMIC_RELAXED)
60 #define __ATOMIC_SUB_RETURN_RELEASE(v, p)                               \
61                                         __atomic_sub_fetch(p, v, __ATOMIC_RELEASE)
62 #define __ATOMIC_AND(p)                 __atomic_and_fetch(p, v, __ATOMIC_RELAXED)
63 #define __ATOMIC_OR(p)                  __atomic_or_fetch(p, v, __ATOMIC_RELAXED)
64
65 #define xchg(p, v)                      __atomic_exchange_n(p, v, __ATOMIC_SEQ_CST)
66 #define xchg_acquire(p, v)              __atomic_exchange_n(p, v, __ATOMIC_ACQUIRE)
67
68 #define cmpxchg(p, old, new)                                    \
69 ({                                                              \
70         typeof(*(p)) __old = (old);                             \
71                                                                 \
72         __atomic_compare_exchange_n((p), &__old, new, false,    \
73                                     __ATOMIC_SEQ_CST,           \
74                                     __ATOMIC_SEQ_CST);          \
75         __old;                                                  \
76 })
77
78 #define cmpxchg_acquire(p, old, new)                            \
79 ({                                                              \
80         typeof(*(p)) __old = (old);                             \
81                                                                 \
82         __atomic_compare_exchange_n((p), &__old, new, false,    \
83                                     __ATOMIC_ACQUIRE,           \
84                                     __ATOMIC_ACQUIRE);          \
85         __old;                                                  \
86 })
87
88 #define cmpxchg_release(p, old, new)                            \
89 ({                                                              \
90         typeof(*(p)) __old = (old);                             \
91                                                                 \
92         __atomic_compare_exchange_n((p), &__old, new, false,    \
93                                     __ATOMIC_RELEASE,           \
94                                     __ATOMIC_RELEASE);          \
95         __old;                                                  \
96 })
97
98 #define smp_mb__before_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
99 #define smp_mb__after_atomic()  __atomic_thread_fence(__ATOMIC_SEQ_CST)
100 #define smp_wmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
101 #define smp_rmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
102 #define smp_mb()                __atomic_thread_fence(__ATOMIC_SEQ_CST)
103 #define smp_read_barrier_depends()
104
105 #endif
106
107 #define smp_store_mb(var, value)  do { WRITE_ONCE(var, value); smp_mb(); } while (0)
108
109 #define smp_load_acquire(p)                                             \
110 ({                                                                      \
111         typeof(*p) ___p1 = READ_ONCE(*p);                               \
112         smp_mb();                                                       \
113         ___p1;                                                          \
114 })
115
116 #define smp_store_release(p, v)                                         \
117 do {                                                                    \
118         smp_mb();                                                       \
119         WRITE_ONCE(*p, v);                                              \
120 } while (0)
121
122 /* atomic interface: */
123
124 #ifndef __ATOMIC_ADD
125 #define __ATOMIC_ADD(i, v) __ATOMIC_ADD_RETURN(i, v)
126 #endif
127
128 #ifndef __ATOMIC_ADD_RETURN_RELEASE
129 #define __ATOMIC_ADD_RETURN_RELEASE(i, v)                               \
130         ({ smp_mb__before_atomic(); __ATOMIC_ADD_RETURN(i, v); })
131 #endif
132
133 #ifndef __ATOMIC_SUB_RETURN_RELEASE
134 #define __ATOMIC_SUB_RETURN_RELEASE(i, v)                               \
135         ({ smp_mb__before_atomic(); __ATOMIC_SUB_RETURN(i, v); })
136 #endif
137
138 #ifndef __ATOMIC_SUB
139 #define __ATOMIC_SUB(i, v) __ATOMIC_SUB_RETURN(i, v)
140 #endif
141
142 #ifndef __ATOMIC_INC_RETURN
143 #define __ATOMIC_INC_RETURN(v) __ATOMIC_ADD_RETURN(1, v)
144 #endif
145
146 #ifndef __ATOMIC_DEC_RETURN
147 #define __ATOMIC_DEC_RETURN(v) __ATOMIC_SUB_RETURN(1, v)
148 #endif
149
150 #ifndef __ATOMIC_INC
151 #define __ATOMIC_INC(v) __ATOMIC_ADD(1, v)
152 #endif
153
154 #ifndef __ATOMIC_DEC
155 #define __ATOMIC_DEC(v) __ATOMIC_SUB(1, v)
156 #endif
157
158 #define DEF_ATOMIC_OPS(a_type, i_type)                                  \
159 static inline i_type a_type##_read(const a_type##_t *v)                 \
160 {                                                                       \
161         return __ATOMIC_READ(&v->counter);                              \
162 }                                                                       \
163                                                                         \
164 static inline i_type a_type##_read_acquire(const a_type##_t *v)         \
165 {                                                                       \
166         i_type ret = __ATOMIC_READ(&v->counter);                        \
167         smp_mb__after_atomic();                                         \
168         return ret;                                                     \
169 }                                                                       \
170                                                                         \
171 static inline void a_type##_set(a_type##_t *v, i_type i)                \
172 {                                                                       \
173         return __ATOMIC_SET(&v->counter, i);                            \
174 }                                                                       \
175                                                                         \
176 static inline i_type a_type##_add_return(i_type i, a_type##_t *v)       \
177 {                                                                       \
178         return __ATOMIC_ADD_RETURN(i, &v->counter);                     \
179 }                                                                       \
180                                                                         \
181 static inline i_type a_type##_add_return_release(i_type i, a_type##_t *v)\
182 {                                                                       \
183         return __ATOMIC_ADD_RETURN_RELEASE(i, &v->counter);             \
184 }                                                                       \
185                                                                         \
186 static inline i_type a_type##_sub_return_release(i_type i, a_type##_t *v)\
187 {                                                                       \
188         return __ATOMIC_SUB_RETURN_RELEASE(i, &v->counter);             \
189 }                                                                       \
190                                                                         \
191 static inline i_type a_type##_sub_return(i_type i, a_type##_t *v)       \
192 {                                                                       \
193         return __ATOMIC_SUB_RETURN(i, &v->counter);                     \
194 }                                                                       \
195                                                                         \
196 static inline void a_type##_add(i_type i, a_type##_t *v)                \
197 {                                                                       \
198         __ATOMIC_ADD(i, &v->counter);                                   \
199 }                                                                       \
200                                                                         \
201 static inline void a_type##_sub(i_type i, a_type##_t *v)                \
202 {                                                                       \
203         __ATOMIC_SUB(i, &v->counter);                                   \
204 }                                                                       \
205                                                                         \
206 static inline i_type a_type##_inc_return(a_type##_t *v)                 \
207 {                                                                       \
208         return __ATOMIC_INC_RETURN(&v->counter);                        \
209 }                                                                       \
210                                                                         \
211 static inline i_type a_type##_dec_return(a_type##_t *v)                 \
212 {                                                                       \
213         return __ATOMIC_DEC_RETURN(&v->counter);                        \
214 }                                                                       \
215                                                                         \
216 static inline i_type a_type##_dec_return_release(a_type##_t *v)         \
217 {                                                                       \
218         return __ATOMIC_SUB_RETURN_RELEASE(1, &v->counter);             \
219 }                                                                       \
220                                                                         \
221 static inline void a_type##_inc(a_type##_t *v)                          \
222 {                                                                       \
223         __ATOMIC_INC(&v->counter);                                      \
224 }                                                                       \
225                                                                         \
226 static inline void a_type##_dec(a_type##_t *v)                          \
227 {                                                                       \
228         __ATOMIC_DEC(&v->counter);                                      \
229 }                                                                       \
230                                                                         \
231 static inline bool a_type##_add_negative(i_type i, a_type##_t *v)       \
232 {                                                                       \
233         return __ATOMIC_ADD_RETURN(i, &v->counter) < 0;                 \
234 }                                                                       \
235                                                                         \
236 static inline bool a_type##_sub_and_test(i_type i, a_type##_t *v)       \
237 {                                                                       \
238         return __ATOMIC_SUB_RETURN(i, &v->counter) == 0;                \
239 }                                                                       \
240                                                                         \
241 static inline bool a_type##_inc_and_test(a_type##_t *v)                 \
242 {                                                                       \
243         return __ATOMIC_INC_RETURN(&v->counter) == 0;                   \
244 }                                                                       \
245                                                                         \
246 static inline bool a_type##_dec_and_test(a_type##_t *v)                 \
247 {                                                                       \
248         return __ATOMIC_DEC_RETURN(&v->counter) == 0;                   \
249 }                                                                       \
250                                                                         \
251 static inline i_type a_type##_add_unless(a_type##_t *v, i_type a, i_type u)\
252 {                                                                       \
253         i_type old, c = __ATOMIC_READ(&v->counter);                     \
254         while (c != u && (old = cmpxchg(&v->counter, c, c + a)) != c)   \
255                 c = old;                                                \
256         return c;                                                       \
257 }                                                                       \
258                                                                         \
259 static inline bool a_type##_inc_not_zero(a_type##_t *v)                 \
260 {                                                                       \
261         return a_type##_add_unless(v, 1, 0);                            \
262 }                                                                       \
263                                                                         \
264 static inline void a_type##_and(i_type a, a_type##_t *v)                \
265 {                                                                       \
266         __ATOMIC_AND(a, v);                                             \
267 }                                                                       \
268                                                                         \
269 static inline void a_type##_or(i_type a, a_type##_t *v)                 \
270 {                                                                       \
271         __ATOMIC_OR(a, v);                                              \
272 }                                                                       \
273                                                                         \
274 static inline i_type a_type##_xchg(a_type##_t *v, i_type i)             \
275 {                                                                       \
276         return xchg(&v->counter, i);                                    \
277 }                                                                       \
278                                                                         \
279 static inline i_type a_type##_cmpxchg(a_type##_t *v, i_type old, i_type new)\
280 {                                                                       \
281         return cmpxchg(&v->counter, old, new);                          \
282 }                                                                       \
283                                                                         \
284 static inline i_type a_type##_cmpxchg_acquire(a_type##_t *v, i_type old, i_type new)\
285 {                                                                       \
286         return cmpxchg_acquire(&v->counter, old, new);                  \
287 }                                                                       \
288                                                                         \
289 static inline bool a_type##_try_cmpxchg_acquire(a_type##_t *v, i_type *old, i_type new)\
290 {                                                                       \
291         i_type prev = *old;                                             \
292         *old = cmpxchg_acquire(&v->counter, *old, new);                 \
293         return prev == *old;                                            \
294 }
295
296 DEF_ATOMIC_OPS(atomic,          int)
297 DEF_ATOMIC_OPS(atomic_long,     long)
298
299 #ifndef ATOMIC64_SPINLOCK
300 DEF_ATOMIC_OPS(atomic64,        s64)
301 #else
302 s64 atomic64_read(const atomic64_t *v);
303 static inline s64 atomic64_read_acquire(const atomic64_t *v)
304 {
305         s64 ret = atomic64_read(v);
306         smp_mb__after_atomic();
307         return ret;
308 }
309
310 void atomic64_set(atomic64_t *v, s64);
311
312 s64 atomic64_add_return(s64, atomic64_t *);
313 s64 atomic64_sub_return(s64, atomic64_t *);
314 void atomic64_add(s64, atomic64_t *);
315 void atomic64_sub(s64, atomic64_t *);
316
317 s64 atomic64_xchg(atomic64_t *, s64);
318 s64 atomic64_cmpxchg(atomic64_t *, s64, s64);
319
320 #define atomic64_add_negative(a, v)     (atomic64_add_return((a), (v)) < 0)
321 #define atomic64_inc(v)                 atomic64_add(1LL, (v))
322 #define atomic64_inc_return(v)          atomic64_add_return(1LL, (v))
323 #define atomic64_inc_and_test(v)        (atomic64_inc_return(v) == 0)
324 #define atomic64_sub_and_test(a, v)     (atomic64_sub_return((a), (v)) == 0)
325 #define atomic64_dec(v)                 atomic64_sub(1LL, (v))
326 #define atomic64_dec_return(v)          atomic64_sub_return(1LL, (v))
327 #define atomic64_dec_and_test(v)        (atomic64_dec_return((v)) == 0)
328 #define atomic64_inc_not_zero(v)        atomic64_add_unless((v), 1LL, 0LL)
329
330 static inline s64 atomic64_add_return_release(s64 i, atomic64_t *v)
331 {
332         smp_mb__before_atomic();
333         return atomic64_add_return(i, v);
334 }
335
336 static inline s64 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
337 {
338         return atomic64_cmpxchg(v, old, new);
339 }
340
341 static inline s64 atomic64_sub_return_release(s64 i, atomic64_t *v)
342 {
343        smp_mb__before_atomic();
344        return atomic64_sub_return(i, v);
345 }
346
347 #endif
348
349 #endif /* __TOOLS_LINUX_ATOMIC_H */