]> git.sesse.net Git - bcachefs-tools-debian/blob - include/linux/atomic.h
Update bcachefs sources to 0d63ed13ea3d closures: Fix race in closure_sync()
[bcachefs-tools-debian] / include / linux / atomic.h
1 #ifndef __TOOLS_LINUX_ATOMIC_H
2 #define __TOOLS_LINUX_ATOMIC_H
3
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6
7 typedef struct {
8         int             counter;
9 } atomic_t;
10
11 typedef struct {
12         long            counter;
13 } atomic_long_t;
14
15 typedef struct {
16         u64             counter;
17 } atomic64_t;
18
19 #ifndef C11_ATOMICS
20
21 #include <urcu/uatomic.h>
22
23 #if (CAA_BITS_PER_LONG != 64)
24 #define ATOMIC64_SPINLOCK
25 #endif
26
27 #define __ATOMIC_READ(p)                uatomic_read(p)
28 #define __ATOMIC_SET(p, v)              uatomic_set(p, v)
29 #define __ATOMIC_ADD_RETURN(v, p)       uatomic_add_return(p, v)
30 #define __ATOMIC_SUB_RETURN(v, p)       uatomic_sub_return(p, v)
31 #define __ATOMIC_ADD(v, p)              uatomic_add(p, v)
32 #define __ATOMIC_SUB(v, p)              uatomic_sub(p, v)
33 #define __ATOMIC_INC(p)                 uatomic_inc(p)
34 #define __ATOMIC_DEC(p)                 uatomic_dec(p)
35 #define __ATOMIC_AND(v, p)              uatomic_and(p, v)
36 #define __ATOMIC_OR(v, p)               uatomic_or(p, v)
37
38 #define xchg(p, v)                      uatomic_xchg(p, v)
39 #define xchg_acquire(p, v)              uatomic_xchg(p, v)
40 #define cmpxchg(p, old, new)            uatomic_cmpxchg(p, old, new)
41 #define cmpxchg_acquire(p, old, new)    uatomic_cmpxchg(p, old, new)
42 #define cmpxchg_release(p, old, new)    uatomic_cmpxchg(p, old, new)
43
44 #define smp_mb__before_atomic()         cmm_smp_mb__before_uatomic_add()
45 #define smp_mb__after_atomic()          cmm_smp_mb__after_uatomic_add()
46 #define smp_wmb()                       cmm_smp_wmb()
47 #define smp_rmb()                       cmm_smp_rmb()
48 #define smp_mb()                        cmm_smp_mb()
49 #define smp_read_barrier_depends()      cmm_smp_read_barrier_depends()
50 #define smp_acquire__after_ctrl_dep()   cmm_smp_mb()
51
52 #else /* C11_ATOMICS */
53
54 #define __ATOMIC_READ(p)                __atomic_load_n(p,      __ATOMIC_RELAXED)
55 #define __ATOMIC_SET(p, v)              __atomic_store_n(p, v,  __ATOMIC_RELAXED)
56 #define __ATOMIC_ADD_RETURN(v, p)       __atomic_add_fetch(p, v, __ATOMIC_RELAXED)
57 #define __ATOMIC_ADD_RETURN_RELEASE(v, p)                               \
58                                         __atomic_add_fetch(p, v, __ATOMIC_RELEASE)
59 #define __ATOMIC_SUB_RETURN(v, p)       __atomic_sub_fetch(p, v, __ATOMIC_RELAXED)
60 #define __ATOMIC_SUB_RETURN_RELEASE(v, p)                               \
61                                         __atomic_sub_fetch(p, v, __ATOMIC_RELEASE)
62 #define __ATOMIC_AND(p)                 __atomic_and_fetch(p, v, __ATOMIC_RELAXED)
63 #define __ATOMIC_OR(p)                  __atomic_or_fetch(p, v, __ATOMIC_RELAXED)
64
65 #define xchg(p, v)                      __atomic_exchange_n(p, v, __ATOMIC_SEQ_CST)
66 #define xchg_acquire(p, v)              __atomic_exchange_n(p, v, __ATOMIC_ACQUIRE)
67
68 #define cmpxchg(p, old, new)                                    \
69 ({                                                              \
70         typeof(*(p)) __old = (old);                             \
71                                                                 \
72         __atomic_compare_exchange_n((p), &__old, new, false,    \
73                                     __ATOMIC_SEQ_CST,           \
74                                     __ATOMIC_SEQ_CST);          \
75         __old;                                                  \
76 })
77
78 #define cmpxchg_acquire(p, old, new)                            \
79 ({                                                              \
80         typeof(*(p)) __old = (old);                             \
81                                                                 \
82         __atomic_compare_exchange_n((p), &__old, new, false,    \
83                                     __ATOMIC_ACQUIRE,           \
84                                     __ATOMIC_ACQUIRE);          \
85         __old;                                                  \
86 })
87
88 #define cmpxchg_release(p, old, new)                            \
89 ({                                                              \
90         typeof(*(p)) __old = (old);                             \
91                                                                 \
92         __atomic_compare_exchange_n((p), &__old, new, false,    \
93                                     __ATOMIC_RELEASE,           \
94                                     __ATOMIC_RELEASE);          \
95         __old;                                                  \
96 })
97
98 #define smp_mb__before_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
99 #define smp_mb__after_atomic()  __atomic_thread_fence(__ATOMIC_SEQ_CST)
100 #define smp_wmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
101 #define smp_rmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
102 #define smp_mb()                __atomic_thread_fence(__ATOMIC_SEQ_CST)
103 #define smp_read_barrier_depends()
104
105 #endif
106
107 #define smp_store_mb(var, value)  do { WRITE_ONCE(var, value); smp_mb(); } while (0)
108
109 #define smp_load_acquire(p)                                             \
110 ({                                                                      \
111         typeof(*p) ___p1 = READ_ONCE(*p);                               \
112         smp_mb();                                                       \
113         ___p1;                                                          \
114 })
115
116 #define smp_store_release(p, v)                                         \
117 do {                                                                    \
118         smp_mb();                                                       \
119         WRITE_ONCE(*p, v);                                              \
120 } while (0)
121
122 /* atomic interface: */
123
124 #ifndef __ATOMIC_ADD
125 #define __ATOMIC_ADD(i, v) __ATOMIC_ADD_RETURN(i, v)
126 #endif
127
128 #ifndef __ATOMIC_ADD_RETURN_RELEASE
129 #define __ATOMIC_ADD_RETURN_RELEASE(i, v)                               \
130         ({ smp_mb__before_atomic(); __ATOMIC_ADD_RETURN(i, v); })
131 #endif
132
133 #ifndef __ATOMIC_SUB_RETURN_RELEASE
134 #define __ATOMIC_SUB_RETURN_RELEASE(i, v)                               \
135         ({ smp_mb__before_atomic(); __ATOMIC_SUB_RETURN(i, v); })
136 #endif
137
138 #ifndef __ATOMIC_SUB
139 #define __ATOMIC_SUB(i, v) __ATOMIC_SUB_RETURN(i, v)
140 #endif
141
142 #ifndef __ATOMIC_INC_RETURN
143 #define __ATOMIC_INC_RETURN(v) __ATOMIC_ADD_RETURN(1, v)
144 #endif
145
146 #ifndef __ATOMIC_DEC_RETURN
147 #define __ATOMIC_DEC_RETURN(v) __ATOMIC_SUB_RETURN(1, v)
148 #endif
149
150 #ifndef __ATOMIC_INC
151 #define __ATOMIC_INC(v) __ATOMIC_ADD(1, v)
152 #endif
153
154 #ifndef __ATOMIC_DEC
155 #define __ATOMIC_DEC(v) __ATOMIC_SUB(1, v)
156 #endif
157
158 #define DEF_ATOMIC_OPS(a_type, i_type)                                  \
159 static inline i_type a_type##_read(const a_type##_t *v)                 \
160 {                                                                       \
161         return __ATOMIC_READ(&v->counter);                              \
162 }                                                                       \
163                                                                         \
164 static inline void a_type##_set(a_type##_t *v, i_type i)                \
165 {                                                                       \
166         return __ATOMIC_SET(&v->counter, i);                            \
167 }                                                                       \
168                                                                         \
169 static inline i_type a_type##_add_return(i_type i, a_type##_t *v)       \
170 {                                                                       \
171         return __ATOMIC_ADD_RETURN(i, &v->counter);                     \
172 }                                                                       \
173                                                                         \
174 static inline i_type a_type##_add_return_release(i_type i, a_type##_t *v)\
175 {                                                                       \
176         return __ATOMIC_ADD_RETURN_RELEASE(i, &v->counter);             \
177 }                                                                       \
178                                                                         \
179 static inline i_type a_type##_sub_return_release(i_type i, a_type##_t *v)\
180 {                                                                       \
181         return __ATOMIC_SUB_RETURN_RELEASE(i, &v->counter);             \
182 }                                                                       \
183                                                                         \
184 static inline i_type a_type##_sub_return(i_type i, a_type##_t *v)       \
185 {                                                                       \
186         return __ATOMIC_SUB_RETURN(i, &v->counter);                     \
187 }                                                                       \
188                                                                         \
189 static inline void a_type##_add(i_type i, a_type##_t *v)                \
190 {                                                                       \
191         __ATOMIC_ADD(i, &v->counter);                                   \
192 }                                                                       \
193                                                                         \
194 static inline void a_type##_sub(i_type i, a_type##_t *v)                \
195 {                                                                       \
196         __ATOMIC_SUB(i, &v->counter);                                   \
197 }                                                                       \
198                                                                         \
199 static inline i_type a_type##_inc_return(a_type##_t *v)                 \
200 {                                                                       \
201         return __ATOMIC_INC_RETURN(&v->counter);                        \
202 }                                                                       \
203                                                                         \
204 static inline i_type a_type##_dec_return(a_type##_t *v)                 \
205 {                                                                       \
206         return __ATOMIC_DEC_RETURN(&v->counter);                        \
207 }                                                                       \
208                                                                         \
209 static inline i_type a_type##_dec_return_release(a_type##_t *v)         \
210 {                                                                       \
211         return __ATOMIC_SUB_RETURN_RELEASE(1, &v->counter);             \
212 }                                                                       \
213                                                                         \
214 static inline void a_type##_inc(a_type##_t *v)                          \
215 {                                                                       \
216         __ATOMIC_INC(&v->counter);                                      \
217 }                                                                       \
218                                                                         \
219 static inline void a_type##_dec(a_type##_t *v)                          \
220 {                                                                       \
221         __ATOMIC_DEC(&v->counter);                                      \
222 }                                                                       \
223                                                                         \
224 static inline bool a_type##_add_negative(i_type i, a_type##_t *v)       \
225 {                                                                       \
226         return __ATOMIC_ADD_RETURN(i, &v->counter) < 0;                 \
227 }                                                                       \
228                                                                         \
229 static inline bool a_type##_sub_and_test(i_type i, a_type##_t *v)       \
230 {                                                                       \
231         return __ATOMIC_SUB_RETURN(i, &v->counter) == 0;                \
232 }                                                                       \
233                                                                         \
234 static inline bool a_type##_inc_and_test(a_type##_t *v)                 \
235 {                                                                       \
236         return __ATOMIC_INC_RETURN(&v->counter) == 0;                   \
237 }                                                                       \
238                                                                         \
239 static inline bool a_type##_dec_and_test(a_type##_t *v)                 \
240 {                                                                       \
241         return __ATOMIC_DEC_RETURN(&v->counter) == 0;                   \
242 }                                                                       \
243                                                                         \
244 static inline i_type a_type##_add_unless(a_type##_t *v, i_type a, i_type u)\
245 {                                                                       \
246         i_type old, c = __ATOMIC_READ(&v->counter);                     \
247         while (c != u && (old = cmpxchg(&v->counter, c, c + a)) != c)   \
248                 c = old;                                                \
249         return c;                                                       \
250 }                                                                       \
251                                                                         \
252 static inline bool a_type##_inc_not_zero(a_type##_t *v)                 \
253 {                                                                       \
254         return a_type##_add_unless(v, 1, 0);                            \
255 }                                                                       \
256                                                                         \
257 static inline void a_type##_and(i_type a, a_type##_t *v)                \
258 {                                                                       \
259         __ATOMIC_AND(a, v);                                             \
260 }                                                                       \
261                                                                         \
262 static inline void a_type##_or(i_type a, a_type##_t *v)                 \
263 {                                                                       \
264         __ATOMIC_OR(a, v);                                              \
265 }                                                                       \
266                                                                         \
267 static inline i_type a_type##_xchg(a_type##_t *v, i_type i)             \
268 {                                                                       \
269         return xchg(&v->counter, i);                                    \
270 }                                                                       \
271                                                                         \
272 static inline i_type a_type##_cmpxchg(a_type##_t *v, i_type old, i_type new)\
273 {                                                                       \
274         return cmpxchg(&v->counter, old, new);                          \
275 }                                                                       \
276                                                                         \
277 static inline i_type a_type##_cmpxchg_acquire(a_type##_t *v, i_type old, i_type new)\
278 {                                                                       \
279         return cmpxchg_acquire(&v->counter, old, new);                  \
280 }                                                                       \
281                                                                         \
282 static inline bool a_type##_try_cmpxchg_acquire(a_type##_t *v, i_type *old, i_type new)\
283 {                                                                       \
284         i_type prev = *old;                                             \
285         *old = cmpxchg_acquire(&v->counter, *old, new);                 \
286         return prev == *old;                                            \
287 }
288
289 DEF_ATOMIC_OPS(atomic,          int)
290 DEF_ATOMIC_OPS(atomic_long,     long)
291
292 #ifndef ATOMIC64_SPINLOCK
293 DEF_ATOMIC_OPS(atomic64,        s64)
294 #else
295 s64 atomic64_read(const atomic64_t *v);
296 void atomic64_set(atomic64_t *v, s64);
297
298 s64 atomic64_add_return(s64, atomic64_t *);
299 s64 atomic64_sub_return(s64, atomic64_t *);
300 void atomic64_add(s64, atomic64_t *);
301 void atomic64_sub(s64, atomic64_t *);
302
303 s64 atomic64_xchg(atomic64_t *, s64);
304 s64 atomic64_cmpxchg(atomic64_t *, s64, s64);
305
306 #define atomic64_add_negative(a, v)     (atomic64_add_return((a), (v)) < 0)
307 #define atomic64_inc(v)                 atomic64_add(1LL, (v))
308 #define atomic64_inc_return(v)          atomic64_add_return(1LL, (v))
309 #define atomic64_inc_and_test(v)        (atomic64_inc_return(v) == 0)
310 #define atomic64_sub_and_test(a, v)     (atomic64_sub_return((a), (v)) == 0)
311 #define atomic64_dec(v)                 atomic64_sub(1LL, (v))
312 #define atomic64_dec_return(v)          atomic64_sub_return(1LL, (v))
313 #define atomic64_dec_and_test(v)        (atomic64_dec_return((v)) == 0)
314 #define atomic64_inc_not_zero(v)        atomic64_add_unless((v), 1LL, 0LL)
315
316 static inline s64 atomic64_add_return_release(s64 i, atomic64_t *v)
317 {
318         smp_mb__before_atomic();
319         return atomic64_add_return(i, v);
320 }
321
322 static inline s64 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
323 {
324         return atomic64_cmpxchg(v, old, new);
325 }
326
327 #endif
328
329 #endif /* __TOOLS_LINUX_ATOMIC_H */