]> git.sesse.net Git - bcachefs-tools-debian/blob - include/linux/atomic.h
Update bcachefs sources to 31c09369cd six locks: Fix an unitialized var
[bcachefs-tools-debian] / include / linux / atomic.h
1 #ifndef __TOOLS_LINUX_ATOMIC_H
2 #define __TOOLS_LINUX_ATOMIC_H
3
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6
7 typedef struct {
8         int             counter;
9 } atomic_t;
10
11 typedef struct {
12         long            counter;
13 } atomic_long_t;
14
15 typedef struct {
16         u64             counter;
17 } atomic64_t;
18
19 #ifndef C11_ATOMICS
20
21 #include <urcu/uatomic.h>
22
23 #if (CAA_BITS_PER_LONG != 64)
24 #define ATOMIC64_SPINLOCK
25 #endif
26
27 #define __ATOMIC_READ(p)                uatomic_read(p)
28 #define __ATOMIC_SET(p, v)              uatomic_set(p, v)
29 #define __ATOMIC_ADD_RETURN(v, p)       uatomic_add_return(p, v)
30 #define __ATOMIC_SUB_RETURN(v, p)       uatomic_sub_return(p, v)
31 #define __ATOMIC_ADD(v, p)              uatomic_add(p, v)
32 #define __ATOMIC_SUB(v, p)              uatomic_sub(p, v)
33 #define __ATOMIC_INC(p)                 uatomic_inc(p)
34 #define __ATOMIC_DEC(p)                 uatomic_dec(p)
35 #define __ATOMIC_AND(v, p)              uatomic_and(p, v)
36 #define __ATOMIC_OR(v, p)               uatomic_or(p, v)
37
38 #define xchg(p, v)                      uatomic_xchg(p, v)
39 #define xchg_acquire(p, v)              uatomic_xchg(p, v)
40 #define cmpxchg(p, old, new)            uatomic_cmpxchg(p, old, new)
41 #define cmpxchg_acquire(p, old, new)    uatomic_cmpxchg(p, old, new)
42 #define cmpxchg_release(p, old, new)    uatomic_cmpxchg(p, old, new)
43
44 #define smp_mb__before_atomic()         cmm_smp_mb__before_uatomic_add()
45 #define smp_mb__after_atomic()          cmm_smp_mb__after_uatomic_add()
46 #define smp_wmb()                       cmm_smp_wmb()
47 #define smp_rmb()                       cmm_smp_rmb()
48 #define smp_mb()                        cmm_smp_mb()
49 #define smp_read_barrier_depends()      cmm_smp_read_barrier_depends()
50
51 #else /* C11_ATOMICS */
52
53 #define __ATOMIC_READ(p)                __atomic_load_n(p,      __ATOMIC_RELAXED)
54 #define __ATOMIC_SET(p, v)              __atomic_store_n(p, v,  __ATOMIC_RELAXED)
55 #define __ATOMIC_ADD_RETURN(v, p)       __atomic_add_fetch(p, v, __ATOMIC_RELAXED)
56 #define __ATOMIC_ADD_RETURN_RELEASE(v, p)                               \
57                                         __atomic_add_fetch(p, v, __ATOMIC_RELEASE)
58 #define __ATOMIC_SUB_RETURN(v, p)       __atomic_sub_fetch(p, v, __ATOMIC_RELAXED)
59 #define __ATOMIC_SUB_RETURN_RELEASE(v, p)                               \
60                                         __atomic_sub_fetch(p, v, __ATOMIC_RELEASE)
61 #define __ATOMIC_AND(p)                 __atomic_and_fetch(p, v, __ATOMIC_RELAXED)
62 #define __ATOMIC_OR(p)                  __atomic_or_fetch(p, v, __ATOMIC_RELAXED)
63
64 #define xchg(p, v)                      __atomic_exchange_n(p, v, __ATOMIC_SEQ_CST)
65 #define xchg_acquire(p, v)              __atomic_exchange_n(p, v, __ATOMIC_ACQUIRE)
66
67 #define cmpxchg(p, old, new)                                    \
68 ({                                                              \
69         typeof(*(p)) __old = (old);                             \
70                                                                 \
71         __atomic_compare_exchange_n((p), &__old, new, false,    \
72                                     __ATOMIC_SEQ_CST,           \
73                                     __ATOMIC_SEQ_CST);          \
74         __old;                                                  \
75 })
76
77 #define cmpxchg_acquire(p, old, new)                            \
78 ({                                                              \
79         typeof(*(p)) __old = (old);                             \
80                                                                 \
81         __atomic_compare_exchange_n((p), &__old, new, false,    \
82                                     __ATOMIC_ACQUIRE,           \
83                                     __ATOMIC_ACQUIRE);          \
84         __old;                                                  \
85 })
86
87 #define cmpxchg_release(p, old, new)                            \
88 ({                                                              \
89         typeof(*(p)) __old = (old);                             \
90                                                                 \
91         __atomic_compare_exchange_n((p), &__old, new, false,    \
92                                     __ATOMIC_RELEASE,           \
93                                     __ATOMIC_RELEASE);          \
94         __old;                                                  \
95 })
96
97 #define smp_mb__before_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
98 #define smp_mb__after_atomic()  __atomic_thread_fence(__ATOMIC_SEQ_CST)
99 #define smp_wmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
100 #define smp_rmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
101 #define smp_mb()                __atomic_thread_fence(__ATOMIC_SEQ_CST)
102 #define smp_read_barrier_depends()
103
104 #endif
105
106 #define smp_store_mb(var, value)  do { WRITE_ONCE(var, value); smp_mb(); } while (0)
107
108 #define smp_load_acquire(p)                                             \
109 ({                                                                      \
110         typeof(*p) ___p1 = READ_ONCE(*p);                               \
111         smp_mb();                                                       \
112         ___p1;                                                          \
113 })
114
115 #define smp_store_release(p, v)                                         \
116 do {                                                                    \
117         smp_mb();                                                       \
118         WRITE_ONCE(*p, v);                                              \
119 } while (0)
120
121 /* atomic interface: */
122
123 #ifndef __ATOMIC_ADD
124 #define __ATOMIC_ADD(i, v) __ATOMIC_ADD_RETURN(i, v)
125 #endif
126
127 #ifndef __ATOMIC_ADD_RETURN_RELEASE
128 #define __ATOMIC_ADD_RETURN_RELEASE(i, v)                               \
129         ({ smp_mb__before_atomic(); __ATOMIC_ADD_RETURN(i, v); })
130 #endif
131
132 #ifndef __ATOMIC_SUB_RETURN_RELEASE
133 #define __ATOMIC_SUB_RETURN_RELEASE(i, v)                               \
134         ({ smp_mb__before_atomic(); __ATOMIC_SUB_RETURN(i, v); })
135 #endif
136
137 #ifndef __ATOMIC_SUB
138 #define __ATOMIC_SUB(i, v) __ATOMIC_SUB_RETURN(i, v)
139 #endif
140
141 #ifndef __ATOMIC_INC_RETURN
142 #define __ATOMIC_INC_RETURN(v) __ATOMIC_ADD_RETURN(1, v)
143 #endif
144
145 #ifndef __ATOMIC_DEC_RETURN
146 #define __ATOMIC_DEC_RETURN(v) __ATOMIC_SUB_RETURN(1, v)
147 #endif
148
149 #ifndef __ATOMIC_INC
150 #define __ATOMIC_INC(v) __ATOMIC_ADD(1, v)
151 #endif
152
153 #ifndef __ATOMIC_DEC
154 #define __ATOMIC_DEC(v) __ATOMIC_SUB(1, v)
155 #endif
156
157 #define DEF_ATOMIC_OPS(a_type, i_type)                                  \
158 static inline i_type a_type##_read(const a_type##_t *v)                 \
159 {                                                                       \
160         return __ATOMIC_READ(&v->counter);                              \
161 }                                                                       \
162                                                                         \
163 static inline void a_type##_set(a_type##_t *v, i_type i)                \
164 {                                                                       \
165         return __ATOMIC_SET(&v->counter, i);                            \
166 }                                                                       \
167                                                                         \
168 static inline i_type a_type##_add_return(i_type i, a_type##_t *v)       \
169 {                                                                       \
170         return __ATOMIC_ADD_RETURN(i, &v->counter);                     \
171 }                                                                       \
172                                                                         \
173 static inline i_type a_type##_add_return_release(i_type i, a_type##_t *v)\
174 {                                                                       \
175         return __ATOMIC_ADD_RETURN_RELEASE(i, &v->counter);             \
176 }                                                                       \
177                                                                         \
178 static inline i_type a_type##_sub_return_release(i_type i, a_type##_t *v)\
179 {                                                                       \
180         return __ATOMIC_SUB_RETURN_RELEASE(i, &v->counter);             \
181 }                                                                       \
182                                                                         \
183 static inline i_type a_type##_sub_return(i_type i, a_type##_t *v)       \
184 {                                                                       \
185         return __ATOMIC_SUB_RETURN(i, &v->counter);                     \
186 }                                                                       \
187                                                                         \
188 static inline void a_type##_add(i_type i, a_type##_t *v)                \
189 {                                                                       \
190         __ATOMIC_ADD(i, &v->counter);                                   \
191 }                                                                       \
192                                                                         \
193 static inline void a_type##_sub(i_type i, a_type##_t *v)                \
194 {                                                                       \
195         __ATOMIC_SUB(i, &v->counter);                                   \
196 }                                                                       \
197                                                                         \
198 static inline i_type a_type##_inc_return(a_type##_t *v)                 \
199 {                                                                       \
200         return __ATOMIC_INC_RETURN(&v->counter);                        \
201 }                                                                       \
202                                                                         \
203 static inline i_type a_type##_dec_return(a_type##_t *v)                 \
204 {                                                                       \
205         return __ATOMIC_DEC_RETURN(&v->counter);                        \
206 }                                                                       \
207                                                                         \
208 static inline void a_type##_inc(a_type##_t *v)                          \
209 {                                                                       \
210         __ATOMIC_INC(&v->counter);                                      \
211 }                                                                       \
212                                                                         \
213 static inline void a_type##_dec(a_type##_t *v)                          \
214 {                                                                       \
215         __ATOMIC_DEC(&v->counter);                                      \
216 }                                                                       \
217                                                                         \
218 static inline bool a_type##_add_negative(i_type i, a_type##_t *v)       \
219 {                                                                       \
220         return __ATOMIC_ADD_RETURN(i, &v->counter) < 0;                 \
221 }                                                                       \
222                                                                         \
223 static inline bool a_type##_sub_and_test(i_type i, a_type##_t *v)       \
224 {                                                                       \
225         return __ATOMIC_SUB_RETURN(i, &v->counter) == 0;                \
226 }                                                                       \
227                                                                         \
228 static inline bool a_type##_inc_and_test(a_type##_t *v)                 \
229 {                                                                       \
230         return __ATOMIC_INC_RETURN(&v->counter) == 0;                   \
231 }                                                                       \
232                                                                         \
233 static inline bool a_type##_dec_and_test(a_type##_t *v)                 \
234 {                                                                       \
235         return __ATOMIC_DEC_RETURN(&v->counter) == 0;                   \
236 }                                                                       \
237                                                                         \
238 static inline i_type a_type##_add_unless(a_type##_t *v, i_type a, i_type u)\
239 {                                                                       \
240         i_type old, c = __ATOMIC_READ(&v->counter);                     \
241         while (c != u && (old = cmpxchg(&v->counter, c, c + a)) != c)   \
242                 c = old;                                                \
243         return c;                                                       \
244 }                                                                       \
245                                                                         \
246 static inline bool a_type##_inc_not_zero(a_type##_t *v)                 \
247 {                                                                       \
248         return a_type##_add_unless(v, 1, 0);                            \
249 }                                                                       \
250                                                                         \
251 static inline void a_type##_and(i_type a, a_type##_t *v)                \
252 {                                                                       \
253         __ATOMIC_AND(a, v);                                             \
254 }                                                                       \
255                                                                         \
256 static inline void a_type##_or(i_type a, a_type##_t *v)                 \
257 {                                                                       \
258         __ATOMIC_OR(a, v);                                              \
259 }                                                                       \
260                                                                         \
261 static inline i_type a_type##_xchg(a_type##_t *v, i_type i)             \
262 {                                                                       \
263         return xchg(&v->counter, i);                                    \
264 }                                                                       \
265                                                                         \
266 static inline i_type a_type##_cmpxchg(a_type##_t *v, i_type old, i_type new)\
267 {                                                                       \
268         return cmpxchg(&v->counter, old, new);                          \
269 }                                                                       \
270                                                                         \
271 static inline i_type a_type##_cmpxchg_acquire(a_type##_t *v, i_type old, i_type new)\
272 {                                                                       \
273         return cmpxchg_acquire(&v->counter, old, new);                  \
274 }
275
276 DEF_ATOMIC_OPS(atomic,          int)
277 DEF_ATOMIC_OPS(atomic_long,     long)
278
279 #ifndef ATOMIC64_SPINLOCK
280 DEF_ATOMIC_OPS(atomic64,        s64)
281 #else
282 s64 atomic64_read(const atomic64_t *v);
283 void atomic64_set(atomic64_t *v, s64);
284
285 s64 atomic64_add_return(s64, atomic64_t *);
286 s64 atomic64_sub_return(s64, atomic64_t *);
287 void atomic64_add(s64, atomic64_t *);
288 void atomic64_sub(s64, atomic64_t *);
289
290 s64 atomic64_xchg(atomic64_t *, s64);
291 s64 atomic64_cmpxchg(atomic64_t *, s64, s64);
292
293 #define atomic64_add_negative(a, v)     (atomic64_add_return((a), (v)) < 0)
294 #define atomic64_inc(v)                 atomic64_add(1LL, (v))
295 #define atomic64_inc_return(v)          atomic64_add_return(1LL, (v))
296 #define atomic64_inc_and_test(v)        (atomic64_inc_return(v) == 0)
297 #define atomic64_sub_and_test(a, v)     (atomic64_sub_return((a), (v)) == 0)
298 #define atomic64_dec(v)                 atomic64_sub(1LL, (v))
299 #define atomic64_dec_return(v)          atomic64_sub_return(1LL, (v))
300 #define atomic64_dec_and_test(v)        (atomic64_dec_return((v)) == 0)
301 #define atomic64_inc_not_zero(v)        atomic64_add_unless((v), 1LL, 0LL)
302
303 static inline s64 atomic64_add_return_release(s64 i, atomic64_t *v)
304 {
305         smp_mb__before_atomic();
306         return atomic64_add_return(i, v);
307 }
308
309 static inline s64 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
310 {
311         return atomic64_cmpxchg(v, old, new);
312 }
313
314 #endif
315
316 #endif /* __TOOLS_LINUX_ATOMIC_H */