]> git.sesse.net Git - bcachefs-tools-debian/blob - include/linux/atomic.h
Update log2.h from linux kernel
[bcachefs-tools-debian] / include / linux / atomic.h
1 #ifndef __TOOLS_LINUX_ATOMIC_H
2 #define __TOOLS_LINUX_ATOMIC_H
3
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6
7 typedef struct {
8         int             counter;
9 } atomic_t;
10
11 typedef struct {
12         long            counter;
13 } atomic_long_t;
14
15 typedef struct {
16         u64             counter;
17 } atomic64_t;
18
19 #ifndef C11_ATOMICS
20
21 #include <urcu/uatomic.h>
22
23 #if (CAA_BITS_PER_LONG != 64)
24 #define ATOMIC64_SPINLOCK
25 #endif
26
27 #define __ATOMIC_READ(p)                uatomic_read(p)
28 #define __ATOMIC_SET(p, v)              uatomic_set(p, v)
29 #define __ATOMIC_ADD_RETURN(v, p)       uatomic_add_return(p, v)
30 #define __ATOMIC_SUB_RETURN(v, p)       uatomic_sub_return(p, v)
31 #define __ATOMIC_ADD(v, p)              uatomic_add(p, v)
32 #define __ATOMIC_SUB(v, p)              uatomic_sub(p, v)
33 #define __ATOMIC_INC(p)                 uatomic_inc(p)
34 #define __ATOMIC_DEC(p)                 uatomic_dec(p)
35
36 #define xchg(p, v)                      uatomic_xchg(p, v)
37 #define xchg_acquire(p, v)              uatomic_xchg(p, v)
38 #define cmpxchg(p, old, new)            uatomic_cmpxchg(p, old, new)
39 #define cmpxchg_acquire(p, old, new)    uatomic_cmpxchg(p, old, new)
40 #define cmpxchg_release(p, old, new)    uatomic_cmpxchg(p, old, new)
41
42 #define smp_mb__before_atomic()         cmm_smp_mb__before_uatomic_add()
43 #define smp_mb__after_atomic()          cmm_smp_mb__after_uatomic_add()
44 #define smp_wmb()                       cmm_smp_wmb()
45 #define smp_rmb()                       cmm_smp_rmb()
46 #define smp_mb()                        cmm_smp_mb()
47 #define smp_read_barrier_depends()      cmm_smp_read_barrier_depends()
48
49 #else /* C11_ATOMICS */
50
51 #define __ATOMIC_READ(p)                __atomic_load_n(p,      __ATOMIC_RELAXED)
52 #define __ATOMIC_SET(p, v)              __atomic_store_n(p, v,  __ATOMIC_RELAXED)
53 #define __ATOMIC_ADD_RETURN(v, p)       __atomic_add_fetch(p, v, __ATOMIC_RELAXED)
54 #define __ATOMIC_ADD_RETURN_RELEASE(v, p)                               \
55                                         __atomic_add_fetch(p, v, __ATOMIC_RELEASE)
56 #define __ATOMIC_SUB_RETURN(v, p)       __atomic_sub_fetch(p, v, __ATOMIC_RELAXED)
57
58 #define xchg(p, v)                      __atomic_exchange_n(p, v, __ATOMIC_SEQ_CST)
59 #define xchg_acquire(p, v)              __atomic_exchange_n(p, v, __ATOMIC_ACQUIRE)
60
61 #define cmpxchg(p, old, new)                                    \
62 ({                                                              \
63         typeof(*(p)) __old = (old);                             \
64                                                                 \
65         __atomic_compare_exchange_n((p), &__old, new, false,    \
66                                     __ATOMIC_SEQ_CST,           \
67                                     __ATOMIC_SEQ_CST);          \
68         __old;                                                  \
69 })
70
71 #define cmpxchg_acquire(p, old, new)                            \
72 ({                                                              \
73         typeof(*(p)) __old = (old);                             \
74                                                                 \
75         __atomic_compare_exchange_n((p), &__old, new, false,    \
76                                     __ATOMIC_ACQUIRE,           \
77                                     __ATOMIC_ACQUIRE);          \
78         __old;                                                  \
79 })
80
81 #define cmpxchg_release(p, old, new)                            \
82 ({                                                              \
83         typeof(*(p)) __old = (old);                             \
84                                                                 \
85         __atomic_compare_exchange_n((p), &__old, new, false,    \
86                                     __ATOMIC_RELEASE,           \
87                                     __ATOMIC_RELEASE);          \
88         __old;                                                  \
89 })
90
91 #define smp_mb__before_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
92 #define smp_mb__after_atomic()  __atomic_thread_fence(__ATOMIC_SEQ_CST)
93 #define smp_wmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
94 #define smp_rmb()               __atomic_thread_fence(__ATOMIC_SEQ_CST)
95 #define smp_mb()                __atomic_thread_fence(__ATOMIC_SEQ_CST)
96 #define smp_read_barrier_depends()
97
98 #endif
99
100 #define smp_store_mb(var, value)  do { WRITE_ONCE(var, value); smp_mb(); } while (0)
101
102 #define smp_load_acquire(p)                                             \
103 ({                                                                      \
104         typeof(*p) ___p1 = READ_ONCE(*p);                               \
105         smp_mb();                                                       \
106         ___p1;                                                          \
107 })
108
109 #define smp_store_release(p, v)                                         \
110 do {                                                                    \
111         smp_mb();                                                       \
112         WRITE_ONCE(*p, v);                                              \
113 } while (0)
114
115 /* atomic interface: */
116
117 #ifndef __ATOMIC_ADD
118 #define __ATOMIC_ADD(i, v) __ATOMIC_ADD_RETURN(i, v)
119 #endif
120
121 #ifndef __ATOMIC_ADD_RETURN_RELEASE
122 #define __ATOMIC_ADD_RETURN_RELEASE(i, v)                               \
123         ({ smp_mb__before_atomic(); __ATOMIC_ADD_RETURN(i, v); })
124 #endif
125
126 #ifndef __ATOMIC_SUB
127 #define __ATOMIC_SUB(i, v) __ATOMIC_SUB_RETURN(i, v)
128 #endif
129
130 #ifndef __ATOMIC_INC_RETURN
131 #define __ATOMIC_INC_RETURN(v) __ATOMIC_ADD_RETURN(1, v)
132 #endif
133
134 #ifndef __ATOMIC_DEC_RETURN
135 #define __ATOMIC_DEC_RETURN(v) __ATOMIC_SUB_RETURN(1, v)
136 #endif
137
138 #ifndef __ATOMIC_INC
139 #define __ATOMIC_INC(v) __ATOMIC_ADD(1, v)
140 #endif
141
142 #ifndef __ATOMIC_DEC
143 #define __ATOMIC_DEC(v) __ATOMIC_SUB(1, v)
144 #endif
145
146 #define DEF_ATOMIC_OPS(a_type, i_type)                                  \
147 static inline i_type a_type##_read(const a_type##_t *v)                 \
148 {                                                                       \
149         return __ATOMIC_READ(&v->counter);                              \
150 }                                                                       \
151                                                                         \
152 static inline void a_type##_set(a_type##_t *v, i_type i)                \
153 {                                                                       \
154         return __ATOMIC_SET(&v->counter, i);                            \
155 }                                                                       \
156                                                                         \
157 static inline i_type a_type##_add_return(i_type i, a_type##_t *v)       \
158 {                                                                       \
159         return __ATOMIC_ADD_RETURN(i, &v->counter);                     \
160 }                                                                       \
161                                                                         \
162 static inline i_type a_type##_add_return_release(i_type i, a_type##_t *v)\
163 {                                                                       \
164         return __ATOMIC_ADD_RETURN_RELEASE(i, &v->counter);             \
165 }                                                                       \
166                                                                         \
167 static inline i_type a_type##_sub_return(i_type i, a_type##_t *v)       \
168 {                                                                       \
169         return __ATOMIC_SUB_RETURN(i, &v->counter);                     \
170 }                                                                       \
171                                                                         \
172 static inline void a_type##_add(i_type i, a_type##_t *v)                \
173 {                                                                       \
174         __ATOMIC_ADD(i, &v->counter);                                   \
175 }                                                                       \
176                                                                         \
177 static inline void a_type##_sub(i_type i, a_type##_t *v)                \
178 {                                                                       \
179         __ATOMIC_SUB(i, &v->counter);                                   \
180 }                                                                       \
181                                                                         \
182 static inline i_type a_type##_inc_return(a_type##_t *v)                 \
183 {                                                                       \
184         return __ATOMIC_INC_RETURN(&v->counter);                        \
185 }                                                                       \
186                                                                         \
187 static inline i_type a_type##_dec_return(a_type##_t *v)                 \
188 {                                                                       \
189         return __ATOMIC_DEC_RETURN(&v->counter);                        \
190 }                                                                       \
191                                                                         \
192 static inline void a_type##_inc(a_type##_t *v)                          \
193 {                                                                       \
194         __ATOMIC_INC(&v->counter);                                      \
195 }                                                                       \
196                                                                         \
197 static inline void a_type##_dec(a_type##_t *v)                          \
198 {                                                                       \
199         __ATOMIC_DEC(&v->counter);                                      \
200 }                                                                       \
201                                                                         \
202 static inline bool a_type##_add_negative(i_type i, a_type##_t *v)       \
203 {                                                                       \
204         return __ATOMIC_ADD_RETURN(i, &v->counter) < 0;                 \
205 }                                                                       \
206                                                                         \
207 static inline bool a_type##_sub_and_test(i_type i, a_type##_t *v)       \
208 {                                                                       \
209         return __ATOMIC_SUB_RETURN(i, &v->counter) == 0;                \
210 }                                                                       \
211                                                                         \
212 static inline bool a_type##_inc_and_test(a_type##_t *v)                 \
213 {                                                                       \
214         return __ATOMIC_INC_RETURN(&v->counter) == 0;                   \
215 }                                                                       \
216                                                                         \
217 static inline bool a_type##_dec_and_test(a_type##_t *v)                 \
218 {                                                                       \
219         return __ATOMIC_DEC_RETURN(&v->counter) == 0;                   \
220 }                                                                       \
221                                                                         \
222 static inline i_type a_type##_add_unless(a_type##_t *v, i_type a, i_type u)\
223 {                                                                       \
224         i_type old, c = __ATOMIC_READ(&v->counter);                     \
225         while (c != u && (old = cmpxchg(&v->counter, c, c + a)) != c)   \
226                 c = old;                                                \
227         return c;                                                       \
228 }                                                                       \
229                                                                         \
230 static inline bool a_type##_inc_not_zero(a_type##_t *v)                 \
231 {                                                                       \
232         return a_type##_add_unless(v, 1, 0);                            \
233 }                                                                       \
234                                                                         \
235 static inline i_type a_type##_xchg(a_type##_t *v, i_type i)             \
236 {                                                                       \
237         return xchg(&v->counter, i);                                    \
238 }                                                                       \
239                                                                         \
240 static inline i_type a_type##_cmpxchg(a_type##_t *v, i_type old, i_type new)\
241 {                                                                       \
242         return cmpxchg(&v->counter, old, new);                          \
243 }                                                                       \
244                                                                         \
245 static inline i_type a_type##_cmpxchg_acquire(a_type##_t *v, i_type old, i_type new)\
246 {                                                                       \
247         return cmpxchg_acquire(&v->counter, old, new);                  \
248 }
249
250 DEF_ATOMIC_OPS(atomic,          int)
251 DEF_ATOMIC_OPS(atomic_long,     long)
252
253 #ifndef ATOMIC64_SPINLOCK
254 DEF_ATOMIC_OPS(atomic64,        s64)
255 #else
256 s64 atomic64_read(const atomic64_t *v);
257 void atomic64_set(atomic64_t *v, s64);
258
259 s64 atomic64_add_return(s64, atomic64_t *);
260 s64 atomic64_sub_return(s64, atomic64_t *);
261 void atomic64_add(s64, atomic64_t *);
262 void atomic64_sub(s64, atomic64_t *);
263
264 s64 atomic64_xchg(atomic64_t *, s64);
265 s64 atomic64_cmpxchg(atomic64_t *, s64, s64);
266
267 #define atomic64_add_negative(a, v)     (atomic64_add_return((a), (v)) < 0)
268 #define atomic64_inc(v)                 atomic64_add(1LL, (v))
269 #define atomic64_inc_return(v)          atomic64_add_return(1LL, (v))
270 #define atomic64_inc_and_test(v)        (atomic64_inc_return(v) == 0)
271 #define atomic64_sub_and_test(a, v)     (atomic64_sub_return((a), (v)) == 0)
272 #define atomic64_dec(v)                 atomic64_sub(1LL, (v))
273 #define atomic64_dec_return(v)          atomic64_sub_return(1LL, (v))
274 #define atomic64_dec_and_test(v)        (atomic64_dec_return((v)) == 0)
275 #define atomic64_inc_not_zero(v)        atomic64_add_unless((v), 1LL, 0LL)
276
277 static inline s64 atomic64_add_return_release(s64 i, atomic64_t *v)
278 {
279         smp_mb__before_atomic();
280         return atomic64_add_return(i, v);
281 }
282
283 static inline s64 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
284 {
285         return atomic64_cmpxchg(v, old, new);
286 }
287
288 #endif
289
290 #endif /* __TOOLS_LINUX_ATOMIC_H */