]> git.sesse.net Git - bcachefs-tools-debian/blobdiff - include/linux/atomic.h
Move c_src dirs back to toplevel
[bcachefs-tools-debian] / include / linux / atomic.h
index 7471bd976dc244c75144d54329db3f4107a5cb3a..5313f8509d32749ef84ed178f743557aff4a3795 100644 (file)
@@ -32,11 +32,14 @@ typedef struct {
 #define __ATOMIC_SUB(v, p)             uatomic_sub(p, v)
 #define __ATOMIC_INC(p)                        uatomic_inc(p)
 #define __ATOMIC_DEC(p)                        uatomic_dec(p)
+#define __ATOMIC_AND(v, p)             uatomic_and(p, v)
+#define __ATOMIC_OR(v, p)              uatomic_or(p, v)
 
 #define xchg(p, v)                     uatomic_xchg(p, v)
 #define xchg_acquire(p, v)             uatomic_xchg(p, v)
 #define cmpxchg(p, old, new)           uatomic_cmpxchg(p, old, new)
 #define cmpxchg_acquire(p, old, new)   uatomic_cmpxchg(p, old, new)
+#define cmpxchg_release(p, old, new)   uatomic_cmpxchg(p, old, new)
 
 #define smp_mb__before_atomic()                cmm_smp_mb__before_uatomic_add()
 #define smp_mb__after_atomic()         cmm_smp_mb__after_uatomic_add()
@@ -44,6 +47,7 @@ typedef struct {
 #define smp_rmb()                      cmm_smp_rmb()
 #define smp_mb()                       cmm_smp_mb()
 #define smp_read_barrier_depends()     cmm_smp_read_barrier_depends()
+#define smp_acquire__after_ctrl_dep()  cmm_smp_mb()
 
 #else /* C11_ATOMICS */
 
@@ -53,6 +57,10 @@ typedef struct {
 #define __ATOMIC_ADD_RETURN_RELEASE(v, p)                              \
                                        __atomic_add_fetch(p, v, __ATOMIC_RELEASE)
 #define __ATOMIC_SUB_RETURN(v, p)      __atomic_sub_fetch(p, v, __ATOMIC_RELAXED)
+#define __ATOMIC_SUB_RETURN_RELEASE(v, p)                              \
+                                       __atomic_sub_fetch(p, v, __ATOMIC_RELEASE)
+#define __ATOMIC_AND(p)                        __atomic_and_fetch(p, v, __ATOMIC_RELAXED)
+#define __ATOMIC_OR(p)                 __atomic_or_fetch(p, v, __ATOMIC_RELAXED)
 
 #define xchg(p, v)                     __atomic_exchange_n(p, v, __ATOMIC_SEQ_CST)
 #define xchg_acquire(p, v)             __atomic_exchange_n(p, v, __ATOMIC_ACQUIRE)
@@ -77,6 +85,16 @@ typedef struct {
        __old;                                                  \
 })
 
+#define cmpxchg_release(p, old, new)                           \
+({                                                             \
+       typeof(*(p)) __old = (old);                             \
+                                                               \
+       __atomic_compare_exchange_n((p), &__old, new, false,    \
+                                   __ATOMIC_RELEASE,           \
+                                   __ATOMIC_RELEASE);          \
+       __old;                                                  \
+})
+
 #define smp_mb__before_atomic()        __atomic_thread_fence(__ATOMIC_SEQ_CST)
 #define smp_mb__after_atomic() __atomic_thread_fence(__ATOMIC_SEQ_CST)
 #define smp_wmb()              __atomic_thread_fence(__ATOMIC_SEQ_CST)
@@ -112,6 +130,11 @@ do {                                                                       \
        ({ smp_mb__before_atomic(); __ATOMIC_ADD_RETURN(i, v); })
 #endif
 
+#ifndef __ATOMIC_SUB_RETURN_RELEASE
+#define __ATOMIC_SUB_RETURN_RELEASE(i, v)                              \
+       ({ smp_mb__before_atomic(); __ATOMIC_SUB_RETURN(i, v); })
+#endif
+
 #ifndef __ATOMIC_SUB
 #define __ATOMIC_SUB(i, v) __ATOMIC_SUB_RETURN(i, v)
 #endif
@@ -138,6 +161,13 @@ static inline i_type a_type##_read(const a_type##_t *v)                    \
        return __ATOMIC_READ(&v->counter);                              \
 }                                                                      \
                                                                        \
+static inline i_type a_type##_read_acquire(const a_type##_t *v)                \
+{                                                                      \
+       i_type ret = __ATOMIC_READ(&v->counter);                        \
+       smp_mb__after_atomic();                                         \
+       return ret;                                                     \
+}                                                                      \
+                                                                       \
 static inline void a_type##_set(a_type##_t *v, i_type i)               \
 {                                                                      \
        return __ATOMIC_SET(&v->counter, i);                            \
@@ -153,6 +183,11 @@ static inline i_type a_type##_add_return_release(i_type i, a_type##_t *v)\
        return __ATOMIC_ADD_RETURN_RELEASE(i, &v->counter);             \
 }                                                                      \
                                                                        \
+static inline i_type a_type##_sub_return_release(i_type i, a_type##_t *v)\
+{                                                                      \
+       return __ATOMIC_SUB_RETURN_RELEASE(i, &v->counter);             \
+}                                                                      \
+                                                                       \
 static inline i_type a_type##_sub_return(i_type i, a_type##_t *v)      \
 {                                                                      \
        return __ATOMIC_SUB_RETURN(i, &v->counter);                     \
@@ -178,6 +213,11 @@ static inline i_type a_type##_dec_return(a_type##_t *v)                    \
        return __ATOMIC_DEC_RETURN(&v->counter);                        \
 }                                                                      \
                                                                        \
+static inline i_type a_type##_dec_return_release(a_type##_t *v)                \
+{                                                                      \
+       return __ATOMIC_SUB_RETURN_RELEASE(1, &v->counter);             \
+}                                                                      \
+                                                                       \
 static inline void a_type##_inc(a_type##_t *v)                         \
 {                                                                      \
        __ATOMIC_INC(&v->counter);                                      \
@@ -221,6 +261,16 @@ static inline bool a_type##_inc_not_zero(a_type##_t *v)                    \
        return a_type##_add_unless(v, 1, 0);                            \
 }                                                                      \
                                                                        \
+static inline void a_type##_and(i_type a, a_type##_t *v)               \
+{                                                                      \
+       __ATOMIC_AND(a, v);                                             \
+}                                                                      \
+                                                                       \
+static inline void a_type##_or(i_type a, a_type##_t *v)                        \
+{                                                                      \
+       __ATOMIC_OR(a, v);                                              \
+}                                                                      \
+                                                                       \
 static inline i_type a_type##_xchg(a_type##_t *v, i_type i)            \
 {                                                                      \
        return xchg(&v->counter, i);                                    \
@@ -234,6 +284,13 @@ static inline i_type a_type##_cmpxchg(a_type##_t *v, i_type old, i_type new)\
 static inline i_type a_type##_cmpxchg_acquire(a_type##_t *v, i_type old, i_type new)\
 {                                                                      \
        return cmpxchg_acquire(&v->counter, old, new);                  \
+}                                                                      \
+                                                                       \
+static inline bool a_type##_try_cmpxchg_acquire(a_type##_t *v, i_type *old, i_type new)\
+{                                                                      \
+       i_type prev = *old;                                             \
+       *old = cmpxchg_acquire(&v->counter, *old, new);                 \
+       return prev == *old;                                            \
 }
 
 DEF_ATOMIC_OPS(atomic,         int)
@@ -243,6 +300,13 @@ DEF_ATOMIC_OPS(atomic_long,        long)
 DEF_ATOMIC_OPS(atomic64,       s64)
 #else
 s64 atomic64_read(const atomic64_t *v);
+static inline s64 atomic64_read_acquire(const atomic64_t *v)
+{
+       s64 ret = atomic64_read(v);
+       smp_mb__after_atomic();
+       return ret;
+}
+
 void atomic64_set(atomic64_t *v, s64);
 
 s64 atomic64_add_return(s64, atomic64_t *);
@@ -274,6 +338,12 @@ static inline s64 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
        return atomic64_cmpxchg(v, old, new);
 }
 
+static inline s64 atomic64_sub_return_release(s64 i, atomic64_t *v)
+{
+       smp_mb__before_atomic();
+       return atomic64_sub_return(i, v);
+}
+
 #endif
 
 #endif /* __TOOLS_LINUX_ATOMIC_H */