#define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
#define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
+#define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE)
#define BITS_PER_BYTE 8
#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
#define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u64))
return (old & mask) != 0;
}
+static inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
+{
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *) addr) + BIT_WORD(nr);
+ unsigned long old;
+
+ old = __atomic_fetch_and(p, ~mask, __ATOMIC_RELAXED);
+
+ return (old & mask) != 0;
+}
+
static inline void clear_bit_unlock(long nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
__builtin_popcount(w >> 32);
}
+static inline unsigned long hweight32(u32 w)
+{
+ return __builtin_popcount(w);
+}
+
static inline unsigned long hweight8(unsigned long w)
{
return __builtin_popcountl(w);