#define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
#define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
+#define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE)
#define BITS_PER_BYTE 8
#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
#define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u64))
__atomic_or_fetch(p, mask, __ATOMIC_RELAXED);
}
+static inline void __clear_bit(int nr, volatile unsigned long *addr)
+{
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
+
+ *p &= ~mask;
+}
+
static inline void clear_bit(long nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
return (old & mask) != 0;
}
+static inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
+{
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *) addr) + BIT_WORD(nr);
+ unsigned long old;
+
+ old = __atomic_fetch_and(p, ~mask, __ATOMIC_RELAXED);
+
+ return (old & mask) != 0;
+}
+
static inline void clear_bit_unlock(long nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
return __builtin_popcountl(w);
}
+static inline unsigned long hweight64(u64 w)
+{
+ return __builtin_popcount((u32) w) +
+ __builtin_popcount(w >> 32);
+}
+
+static inline unsigned long hweight32(u32 w)
+{
+ return __builtin_popcount(w);
+}
+
+static inline unsigned long hweight8(unsigned long w)
+{
+ return __builtin_popcountl(w);
+}
+
/**
* rol64 - rotate a 64-bit value left
* @word: value to rotate
return 1UL << (fls_long(n) - 1);
}
-static inline __attribute_const__
-int __get_order(unsigned long size)
-{
- int order;
-
- size--;
- size >>= PAGE_SHIFT;
-#if BITS_PER_LONG == 32
- order = fls(size);
-#else
- order = fls64(size);
-#endif
- return order;
-}
-
-#define get_order(n) \
-( \
- __builtin_constant_p(n) ? ( \
- ((n) == 0UL) ? BITS_PER_LONG - PAGE_SHIFT : \
- (((n) < (1UL << PAGE_SHIFT)) ? 0 : \
- ilog2((n) - 1) - PAGE_SHIFT + 1) \
- ) : \
- __get_order(n) \
-)
-
#endif