123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153 |
- #ifndef _LINUX_BITOPS_H
- #define _LINUX_BITOPS_H
- #include <asm/types.h>
- #ifdef __KERNEL__
- #define BIT(nr) (1UL << (nr))
- #define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
- #define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
- #define BITS_PER_BYTE 8
- #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
- #endif
- extern unsigned int __sw_hweight8(unsigned int w);
- extern unsigned int __sw_hweight16(unsigned int w);
- extern unsigned int __sw_hweight32(unsigned int w);
- extern unsigned long __sw_hweight64(__u64 w);
- /*
- * Include this here because some architectures need generic_ffs/fls in
- * scope
- */
- #include <asm/bitops.h>
- #define for_each_set_bit(bit, addr, size) \
- for ((bit) = find_first_bit((addr), (size)); \
- (bit) < (size); \
- (bit) = find_next_bit((addr), (size), (bit) + 1))
- static __inline__ int get_bitmask_order(unsigned int count)
- {
- int order;
- order = fls(count);
- return order; /* We could be slightly more clever with -1 here... */
- }
- static __inline__ int get_count_order(unsigned int count)
- {
- int order;
- order = fls(count) - 1;
- if (count & (count - 1))
- order++;
- return order;
- }
- static inline unsigned long hweight_long(unsigned long w)
- {
- return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
- }
- /**
- * rol32 - rotate a 32-bit value left
- * @word: value to rotate
- * @shift: bits to roll
- */
- static inline __u32 rol32(__u32 word, unsigned int shift)
- {
- return (word << shift) | (word >> (32 - shift));
- }
- /**
- * ror32 - rotate a 32-bit value right
- * @word: value to rotate
- * @shift: bits to roll
- */
- static inline __u32 ror32(__u32 word, unsigned int shift)
- {
- return (word >> shift) | (word << (32 - shift));
- }
- /**
- * rol16 - rotate a 16-bit value left
- * @word: value to rotate
- * @shift: bits to roll
- */
- static inline __u16 rol16(__u16 word, unsigned int shift)
- {
- return (word << shift) | (word >> (16 - shift));
- }
- /**
- * ror16 - rotate a 16-bit value right
- * @word: value to rotate
- * @shift: bits to roll
- */
- static inline __u16 ror16(__u16 word, unsigned int shift)
- {
- return (word >> shift) | (word << (16 - shift));
- }
- /**
- * rol8 - rotate an 8-bit value left
- * @word: value to rotate
- * @shift: bits to roll
- */
- static inline __u8 rol8(__u8 word, unsigned int shift)
- {
- return (word << shift) | (word >> (8 - shift));
- }
- /**
- * ror8 - rotate an 8-bit value right
- * @word: value to rotate
- * @shift: bits to roll
- */
- static inline __u8 ror8(__u8 word, unsigned int shift)
- {
- return (word >> shift) | (word << (8 - shift));
- }
- static inline unsigned fls_long(unsigned long l)
- {
- if (sizeof(l) == 4)
- return fls(l);
- return fls64(l);
- }
- /**
- * __ffs64 - find first set bit in a 64 bit word
- * @word: The 64 bit word
- *
- * On 64 bit arches this is a synomyn for __ffs
- * The result is not defined if no bits are set, so check that @word
- * is non-zero before calling this.
- */
- static inline unsigned long __ffs64(u64 word)
- {
- #if BITS_PER_LONG == 32
- if (((u32)word) == 0UL)
- return __ffs((u32)(word >> 32)) + 32;
- #elif BITS_PER_LONG != 64
- #error BITS_PER_LONG not 32 or 64
- #endif
- return __ffs((unsigned long)word);
- }
- #ifdef __KERNEL__
- #ifdef CONFIG_GENERIC_FIND_LAST_BIT
- /**
- * find_last_bit - find the last set bit in a memory region
- * @addr: The address to start the search at
- * @size: The maximum size to search
- *
- * Returns the bit number of the first set bit, or size.
- */
- extern unsigned long find_last_bit(const unsigned long *addr,
- unsigned long size);
- #endif /* CONFIG_GENERIC_FIND_LAST_BIT */
- #endif /* __KERNEL__ */
- #endif
|