bitops.h 1.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
  1. #ifndef _LINUX_BITOPS_H
  2. #define _LINUX_BITOPS_H
  3. #include <asm/types.h>
  4. /*
  5. * Include this here because some architectures need generic_ffs/fls in
  6. * scope
  7. */
  8. #include <asm/bitops.h>
  9. #define for_each_bit(bit, addr, size) \
  10. for ((bit) = find_first_bit((addr), (size)); \
  11. (bit) < (size); \
  12. (bit) = find_next_bit((addr), (size), (bit) + 1))
  13. static __inline__ int get_bitmask_order(unsigned int count)
  14. {
  15. int order;
  16. order = fls(count);
  17. return order; /* We could be slightly more clever with -1 here... */
  18. }
  19. static __inline__ int get_count_order(unsigned int count)
  20. {
  21. int order;
  22. order = fls(count) - 1;
  23. if (count & (count - 1))
  24. order++;
  25. return order;
  26. }
  27. static inline unsigned long hweight_long(unsigned long w)
  28. {
  29. return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
  30. }
  31. /**
  32. * rol32 - rotate a 32-bit value left
  33. * @word: value to rotate
  34. * @shift: bits to roll
  35. */
  36. static inline __u32 rol32(__u32 word, unsigned int shift)
  37. {
  38. return (word << shift) | (word >> (32 - shift));
  39. }
  40. /**
  41. * ror32 - rotate a 32-bit value right
  42. * @word: value to rotate
  43. * @shift: bits to roll
  44. */
  45. static inline __u32 ror32(__u32 word, unsigned int shift)
  46. {
  47. return (word >> shift) | (word << (32 - shift));
  48. }
  49. static inline unsigned fls_long(unsigned long l)
  50. {
  51. if (sizeof(l) == 4)
  52. return fls(l);
  53. return fls64(l);
  54. }
  55. #endif