bitops.h 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295
  1. /* $Id: bitops.h,v 1.39 2002/01/30 01:40:00 davem Exp $
  2. * bitops.h: Bit string operations on the V9.
  3. *
  4. * Copyright 1996, 1997 David S. Miller (davem@caip.rutgers.edu)
  5. */
  6. #ifndef _SPARC64_BITOPS_H
  7. #define _SPARC64_BITOPS_H
  8. #include <linux/config.h>
  9. #include <linux/compiler.h>
  10. #include <asm/byteorder.h>
  11. extern int test_and_set_bit(unsigned long nr, volatile unsigned long *addr);
  12. extern int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr);
  13. extern int test_and_change_bit(unsigned long nr, volatile unsigned long *addr);
  14. extern void set_bit(unsigned long nr, volatile unsigned long *addr);
  15. extern void clear_bit(unsigned long nr, volatile unsigned long *addr);
  16. extern void change_bit(unsigned long nr, volatile unsigned long *addr);
  17. /* "non-atomic" versions... */
  18. static inline void __set_bit(int nr, volatile unsigned long *addr)
  19. {
  20. unsigned long *m = ((unsigned long *)addr) + (nr >> 6);
  21. *m |= (1UL << (nr & 63));
  22. }
  23. static inline void __clear_bit(int nr, volatile unsigned long *addr)
  24. {
  25. unsigned long *m = ((unsigned long *)addr) + (nr >> 6);
  26. *m &= ~(1UL << (nr & 63));
  27. }
  28. static inline void __change_bit(int nr, volatile unsigned long *addr)
  29. {
  30. unsigned long *m = ((unsigned long *)addr) + (nr >> 6);
  31. *m ^= (1UL << (nr & 63));
  32. }
  33. static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
  34. {
  35. unsigned long *m = ((unsigned long *)addr) + (nr >> 6);
  36. unsigned long old = *m;
  37. unsigned long mask = (1UL << (nr & 63));
  38. *m = (old | mask);
  39. return ((old & mask) != 0);
  40. }
  41. static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr)
  42. {
  43. unsigned long *m = ((unsigned long *)addr) + (nr >> 6);
  44. unsigned long old = *m;
  45. unsigned long mask = (1UL << (nr & 63));
  46. *m = (old & ~mask);
  47. return ((old & mask) != 0);
  48. }
  49. static inline int __test_and_change_bit(int nr, volatile unsigned long *addr)
  50. {
  51. unsigned long *m = ((unsigned long *)addr) + (nr >> 6);
  52. unsigned long old = *m;
  53. unsigned long mask = (1UL << (nr & 63));
  54. *m = (old ^ mask);
  55. return ((old & mask) != 0);
  56. }
  57. #ifdef CONFIG_SMP
  58. #define smp_mb__before_clear_bit() membar_storeload_loadload()
  59. #define smp_mb__after_clear_bit() membar_storeload_storestore()
  60. #else
  61. #define smp_mb__before_clear_bit() barrier()
  62. #define smp_mb__after_clear_bit() barrier()
  63. #endif
  64. static inline int test_bit(int nr, __const__ volatile unsigned long *addr)
  65. {
  66. return (1UL & (addr[nr >> 6] >> (nr & 63))) != 0UL;
  67. }
  68. /* The easy/cheese version for now. */
  69. static inline unsigned long ffz(unsigned long word)
  70. {
  71. unsigned long result;
  72. result = 0;
  73. while(word & 1) {
  74. result++;
  75. word >>= 1;
  76. }
  77. return result;
  78. }
  79. /**
  80. * __ffs - find first bit in word.
  81. * @word: The word to search
  82. *
  83. * Undefined if no bit exists, so code should check against 0 first.
  84. */
  85. static inline unsigned long __ffs(unsigned long word)
  86. {
  87. unsigned long result = 0;
  88. while (!(word & 1UL)) {
  89. result++;
  90. word >>= 1;
  91. }
  92. return result;
  93. }
  94. /*
  95. * fls: find last bit set.
  96. */
  97. #define fls(x) generic_fls(x)
  98. #define fls64(x) generic_fls64(x)
  99. #ifdef __KERNEL__
  100. /*
  101. * Every architecture must define this function. It's the fastest
  102. * way of searching a 140-bit bitmap where the first 100 bits are
  103. * unlikely to be set. It's guaranteed that at least one of the 140
  104. * bits is cleared.
  105. */
  106. static inline int sched_find_first_bit(unsigned long *b)
  107. {
  108. if (unlikely(b[0]))
  109. return __ffs(b[0]);
  110. if (unlikely(((unsigned int)b[1])))
  111. return __ffs(b[1]) + 64;
  112. if (b[1] >> 32)
  113. return __ffs(b[1] >> 32) + 96;
  114. return __ffs(b[2]) + 128;
  115. }
  116. /*
  117. * ffs: find first bit set. This is defined the same way as
  118. * the libc and compiler builtin ffs routines, therefore
  119. * differs in spirit from the above ffz (man ffs).
  120. */
  121. static inline int ffs(int x)
  122. {
  123. if (!x)
  124. return 0;
  125. return __ffs((unsigned long)x) + 1;
  126. }
  127. /*
  128. * hweightN: returns the hamming weight (i.e. the number
  129. * of bits set) of a N-bit word
  130. */
  131. #ifdef ULTRA_HAS_POPULATION_COUNT
  132. static inline unsigned int hweight64(unsigned long w)
  133. {
  134. unsigned int res;
  135. __asm__ ("popc %1,%0" : "=r" (res) : "r" (w));
  136. return res;
  137. }
  138. static inline unsigned int hweight32(unsigned int w)
  139. {
  140. unsigned int res;
  141. __asm__ ("popc %1,%0" : "=r" (res) : "r" (w & 0xffffffff));
  142. return res;
  143. }
  144. static inline unsigned int hweight16(unsigned int w)
  145. {
  146. unsigned int res;
  147. __asm__ ("popc %1,%0" : "=r" (res) : "r" (w & 0xffff));
  148. return res;
  149. }
  150. static inline unsigned int hweight8(unsigned int w)
  151. {
  152. unsigned int res;
  153. __asm__ ("popc %1,%0" : "=r" (res) : "r" (w & 0xff));
  154. return res;
  155. }
  156. #else
  157. #define hweight64(x) generic_hweight64(x)
  158. #define hweight32(x) generic_hweight32(x)
  159. #define hweight16(x) generic_hweight16(x)
  160. #define hweight8(x) generic_hweight8(x)
  161. #endif
  162. #endif /* __KERNEL__ */
  163. /**
  164. * find_next_bit - find the next set bit in a memory region
  165. * @addr: The address to base the search on
  166. * @offset: The bitnumber to start searching at
  167. * @size: The maximum size to search
  168. */
  169. extern unsigned long find_next_bit(const unsigned long *, unsigned long,
  170. unsigned long);
  171. /**
  172. * find_first_bit - find the first set bit in a memory region
  173. * @addr: The address to start the search at
  174. * @size: The maximum size to search
  175. *
  176. * Returns the bit-number of the first set bit, not the number of the byte
  177. * containing a bit.
  178. */
  179. #define find_first_bit(addr, size) \
  180. find_next_bit((addr), (size), 0)
  181. /* find_next_zero_bit() finds the first zero bit in a bit string of length
  182. * 'size' bits, starting the search at bit 'offset'. This is largely based
  183. * on Linus's ALPHA routines, which are pretty portable BTW.
  184. */
  185. extern unsigned long find_next_zero_bit(const unsigned long *,
  186. unsigned long, unsigned long);
  187. #define find_first_zero_bit(addr, size) \
  188. find_next_zero_bit((addr), (size), 0)
  189. #define test_and_set_le_bit(nr,addr) \
  190. test_and_set_bit((nr) ^ 0x38, (addr))
  191. #define test_and_clear_le_bit(nr,addr) \
  192. test_and_clear_bit((nr) ^ 0x38, (addr))
  193. static inline int test_le_bit(int nr, __const__ unsigned long * addr)
  194. {
  195. int mask;
  196. __const__ unsigned char *ADDR = (__const__ unsigned char *) addr;
  197. ADDR += nr >> 3;
  198. mask = 1 << (nr & 0x07);
  199. return ((mask & *ADDR) != 0);
  200. }
  201. #define find_first_zero_le_bit(addr, size) \
  202. find_next_zero_le_bit((addr), (size), 0)
  203. extern unsigned long find_next_zero_le_bit(unsigned long *, unsigned long, unsigned long);
  204. #ifdef __KERNEL__
  205. #define __set_le_bit(nr, addr) \
  206. __set_bit((nr) ^ 0x38, (addr))
  207. #define __clear_le_bit(nr, addr) \
  208. __clear_bit((nr) ^ 0x38, (addr))
  209. #define __test_and_clear_le_bit(nr, addr) \
  210. __test_and_clear_bit((nr) ^ 0x38, (addr))
  211. #define __test_and_set_le_bit(nr, addr) \
  212. __test_and_set_bit((nr) ^ 0x38, (addr))
  213. #define ext2_set_bit(nr,addr) \
  214. __test_and_set_le_bit((nr),(unsigned long *)(addr))
  215. #define ext2_set_bit_atomic(lock,nr,addr) \
  216. test_and_set_le_bit((nr),(unsigned long *)(addr))
  217. #define ext2_clear_bit(nr,addr) \
  218. __test_and_clear_le_bit((nr),(unsigned long *)(addr))
  219. #define ext2_clear_bit_atomic(lock,nr,addr) \
  220. test_and_clear_le_bit((nr),(unsigned long *)(addr))
  221. #define ext2_test_bit(nr,addr) \
  222. test_le_bit((nr),(unsigned long *)(addr))
  223. #define ext2_find_first_zero_bit(addr, size) \
  224. find_first_zero_le_bit((unsigned long *)(addr), (size))
  225. #define ext2_find_next_zero_bit(addr, size, off) \
  226. find_next_zero_le_bit((unsigned long *)(addr), (size), (off))
  227. /* Bitmap functions for the minix filesystem. */
  228. #define minix_test_and_set_bit(nr,addr) \
  229. test_and_set_bit((nr),(unsigned long *)(addr))
  230. #define minix_set_bit(nr,addr) \
  231. set_bit((nr),(unsigned long *)(addr))
  232. #define minix_test_and_clear_bit(nr,addr) \
  233. test_and_clear_bit((nr),(unsigned long *)(addr))
  234. #define minix_test_bit(nr,addr) \
  235. test_bit((nr),(unsigned long *)(addr))
  236. #define minix_find_first_zero_bit(addr,size) \
  237. find_first_zero_bit((unsigned long *)(addr),(size))
  238. #endif /* __KERNEL__ */
  239. #endif /* defined(_SPARC64_BITOPS_H) */