bitops.h 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. /* $Id: bitops.h,v 1.67 2001/11/19 18:36:34 davem Exp $
  2. * bitops.h: Bit string operations on the Sparc.
  3. *
  4. * Copyright 1995 David S. Miller (davem@caip.rutgers.edu)
  5. * Copyright 1996 Eddie C. Dost (ecd@skynet.be)
  6. * Copyright 2001 Anton Blanchard (anton@samba.org)
  7. */
  8. #ifndef _SPARC_BITOPS_H
  9. #define _SPARC_BITOPS_H
  10. #include <linux/compiler.h>
  11. #include <asm/byteorder.h>
  12. #ifdef __KERNEL__
  13. /*
  14. * Set bit 'nr' in 32-bit quantity at address 'addr' where bit '0'
  15. * is in the highest of the four bytes and bit '31' is the high bit
  16. * within the first byte. Sparc is BIG-Endian. Unless noted otherwise
  17. * all bit-ops return 0 if bit was previously clear and != 0 otherwise.
  18. */
  19. static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
  20. {
  21. register unsigned long mask asm("g2");
  22. register unsigned long *ADDR asm("g1");
  23. register int tmp1 asm("g3");
  24. register int tmp2 asm("g4");
  25. register int tmp3 asm("g5");
  26. register int tmp4 asm("g7");
  27. ADDR = ((unsigned long *) addr) + (nr >> 5);
  28. mask = 1 << (nr & 31);
  29. __asm__ __volatile__(
  30. "mov %%o7, %%g4\n\t"
  31. "call ___set_bit\n\t"
  32. " add %%o7, 8, %%o7\n"
  33. : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4)
  34. : "0" (mask), "r" (ADDR)
  35. : "memory", "cc");
  36. return mask != 0;
  37. }
  38. static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
  39. {
  40. register unsigned long mask asm("g2");
  41. register unsigned long *ADDR asm("g1");
  42. register int tmp1 asm("g3");
  43. register int tmp2 asm("g4");
  44. register int tmp3 asm("g5");
  45. register int tmp4 asm("g7");
  46. ADDR = ((unsigned long *) addr) + (nr >> 5);
  47. mask = 1 << (nr & 31);
  48. __asm__ __volatile__(
  49. "mov %%o7, %%g4\n\t"
  50. "call ___set_bit\n\t"
  51. " add %%o7, 8, %%o7\n"
  52. : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4)
  53. : "0" (mask), "r" (ADDR)
  54. : "memory", "cc");
  55. }
  56. static inline int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
  57. {
  58. register unsigned long mask asm("g2");
  59. register unsigned long *ADDR asm("g1");
  60. register int tmp1 asm("g3");
  61. register int tmp2 asm("g4");
  62. register int tmp3 asm("g5");
  63. register int tmp4 asm("g7");
  64. ADDR = ((unsigned long *) addr) + (nr >> 5);
  65. mask = 1 << (nr & 31);
  66. __asm__ __volatile__(
  67. "mov %%o7, %%g4\n\t"
  68. "call ___clear_bit\n\t"
  69. " add %%o7, 8, %%o7\n"
  70. : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4)
  71. : "0" (mask), "r" (ADDR)
  72. : "memory", "cc");
  73. return mask != 0;
  74. }
  75. static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
  76. {
  77. register unsigned long mask asm("g2");
  78. register unsigned long *ADDR asm("g1");
  79. register int tmp1 asm("g3");
  80. register int tmp2 asm("g4");
  81. register int tmp3 asm("g5");
  82. register int tmp4 asm("g7");
  83. ADDR = ((unsigned long *) addr) + (nr >> 5);
  84. mask = 1 << (nr & 31);
  85. __asm__ __volatile__(
  86. "mov %%o7, %%g4\n\t"
  87. "call ___clear_bit\n\t"
  88. " add %%o7, 8, %%o7\n"
  89. : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4)
  90. : "0" (mask), "r" (ADDR)
  91. : "memory", "cc");
  92. }
  93. static inline int test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
  94. {
  95. register unsigned long mask asm("g2");
  96. register unsigned long *ADDR asm("g1");
  97. register int tmp1 asm("g3");
  98. register int tmp2 asm("g4");
  99. register int tmp3 asm("g5");
  100. register int tmp4 asm("g7");
  101. ADDR = ((unsigned long *) addr) + (nr >> 5);
  102. mask = 1 << (nr & 31);
  103. __asm__ __volatile__(
  104. "mov %%o7, %%g4\n\t"
  105. "call ___change_bit\n\t"
  106. " add %%o7, 8, %%o7\n"
  107. : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4)
  108. : "0" (mask), "r" (ADDR)
  109. : "memory", "cc");
  110. return mask != 0;
  111. }
  112. static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
  113. {
  114. register unsigned long mask asm("g2");
  115. register unsigned long *ADDR asm("g1");
  116. register int tmp1 asm("g3");
  117. register int tmp2 asm("g4");
  118. register int tmp3 asm("g5");
  119. register int tmp4 asm("g7");
  120. ADDR = ((unsigned long *) addr) + (nr >> 5);
  121. mask = 1 << (nr & 31);
  122. __asm__ __volatile__(
  123. "mov %%o7, %%g4\n\t"
  124. "call ___change_bit\n\t"
  125. " add %%o7, 8, %%o7\n"
  126. : "=&r" (mask), "=r" (tmp1), "=r" (tmp2), "=r" (tmp3), "=r" (tmp4)
  127. : "0" (mask), "r" (ADDR)
  128. : "memory", "cc");
  129. }
  130. #include <asm-generic/bitops/non-atomic.h>
  131. #define smp_mb__before_clear_bit() do { } while(0)
  132. #define smp_mb__after_clear_bit() do { } while(0)
  133. #include <asm-generic/bitops/ffz.h>
  134. #include <asm-generic/bitops/__ffs.h>
  135. #include <asm-generic/bitops/sched.h>
  136. #include <asm-generic/bitops/ffs.h>
  137. #include <asm-generic/bitops/fls.h>
  138. #include <asm-generic/bitops/fls64.h>
  139. #include <asm-generic/bitops/hweight.h>
  140. #include <asm-generic/bitops/find.h>
  141. #include <asm-generic/bitops/ext2-non-atomic.h>
  142. #include <asm-generic/bitops/ext2-atomic.h>
  143. #include <asm-generic/bitops/minix.h>
  144. #endif /* __KERNEL__ */
  145. #endif /* defined(_SPARC_BITOPS_H) */