bitops.c 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126
  1. /*
  2. * Copyright (C) 1996 Paul Mackerras.
  3. */
  4. #include <linux/kernel.h>
  5. #include <linux/bitops.h>
  6. /*
  7. * If the bitops are not inlined in bitops.h, they are defined here.
  8. * -- paulus
  9. */
  10. #if !__INLINE_BITOPS
  11. void set_bit(int nr, volatile void * addr)
  12. {
  13. unsigned long old;
  14. unsigned long mask = 1 << (nr & 0x1f);
  15. unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
  16. __asm__ __volatile__(SMP_WMB "\n\
  17. 1: lwarx %0,0,%3 \n\
  18. or %0,%0,%2 \n"
  19. PPC405_ERR77(0,%3)
  20. " stwcx. %0,0,%3 \n\
  21. bne 1b"
  22. SMP_MB
  23. : "=&r" (old), "=m" (*p)
  24. : "r" (mask), "r" (p), "m" (*p)
  25. : "cc" );
  26. }
  27. void clear_bit(int nr, volatile void *addr)
  28. {
  29. unsigned long old;
  30. unsigned long mask = 1 << (nr & 0x1f);
  31. unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
  32. __asm__ __volatile__(SMP_WMB "\n\
  33. 1: lwarx %0,0,%3 \n\
  34. andc %0,%0,%2 \n"
  35. PPC405_ERR77(0,%3)
  36. " stwcx. %0,0,%3 \n\
  37. bne 1b"
  38. SMP_MB
  39. : "=&r" (old), "=m" (*p)
  40. : "r" (mask), "r" (p), "m" (*p)
  41. : "cc");
  42. }
  43. void change_bit(int nr, volatile void *addr)
  44. {
  45. unsigned long old;
  46. unsigned long mask = 1 << (nr & 0x1f);
  47. unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
  48. __asm__ __volatile__(SMP_WMB "\n\
  49. 1: lwarx %0,0,%3 \n\
  50. xor %0,%0,%2 \n"
  51. PPC405_ERR77(0,%3)
  52. " stwcx. %0,0,%3 \n\
  53. bne 1b"
  54. SMP_MB
  55. : "=&r" (old), "=m" (*p)
  56. : "r" (mask), "r" (p), "m" (*p)
  57. : "cc");
  58. }
  59. int test_and_set_bit(int nr, volatile void *addr)
  60. {
  61. unsigned int old, t;
  62. unsigned int mask = 1 << (nr & 0x1f);
  63. volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
  64. __asm__ __volatile__(SMP_WMB "\n\
  65. 1: lwarx %0,0,%4 \n\
  66. or %1,%0,%3 \n"
  67. PPC405_ERR77(0,%4)
  68. " stwcx. %1,0,%4 \n\
  69. bne 1b"
  70. SMP_MB
  71. : "=&r" (old), "=&r" (t), "=m" (*p)
  72. : "r" (mask), "r" (p), "m" (*p)
  73. : "cc");
  74. return (old & mask) != 0;
  75. }
  76. int test_and_clear_bit(int nr, volatile void *addr)
  77. {
  78. unsigned int old, t;
  79. unsigned int mask = 1 << (nr & 0x1f);
  80. volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
  81. __asm__ __volatile__(SMP_WMB "\n\
  82. 1: lwarx %0,0,%4 \n\
  83. andc %1,%0,%3 \n"
  84. PPC405_ERR77(0,%4)
  85. " stwcx. %1,0,%4 \n\
  86. bne 1b"
  87. SMP_MB
  88. : "=&r" (old), "=&r" (t), "=m" (*p)
  89. : "r" (mask), "r" (p), "m" (*p)
  90. : "cc");
  91. return (old & mask) != 0;
  92. }
  93. int test_and_change_bit(int nr, volatile void *addr)
  94. {
  95. unsigned int old, t;
  96. unsigned int mask = 1 << (nr & 0x1f);
  97. volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
  98. __asm__ __volatile__(SMP_WMB "\n\
  99. 1: lwarx %0,0,%4 \n\
  100. xor %1,%0,%3 \n"
  101. PPC405_ERR77(0,%4)
  102. " stwcx. %1,0,%4 \n\
  103. bne 1b"
  104. SMP_MB
  105. : "=&r" (old), "=&r" (t), "=m" (*p)
  106. : "r" (mask), "r" (p), "m" (*p)
  107. : "cc");
  108. return (old & mask) != 0;
  109. }
  110. #endif /* !__INLINE_BITOPS */