atomic.h 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209
  1. #ifndef _ASM_POWERPC_ATOMIC_H_
  2. #define _ASM_POWERPC_ATOMIC_H_
  3. /*
  4. * PowerPC atomic operations
  5. */
  6. typedef struct { volatile int counter; } atomic_t;
  7. #ifdef __KERNEL__
  8. #include <asm/synch.h>
  9. #define ATOMIC_INIT(i) { (i) }
  10. #define atomic_read(v) ((v)->counter)
  11. #define atomic_set(v,i) (((v)->counter) = (i))
  12. /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
  13. * The old ATOMIC_SYNC_FIX covered some but not all of this.
  14. */
  15. #ifdef CONFIG_IBM405_ERR77
  16. #define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
  17. #else
  18. #define PPC405_ERR77(ra,rb)
  19. #endif
  20. static __inline__ void atomic_add(int a, atomic_t *v)
  21. {
  22. int t;
  23. __asm__ __volatile__(
  24. "1: lwarx %0,0,%3 # atomic_add\n\
  25. add %0,%2,%0\n"
  26. PPC405_ERR77(0,%3)
  27. " stwcx. %0,0,%3 \n\
  28. bne- 1b"
  29. : "=&r" (t), "=m" (v->counter)
  30. : "r" (a), "r" (&v->counter), "m" (v->counter)
  31. : "cc");
  32. }
  33. static __inline__ int atomic_add_return(int a, atomic_t *v)
  34. {
  35. int t;
  36. __asm__ __volatile__(
  37. EIEIO_ON_SMP
  38. "1: lwarx %0,0,%2 # atomic_add_return\n\
  39. add %0,%1,%0\n"
  40. PPC405_ERR77(0,%2)
  41. " stwcx. %0,0,%2 \n\
  42. bne- 1b"
  43. ISYNC_ON_SMP
  44. : "=&r" (t)
  45. : "r" (a), "r" (&v->counter)
  46. : "cc", "memory");
  47. return t;
  48. }
  49. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  50. static __inline__ void atomic_sub(int a, atomic_t *v)
  51. {
  52. int t;
  53. __asm__ __volatile__(
  54. "1: lwarx %0,0,%3 # atomic_sub\n\
  55. subf %0,%2,%0\n"
  56. PPC405_ERR77(0,%3)
  57. " stwcx. %0,0,%3 \n\
  58. bne- 1b"
  59. : "=&r" (t), "=m" (v->counter)
  60. : "r" (a), "r" (&v->counter), "m" (v->counter)
  61. : "cc");
  62. }
  63. static __inline__ int atomic_sub_return(int a, atomic_t *v)
  64. {
  65. int t;
  66. __asm__ __volatile__(
  67. EIEIO_ON_SMP
  68. "1: lwarx %0,0,%2 # atomic_sub_return\n\
  69. subf %0,%1,%0\n"
  70. PPC405_ERR77(0,%2)
  71. " stwcx. %0,0,%2 \n\
  72. bne- 1b"
  73. ISYNC_ON_SMP
  74. : "=&r" (t)
  75. : "r" (a), "r" (&v->counter)
  76. : "cc", "memory");
  77. return t;
  78. }
  79. static __inline__ void atomic_inc(atomic_t *v)
  80. {
  81. int t;
  82. __asm__ __volatile__(
  83. "1: lwarx %0,0,%2 # atomic_inc\n\
  84. addic %0,%0,1\n"
  85. PPC405_ERR77(0,%2)
  86. " stwcx. %0,0,%2 \n\
  87. bne- 1b"
  88. : "=&r" (t), "=m" (v->counter)
  89. : "r" (&v->counter), "m" (v->counter)
  90. : "cc");
  91. }
  92. static __inline__ int atomic_inc_return(atomic_t *v)
  93. {
  94. int t;
  95. __asm__ __volatile__(
  96. EIEIO_ON_SMP
  97. "1: lwarx %0,0,%1 # atomic_inc_return\n\
  98. addic %0,%0,1\n"
  99. PPC405_ERR77(0,%1)
  100. " stwcx. %0,0,%1 \n\
  101. bne- 1b"
  102. ISYNC_ON_SMP
  103. : "=&r" (t)
  104. : "r" (&v->counter)
  105. : "cc", "memory");
  106. return t;
  107. }
  108. /*
  109. * atomic_inc_and_test - increment and test
  110. * @v: pointer of type atomic_t
  111. *
  112. * Atomically increments @v by 1
  113. * and returns true if the result is zero, or false for all
  114. * other cases.
  115. */
  116. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  117. static __inline__ void atomic_dec(atomic_t *v)
  118. {
  119. int t;
  120. __asm__ __volatile__(
  121. "1: lwarx %0,0,%2 # atomic_dec\n\
  122. addic %0,%0,-1\n"
  123. PPC405_ERR77(0,%2)\
  124. " stwcx. %0,0,%2\n\
  125. bne- 1b"
  126. : "=&r" (t), "=m" (v->counter)
  127. : "r" (&v->counter), "m" (v->counter)
  128. : "cc");
  129. }
  130. static __inline__ int atomic_dec_return(atomic_t *v)
  131. {
  132. int t;
  133. __asm__ __volatile__(
  134. EIEIO_ON_SMP
  135. "1: lwarx %0,0,%1 # atomic_dec_return\n\
  136. addic %0,%0,-1\n"
  137. PPC405_ERR77(0,%1)
  138. " stwcx. %0,0,%1\n\
  139. bne- 1b"
  140. ISYNC_ON_SMP
  141. : "=&r" (t)
  142. : "r" (&v->counter)
  143. : "cc", "memory");
  144. return t;
  145. }
  146. #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
  147. #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
  148. /*
  149. * Atomically test *v and decrement if it is greater than 0.
  150. * The function returns the old value of *v minus 1.
  151. */
  152. static __inline__ int atomic_dec_if_positive(atomic_t *v)
  153. {
  154. int t;
  155. __asm__ __volatile__(
  156. EIEIO_ON_SMP
  157. "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
  158. addic. %0,%0,-1\n\
  159. blt- 2f\n"
  160. PPC405_ERR77(0,%1)
  161. " stwcx. %0,0,%1\n\
  162. bne- 1b"
  163. ISYNC_ON_SMP
  164. "\n\
  165. 2:" : "=&r" (t)
  166. : "r" (&v->counter)
  167. : "cc", "memory");
  168. return t;
  169. }
  170. #define smp_mb__before_atomic_dec() smp_mb()
  171. #define smp_mb__after_atomic_dec() smp_mb()
  172. #define smp_mb__before_atomic_inc() smp_mb()
  173. #define smp_mb__after_atomic_inc() smp_mb()
  174. #endif /* __KERNEL__ */
  175. #endif /* _ASM_POWERPC_ATOMIC_H_ */