atomic.h 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197
  1. /*
  2. * PowerPC64 atomic operations
  3. *
  4. * Copyright (C) 2001 Paul Mackerras <paulus@au.ibm.com>, IBM
  5. * Copyright (C) 2001 Anton Blanchard <anton@au.ibm.com>, IBM
  6. *
  7. * This program is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU General Public License
  9. * as published by the Free Software Foundation; either version
  10. * 2 of the License, or (at your option) any later version.
  11. */
  12. #ifndef _ASM_PPC64_ATOMIC_H_
  13. #define _ASM_PPC64_ATOMIC_H_
  14. #include <asm/memory.h>
  15. typedef struct { volatile int counter; } atomic_t;
  16. #define ATOMIC_INIT(i) { (i) }
  17. #define atomic_read(v) ((v)->counter)
  18. #define atomic_set(v,i) (((v)->counter) = (i))
  19. static __inline__ void atomic_add(int a, atomic_t *v)
  20. {
  21. int t;
  22. __asm__ __volatile__(
  23. "1: lwarx %0,0,%3 # atomic_add\n\
  24. add %0,%2,%0\n\
  25. stwcx. %0,0,%3\n\
  26. bne- 1b"
  27. : "=&r" (t), "=m" (v->counter)
  28. : "r" (a), "r" (&v->counter), "m" (v->counter)
  29. : "cc");
  30. }
  31. static __inline__ int atomic_add_return(int a, atomic_t *v)
  32. {
  33. int t;
  34. __asm__ __volatile__(
  35. EIEIO_ON_SMP
  36. "1: lwarx %0,0,%2 # atomic_add_return\n\
  37. add %0,%1,%0\n\
  38. stwcx. %0,0,%2\n\
  39. bne- 1b"
  40. ISYNC_ON_SMP
  41. : "=&r" (t)
  42. : "r" (a), "r" (&v->counter)
  43. : "cc", "memory");
  44. return t;
  45. }
  46. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  47. static __inline__ void atomic_sub(int a, atomic_t *v)
  48. {
  49. int t;
  50. __asm__ __volatile__(
  51. "1: lwarx %0,0,%3 # atomic_sub\n\
  52. subf %0,%2,%0\n\
  53. stwcx. %0,0,%3\n\
  54. bne- 1b"
  55. : "=&r" (t), "=m" (v->counter)
  56. : "r" (a), "r" (&v->counter), "m" (v->counter)
  57. : "cc");
  58. }
  59. static __inline__ int atomic_sub_return(int a, atomic_t *v)
  60. {
  61. int t;
  62. __asm__ __volatile__(
  63. EIEIO_ON_SMP
  64. "1: lwarx %0,0,%2 # atomic_sub_return\n\
  65. subf %0,%1,%0\n\
  66. stwcx. %0,0,%2\n\
  67. bne- 1b"
  68. ISYNC_ON_SMP
  69. : "=&r" (t)
  70. : "r" (a), "r" (&v->counter)
  71. : "cc", "memory");
  72. return t;
  73. }
  74. static __inline__ void atomic_inc(atomic_t *v)
  75. {
  76. int t;
  77. __asm__ __volatile__(
  78. "1: lwarx %0,0,%2 # atomic_inc\n\
  79. addic %0,%0,1\n\
  80. stwcx. %0,0,%2\n\
  81. bne- 1b"
  82. : "=&r" (t), "=m" (v->counter)
  83. : "r" (&v->counter), "m" (v->counter)
  84. : "cc");
  85. }
  86. static __inline__ int atomic_inc_return(atomic_t *v)
  87. {
  88. int t;
  89. __asm__ __volatile__(
  90. EIEIO_ON_SMP
  91. "1: lwarx %0,0,%1 # atomic_inc_return\n\
  92. addic %0,%0,1\n\
  93. stwcx. %0,0,%1\n\
  94. bne- 1b"
  95. ISYNC_ON_SMP
  96. : "=&r" (t)
  97. : "r" (&v->counter)
  98. : "cc", "memory");
  99. return t;
  100. }
  101. /*
  102. * atomic_inc_and_test - increment and test
  103. * @v: pointer of type atomic_t
  104. *
  105. * Atomically increments @v by 1
  106. * and returns true if the result is zero, or false for all
  107. * other cases.
  108. */
  109. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  110. static __inline__ void atomic_dec(atomic_t *v)
  111. {
  112. int t;
  113. __asm__ __volatile__(
  114. "1: lwarx %0,0,%2 # atomic_dec\n\
  115. addic %0,%0,-1\n\
  116. stwcx. %0,0,%2\n\
  117. bne- 1b"
  118. : "=&r" (t), "=m" (v->counter)
  119. : "r" (&v->counter), "m" (v->counter)
  120. : "cc");
  121. }
  122. static __inline__ int atomic_dec_return(atomic_t *v)
  123. {
  124. int t;
  125. __asm__ __volatile__(
  126. EIEIO_ON_SMP
  127. "1: lwarx %0,0,%1 # atomic_dec_return\n\
  128. addic %0,%0,-1\n\
  129. stwcx. %0,0,%1\n\
  130. bne- 1b"
  131. ISYNC_ON_SMP
  132. : "=&r" (t)
  133. : "r" (&v->counter)
  134. : "cc", "memory");
  135. return t;
  136. }
  137. #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
  138. #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
  139. /*
  140. * Atomically test *v and decrement if it is greater than 0.
  141. * The function returns the old value of *v minus 1.
  142. */
  143. static __inline__ int atomic_dec_if_positive(atomic_t *v)
  144. {
  145. int t;
  146. __asm__ __volatile__(
  147. EIEIO_ON_SMP
  148. "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
  149. addic. %0,%0,-1\n\
  150. blt- 2f\n\
  151. stwcx. %0,0,%1\n\
  152. bne- 1b"
  153. ISYNC_ON_SMP
  154. "\n\
  155. 2:" : "=&r" (t)
  156. : "r" (&v->counter)
  157. : "cc", "memory");
  158. return t;
  159. }
  160. #define smp_mb__before_atomic_dec() smp_mb()
  161. #define smp_mb__after_atomic_dec() smp_mb()
  162. #define smp_mb__before_atomic_inc() smp_mb()
  163. #define smp_mb__after_atomic_inc() smp_mb()
  164. #endif /* _ASM_PPC64_ATOMIC_H_ */