atomic.h 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406
  1. #ifndef _ASM_POWERPC_ATOMIC_H_
  2. #define _ASM_POWERPC_ATOMIC_H_
  3. /*
  4. * PowerPC atomic operations
  5. */
  6. typedef struct { volatile int counter; } atomic_t;
  7. #ifdef __KERNEL__
  8. #include <asm/synch.h>
  9. #include <asm/asm-compat.h>
  10. #define ATOMIC_INIT(i) { (i) }
  11. #define atomic_read(v) ((v)->counter)
  12. #define atomic_set(v,i) (((v)->counter) = (i))
  13. static __inline__ void atomic_add(int a, atomic_t *v)
  14. {
  15. int t;
  16. __asm__ __volatile__(
  17. "1: lwarx %0,0,%3 # atomic_add\n\
  18. add %0,%2,%0\n"
  19. PPC405_ERR77(0,%3)
  20. " stwcx. %0,0,%3 \n\
  21. bne- 1b"
  22. : "=&r" (t), "=m" (v->counter)
  23. : "r" (a), "r" (&v->counter), "m" (v->counter)
  24. : "cc");
  25. }
  26. static __inline__ int atomic_add_return(int a, atomic_t *v)
  27. {
  28. int t;
  29. __asm__ __volatile__(
  30. EIEIO_ON_SMP
  31. "1: lwarx %0,0,%2 # atomic_add_return\n\
  32. add %0,%1,%0\n"
  33. PPC405_ERR77(0,%2)
  34. " stwcx. %0,0,%2 \n\
  35. bne- 1b"
  36. ISYNC_ON_SMP
  37. : "=&r" (t)
  38. : "r" (a), "r" (&v->counter)
  39. : "cc", "memory");
  40. return t;
  41. }
  42. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  43. static __inline__ void atomic_sub(int a, atomic_t *v)
  44. {
  45. int t;
  46. __asm__ __volatile__(
  47. "1: lwarx %0,0,%3 # atomic_sub\n\
  48. subf %0,%2,%0\n"
  49. PPC405_ERR77(0,%3)
  50. " stwcx. %0,0,%3 \n\
  51. bne- 1b"
  52. : "=&r" (t), "=m" (v->counter)
  53. : "r" (a), "r" (&v->counter), "m" (v->counter)
  54. : "cc");
  55. }
  56. static __inline__ int atomic_sub_return(int a, atomic_t *v)
  57. {
  58. int t;
  59. __asm__ __volatile__(
  60. EIEIO_ON_SMP
  61. "1: lwarx %0,0,%2 # atomic_sub_return\n\
  62. subf %0,%1,%0\n"
  63. PPC405_ERR77(0,%2)
  64. " stwcx. %0,0,%2 \n\
  65. bne- 1b"
  66. ISYNC_ON_SMP
  67. : "=&r" (t)
  68. : "r" (a), "r" (&v->counter)
  69. : "cc", "memory");
  70. return t;
  71. }
  72. static __inline__ void atomic_inc(atomic_t *v)
  73. {
  74. int t;
  75. __asm__ __volatile__(
  76. "1: lwarx %0,0,%2 # atomic_inc\n\
  77. addic %0,%0,1\n"
  78. PPC405_ERR77(0,%2)
  79. " stwcx. %0,0,%2 \n\
  80. bne- 1b"
  81. : "=&r" (t), "=m" (v->counter)
  82. : "r" (&v->counter), "m" (v->counter)
  83. : "cc");
  84. }
  85. static __inline__ int atomic_inc_return(atomic_t *v)
  86. {
  87. int t;
  88. __asm__ __volatile__(
  89. EIEIO_ON_SMP
  90. "1: lwarx %0,0,%1 # atomic_inc_return\n\
  91. addic %0,%0,1\n"
  92. PPC405_ERR77(0,%1)
  93. " stwcx. %0,0,%1 \n\
  94. bne- 1b"
  95. ISYNC_ON_SMP
  96. : "=&r" (t)
  97. : "r" (&v->counter)
  98. : "cc", "memory");
  99. return t;
  100. }
  101. /*
  102. * atomic_inc_and_test - increment and test
  103. * @v: pointer of type atomic_t
  104. *
  105. * Atomically increments @v by 1
  106. * and returns true if the result is zero, or false for all
  107. * other cases.
  108. */
  109. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  110. static __inline__ void atomic_dec(atomic_t *v)
  111. {
  112. int t;
  113. __asm__ __volatile__(
  114. "1: lwarx %0,0,%2 # atomic_dec\n\
  115. addic %0,%0,-1\n"
  116. PPC405_ERR77(0,%2)\
  117. " stwcx. %0,0,%2\n\
  118. bne- 1b"
  119. : "=&r" (t), "=m" (v->counter)
  120. : "r" (&v->counter), "m" (v->counter)
  121. : "cc");
  122. }
  123. static __inline__ int atomic_dec_return(atomic_t *v)
  124. {
  125. int t;
  126. __asm__ __volatile__(
  127. EIEIO_ON_SMP
  128. "1: lwarx %0,0,%1 # atomic_dec_return\n\
  129. addic %0,%0,-1\n"
  130. PPC405_ERR77(0,%1)
  131. " stwcx. %0,0,%1\n\
  132. bne- 1b"
  133. ISYNC_ON_SMP
  134. : "=&r" (t)
  135. : "r" (&v->counter)
  136. : "cc", "memory");
  137. return t;
  138. }
  139. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  140. /**
  141. * atomic_add_unless - add unless the number is a given value
  142. * @v: pointer of type atomic_t
  143. * @a: the amount to add to v...
  144. * @u: ...unless v is equal to u.
  145. *
  146. * Atomically adds @a to @v, so long as it was not @u.
  147. * Returns non-zero if @v was not @u, and zero otherwise.
  148. */
  149. #define atomic_add_unless(v, a, u) \
  150. ({ \
  151. int c, old; \
  152. c = atomic_read(v); \
  153. for (;;) { \
  154. if (unlikely(c == (u))) \
  155. break; \
  156. old = atomic_cmpxchg((v), c, c + (a)); \
  157. if (likely(old == c)) \
  158. break; \
  159. c = old; \
  160. } \
  161. c != (u); \
  162. })
  163. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  164. #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
  165. #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
  166. /*
  167. * Atomically test *v and decrement if it is greater than 0.
  168. * The function returns the old value of *v minus 1.
  169. */
  170. static __inline__ int atomic_dec_if_positive(atomic_t *v)
  171. {
  172. int t;
  173. __asm__ __volatile__(
  174. EIEIO_ON_SMP
  175. "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
  176. addic. %0,%0,-1\n\
  177. blt- 2f\n"
  178. PPC405_ERR77(0,%1)
  179. " stwcx. %0,0,%1\n\
  180. bne- 1b"
  181. ISYNC_ON_SMP
  182. "\n\
  183. 2:" : "=&r" (t)
  184. : "r" (&v->counter)
  185. : "cc", "memory");
  186. return t;
  187. }
  188. #define smp_mb__before_atomic_dec() smp_mb()
  189. #define smp_mb__after_atomic_dec() smp_mb()
  190. #define smp_mb__before_atomic_inc() smp_mb()
  191. #define smp_mb__after_atomic_inc() smp_mb()
  192. #ifdef __powerpc64__
  193. typedef struct { volatile long counter; } atomic64_t;
  194. #define ATOMIC64_INIT(i) { (i) }
  195. #define atomic64_read(v) ((v)->counter)
  196. #define atomic64_set(v,i) (((v)->counter) = (i))
  197. static __inline__ void atomic64_add(long a, atomic64_t *v)
  198. {
  199. long t;
  200. __asm__ __volatile__(
  201. "1: ldarx %0,0,%3 # atomic64_add\n\
  202. add %0,%2,%0\n\
  203. stdcx. %0,0,%3 \n\
  204. bne- 1b"
  205. : "=&r" (t), "=m" (v->counter)
  206. : "r" (a), "r" (&v->counter), "m" (v->counter)
  207. : "cc");
  208. }
  209. static __inline__ long atomic64_add_return(long a, atomic64_t *v)
  210. {
  211. long t;
  212. __asm__ __volatile__(
  213. EIEIO_ON_SMP
  214. "1: ldarx %0,0,%2 # atomic64_add_return\n\
  215. add %0,%1,%0\n\
  216. stdcx. %0,0,%2 \n\
  217. bne- 1b"
  218. ISYNC_ON_SMP
  219. : "=&r" (t)
  220. : "r" (a), "r" (&v->counter)
  221. : "cc", "memory");
  222. return t;
  223. }
  224. #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
  225. static __inline__ void atomic64_sub(long a, atomic64_t *v)
  226. {
  227. long t;
  228. __asm__ __volatile__(
  229. "1: ldarx %0,0,%3 # atomic64_sub\n\
  230. subf %0,%2,%0\n\
  231. stdcx. %0,0,%3 \n\
  232. bne- 1b"
  233. : "=&r" (t), "=m" (v->counter)
  234. : "r" (a), "r" (&v->counter), "m" (v->counter)
  235. : "cc");
  236. }
  237. static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
  238. {
  239. long t;
  240. __asm__ __volatile__(
  241. EIEIO_ON_SMP
  242. "1: ldarx %0,0,%2 # atomic64_sub_return\n\
  243. subf %0,%1,%0\n\
  244. stdcx. %0,0,%2 \n\
  245. bne- 1b"
  246. ISYNC_ON_SMP
  247. : "=&r" (t)
  248. : "r" (a), "r" (&v->counter)
  249. : "cc", "memory");
  250. return t;
  251. }
  252. static __inline__ void atomic64_inc(atomic64_t *v)
  253. {
  254. long t;
  255. __asm__ __volatile__(
  256. "1: ldarx %0,0,%2 # atomic64_inc\n\
  257. addic %0,%0,1\n\
  258. stdcx. %0,0,%2 \n\
  259. bne- 1b"
  260. : "=&r" (t), "=m" (v->counter)
  261. : "r" (&v->counter), "m" (v->counter)
  262. : "cc");
  263. }
  264. static __inline__ long atomic64_inc_return(atomic64_t *v)
  265. {
  266. long t;
  267. __asm__ __volatile__(
  268. EIEIO_ON_SMP
  269. "1: ldarx %0,0,%1 # atomic64_inc_return\n\
  270. addic %0,%0,1\n\
  271. stdcx. %0,0,%1 \n\
  272. bne- 1b"
  273. ISYNC_ON_SMP
  274. : "=&r" (t)
  275. : "r" (&v->counter)
  276. : "cc", "memory");
  277. return t;
  278. }
  279. /*
  280. * atomic64_inc_and_test - increment and test
  281. * @v: pointer of type atomic64_t
  282. *
  283. * Atomically increments @v by 1
  284. * and returns true if the result is zero, or false for all
  285. * other cases.
  286. */
  287. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  288. static __inline__ void atomic64_dec(atomic64_t *v)
  289. {
  290. long t;
  291. __asm__ __volatile__(
  292. "1: ldarx %0,0,%2 # atomic64_dec\n\
  293. addic %0,%0,-1\n\
  294. stdcx. %0,0,%2\n\
  295. bne- 1b"
  296. : "=&r" (t), "=m" (v->counter)
  297. : "r" (&v->counter), "m" (v->counter)
  298. : "cc");
  299. }
  300. static __inline__ long atomic64_dec_return(atomic64_t *v)
  301. {
  302. long t;
  303. __asm__ __volatile__(
  304. EIEIO_ON_SMP
  305. "1: ldarx %0,0,%1 # atomic64_dec_return\n\
  306. addic %0,%0,-1\n\
  307. stdcx. %0,0,%1\n\
  308. bne- 1b"
  309. ISYNC_ON_SMP
  310. : "=&r" (t)
  311. : "r" (&v->counter)
  312. : "cc", "memory");
  313. return t;
  314. }
  315. #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
  316. #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
  317. /*
  318. * Atomically test *v and decrement if it is greater than 0.
  319. * The function returns the old value of *v minus 1.
  320. */
  321. static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
  322. {
  323. long t;
  324. __asm__ __volatile__(
  325. EIEIO_ON_SMP
  326. "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
  327. addic. %0,%0,-1\n\
  328. blt- 2f\n\
  329. stdcx. %0,0,%1\n\
  330. bne- 1b"
  331. ISYNC_ON_SMP
  332. "\n\
  333. 2:" : "=&r" (t)
  334. : "r" (&v->counter)
  335. : "cc", "memory");
  336. return t;
  337. }
  338. #endif /* __powerpc64__ */
  339. #endif /* __KERNEL__ */
  340. #endif /* _ASM_POWERPC_ATOMIC_H_ */