atomic.h 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420
  1. #ifndef _ASM_POWERPC_ATOMIC_H_
  2. #define _ASM_POWERPC_ATOMIC_H_
  3. /*
  4. * PowerPC atomic operations
  5. */
  6. typedef struct { volatile int counter; } atomic_t;
  7. #ifdef __KERNEL__
  8. #include <linux/compiler.h>
  9. #include <asm/synch.h>
  10. #include <asm/asm-compat.h>
  11. #define ATOMIC_INIT(i) { (i) }
  12. #define atomic_read(v) ((v)->counter)
  13. #define atomic_set(v,i) (((v)->counter) = (i))
  14. static __inline__ void atomic_add(int a, atomic_t *v)
  15. {
  16. int t;
  17. __asm__ __volatile__(
  18. "1: lwarx %0,0,%3 # atomic_add\n\
  19. add %0,%2,%0\n"
  20. PPC405_ERR77(0,%3)
  21. " stwcx. %0,0,%3 \n\
  22. bne- 1b"
  23. : "=&r" (t), "+m" (v->counter)
  24. : "r" (a), "r" (&v->counter)
  25. : "cc");
  26. }
  27. static __inline__ int atomic_add_return(int a, atomic_t *v)
  28. {
  29. int t;
  30. __asm__ __volatile__(
  31. LWSYNC_ON_SMP
  32. "1: lwarx %0,0,%2 # atomic_add_return\n\
  33. add %0,%1,%0\n"
  34. PPC405_ERR77(0,%2)
  35. " stwcx. %0,0,%2 \n\
  36. bne- 1b"
  37. ISYNC_ON_SMP
  38. : "=&r" (t)
  39. : "r" (a), "r" (&v->counter)
  40. : "cc", "memory");
  41. return t;
  42. }
  43. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  44. static __inline__ void atomic_sub(int a, atomic_t *v)
  45. {
  46. int t;
  47. __asm__ __volatile__(
  48. "1: lwarx %0,0,%3 # atomic_sub\n\
  49. subf %0,%2,%0\n"
  50. PPC405_ERR77(0,%3)
  51. " stwcx. %0,0,%3 \n\
  52. bne- 1b"
  53. : "=&r" (t), "+m" (v->counter)
  54. : "r" (a), "r" (&v->counter)
  55. : "cc");
  56. }
  57. static __inline__ int atomic_sub_return(int a, atomic_t *v)
  58. {
  59. int t;
  60. __asm__ __volatile__(
  61. LWSYNC_ON_SMP
  62. "1: lwarx %0,0,%2 # atomic_sub_return\n\
  63. subf %0,%1,%0\n"
  64. PPC405_ERR77(0,%2)
  65. " stwcx. %0,0,%2 \n\
  66. bne- 1b"
  67. ISYNC_ON_SMP
  68. : "=&r" (t)
  69. : "r" (a), "r" (&v->counter)
  70. : "cc", "memory");
  71. return t;
  72. }
  73. static __inline__ void atomic_inc(atomic_t *v)
  74. {
  75. int t;
  76. __asm__ __volatile__(
  77. "1: lwarx %0,0,%2 # atomic_inc\n\
  78. addic %0,%0,1\n"
  79. PPC405_ERR77(0,%2)
  80. " stwcx. %0,0,%2 \n\
  81. bne- 1b"
  82. : "=&r" (t), "+m" (v->counter)
  83. : "r" (&v->counter)
  84. : "cc");
  85. }
  86. static __inline__ int atomic_inc_return(atomic_t *v)
  87. {
  88. int t;
  89. __asm__ __volatile__(
  90. LWSYNC_ON_SMP
  91. "1: lwarx %0,0,%1 # atomic_inc_return\n\
  92. addic %0,%0,1\n"
  93. PPC405_ERR77(0,%1)
  94. " stwcx. %0,0,%1 \n\
  95. bne- 1b"
  96. ISYNC_ON_SMP
  97. : "=&r" (t)
  98. : "r" (&v->counter)
  99. : "cc", "memory");
  100. return t;
  101. }
  102. /*
  103. * atomic_inc_and_test - increment and test
  104. * @v: pointer of type atomic_t
  105. *
  106. * Atomically increments @v by 1
  107. * and returns true if the result is zero, or false for all
  108. * other cases.
  109. */
  110. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  111. static __inline__ void atomic_dec(atomic_t *v)
  112. {
  113. int t;
  114. __asm__ __volatile__(
  115. "1: lwarx %0,0,%2 # atomic_dec\n\
  116. addic %0,%0,-1\n"
  117. PPC405_ERR77(0,%2)\
  118. " stwcx. %0,0,%2\n\
  119. bne- 1b"
  120. : "=&r" (t), "+m" (v->counter)
  121. : "r" (&v->counter)
  122. : "cc");
  123. }
  124. static __inline__ int atomic_dec_return(atomic_t *v)
  125. {
  126. int t;
  127. __asm__ __volatile__(
  128. LWSYNC_ON_SMP
  129. "1: lwarx %0,0,%1 # atomic_dec_return\n\
  130. addic %0,%0,-1\n"
  131. PPC405_ERR77(0,%1)
  132. " stwcx. %0,0,%1\n\
  133. bne- 1b"
  134. ISYNC_ON_SMP
  135. : "=&r" (t)
  136. : "r" (&v->counter)
  137. : "cc", "memory");
  138. return t;
  139. }
  140. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  141. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  142. /**
  143. * atomic_add_unless - add unless the number is a given value
  144. * @v: pointer of type atomic_t
  145. * @a: the amount to add to v...
  146. * @u: ...unless v is equal to u.
  147. *
  148. * Atomically adds @a to @v, so long as it was not @u.
  149. * Returns non-zero if @v was not @u, and zero otherwise.
  150. */
  151. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  152. {
  153. int t;
  154. __asm__ __volatile__ (
  155. LWSYNC_ON_SMP
  156. "1: lwarx %0,0,%1 # atomic_add_unless\n\
  157. cmpw 0,%0,%3 \n\
  158. beq- 2f \n\
  159. add %0,%2,%0 \n"
  160. PPC405_ERR77(0,%2)
  161. " stwcx. %0,0,%1 \n\
  162. bne- 1b \n"
  163. ISYNC_ON_SMP
  164. " subf %0,%2,%0 \n\
  165. 2:"
  166. : "=&r" (t)
  167. : "r" (&v->counter), "r" (a), "r" (u)
  168. : "cc", "memory");
  169. return t != u;
  170. }
  171. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  172. #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
  173. #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
  174. /*
  175. * Atomically test *v and decrement if it is greater than 0.
  176. * The function returns the old value of *v minus 1, even if
  177. * the atomic variable, v, was not decremented.
  178. */
  179. static __inline__ int atomic_dec_if_positive(atomic_t *v)
  180. {
  181. int t;
  182. __asm__ __volatile__(
  183. LWSYNC_ON_SMP
  184. "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
  185. cmpwi %0,1\n\
  186. addi %0,%0,-1\n\
  187. blt- 2f\n"
  188. PPC405_ERR77(0,%1)
  189. " stwcx. %0,0,%1\n\
  190. bne- 1b"
  191. ISYNC_ON_SMP
  192. "\n\
  193. 2:" : "=&b" (t)
  194. : "r" (&v->counter)
  195. : "cc", "memory");
  196. return t;
  197. }
  198. #define smp_mb__before_atomic_dec() smp_mb()
  199. #define smp_mb__after_atomic_dec() smp_mb()
  200. #define smp_mb__before_atomic_inc() smp_mb()
  201. #define smp_mb__after_atomic_inc() smp_mb()
  202. #ifdef __powerpc64__
  203. typedef struct { volatile long counter; } atomic64_t;
  204. #define ATOMIC64_INIT(i) { (i) }
  205. #define atomic64_read(v) ((v)->counter)
  206. #define atomic64_set(v,i) (((v)->counter) = (i))
  207. static __inline__ void atomic64_add(long a, atomic64_t *v)
  208. {
  209. long t;
  210. __asm__ __volatile__(
  211. "1: ldarx %0,0,%3 # atomic64_add\n\
  212. add %0,%2,%0\n\
  213. stdcx. %0,0,%3 \n\
  214. bne- 1b"
  215. : "=&r" (t), "+m" (v->counter)
  216. : "r" (a), "r" (&v->counter)
  217. : "cc");
  218. }
  219. static __inline__ long atomic64_add_return(long a, atomic64_t *v)
  220. {
  221. long t;
  222. __asm__ __volatile__(
  223. LWSYNC_ON_SMP
  224. "1: ldarx %0,0,%2 # atomic64_add_return\n\
  225. add %0,%1,%0\n\
  226. stdcx. %0,0,%2 \n\
  227. bne- 1b"
  228. ISYNC_ON_SMP
  229. : "=&r" (t)
  230. : "r" (a), "r" (&v->counter)
  231. : "cc", "memory");
  232. return t;
  233. }
  234. #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
  235. static __inline__ void atomic64_sub(long a, atomic64_t *v)
  236. {
  237. long t;
  238. __asm__ __volatile__(
  239. "1: ldarx %0,0,%3 # atomic64_sub\n\
  240. subf %0,%2,%0\n\
  241. stdcx. %0,0,%3 \n\
  242. bne- 1b"
  243. : "=&r" (t), "+m" (v->counter)
  244. : "r" (a), "r" (&v->counter)
  245. : "cc");
  246. }
  247. static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
  248. {
  249. long t;
  250. __asm__ __volatile__(
  251. LWSYNC_ON_SMP
  252. "1: ldarx %0,0,%2 # atomic64_sub_return\n\
  253. subf %0,%1,%0\n\
  254. stdcx. %0,0,%2 \n\
  255. bne- 1b"
  256. ISYNC_ON_SMP
  257. : "=&r" (t)
  258. : "r" (a), "r" (&v->counter)
  259. : "cc", "memory");
  260. return t;
  261. }
  262. static __inline__ void atomic64_inc(atomic64_t *v)
  263. {
  264. long t;
  265. __asm__ __volatile__(
  266. "1: ldarx %0,0,%2 # atomic64_inc\n\
  267. addic %0,%0,1\n\
  268. stdcx. %0,0,%2 \n\
  269. bne- 1b"
  270. : "=&r" (t), "+m" (v->counter)
  271. : "r" (&v->counter)
  272. : "cc");
  273. }
  274. static __inline__ long atomic64_inc_return(atomic64_t *v)
  275. {
  276. long t;
  277. __asm__ __volatile__(
  278. LWSYNC_ON_SMP
  279. "1: ldarx %0,0,%1 # atomic64_inc_return\n\
  280. addic %0,%0,1\n\
  281. stdcx. %0,0,%1 \n\
  282. bne- 1b"
  283. ISYNC_ON_SMP
  284. : "=&r" (t)
  285. : "r" (&v->counter)
  286. : "cc", "memory");
  287. return t;
  288. }
  289. /*
  290. * atomic64_inc_and_test - increment and test
  291. * @v: pointer of type atomic64_t
  292. *
  293. * Atomically increments @v by 1
  294. * and returns true if the result is zero, or false for all
  295. * other cases.
  296. */
  297. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  298. static __inline__ void atomic64_dec(atomic64_t *v)
  299. {
  300. long t;
  301. __asm__ __volatile__(
  302. "1: ldarx %0,0,%2 # atomic64_dec\n\
  303. addic %0,%0,-1\n\
  304. stdcx. %0,0,%2\n\
  305. bne- 1b"
  306. : "=&r" (t), "+m" (v->counter)
  307. : "r" (&v->counter)
  308. : "cc");
  309. }
  310. static __inline__ long atomic64_dec_return(atomic64_t *v)
  311. {
  312. long t;
  313. __asm__ __volatile__(
  314. LWSYNC_ON_SMP
  315. "1: ldarx %0,0,%1 # atomic64_dec_return\n\
  316. addic %0,%0,-1\n\
  317. stdcx. %0,0,%1\n\
  318. bne- 1b"
  319. ISYNC_ON_SMP
  320. : "=&r" (t)
  321. : "r" (&v->counter)
  322. : "cc", "memory");
  323. return t;
  324. }
  325. #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
  326. #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
  327. /*
  328. * Atomically test *v and decrement if it is greater than 0.
  329. * The function returns the old value of *v minus 1.
  330. */
  331. static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
  332. {
  333. long t;
  334. __asm__ __volatile__(
  335. LWSYNC_ON_SMP
  336. "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
  337. addic. %0,%0,-1\n\
  338. blt- 2f\n\
  339. stdcx. %0,0,%1\n\
  340. bne- 1b"
  341. ISYNC_ON_SMP
  342. "\n\
  343. 2:" : "=&r" (t)
  344. : "r" (&v->counter)
  345. : "cc", "memory");
  346. return t;
  347. }
  348. #endif /* __powerpc64__ */
  349. #include <asm-generic/atomic.h>
  350. #endif /* __KERNEL__ */
  351. #endif /* _ASM_POWERPC_ATOMIC_H_ */