atomic.h 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418
  1. #ifndef _ASM_POWERPC_ATOMIC_H_
  2. #define _ASM_POWERPC_ATOMIC_H_
  3. /*
  4. * PowerPC atomic operations
  5. */
  6. typedef struct { volatile int counter; } atomic_t;
  7. #ifdef __KERNEL__
  8. #include <linux/compiler.h>
  9. #include <asm/synch.h>
  10. #include <asm/asm-compat.h>
  11. #define ATOMIC_INIT(i) { (i) }
  12. #define atomic_read(v) ((v)->counter)
  13. #define atomic_set(v,i) (((v)->counter) = (i))
  14. static __inline__ void atomic_add(int a, atomic_t *v)
  15. {
  16. int t;
  17. __asm__ __volatile__(
  18. "1: lwarx %0,0,%3 # atomic_add\n\
  19. add %0,%2,%0\n"
  20. PPC405_ERR77(0,%3)
  21. " stwcx. %0,0,%3 \n\
  22. bne- 1b"
  23. : "=&r" (t), "+m" (v->counter)
  24. : "r" (a), "r" (&v->counter)
  25. : "cc");
  26. }
  27. static __inline__ int atomic_add_return(int a, atomic_t *v)
  28. {
  29. int t;
  30. __asm__ __volatile__(
  31. LWSYNC_ON_SMP
  32. "1: lwarx %0,0,%2 # atomic_add_return\n\
  33. add %0,%1,%0\n"
  34. PPC405_ERR77(0,%2)
  35. " stwcx. %0,0,%2 \n\
  36. bne- 1b"
  37. ISYNC_ON_SMP
  38. : "=&r" (t)
  39. : "r" (a), "r" (&v->counter)
  40. : "cc", "memory");
  41. return t;
  42. }
  43. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  44. static __inline__ void atomic_sub(int a, atomic_t *v)
  45. {
  46. int t;
  47. __asm__ __volatile__(
  48. "1: lwarx %0,0,%3 # atomic_sub\n\
  49. subf %0,%2,%0\n"
  50. PPC405_ERR77(0,%3)
  51. " stwcx. %0,0,%3 \n\
  52. bne- 1b"
  53. : "=&r" (t), "+m" (v->counter)
  54. : "r" (a), "r" (&v->counter)
  55. : "cc");
  56. }
  57. static __inline__ int atomic_sub_return(int a, atomic_t *v)
  58. {
  59. int t;
  60. __asm__ __volatile__(
  61. LWSYNC_ON_SMP
  62. "1: lwarx %0,0,%2 # atomic_sub_return\n\
  63. subf %0,%1,%0\n"
  64. PPC405_ERR77(0,%2)
  65. " stwcx. %0,0,%2 \n\
  66. bne- 1b"
  67. ISYNC_ON_SMP
  68. : "=&r" (t)
  69. : "r" (a), "r" (&v->counter)
  70. : "cc", "memory");
  71. return t;
  72. }
  73. static __inline__ void atomic_inc(atomic_t *v)
  74. {
  75. int t;
  76. __asm__ __volatile__(
  77. "1: lwarx %0,0,%2 # atomic_inc\n\
  78. addic %0,%0,1\n"
  79. PPC405_ERR77(0,%2)
  80. " stwcx. %0,0,%2 \n\
  81. bne- 1b"
  82. : "=&r" (t), "+m" (v->counter)
  83. : "r" (&v->counter)
  84. : "cc");
  85. }
  86. static __inline__ int atomic_inc_return(atomic_t *v)
  87. {
  88. int t;
  89. __asm__ __volatile__(
  90. LWSYNC_ON_SMP
  91. "1: lwarx %0,0,%1 # atomic_inc_return\n\
  92. addic %0,%0,1\n"
  93. PPC405_ERR77(0,%1)
  94. " stwcx. %0,0,%1 \n\
  95. bne- 1b"
  96. ISYNC_ON_SMP
  97. : "=&r" (t)
  98. : "r" (&v->counter)
  99. : "cc", "memory");
  100. return t;
  101. }
  102. /*
  103. * atomic_inc_and_test - increment and test
  104. * @v: pointer of type atomic_t
  105. *
  106. * Atomically increments @v by 1
  107. * and returns true if the result is zero, or false for all
  108. * other cases.
  109. */
  110. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  111. static __inline__ void atomic_dec(atomic_t *v)
  112. {
  113. int t;
  114. __asm__ __volatile__(
  115. "1: lwarx %0,0,%2 # atomic_dec\n\
  116. addic %0,%0,-1\n"
  117. PPC405_ERR77(0,%2)\
  118. " stwcx. %0,0,%2\n\
  119. bne- 1b"
  120. : "=&r" (t), "+m" (v->counter)
  121. : "r" (&v->counter)
  122. : "cc");
  123. }
  124. static __inline__ int atomic_dec_return(atomic_t *v)
  125. {
  126. int t;
  127. __asm__ __volatile__(
  128. LWSYNC_ON_SMP
  129. "1: lwarx %0,0,%1 # atomic_dec_return\n\
  130. addic %0,%0,-1\n"
  131. PPC405_ERR77(0,%1)
  132. " stwcx. %0,0,%1\n\
  133. bne- 1b"
  134. ISYNC_ON_SMP
  135. : "=&r" (t)
  136. : "r" (&v->counter)
  137. : "cc", "memory");
  138. return t;
  139. }
  140. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  141. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  142. /**
  143. * atomic_add_unless - add unless the number is a given value
  144. * @v: pointer of type atomic_t
  145. * @a: the amount to add to v...
  146. * @u: ...unless v is equal to u.
  147. *
  148. * Atomically adds @a to @v, so long as it was not @u.
  149. * Returns non-zero if @v was not @u, and zero otherwise.
  150. */
  151. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  152. {
  153. int t;
  154. __asm__ __volatile__ (
  155. LWSYNC_ON_SMP
  156. "1: lwarx %0,0,%1 # atomic_add_unless\n\
  157. cmpw 0,%0,%3 \n\
  158. beq- 2f \n\
  159. add %0,%2,%0 \n"
  160. PPC405_ERR77(0,%2)
  161. " stwcx. %0,0,%1 \n\
  162. bne- 1b \n"
  163. ISYNC_ON_SMP
  164. " subf %0,%2,%0 \n\
  165. 2:"
  166. : "=&r" (t)
  167. : "r" (&v->counter), "r" (a), "r" (u)
  168. : "cc", "memory");
  169. return t != u;
  170. }
  171. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  172. #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
  173. #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
  174. /*
  175. * Atomically test *v and decrement if it is greater than 0.
  176. * The function returns the old value of *v minus 1.
  177. */
  178. static __inline__ int atomic_dec_if_positive(atomic_t *v)
  179. {
  180. int t;
  181. __asm__ __volatile__(
  182. LWSYNC_ON_SMP
  183. "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
  184. addic. %0,%0,-1\n\
  185. blt- 2f\n"
  186. PPC405_ERR77(0,%1)
  187. " stwcx. %0,0,%1\n\
  188. bne- 1b"
  189. ISYNC_ON_SMP
  190. "\n\
  191. 2:" : "=&r" (t)
  192. : "r" (&v->counter)
  193. : "cc", "memory");
  194. return t;
  195. }
  196. #define smp_mb__before_atomic_dec() smp_mb()
  197. #define smp_mb__after_atomic_dec() smp_mb()
  198. #define smp_mb__before_atomic_inc() smp_mb()
  199. #define smp_mb__after_atomic_inc() smp_mb()
  200. #ifdef __powerpc64__
  201. typedef struct { volatile long counter; } atomic64_t;
  202. #define ATOMIC64_INIT(i) { (i) }
  203. #define atomic64_read(v) ((v)->counter)
  204. #define atomic64_set(v,i) (((v)->counter) = (i))
  205. static __inline__ void atomic64_add(long a, atomic64_t *v)
  206. {
  207. long t;
  208. __asm__ __volatile__(
  209. "1: ldarx %0,0,%3 # atomic64_add\n\
  210. add %0,%2,%0\n\
  211. stdcx. %0,0,%3 \n\
  212. bne- 1b"
  213. : "=&r" (t), "+m" (v->counter)
  214. : "r" (a), "r" (&v->counter)
  215. : "cc");
  216. }
  217. static __inline__ long atomic64_add_return(long a, atomic64_t *v)
  218. {
  219. long t;
  220. __asm__ __volatile__(
  221. LWSYNC_ON_SMP
  222. "1: ldarx %0,0,%2 # atomic64_add_return\n\
  223. add %0,%1,%0\n\
  224. stdcx. %0,0,%2 \n\
  225. bne- 1b"
  226. ISYNC_ON_SMP
  227. : "=&r" (t)
  228. : "r" (a), "r" (&v->counter)
  229. : "cc", "memory");
  230. return t;
  231. }
  232. #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
  233. static __inline__ void atomic64_sub(long a, atomic64_t *v)
  234. {
  235. long t;
  236. __asm__ __volatile__(
  237. "1: ldarx %0,0,%3 # atomic64_sub\n\
  238. subf %0,%2,%0\n\
  239. stdcx. %0,0,%3 \n\
  240. bne- 1b"
  241. : "=&r" (t), "+m" (v->counter)
  242. : "r" (a), "r" (&v->counter)
  243. : "cc");
  244. }
  245. static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
  246. {
  247. long t;
  248. __asm__ __volatile__(
  249. LWSYNC_ON_SMP
  250. "1: ldarx %0,0,%2 # atomic64_sub_return\n\
  251. subf %0,%1,%0\n\
  252. stdcx. %0,0,%2 \n\
  253. bne- 1b"
  254. ISYNC_ON_SMP
  255. : "=&r" (t)
  256. : "r" (a), "r" (&v->counter)
  257. : "cc", "memory");
  258. return t;
  259. }
  260. static __inline__ void atomic64_inc(atomic64_t *v)
  261. {
  262. long t;
  263. __asm__ __volatile__(
  264. "1: ldarx %0,0,%2 # atomic64_inc\n\
  265. addic %0,%0,1\n\
  266. stdcx. %0,0,%2 \n\
  267. bne- 1b"
  268. : "=&r" (t), "+m" (v->counter)
  269. : "r" (&v->counter)
  270. : "cc");
  271. }
  272. static __inline__ long atomic64_inc_return(atomic64_t *v)
  273. {
  274. long t;
  275. __asm__ __volatile__(
  276. LWSYNC_ON_SMP
  277. "1: ldarx %0,0,%1 # atomic64_inc_return\n\
  278. addic %0,%0,1\n\
  279. stdcx. %0,0,%1 \n\
  280. bne- 1b"
  281. ISYNC_ON_SMP
  282. : "=&r" (t)
  283. : "r" (&v->counter)
  284. : "cc", "memory");
  285. return t;
  286. }
  287. /*
  288. * atomic64_inc_and_test - increment and test
  289. * @v: pointer of type atomic64_t
  290. *
  291. * Atomically increments @v by 1
  292. * and returns true if the result is zero, or false for all
  293. * other cases.
  294. */
  295. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  296. static __inline__ void atomic64_dec(atomic64_t *v)
  297. {
  298. long t;
  299. __asm__ __volatile__(
  300. "1: ldarx %0,0,%2 # atomic64_dec\n\
  301. addic %0,%0,-1\n\
  302. stdcx. %0,0,%2\n\
  303. bne- 1b"
  304. : "=&r" (t), "+m" (v->counter)
  305. : "r" (&v->counter)
  306. : "cc");
  307. }
  308. static __inline__ long atomic64_dec_return(atomic64_t *v)
  309. {
  310. long t;
  311. __asm__ __volatile__(
  312. LWSYNC_ON_SMP
  313. "1: ldarx %0,0,%1 # atomic64_dec_return\n\
  314. addic %0,%0,-1\n\
  315. stdcx. %0,0,%1\n\
  316. bne- 1b"
  317. ISYNC_ON_SMP
  318. : "=&r" (t)
  319. : "r" (&v->counter)
  320. : "cc", "memory");
  321. return t;
  322. }
  323. #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
  324. #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
  325. /*
  326. * Atomically test *v and decrement if it is greater than 0.
  327. * The function returns the old value of *v minus 1.
  328. */
  329. static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
  330. {
  331. long t;
  332. __asm__ __volatile__(
  333. LWSYNC_ON_SMP
  334. "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
  335. addic. %0,%0,-1\n\
  336. blt- 2f\n\
  337. stdcx. %0,0,%1\n\
  338. bne- 1b"
  339. ISYNC_ON_SMP
  340. "\n\
  341. 2:" : "=&r" (t)
  342. : "r" (&v->counter)
  343. : "cc", "memory");
  344. return t;
  345. }
  346. #endif /* __powerpc64__ */
  347. #include <asm-generic/atomic.h>
  348. #endif /* __KERNEL__ */
  349. #endif /* _ASM_POWERPC_ATOMIC_H_ */