atomic.h 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458
  1. #ifndef _ASM_POWERPC_ATOMIC_H_
  2. #define _ASM_POWERPC_ATOMIC_H_
  3. /*
  4. * PowerPC atomic operations
  5. */
  6. typedef struct { volatile int counter; } atomic_t;
  7. #ifdef __KERNEL__
  8. #include <linux/compiler.h>
  9. #include <asm/synch.h>
  10. #include <asm/asm-compat.h>
  11. #define ATOMIC_INIT(i) { (i) }
  12. #define atomic_read(v) ((v)->counter)
  13. #define atomic_set(v,i) (((v)->counter) = (i))
  14. static __inline__ void atomic_add(int a, atomic_t *v)
  15. {
  16. int t;
  17. __asm__ __volatile__(
  18. "1: lwarx %0,0,%3 # atomic_add\n\
  19. add %0,%2,%0\n"
  20. PPC405_ERR77(0,%3)
  21. " stwcx. %0,0,%3 \n\
  22. bne- 1b"
  23. : "=&r" (t), "+m" (v->counter)
  24. : "r" (a), "r" (&v->counter)
  25. : "cc");
  26. }
  27. static __inline__ int atomic_add_return(int a, atomic_t *v)
  28. {
  29. int t;
  30. __asm__ __volatile__(
  31. LWSYNC_ON_SMP
  32. "1: lwarx %0,0,%2 # atomic_add_return\n\
  33. add %0,%1,%0\n"
  34. PPC405_ERR77(0,%2)
  35. " stwcx. %0,0,%2 \n\
  36. bne- 1b"
  37. ISYNC_ON_SMP
  38. : "=&r" (t)
  39. : "r" (a), "r" (&v->counter)
  40. : "cc", "memory");
  41. return t;
  42. }
  43. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  44. static __inline__ void atomic_sub(int a, atomic_t *v)
  45. {
  46. int t;
  47. __asm__ __volatile__(
  48. "1: lwarx %0,0,%3 # atomic_sub\n\
  49. subf %0,%2,%0\n"
  50. PPC405_ERR77(0,%3)
  51. " stwcx. %0,0,%3 \n\
  52. bne- 1b"
  53. : "=&r" (t), "+m" (v->counter)
  54. : "r" (a), "r" (&v->counter)
  55. : "cc");
  56. }
  57. static __inline__ int atomic_sub_return(int a, atomic_t *v)
  58. {
  59. int t;
  60. __asm__ __volatile__(
  61. LWSYNC_ON_SMP
  62. "1: lwarx %0,0,%2 # atomic_sub_return\n\
  63. subf %0,%1,%0\n"
  64. PPC405_ERR77(0,%2)
  65. " stwcx. %0,0,%2 \n\
  66. bne- 1b"
  67. ISYNC_ON_SMP
  68. : "=&r" (t)
  69. : "r" (a), "r" (&v->counter)
  70. : "cc", "memory");
  71. return t;
  72. }
  73. static __inline__ void atomic_inc(atomic_t *v)
  74. {
  75. int t;
  76. __asm__ __volatile__(
  77. "1: lwarx %0,0,%2 # atomic_inc\n\
  78. addic %0,%0,1\n"
  79. PPC405_ERR77(0,%2)
  80. " stwcx. %0,0,%2 \n\
  81. bne- 1b"
  82. : "=&r" (t), "+m" (v->counter)
  83. : "r" (&v->counter)
  84. : "cc");
  85. }
  86. static __inline__ int atomic_inc_return(atomic_t *v)
  87. {
  88. int t;
  89. __asm__ __volatile__(
  90. LWSYNC_ON_SMP
  91. "1: lwarx %0,0,%1 # atomic_inc_return\n\
  92. addic %0,%0,1\n"
  93. PPC405_ERR77(0,%1)
  94. " stwcx. %0,0,%1 \n\
  95. bne- 1b"
  96. ISYNC_ON_SMP
  97. : "=&r" (t)
  98. : "r" (&v->counter)
  99. : "cc", "memory");
  100. return t;
  101. }
  102. /*
  103. * atomic_inc_and_test - increment and test
  104. * @v: pointer of type atomic_t
  105. *
  106. * Atomically increments @v by 1
  107. * and returns true if the result is zero, or false for all
  108. * other cases.
  109. */
  110. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  111. static __inline__ void atomic_dec(atomic_t *v)
  112. {
  113. int t;
  114. __asm__ __volatile__(
  115. "1: lwarx %0,0,%2 # atomic_dec\n\
  116. addic %0,%0,-1\n"
  117. PPC405_ERR77(0,%2)\
  118. " stwcx. %0,0,%2\n\
  119. bne- 1b"
  120. : "=&r" (t), "+m" (v->counter)
  121. : "r" (&v->counter)
  122. : "cc");
  123. }
  124. static __inline__ int atomic_dec_return(atomic_t *v)
  125. {
  126. int t;
  127. __asm__ __volatile__(
  128. LWSYNC_ON_SMP
  129. "1: lwarx %0,0,%1 # atomic_dec_return\n\
  130. addic %0,%0,-1\n"
  131. PPC405_ERR77(0,%1)
  132. " stwcx. %0,0,%1\n\
  133. bne- 1b"
  134. ISYNC_ON_SMP
  135. : "=&r" (t)
  136. : "r" (&v->counter)
  137. : "cc", "memory");
  138. return t;
  139. }
  140. #define atomic_cmpxchg(v, o, n) \
  141. ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  142. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  143. /**
  144. * atomic_add_unless - add unless the number is a given value
  145. * @v: pointer of type atomic_t
  146. * @a: the amount to add to v...
  147. * @u: ...unless v is equal to u.
  148. *
  149. * Atomically adds @a to @v, so long as it was not @u.
  150. * Returns non-zero if @v was not @u, and zero otherwise.
  151. */
  152. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  153. {
  154. int t;
  155. __asm__ __volatile__ (
  156. LWSYNC_ON_SMP
  157. "1: lwarx %0,0,%1 # atomic_add_unless\n\
  158. cmpw 0,%0,%3 \n\
  159. beq- 2f \n\
  160. add %0,%2,%0 \n"
  161. PPC405_ERR77(0,%2)
  162. " stwcx. %0,0,%1 \n\
  163. bne- 1b \n"
  164. ISYNC_ON_SMP
  165. " subf %0,%2,%0 \n\
  166. 2:"
  167. : "=&r" (t)
  168. : "r" (&v->counter), "r" (a), "r" (u)
  169. : "cc", "memory");
  170. return t != u;
  171. }
  172. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  173. #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
  174. #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
  175. /*
  176. * Atomically test *v and decrement if it is greater than 0.
  177. * The function returns the old value of *v minus 1, even if
  178. * the atomic variable, v, was not decremented.
  179. */
  180. static __inline__ int atomic_dec_if_positive(atomic_t *v)
  181. {
  182. int t;
  183. __asm__ __volatile__(
  184. LWSYNC_ON_SMP
  185. "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
  186. cmpwi %0,1\n\
  187. addi %0,%0,-1\n\
  188. blt- 2f\n"
  189. PPC405_ERR77(0,%1)
  190. " stwcx. %0,0,%1\n\
  191. bne- 1b"
  192. ISYNC_ON_SMP
  193. "\n\
  194. 2:" : "=&b" (t)
  195. : "r" (&v->counter)
  196. : "cc", "memory");
  197. return t;
  198. }
  199. #define smp_mb__before_atomic_dec() smp_mb()
  200. #define smp_mb__after_atomic_dec() smp_mb()
  201. #define smp_mb__before_atomic_inc() smp_mb()
  202. #define smp_mb__after_atomic_inc() smp_mb()
  203. #ifdef __powerpc64__
  204. typedef struct { volatile long counter; } atomic64_t;
  205. #define ATOMIC64_INIT(i) { (i) }
  206. #define atomic64_read(v) ((v)->counter)
  207. #define atomic64_set(v,i) (((v)->counter) = (i))
  208. static __inline__ void atomic64_add(long a, atomic64_t *v)
  209. {
  210. long t;
  211. __asm__ __volatile__(
  212. "1: ldarx %0,0,%3 # atomic64_add\n\
  213. add %0,%2,%0\n\
  214. stdcx. %0,0,%3 \n\
  215. bne- 1b"
  216. : "=&r" (t), "+m" (v->counter)
  217. : "r" (a), "r" (&v->counter)
  218. : "cc");
  219. }
  220. static __inline__ long atomic64_add_return(long a, atomic64_t *v)
  221. {
  222. long t;
  223. __asm__ __volatile__(
  224. LWSYNC_ON_SMP
  225. "1: ldarx %0,0,%2 # atomic64_add_return\n\
  226. add %0,%1,%0\n\
  227. stdcx. %0,0,%2 \n\
  228. bne- 1b"
  229. ISYNC_ON_SMP
  230. : "=&r" (t)
  231. : "r" (a), "r" (&v->counter)
  232. : "cc", "memory");
  233. return t;
  234. }
  235. #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
  236. static __inline__ void atomic64_sub(long a, atomic64_t *v)
  237. {
  238. long t;
  239. __asm__ __volatile__(
  240. "1: ldarx %0,0,%3 # atomic64_sub\n\
  241. subf %0,%2,%0\n\
  242. stdcx. %0,0,%3 \n\
  243. bne- 1b"
  244. : "=&r" (t), "+m" (v->counter)
  245. : "r" (a), "r" (&v->counter)
  246. : "cc");
  247. }
  248. static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
  249. {
  250. long t;
  251. __asm__ __volatile__(
  252. LWSYNC_ON_SMP
  253. "1: ldarx %0,0,%2 # atomic64_sub_return\n\
  254. subf %0,%1,%0\n\
  255. stdcx. %0,0,%2 \n\
  256. bne- 1b"
  257. ISYNC_ON_SMP
  258. : "=&r" (t)
  259. : "r" (a), "r" (&v->counter)
  260. : "cc", "memory");
  261. return t;
  262. }
  263. static __inline__ void atomic64_inc(atomic64_t *v)
  264. {
  265. long t;
  266. __asm__ __volatile__(
  267. "1: ldarx %0,0,%2 # atomic64_inc\n\
  268. addic %0,%0,1\n\
  269. stdcx. %0,0,%2 \n\
  270. bne- 1b"
  271. : "=&r" (t), "+m" (v->counter)
  272. : "r" (&v->counter)
  273. : "cc");
  274. }
  275. static __inline__ long atomic64_inc_return(atomic64_t *v)
  276. {
  277. long t;
  278. __asm__ __volatile__(
  279. LWSYNC_ON_SMP
  280. "1: ldarx %0,0,%1 # atomic64_inc_return\n\
  281. addic %0,%0,1\n\
  282. stdcx. %0,0,%1 \n\
  283. bne- 1b"
  284. ISYNC_ON_SMP
  285. : "=&r" (t)
  286. : "r" (&v->counter)
  287. : "cc", "memory");
  288. return t;
  289. }
  290. /*
  291. * atomic64_inc_and_test - increment and test
  292. * @v: pointer of type atomic64_t
  293. *
  294. * Atomically increments @v by 1
  295. * and returns true if the result is zero, or false for all
  296. * other cases.
  297. */
  298. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  299. static __inline__ void atomic64_dec(atomic64_t *v)
  300. {
  301. long t;
  302. __asm__ __volatile__(
  303. "1: ldarx %0,0,%2 # atomic64_dec\n\
  304. addic %0,%0,-1\n\
  305. stdcx. %0,0,%2\n\
  306. bne- 1b"
  307. : "=&r" (t), "+m" (v->counter)
  308. : "r" (&v->counter)
  309. : "cc");
  310. }
  311. static __inline__ long atomic64_dec_return(atomic64_t *v)
  312. {
  313. long t;
  314. __asm__ __volatile__(
  315. LWSYNC_ON_SMP
  316. "1: ldarx %0,0,%1 # atomic64_dec_return\n\
  317. addic %0,%0,-1\n\
  318. stdcx. %0,0,%1\n\
  319. bne- 1b"
  320. ISYNC_ON_SMP
  321. : "=&r" (t)
  322. : "r" (&v->counter)
  323. : "cc", "memory");
  324. return t;
  325. }
  326. #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
  327. #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
  328. /*
  329. * Atomically test *v and decrement if it is greater than 0.
  330. * The function returns the old value of *v minus 1.
  331. */
  332. static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
  333. {
  334. long t;
  335. __asm__ __volatile__(
  336. LWSYNC_ON_SMP
  337. "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
  338. addic. %0,%0,-1\n\
  339. blt- 2f\n\
  340. stdcx. %0,0,%1\n\
  341. bne- 1b"
  342. ISYNC_ON_SMP
  343. "\n\
  344. 2:" : "=&r" (t)
  345. : "r" (&v->counter)
  346. : "cc", "memory");
  347. return t;
  348. }
  349. #define atomic64_cmpxchg(v, o, n) \
  350. ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  351. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  352. /**
  353. * atomic64_add_unless - add unless the number is a given value
  354. * @v: pointer of type atomic64_t
  355. * @a: the amount to add to v...
  356. * @u: ...unless v is equal to u.
  357. *
  358. * Atomically adds @a to @v, so long as it was not @u.
  359. * Returns non-zero if @v was not @u, and zero otherwise.
  360. */
  361. static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  362. {
  363. long t;
  364. __asm__ __volatile__ (
  365. LWSYNC_ON_SMP
  366. "1: ldarx %0,0,%1 # atomic_add_unless\n\
  367. cmpd 0,%0,%3 \n\
  368. beq- 2f \n\
  369. add %0,%2,%0 \n"
  370. " stdcx. %0,0,%1 \n\
  371. bne- 1b \n"
  372. ISYNC_ON_SMP
  373. " subf %0,%2,%0 \n\
  374. 2:"
  375. : "=&r" (t)
  376. : "r" (&v->counter), "r" (a), "r" (u)
  377. : "cc", "memory");
  378. return t != u;
  379. }
  380. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  381. #endif /* __powerpc64__ */
  382. #include <asm-generic/atomic.h>
  383. #endif /* __KERNEL__ */
  384. #endif /* _ASM_POWERPC_ATOMIC_H_ */