cmpxchg.h 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309
  1. #ifndef _ASM_POWERPC_CMPXCHG_H_
  2. #define _ASM_POWERPC_CMPXCHG_H_
  3. #ifdef __KERNEL__
  4. #include <linux/compiler.h>
  5. #include <asm/synch.h>
  6. #include <asm/asm-compat.h>
  7. /*
  8. * Atomic exchange
  9. *
  10. * Changes the memory location '*ptr' to be val and returns
  11. * the previous value stored there.
  12. */
  13. static __always_inline unsigned long
  14. __xchg_u32(volatile void *p, unsigned long val)
  15. {
  16. unsigned long prev;
  17. __asm__ __volatile__(
  18. PPC_RELEASE_BARRIER
  19. "1: lwarx %0,0,%2 \n"
  20. PPC405_ERR77(0,%2)
  21. " stwcx. %3,0,%2 \n\
  22. bne- 1b"
  23. PPC_ACQUIRE_BARRIER
  24. : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
  25. : "r" (p), "r" (val)
  26. : "cc", "memory");
  27. return prev;
  28. }
  29. /*
  30. * Atomic exchange
  31. *
  32. * Changes the memory location '*ptr' to be val and returns
  33. * the previous value stored there.
  34. */
  35. static __always_inline unsigned long
  36. __xchg_u32_local(volatile void *p, unsigned long val)
  37. {
  38. unsigned long prev;
  39. __asm__ __volatile__(
  40. "1: lwarx %0,0,%2 \n"
  41. PPC405_ERR77(0,%2)
  42. " stwcx. %3,0,%2 \n\
  43. bne- 1b"
  44. : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
  45. : "r" (p), "r" (val)
  46. : "cc", "memory");
  47. return prev;
  48. }
  49. #ifdef CONFIG_PPC64
  50. static __always_inline unsigned long
  51. __xchg_u64(volatile void *p, unsigned long val)
  52. {
  53. unsigned long prev;
  54. __asm__ __volatile__(
  55. PPC_RELEASE_BARRIER
  56. "1: ldarx %0,0,%2 \n"
  57. PPC405_ERR77(0,%2)
  58. " stdcx. %3,0,%2 \n\
  59. bne- 1b"
  60. PPC_ACQUIRE_BARRIER
  61. : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
  62. : "r" (p), "r" (val)
  63. : "cc", "memory");
  64. return prev;
  65. }
  66. static __always_inline unsigned long
  67. __xchg_u64_local(volatile void *p, unsigned long val)
  68. {
  69. unsigned long prev;
  70. __asm__ __volatile__(
  71. "1: ldarx %0,0,%2 \n"
  72. PPC405_ERR77(0,%2)
  73. " stdcx. %3,0,%2 \n\
  74. bne- 1b"
  75. : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
  76. : "r" (p), "r" (val)
  77. : "cc", "memory");
  78. return prev;
  79. }
  80. #endif
  81. /*
  82. * This function doesn't exist, so you'll get a linker error
  83. * if something tries to do an invalid xchg().
  84. */
  85. extern void __xchg_called_with_bad_pointer(void);
  86. static __always_inline unsigned long
  87. __xchg(volatile void *ptr, unsigned long x, unsigned int size)
  88. {
  89. switch (size) {
  90. case 4:
  91. return __xchg_u32(ptr, x);
  92. #ifdef CONFIG_PPC64
  93. case 8:
  94. return __xchg_u64(ptr, x);
  95. #endif
  96. }
  97. __xchg_called_with_bad_pointer();
  98. return x;
  99. }
  100. static __always_inline unsigned long
  101. __xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
  102. {
  103. switch (size) {
  104. case 4:
  105. return __xchg_u32_local(ptr, x);
  106. #ifdef CONFIG_PPC64
  107. case 8:
  108. return __xchg_u64_local(ptr, x);
  109. #endif
  110. }
  111. __xchg_called_with_bad_pointer();
  112. return x;
  113. }
  114. #define xchg(ptr,x) \
  115. ({ \
  116. __typeof__(*(ptr)) _x_ = (x); \
  117. (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
  118. })
  119. #define xchg_local(ptr,x) \
  120. ({ \
  121. __typeof__(*(ptr)) _x_ = (x); \
  122. (__typeof__(*(ptr))) __xchg_local((ptr), \
  123. (unsigned long)_x_, sizeof(*(ptr))); \
  124. })
  125. /*
  126. * Compare and exchange - if *p == old, set it to new,
  127. * and return the old value of *p.
  128. */
  129. #define __HAVE_ARCH_CMPXCHG 1
  130. static __always_inline unsigned long
  131. __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
  132. {
  133. unsigned int prev;
  134. __asm__ __volatile__ (
  135. PPC_RELEASE_BARRIER
  136. "1: lwarx %0,0,%2 # __cmpxchg_u32\n\
  137. cmpw 0,%0,%3\n\
  138. bne- 2f\n"
  139. PPC405_ERR77(0,%2)
  140. " stwcx. %4,0,%2\n\
  141. bne- 1b"
  142. PPC_ACQUIRE_BARRIER
  143. "\n\
  144. 2:"
  145. : "=&r" (prev), "+m" (*p)
  146. : "r" (p), "r" (old), "r" (new)
  147. : "cc", "memory");
  148. return prev;
  149. }
  150. static __always_inline unsigned long
  151. __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
  152. unsigned long new)
  153. {
  154. unsigned int prev;
  155. __asm__ __volatile__ (
  156. "1: lwarx %0,0,%2 # __cmpxchg_u32\n\
  157. cmpw 0,%0,%3\n\
  158. bne- 2f\n"
  159. PPC405_ERR77(0,%2)
  160. " stwcx. %4,0,%2\n\
  161. bne- 1b"
  162. "\n\
  163. 2:"
  164. : "=&r" (prev), "+m" (*p)
  165. : "r" (p), "r" (old), "r" (new)
  166. : "cc", "memory");
  167. return prev;
  168. }
  169. #ifdef CONFIG_PPC64
  170. static __always_inline unsigned long
  171. __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
  172. {
  173. unsigned long prev;
  174. __asm__ __volatile__ (
  175. PPC_RELEASE_BARRIER
  176. "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
  177. cmpd 0,%0,%3\n\
  178. bne- 2f\n\
  179. stdcx. %4,0,%2\n\
  180. bne- 1b"
  181. PPC_ACQUIRE_BARRIER
  182. "\n\
  183. 2:"
  184. : "=&r" (prev), "+m" (*p)
  185. : "r" (p), "r" (old), "r" (new)
  186. : "cc", "memory");
  187. return prev;
  188. }
  189. static __always_inline unsigned long
  190. __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
  191. unsigned long new)
  192. {
  193. unsigned long prev;
  194. __asm__ __volatile__ (
  195. "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
  196. cmpd 0,%0,%3\n\
  197. bne- 2f\n\
  198. stdcx. %4,0,%2\n\
  199. bne- 1b"
  200. "\n\
  201. 2:"
  202. : "=&r" (prev), "+m" (*p)
  203. : "r" (p), "r" (old), "r" (new)
  204. : "cc", "memory");
  205. return prev;
  206. }
  207. #endif
  208. /* This function doesn't exist, so you'll get a linker error
  209. if something tries to do an invalid cmpxchg(). */
  210. extern void __cmpxchg_called_with_bad_pointer(void);
  211. static __always_inline unsigned long
  212. __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
  213. unsigned int size)
  214. {
  215. switch (size) {
  216. case 4:
  217. return __cmpxchg_u32(ptr, old, new);
  218. #ifdef CONFIG_PPC64
  219. case 8:
  220. return __cmpxchg_u64(ptr, old, new);
  221. #endif
  222. }
  223. __cmpxchg_called_with_bad_pointer();
  224. return old;
  225. }
  226. static __always_inline unsigned long
  227. __cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
  228. unsigned int size)
  229. {
  230. switch (size) {
  231. case 4:
  232. return __cmpxchg_u32_local(ptr, old, new);
  233. #ifdef CONFIG_PPC64
  234. case 8:
  235. return __cmpxchg_u64_local(ptr, old, new);
  236. #endif
  237. }
  238. __cmpxchg_called_with_bad_pointer();
  239. return old;
  240. }
  241. #define cmpxchg(ptr, o, n) \
  242. ({ \
  243. __typeof__(*(ptr)) _o_ = (o); \
  244. __typeof__(*(ptr)) _n_ = (n); \
  245. (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
  246. (unsigned long)_n_, sizeof(*(ptr))); \
  247. })
  248. #define cmpxchg_local(ptr, o, n) \
  249. ({ \
  250. __typeof__(*(ptr)) _o_ = (o); \
  251. __typeof__(*(ptr)) _n_ = (n); \
  252. (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
  253. (unsigned long)_n_, sizeof(*(ptr))); \
  254. })
  255. #ifdef CONFIG_PPC64
  256. #define cmpxchg64(ptr, o, n) \
  257. ({ \
  258. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  259. cmpxchg((ptr), (o), (n)); \
  260. })
  261. #define cmpxchg64_local(ptr, o, n) \
  262. ({ \
  263. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  264. cmpxchg_local((ptr), (o), (n)); \
  265. })
  266. #else
  267. #include <asm-generic/cmpxchg-local.h>
  268. #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  269. #endif
  270. #endif /* __KERNEL__ */
  271. #endif /* _ASM_POWERPC_CMPXCHG_H_ */