cmpxchg.h 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303
  1. /*
  2. * Copyright IBM Corp. 1999, 2011
  3. *
  4. * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  5. */
  6. #ifndef __ASM_CMPXCHG_H
  7. #define __ASM_CMPXCHG_H
  8. #include <linux/mmdebug.h>
  9. #include <linux/types.h>
  10. #include <linux/bug.h>
  11. extern void __xchg_called_with_bad_pointer(void);
  12. static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
  13. {
  14. unsigned long addr, old;
  15. int shift;
  16. switch (size) {
  17. case 1:
  18. addr = (unsigned long) ptr;
  19. shift = (3 ^ (addr & 3)) << 3;
  20. addr ^= addr & 3;
  21. asm volatile(
  22. " l %0,%4\n"
  23. "0: lr 0,%0\n"
  24. " nr 0,%3\n"
  25. " or 0,%2\n"
  26. " cs %0,0,%4\n"
  27. " jl 0b\n"
  28. : "=&d" (old), "=Q" (*(int *) addr)
  29. : "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
  30. "Q" (*(int *) addr) : "memory", "cc", "0");
  31. return old >> shift;
  32. case 2:
  33. addr = (unsigned long) ptr;
  34. shift = (2 ^ (addr & 2)) << 3;
  35. addr ^= addr & 2;
  36. asm volatile(
  37. " l %0,%4\n"
  38. "0: lr 0,%0\n"
  39. " nr 0,%3\n"
  40. " or 0,%2\n"
  41. " cs %0,0,%4\n"
  42. " jl 0b\n"
  43. : "=&d" (old), "=Q" (*(int *) addr)
  44. : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
  45. "Q" (*(int *) addr) : "memory", "cc", "0");
  46. return old >> shift;
  47. case 4:
  48. asm volatile(
  49. " l %0,%3\n"
  50. "0: cs %0,%2,%3\n"
  51. " jl 0b\n"
  52. : "=&d" (old), "=Q" (*(int *) ptr)
  53. : "d" (x), "Q" (*(int *) ptr)
  54. : "memory", "cc");
  55. return old;
  56. #ifdef CONFIG_64BIT
  57. case 8:
  58. asm volatile(
  59. " lg %0,%3\n"
  60. "0: csg %0,%2,%3\n"
  61. " jl 0b\n"
  62. : "=&d" (old), "=m" (*(long *) ptr)
  63. : "d" (x), "Q" (*(long *) ptr)
  64. : "memory", "cc");
  65. return old;
  66. #endif /* CONFIG_64BIT */
  67. }
  68. __xchg_called_with_bad_pointer();
  69. return x;
  70. }
  71. #define xchg(ptr, x) \
  72. ({ \
  73. __typeof__(*(ptr)) __ret; \
  74. __ret = (__typeof__(*(ptr))) \
  75. __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
  76. __ret; \
  77. })
  78. /*
  79. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  80. * store NEW in MEM. Return the initial value in MEM. Success is
  81. * indicated by comparing RETURN with OLD.
  82. */
  83. #define __HAVE_ARCH_CMPXCHG
  84. extern void __cmpxchg_called_with_bad_pointer(void);
  85. static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
  86. unsigned long new, int size)
  87. {
  88. unsigned long addr, prev, tmp;
  89. int shift;
  90. switch (size) {
  91. case 1:
  92. addr = (unsigned long) ptr;
  93. shift = (3 ^ (addr & 3)) << 3;
  94. addr ^= addr & 3;
  95. asm volatile(
  96. " l %0,%2\n"
  97. "0: nr %0,%5\n"
  98. " lr %1,%0\n"
  99. " or %0,%3\n"
  100. " or %1,%4\n"
  101. " cs %0,%1,%2\n"
  102. " jnl 1f\n"
  103. " xr %1,%0\n"
  104. " nr %1,%5\n"
  105. " jnz 0b\n"
  106. "1:"
  107. : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
  108. : "d" ((old & 0xff) << shift),
  109. "d" ((new & 0xff) << shift),
  110. "d" (~(0xff << shift))
  111. : "memory", "cc");
  112. return prev >> shift;
  113. case 2:
  114. addr = (unsigned long) ptr;
  115. shift = (2 ^ (addr & 2)) << 3;
  116. addr ^= addr & 2;
  117. asm volatile(
  118. " l %0,%2\n"
  119. "0: nr %0,%5\n"
  120. " lr %1,%0\n"
  121. " or %0,%3\n"
  122. " or %1,%4\n"
  123. " cs %0,%1,%2\n"
  124. " jnl 1f\n"
  125. " xr %1,%0\n"
  126. " nr %1,%5\n"
  127. " jnz 0b\n"
  128. "1:"
  129. : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
  130. : "d" ((old & 0xffff) << shift),
  131. "d" ((new & 0xffff) << shift),
  132. "d" (~(0xffff << shift))
  133. : "memory", "cc");
  134. return prev >> shift;
  135. case 4:
  136. asm volatile(
  137. " cs %0,%3,%1\n"
  138. : "=&d" (prev), "=Q" (*(int *) ptr)
  139. : "0" (old), "d" (new), "Q" (*(int *) ptr)
  140. : "memory", "cc");
  141. return prev;
  142. #ifdef CONFIG_64BIT
  143. case 8:
  144. asm volatile(
  145. " csg %0,%3,%1\n"
  146. : "=&d" (prev), "=Q" (*(long *) ptr)
  147. : "0" (old), "d" (new), "Q" (*(long *) ptr)
  148. : "memory", "cc");
  149. return prev;
  150. #endif /* CONFIG_64BIT */
  151. }
  152. __cmpxchg_called_with_bad_pointer();
  153. return old;
  154. }
  155. #define cmpxchg(ptr, o, n) \
  156. ({ \
  157. __typeof__(*(ptr)) __ret; \
  158. __ret = (__typeof__(*(ptr))) \
  159. __cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
  160. sizeof(*(ptr))); \
  161. __ret; \
  162. })
  163. #ifdef CONFIG_64BIT
  164. #define cmpxchg64(ptr, o, n) \
  165. ({ \
  166. cmpxchg((ptr), (o), (n)); \
  167. })
  168. #else /* CONFIG_64BIT */
  169. static inline unsigned long long __cmpxchg64(void *ptr,
  170. unsigned long long old,
  171. unsigned long long new)
  172. {
  173. register_pair rp_old = {.pair = old};
  174. register_pair rp_new = {.pair = new};
  175. asm volatile(
  176. " cds %0,%2,%1"
  177. : "+&d" (rp_old), "=Q" (ptr)
  178. : "d" (rp_new), "Q" (ptr)
  179. : "memory", "cc");
  180. return rp_old.pair;
  181. }
  182. #define cmpxchg64(ptr, o, n) \
  183. ({ \
  184. __typeof__(*(ptr)) __ret; \
  185. __ret = (__typeof__(*(ptr))) \
  186. __cmpxchg64((ptr), \
  187. (unsigned long long)(o), \
  188. (unsigned long long)(n)); \
  189. __ret; \
  190. })
  191. #endif /* CONFIG_64BIT */
  192. #define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn) \
  193. ({ \
  194. register __typeof__(*(p1)) __old1 asm("2") = (o1); \
  195. register __typeof__(*(p2)) __old2 asm("3") = (o2); \
  196. register __typeof__(*(p1)) __new1 asm("4") = (n1); \
  197. register __typeof__(*(p2)) __new2 asm("5") = (n2); \
  198. int cc; \
  199. asm volatile( \
  200. insn " %[old],%[new],%[ptr]\n" \
  201. " ipm %[cc]\n" \
  202. " srl %[cc],28" \
  203. : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2) \
  204. : [new] "d" (__new1), "d" (__new2), \
  205. [ptr] "Q" (*(p1)), "Q" (*(p2)) \
  206. : "memory", "cc"); \
  207. !cc; \
  208. })
  209. #define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \
  210. __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds")
  211. #define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \
  212. __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg")
  213. extern void __cmpxchg_double_called_with_bad_pointer(void);
  214. #define __cmpxchg_double(p1, p2, o1, o2, n1, n2) \
  215. ({ \
  216. int __ret; \
  217. switch (sizeof(*(p1))) { \
  218. case 4: \
  219. __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2); \
  220. break; \
  221. case 8: \
  222. __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2); \
  223. break; \
  224. default: \
  225. __cmpxchg_double_called_with_bad_pointer(); \
  226. } \
  227. __ret; \
  228. })
  229. #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
  230. ({ \
  231. __typeof__(p1) __p1 = (p1); \
  232. __typeof__(p2) __p2 = (p2); \
  233. int __ret; \
  234. BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \
  235. BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \
  236. VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
  237. if (sizeof(long) == 4) \
  238. __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \
  239. else \
  240. __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \
  241. __ret; \
  242. })
  243. #define system_has_cmpxchg_double() 1
  244. #include <asm-generic/cmpxchg-local.h>
  245. static inline unsigned long __cmpxchg_local(void *ptr,
  246. unsigned long old,
  247. unsigned long new, int size)
  248. {
  249. switch (size) {
  250. case 1:
  251. case 2:
  252. case 4:
  253. #ifdef CONFIG_64BIT
  254. case 8:
  255. #endif
  256. return __cmpxchg(ptr, old, new, size);
  257. default:
  258. return __cmpxchg_local_generic(ptr, old, new, size);
  259. }
  260. return old;
  261. }
  262. /*
  263. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  264. * them available.
  265. */
  266. #define cmpxchg_local(ptr, o, n) \
  267. ({ \
  268. __typeof__(*(ptr)) __ret; \
  269. __ret = (__typeof__(*(ptr))) \
  270. __cmpxchg_local((ptr), (unsigned long)(o), \
  271. (unsigned long)(n), sizeof(*(ptr))); \
  272. __ret; \
  273. })
  274. #define cmpxchg64_local(ptr, o, n) cmpxchg64((ptr), (o), (n))
  275. #endif /* __ASM_CMPXCHG_H */