cmpxchg.h 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242
  1. /*
  2. * Copyright IBM Corp. 1999, 2011
  3. *
  4. * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  5. */
  6. #ifndef __ASM_CMPXCHG_H
  7. #define __ASM_CMPXCHG_H
  8. #include <linux/types.h>
  9. extern void __xchg_called_with_bad_pointer(void);
  10. static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
  11. {
  12. unsigned long addr, old;
  13. int shift;
  14. switch (size) {
  15. case 1:
  16. addr = (unsigned long) ptr;
  17. shift = (3 ^ (addr & 3)) << 3;
  18. addr ^= addr & 3;
  19. asm volatile(
  20. " l %0,%4\n"
  21. "0: lr 0,%0\n"
  22. " nr 0,%3\n"
  23. " or 0,%2\n"
  24. " cs %0,0,%4\n"
  25. " jl 0b\n"
  26. : "=&d" (old), "=Q" (*(int *) addr)
  27. : "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
  28. "Q" (*(int *) addr) : "memory", "cc", "0");
  29. return old >> shift;
  30. case 2:
  31. addr = (unsigned long) ptr;
  32. shift = (2 ^ (addr & 2)) << 3;
  33. addr ^= addr & 2;
  34. asm volatile(
  35. " l %0,%4\n"
  36. "0: lr 0,%0\n"
  37. " nr 0,%3\n"
  38. " or 0,%2\n"
  39. " cs %0,0,%4\n"
  40. " jl 0b\n"
  41. : "=&d" (old), "=Q" (*(int *) addr)
  42. : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
  43. "Q" (*(int *) addr) : "memory", "cc", "0");
  44. return old >> shift;
  45. case 4:
  46. asm volatile(
  47. " l %0,%3\n"
  48. "0: cs %0,%2,%3\n"
  49. " jl 0b\n"
  50. : "=&d" (old), "=Q" (*(int *) ptr)
  51. : "d" (x), "Q" (*(int *) ptr)
  52. : "memory", "cc");
  53. return old;
  54. #ifdef CONFIG_64BIT
  55. case 8:
  56. asm volatile(
  57. " lg %0,%3\n"
  58. "0: csg %0,%2,%3\n"
  59. " jl 0b\n"
  60. : "=&d" (old), "=m" (*(long *) ptr)
  61. : "d" (x), "Q" (*(long *) ptr)
  62. : "memory", "cc");
  63. return old;
  64. #endif /* CONFIG_64BIT */
  65. }
  66. __xchg_called_with_bad_pointer();
  67. return x;
  68. }
  69. #define xchg(ptr, x) \
  70. ({ \
  71. __typeof__(*(ptr)) __ret; \
  72. __ret = (__typeof__(*(ptr))) \
  73. __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
  74. __ret; \
  75. })
  76. /*
  77. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  78. * store NEW in MEM. Return the initial value in MEM. Success is
  79. * indicated by comparing RETURN with OLD.
  80. */
  81. #define __HAVE_ARCH_CMPXCHG
  82. extern void __cmpxchg_called_with_bad_pointer(void);
  83. static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
  84. unsigned long new, int size)
  85. {
  86. unsigned long addr, prev, tmp;
  87. int shift;
  88. switch (size) {
  89. case 1:
  90. addr = (unsigned long) ptr;
  91. shift = (3 ^ (addr & 3)) << 3;
  92. addr ^= addr & 3;
  93. asm volatile(
  94. " l %0,%2\n"
  95. "0: nr %0,%5\n"
  96. " lr %1,%0\n"
  97. " or %0,%3\n"
  98. " or %1,%4\n"
  99. " cs %0,%1,%2\n"
  100. " jnl 1f\n"
  101. " xr %1,%0\n"
  102. " nr %1,%5\n"
  103. " jnz 0b\n"
  104. "1:"
  105. : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
  106. : "d" ((old & 0xff) << shift),
  107. "d" ((new & 0xff) << shift),
  108. "d" (~(0xff << shift))
  109. : "memory", "cc");
  110. return prev >> shift;
  111. case 2:
  112. addr = (unsigned long) ptr;
  113. shift = (2 ^ (addr & 2)) << 3;
  114. addr ^= addr & 2;
  115. asm volatile(
  116. " l %0,%2\n"
  117. "0: nr %0,%5\n"
  118. " lr %1,%0\n"
  119. " or %0,%3\n"
  120. " or %1,%4\n"
  121. " cs %0,%1,%2\n"
  122. " jnl 1f\n"
  123. " xr %1,%0\n"
  124. " nr %1,%5\n"
  125. " jnz 0b\n"
  126. "1:"
  127. : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
  128. : "d" ((old & 0xffff) << shift),
  129. "d" ((new & 0xffff) << shift),
  130. "d" (~(0xffff << shift))
  131. : "memory", "cc");
  132. return prev >> shift;
  133. case 4:
  134. asm volatile(
  135. " cs %0,%3,%1\n"
  136. : "=&d" (prev), "=Q" (*(int *) ptr)
  137. : "0" (old), "d" (new), "Q" (*(int *) ptr)
  138. : "memory", "cc");
  139. return prev;
  140. #ifdef CONFIG_64BIT
  141. case 8:
  142. asm volatile(
  143. " csg %0,%3,%1\n"
  144. : "=&d" (prev), "=Q" (*(long *) ptr)
  145. : "0" (old), "d" (new), "Q" (*(long *) ptr)
  146. : "memory", "cc");
  147. return prev;
  148. #endif /* CONFIG_64BIT */
  149. }
  150. __cmpxchg_called_with_bad_pointer();
  151. return old;
  152. }
  153. #define cmpxchg(ptr, o, n) \
  154. ({ \
  155. __typeof__(*(ptr)) __ret; \
  156. __ret = (__typeof__(*(ptr))) \
  157. __cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
  158. sizeof(*(ptr))); \
  159. __ret; \
  160. })
  161. #ifdef CONFIG_64BIT
  162. #define cmpxchg64(ptr, o, n) \
  163. ({ \
  164. cmpxchg((ptr), (o), (n)); \
  165. })
  166. #else /* CONFIG_64BIT */
  167. static inline unsigned long long __cmpxchg64(void *ptr,
  168. unsigned long long old,
  169. unsigned long long new)
  170. {
  171. register_pair rp_old = {.pair = old};
  172. register_pair rp_new = {.pair = new};
  173. asm volatile(
  174. " cds %0,%2,%1"
  175. : "+&d" (rp_old), "=Q" (ptr)
  176. : "d" (rp_new), "Q" (ptr)
  177. : "memory", "cc");
  178. return rp_old.pair;
  179. }
  180. #define cmpxchg64(ptr, o, n) \
  181. ({ \
  182. __typeof__(*(ptr)) __ret; \
  183. __ret = (__typeof__(*(ptr))) \
  184. __cmpxchg64((ptr), \
  185. (unsigned long long)(o), \
  186. (unsigned long long)(n)); \
  187. __ret; \
  188. })
  189. #endif /* CONFIG_64BIT */
  190. #include <asm-generic/cmpxchg-local.h>
  191. static inline unsigned long __cmpxchg_local(void *ptr,
  192. unsigned long old,
  193. unsigned long new, int size)
  194. {
  195. switch (size) {
  196. case 1:
  197. case 2:
  198. case 4:
  199. #ifdef CONFIG_64BIT
  200. case 8:
  201. #endif
  202. return __cmpxchg(ptr, old, new, size);
  203. default:
  204. return __cmpxchg_local_generic(ptr, old, new, size);
  205. }
  206. return old;
  207. }
  208. /*
  209. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  210. * them available.
  211. */
  212. #define cmpxchg_local(ptr, o, n) \
  213. ({ \
  214. __typeof__(*(ptr)) __ret; \
  215. __ret = (__typeof__(*(ptr))) \
  216. __cmpxchg_local((ptr), (unsigned long)(o), \
  217. (unsigned long)(n), sizeof(*(ptr))); \
  218. __ret; \
  219. })
  220. #define cmpxchg64_local(ptr, o, n) cmpxchg64((ptr), (o), (n))
  221. #endif /* __ASM_CMPXCHG_H */