cmpxchg.h 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210
  1. #ifndef ASM_X86_CMPXCHG_H
  2. #define ASM_X86_CMPXCHG_H
  3. #include <linux/compiler.h>
  4. #include <asm/alternative.h> /* Provides LOCK_PREFIX */
  5. /*
  6. * Non-existant functions to indicate usage errors at link time
  7. * (or compile-time if the compiler implements __compiletime_error().
  8. */
  9. extern void __xchg_wrong_size(void)
  10. __compiletime_error("Bad argument size for xchg");
  11. extern void __cmpxchg_wrong_size(void)
  12. __compiletime_error("Bad argument size for cmpxchg");
  13. extern void __xadd_wrong_size(void)
  14. __compiletime_error("Bad argument size for xadd");
  15. /*
  16. * Constants for operation sizes. On 32-bit, the 64-bit size it set to
  17. * -1 because sizeof will never return -1, thereby making those switch
  18. * case statements guaranteeed dead code which the compiler will
  19. * eliminate, and allowing the "missing symbol in the default case" to
  20. * indicate a usage error.
  21. */
  22. #define __X86_CASE_B 1
  23. #define __X86_CASE_W 2
  24. #define __X86_CASE_L 4
  25. #ifdef CONFIG_64BIT
  26. #define __X86_CASE_Q 8
  27. #else
  28. #define __X86_CASE_Q -1 /* sizeof will never return -1 */
  29. #endif
  30. /*
  31. * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
  32. * Since this is generally used to protect other memory information, we
  33. * use "asm volatile" and "memory" clobbers to prevent gcc from moving
  34. * information around.
  35. */
  36. #define __xchg(x, ptr, size) \
  37. ({ \
  38. __typeof(*(ptr)) __x = (x); \
  39. switch (size) { \
  40. case __X86_CASE_B: \
  41. { \
  42. volatile u8 *__ptr = (volatile u8 *)(ptr); \
  43. asm volatile("xchgb %0,%1" \
  44. : "=q" (__x), "+m" (*__ptr) \
  45. : "0" (__x) \
  46. : "memory"); \
  47. break; \
  48. } \
  49. case __X86_CASE_W: \
  50. { \
  51. volatile u16 *__ptr = (volatile u16 *)(ptr); \
  52. asm volatile("xchgw %0,%1" \
  53. : "=r" (__x), "+m" (*__ptr) \
  54. : "0" (__x) \
  55. : "memory"); \
  56. break; \
  57. } \
  58. case __X86_CASE_L: \
  59. { \
  60. volatile u32 *__ptr = (volatile u32 *)(ptr); \
  61. asm volatile("xchgl %0,%1" \
  62. : "=r" (__x), "+m" (*__ptr) \
  63. : "0" (__x) \
  64. : "memory"); \
  65. break; \
  66. } \
  67. case __X86_CASE_Q: \
  68. { \
  69. volatile u64 *__ptr = (volatile u64 *)(ptr); \
  70. asm volatile("xchgq %0,%1" \
  71. : "=r" (__x), "+m" (*__ptr) \
  72. : "0" (__x) \
  73. : "memory"); \
  74. break; \
  75. } \
  76. default: \
  77. __xchg_wrong_size(); \
  78. } \
  79. __x; \
  80. })
  81. #define xchg(ptr, v) \
  82. __xchg((v), (ptr), sizeof(*ptr))
  83. /*
  84. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  85. * store NEW in MEM. Return the initial value in MEM. Success is
  86. * indicated by comparing RETURN with OLD.
  87. */
  88. #define __raw_cmpxchg(ptr, old, new, size, lock) \
  89. ({ \
  90. __typeof__(*(ptr)) __ret; \
  91. __typeof__(*(ptr)) __old = (old); \
  92. __typeof__(*(ptr)) __new = (new); \
  93. switch (size) { \
  94. case __X86_CASE_B: \
  95. { \
  96. volatile u8 *__ptr = (volatile u8 *)(ptr); \
  97. asm volatile(lock "cmpxchgb %2,%1" \
  98. : "=a" (__ret), "+m" (*__ptr) \
  99. : "q" (__new), "0" (__old) \
  100. : "memory"); \
  101. break; \
  102. } \
  103. case __X86_CASE_W: \
  104. { \
  105. volatile u16 *__ptr = (volatile u16 *)(ptr); \
  106. asm volatile(lock "cmpxchgw %2,%1" \
  107. : "=a" (__ret), "+m" (*__ptr) \
  108. : "r" (__new), "0" (__old) \
  109. : "memory"); \
  110. break; \
  111. } \
  112. case __X86_CASE_L: \
  113. { \
  114. volatile u32 *__ptr = (volatile u32 *)(ptr); \
  115. asm volatile(lock "cmpxchgl %2,%1" \
  116. : "=a" (__ret), "+m" (*__ptr) \
  117. : "r" (__new), "0" (__old) \
  118. : "memory"); \
  119. break; \
  120. } \
  121. case __X86_CASE_Q: \
  122. { \
  123. volatile u64 *__ptr = (volatile u64 *)(ptr); \
  124. asm volatile(lock "cmpxchgq %2,%1" \
  125. : "=a" (__ret), "+m" (*__ptr) \
  126. : "r" (__new), "0" (__old) \
  127. : "memory"); \
  128. break; \
  129. } \
  130. default: \
  131. __cmpxchg_wrong_size(); \
  132. } \
  133. __ret; \
  134. })
  135. #define __cmpxchg(ptr, old, new, size) \
  136. __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
  137. #define __sync_cmpxchg(ptr, old, new, size) \
  138. __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
  139. #define __cmpxchg_local(ptr, old, new, size) \
  140. __raw_cmpxchg((ptr), (old), (new), (size), "")
  141. #ifdef CONFIG_X86_32
  142. # include "cmpxchg_32.h"
  143. #else
  144. # include "cmpxchg_64.h"
  145. #endif
  146. #ifdef __HAVE_ARCH_CMPXCHG
  147. #define cmpxchg(ptr, old, new) \
  148. __cmpxchg((ptr), (old), (new), sizeof(*ptr))
  149. #define sync_cmpxchg(ptr, old, new) \
  150. __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
  151. #define cmpxchg_local(ptr, old, new) \
  152. __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
  153. #endif
  154. #define __xadd(ptr, inc, lock) \
  155. ({ \
  156. __typeof__ (*(ptr)) __ret = (inc); \
  157. switch (sizeof(*(ptr))) { \
  158. case __X86_CASE_B: \
  159. asm volatile (lock "xaddb %b0, %1\n" \
  160. : "+r" (__ret), "+m" (*(ptr)) \
  161. : : "memory", "cc"); \
  162. break; \
  163. case __X86_CASE_W: \
  164. asm volatile (lock "xaddw %w0, %1\n" \
  165. : "+r" (__ret), "+m" (*(ptr)) \
  166. : : "memory", "cc"); \
  167. break; \
  168. case __X86_CASE_L: \
  169. asm volatile (lock "xaddl %0, %1\n" \
  170. : "+r" (__ret), "+m" (*(ptr)) \
  171. : : "memory", "cc"); \
  172. break; \
  173. case __X86_CASE_Q: \
  174. asm volatile (lock "xaddq %q0, %1\n" \
  175. : "+r" (__ret), "+m" (*(ptr)) \
  176. : : "memory", "cc"); \
  177. break; \
  178. default: \
  179. __xadd_wrong_size(); \
  180. } \
  181. __ret; \
  182. })
  183. /*
  184. * xadd() adds "inc" to "*ptr" and atomically returns the previous
  185. * value of "*ptr".
  186. *
  187. * xadd() is locked when multiple CPUs are online
  188. * xadd_sync() is always locked
  189. * xadd_local() is never locked
  190. */
  191. #define xadd(ptr, inc) __xadd((ptr), (inc), LOCK_PREFIX)
  192. #define xadd_sync(ptr, inc) __xadd((ptr), (inc), "lock; ")
  193. #define xadd_local(ptr, inc) __xadd((ptr), (inc), "")
  194. #endif /* ASM_X86_CMPXCHG_H */