cmpxchg_64.h 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154
  1. #ifndef _ASM_X86_CMPXCHG_64_H
  2. #define _ASM_X86_CMPXCHG_64_H
  3. #include <asm/alternative.h> /* Provides LOCK_PREFIX */
  4. static inline void set_64bit(volatile u64 *ptr, u64 val)
  5. {
  6. *ptr = val;
  7. }
  8. extern void __xchg_wrong_size(void);
  9. extern void __cmpxchg_wrong_size(void);
  10. /*
  11. * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
  12. * Since this is generally used to protect other memory information, we
  13. * use "asm volatile" and "memory" clobbers to prevent gcc from moving
  14. * information around.
  15. */
  16. #define __xchg(x, ptr, size) \
  17. ({ \
  18. __typeof(*(ptr)) __x = (x); \
  19. switch (size) { \
  20. case 1: \
  21. { \
  22. volatile u8 *__ptr = (volatile u8 *)(ptr); \
  23. asm volatile("xchgb %0,%1" \
  24. : "=q" (__x), "+m" (*__ptr) \
  25. : "0" (__x) \
  26. : "memory"); \
  27. break; \
  28. } \
  29. case 2: \
  30. { \
  31. volatile u16 *__ptr = (volatile u16 *)(ptr); \
  32. asm volatile("xchgw %0,%1" \
  33. : "=r" (__x), "+m" (*__ptr) \
  34. : "0" (__x) \
  35. : "memory"); \
  36. break; \
  37. } \
  38. case 4: \
  39. { \
  40. volatile u32 *__ptr = (volatile u32 *)(ptr); \
  41. asm volatile("xchgl %0,%1" \
  42. : "=r" (__x), "+m" (*__ptr) \
  43. : "0" (__x) \
  44. : "memory"); \
  45. break; \
  46. } \
  47. case 8: \
  48. { \
  49. volatile u64 *__ptr = (volatile u64 *)(ptr); \
  50. asm volatile("xchgq %0,%1" \
  51. : "=r" (__x), "+m" (*__ptr) \
  52. : "0" (__x) \
  53. : "memory"); \
  54. break; \
  55. } \
  56. default: \
  57. __xchg_wrong_size(); \
  58. } \
  59. __x; \
  60. })
  61. #define xchg(ptr, v) \
  62. __xchg((v), (ptr), sizeof(*ptr))
  63. #define __HAVE_ARCH_CMPXCHG 1
  64. /*
  65. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  66. * store NEW in MEM. Return the initial value in MEM. Success is
  67. * indicated by comparing RETURN with OLD.
  68. */
  69. #define __raw_cmpxchg(ptr, old, new, size, lock) \
  70. ({ \
  71. __typeof__(*(ptr)) __ret; \
  72. __typeof__(*(ptr)) __old = (old); \
  73. __typeof__(*(ptr)) __new = (new); \
  74. switch (size) { \
  75. case 1: \
  76. { \
  77. volatile u8 *__ptr = (volatile u8 *)(ptr); \
  78. asm volatile(lock "cmpxchgb %2,%1" \
  79. : "=a" (__ret), "+m" (*__ptr) \
  80. : "q" (__new), "0" (__old) \
  81. : "memory"); \
  82. break; \
  83. } \
  84. case 2: \
  85. { \
  86. volatile u16 *__ptr = (volatile u16 *)(ptr); \
  87. asm volatile(lock "cmpxchgw %2,%1" \
  88. : "=a" (__ret), "+m" (*__ptr) \
  89. : "r" (__new), "0" (__old) \
  90. : "memory"); \
  91. break; \
  92. } \
  93. case 4: \
  94. { \
  95. volatile u32 *__ptr = (volatile u32 *)(ptr); \
  96. asm volatile(lock "cmpxchgl %2,%1" \
  97. : "=a" (__ret), "+m" (*__ptr) \
  98. : "r" (__new), "0" (__old) \
  99. : "memory"); \
  100. break; \
  101. } \
  102. case 8: \
  103. { \
  104. volatile u64 *__ptr = (volatile u64 *)(ptr); \
  105. asm volatile(lock "cmpxchgq %2,%1" \
  106. : "=a" (__ret), "+m" (*__ptr) \
  107. : "r" (__new), "0" (__old) \
  108. : "memory"); \
  109. break; \
  110. } \
  111. default: \
  112. __cmpxchg_wrong_size(); \
  113. } \
  114. __ret; \
  115. })
  116. #define __cmpxchg(ptr, old, new, size) \
  117. __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
  118. #define __sync_cmpxchg(ptr, old, new, size) \
  119. __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
  120. #define __cmpxchg_local(ptr, old, new, size) \
  121. __raw_cmpxchg((ptr), (old), (new), (size), "")
  122. #define cmpxchg(ptr, old, new) \
  123. __cmpxchg((ptr), (old), (new), sizeof(*ptr))
  124. #define sync_cmpxchg(ptr, old, new) \
  125. __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
  126. #define cmpxchg_local(ptr, old, new) \
  127. __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
  128. #define cmpxchg64(ptr, o, n) \
  129. ({ \
  130. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  131. cmpxchg((ptr), (o), (n)); \
  132. })
  133. #define cmpxchg64_local(ptr, o, n) \
  134. ({ \
  135. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  136. cmpxchg_local((ptr), (o), (n)); \
  137. })
  138. #endif /* _ASM_X86_CMPXCHG_64_H */