cmpxchg_64.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. #ifndef _ASM_X86_CMPXCHG_64_H
  2. #define _ASM_X86_CMPXCHG_64_H
  3. #include <asm/alternative.h> /* Provides LOCK_PREFIX */
  4. #define __xg(x) ((volatile long *)(x))
  5. static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
  6. {
  7. *ptr = val;
  8. }
  9. #define _set_64bit set_64bit
  10. extern void __xchg_wrong_size(void);
  11. extern void __cmpxchg_wrong_size(void);
  12. /*
  13. * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
  14. * Note 2: xchg has side effect, so that attribute volatile is necessary,
  15. * but generally the primitive is invalid, *ptr is output argument. --ANK
  16. */
  17. #define __xchg(x, ptr, size) \
  18. ({ \
  19. __typeof(*(ptr)) __x = (x); \
  20. switch (size) { \
  21. case 1: \
  22. asm volatile("xchgb %b0,%1" \
  23. : "=q" (__x) \
  24. : "m" (*__xg(ptr)), "0" (__x) \
  25. : "memory"); \
  26. break; \
  27. case 2: \
  28. asm volatile("xchgw %w0,%1" \
  29. : "=r" (__x) \
  30. : "m" (*__xg(ptr)), "0" (__x) \
  31. : "memory"); \
  32. break; \
  33. case 4: \
  34. asm volatile("xchgl %k0,%1" \
  35. : "=r" (__x) \
  36. : "m" (*__xg(ptr)), "0" (__x) \
  37. : "memory"); \
  38. break; \
  39. case 8: \
  40. asm volatile("xchgq %0,%1" \
  41. : "=r" (__x) \
  42. : "m" (*__xg(ptr)), "0" (__x) \
  43. : "memory"); \
  44. break; \
  45. default: \
  46. __xchg_wrong_size(); \
  47. } \
  48. __x; \
  49. })
  50. #define xchg(ptr, v) \
  51. __xchg((v), (ptr), sizeof(*ptr))
  52. #define __HAVE_ARCH_CMPXCHG 1
  53. /*
  54. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  55. * store NEW in MEM. Return the initial value in MEM. Success is
  56. * indicated by comparing RETURN with OLD.
  57. */
  58. #define __raw_cmpxchg(ptr, old, new, size, lock) \
  59. ({ \
  60. __typeof__(*(ptr)) __ret; \
  61. __typeof__(*(ptr)) __old = (old); \
  62. __typeof__(*(ptr)) __new = (new); \
  63. switch (size) { \
  64. case 1: \
  65. asm volatile(lock "cmpxchgb %b1,%2" \
  66. : "=a"(__ret) \
  67. : "q"(__new), "m"(*__xg(ptr)), "0"(__old) \
  68. : "memory"); \
  69. break; \
  70. case 2: \
  71. asm volatile(lock "cmpxchgw %w1,%2" \
  72. : "=a"(__ret) \
  73. : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \
  74. : "memory"); \
  75. break; \
  76. case 4: \
  77. asm volatile(lock "cmpxchgl %k1,%2" \
  78. : "=a"(__ret) \
  79. : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \
  80. : "memory"); \
  81. break; \
  82. case 8: \
  83. asm volatile(lock "cmpxchgq %1,%2" \
  84. : "=a"(__ret) \
  85. : "r"(__new), "m"(*__xg(ptr)), "0"(__old) \
  86. : "memory"); \
  87. break; \
  88. default: \
  89. __cmpxchg_wrong_size(); \
  90. } \
  91. __ret; \
  92. })
  93. #define __cmpxchg(ptr, old, new, size) \
  94. __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
  95. #define __sync_cmpxchg(ptr, old, new, size) \
  96. __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
  97. #define __cmpxchg_local(ptr, old, new, size) \
  98. __raw_cmpxchg((ptr), (old), (new), (size), "")
  99. #define cmpxchg(ptr, old, new) \
  100. __cmpxchg((ptr), (old), (new), sizeof(*ptr))
  101. #define sync_cmpxchg(ptr, old, new) \
  102. __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
  103. #define cmpxchg_local(ptr, old, new) \
  104. __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
  105. #define cmpxchg64(ptr, o, n) \
  106. ({ \
  107. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  108. cmpxchg((ptr), (o), (n)); \
  109. })
  110. #define cmpxchg64_local(ptr, o, n) \
  111. ({ \
  112. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  113. cmpxchg_local((ptr), (o), (n)); \
  114. })
  115. #endif /* _ASM_X86_CMPXCHG_64_H */