cmpxchg_64.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. #ifndef __ASM_CMPXCHG_H
  2. #define __ASM_CMPXCHG_H
  3. #include <asm/alternative.h> /* Provides LOCK_PREFIX */
  4. #define xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), \
  5. (ptr), sizeof(*(ptr))))
  6. #define __xg(x) ((volatile long *)(x))
  7. static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
  8. {
  9. *ptr = val;
  10. }
  11. #define _set_64bit set_64bit
  12. /*
  13. * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
  14. * Note 2: xchg has side effect, so that attribute volatile is necessary,
  15. * but generally the primitive is invalid, *ptr is output argument. --ANK
  16. */
  17. static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
  18. int size)
  19. {
  20. switch (size) {
  21. case 1:
  22. asm volatile("xchgb %b0,%1"
  23. : "=q" (x)
  24. : "m" (*__xg(ptr)), "0" (x)
  25. : "memory");
  26. break;
  27. case 2:
  28. asm volatile("xchgw %w0,%1"
  29. : "=r" (x)
  30. : "m" (*__xg(ptr)), "0" (x)
  31. : "memory");
  32. break;
  33. case 4:
  34. asm volatile("xchgl %k0,%1"
  35. : "=r" (x)
  36. : "m" (*__xg(ptr)), "0" (x)
  37. : "memory");
  38. break;
  39. case 8:
  40. asm volatile("xchgq %0,%1"
  41. : "=r" (x)
  42. : "m" (*__xg(ptr)), "0" (x)
  43. : "memory");
  44. break;
  45. }
  46. return x;
  47. }
  48. /*
  49. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  50. * store NEW in MEM. Return the initial value in MEM. Success is
  51. * indicated by comparing RETURN with OLD.
  52. */
  53. #define __HAVE_ARCH_CMPXCHG 1
  54. static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
  55. unsigned long new, int size)
  56. {
  57. unsigned long prev;
  58. switch (size) {
  59. case 1:
  60. asm volatile(LOCK_PREFIX "cmpxchgb %b1,%2"
  61. : "=a"(prev)
  62. : "q"(new), "m"(*__xg(ptr)), "0"(old)
  63. : "memory");
  64. return prev;
  65. case 2:
  66. asm volatile(LOCK_PREFIX "cmpxchgw %w1,%2"
  67. : "=a"(prev)
  68. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  69. : "memory");
  70. return prev;
  71. case 4:
  72. asm volatile(LOCK_PREFIX "cmpxchgl %k1,%2"
  73. : "=a"(prev)
  74. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  75. : "memory");
  76. return prev;
  77. case 8:
  78. asm volatile(LOCK_PREFIX "cmpxchgq %1,%2"
  79. : "=a"(prev)
  80. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  81. : "memory");
  82. return prev;
  83. }
  84. return old;
  85. }
  86. static inline unsigned long __cmpxchg_local(volatile void *ptr,
  87. unsigned long old,
  88. unsigned long new, int size)
  89. {
  90. unsigned long prev;
  91. switch (size) {
  92. case 1:
  93. asm volatile("cmpxchgb %b1,%2"
  94. : "=a"(prev)
  95. : "q"(new), "m"(*__xg(ptr)), "0"(old)
  96. : "memory");
  97. return prev;
  98. case 2:
  99. asm volatile("cmpxchgw %w1,%2"
  100. : "=a"(prev)
  101. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  102. : "memory");
  103. return prev;
  104. case 4:
  105. asm volatile("cmpxchgl %k1,%2"
  106. : "=a"(prev)
  107. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  108. : "memory");
  109. return prev;
  110. case 8:
  111. asm volatile("cmpxchgq %1,%2"
  112. : "=a"(prev)
  113. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  114. : "memory");
  115. return prev;
  116. }
  117. return old;
  118. }
  119. #define cmpxchg(ptr, o, n) \
  120. ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
  121. (unsigned long)(n), sizeof(*(ptr))))
  122. #define cmpxchg64(ptr, o, n) \
  123. ({ \
  124. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  125. cmpxchg((ptr), (o), (n)); \
  126. })
  127. #define cmpxchg_local(ptr, o, n) \
  128. ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
  129. (unsigned long)(n), \
  130. sizeof(*(ptr))))
  131. #define cmpxchg64_local(ptr, o, n) \
  132. ({ \
  133. BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
  134. cmpxchg_local((ptr), (o), (n)); \
  135. })
  136. #endif