cmpxchg_64.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134
  1. #ifndef __ASM_CMPXCHG_H
  2. #define __ASM_CMPXCHG_H
  3. #include <asm/alternative.h> /* Provides LOCK_PREFIX */
  4. #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
  5. #define __xg(x) ((volatile long *)(x))
  6. static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
  7. {
  8. *ptr = val;
  9. }
  10. #define _set_64bit set_64bit
  11. /*
  12. * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
  13. * Note 2: xchg has side effect, so that attribute volatile is necessary,
  14. * but generally the primitive is invalid, *ptr is output argument. --ANK
  15. */
  16. static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
  17. {
  18. switch (size) {
  19. case 1:
  20. __asm__ __volatile__("xchgb %b0,%1"
  21. :"=q" (x)
  22. :"m" (*__xg(ptr)), "0" (x)
  23. :"memory");
  24. break;
  25. case 2:
  26. __asm__ __volatile__("xchgw %w0,%1"
  27. :"=r" (x)
  28. :"m" (*__xg(ptr)), "0" (x)
  29. :"memory");
  30. break;
  31. case 4:
  32. __asm__ __volatile__("xchgl %k0,%1"
  33. :"=r" (x)
  34. :"m" (*__xg(ptr)), "0" (x)
  35. :"memory");
  36. break;
  37. case 8:
  38. __asm__ __volatile__("xchgq %0,%1"
  39. :"=r" (x)
  40. :"m" (*__xg(ptr)), "0" (x)
  41. :"memory");
  42. break;
  43. }
  44. return x;
  45. }
  46. /*
  47. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  48. * store NEW in MEM. Return the initial value in MEM. Success is
  49. * indicated by comparing RETURN with OLD.
  50. */
  51. #define __HAVE_ARCH_CMPXCHG 1
  52. static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
  53. unsigned long new, int size)
  54. {
  55. unsigned long prev;
  56. switch (size) {
  57. case 1:
  58. __asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2"
  59. : "=a"(prev)
  60. : "q"(new), "m"(*__xg(ptr)), "0"(old)
  61. : "memory");
  62. return prev;
  63. case 2:
  64. __asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2"
  65. : "=a"(prev)
  66. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  67. : "memory");
  68. return prev;
  69. case 4:
  70. __asm__ __volatile__(LOCK_PREFIX "cmpxchgl %k1,%2"
  71. : "=a"(prev)
  72. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  73. : "memory");
  74. return prev;
  75. case 8:
  76. __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2"
  77. : "=a"(prev)
  78. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  79. : "memory");
  80. return prev;
  81. }
  82. return old;
  83. }
  84. static inline unsigned long __cmpxchg_local(volatile void *ptr,
  85. unsigned long old, unsigned long new, int size)
  86. {
  87. unsigned long prev;
  88. switch (size) {
  89. case 1:
  90. __asm__ __volatile__("cmpxchgb %b1,%2"
  91. : "=a"(prev)
  92. : "q"(new), "m"(*__xg(ptr)), "0"(old)
  93. : "memory");
  94. return prev;
  95. case 2:
  96. __asm__ __volatile__("cmpxchgw %w1,%2"
  97. : "=a"(prev)
  98. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  99. : "memory");
  100. return prev;
  101. case 4:
  102. __asm__ __volatile__("cmpxchgl %k1,%2"
  103. : "=a"(prev)
  104. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  105. : "memory");
  106. return prev;
  107. case 8:
  108. __asm__ __volatile__("cmpxchgq %1,%2"
  109. : "=a"(prev)
  110. : "r"(new), "m"(*__xg(ptr)), "0"(old)
  111. : "memory");
  112. return prev;
  113. }
  114. return old;
  115. }
  116. #define cmpxchg(ptr,o,n)\
  117. ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
  118. (unsigned long)(n),sizeof(*(ptr))))
  119. #define cmpxchg_local(ptr,o,n)\
  120. ((__typeof__(*(ptr)))__cmpxchg_local((ptr),(unsigned long)(o),\
  121. (unsigned long)(n),sizeof(*(ptr))))
  122. #endif