cmpxchg.h 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221
  1. #ifndef _ASM_M32R_CMPXCHG_H
  2. #define _ASM_M32R_CMPXCHG_H
  3. /*
  4. * M32R version:
  5. * Copyright (C) 2001, 2002 Hitoshi Yamamoto
  6. * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
  7. */
  8. #include <linux/irqflags.h>
  9. #include <asm/assembler.h>
  10. #include <asm/dcache_clear.h>
  11. extern void __xchg_called_with_bad_pointer(void);
  12. static __always_inline unsigned long
  13. __xchg(unsigned long x, volatile void *ptr, int size)
  14. {
  15. unsigned long flags;
  16. unsigned long tmp = 0;
  17. local_irq_save(flags);
  18. switch (size) {
  19. #ifndef CONFIG_SMP
  20. case 1:
  21. __asm__ __volatile__ (
  22. "ldb %0, @%2 \n\t"
  23. "stb %1, @%2 \n\t"
  24. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  25. break;
  26. case 2:
  27. __asm__ __volatile__ (
  28. "ldh %0, @%2 \n\t"
  29. "sth %1, @%2 \n\t"
  30. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  31. break;
  32. case 4:
  33. __asm__ __volatile__ (
  34. "ld %0, @%2 \n\t"
  35. "st %1, @%2 \n\t"
  36. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  37. break;
  38. #else /* CONFIG_SMP */
  39. case 4:
  40. __asm__ __volatile__ (
  41. DCACHE_CLEAR("%0", "r4", "%2")
  42. "lock %0, @%2; \n\t"
  43. "unlock %1, @%2; \n\t"
  44. : "=&r" (tmp) : "r" (x), "r" (ptr)
  45. : "memory"
  46. #ifdef CONFIG_CHIP_M32700_TS1
  47. , "r4"
  48. #endif /* CONFIG_CHIP_M32700_TS1 */
  49. );
  50. break;
  51. #endif /* CONFIG_SMP */
  52. default:
  53. __xchg_called_with_bad_pointer();
  54. }
  55. local_irq_restore(flags);
  56. return (tmp);
  57. }
  58. #define xchg(ptr, x) \
  59. ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
  60. static __always_inline unsigned long
  61. __xchg_local(unsigned long x, volatile void *ptr, int size)
  62. {
  63. unsigned long flags;
  64. unsigned long tmp = 0;
  65. local_irq_save(flags);
  66. switch (size) {
  67. case 1:
  68. __asm__ __volatile__ (
  69. "ldb %0, @%2 \n\t"
  70. "stb %1, @%2 \n\t"
  71. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  72. break;
  73. case 2:
  74. __asm__ __volatile__ (
  75. "ldh %0, @%2 \n\t"
  76. "sth %1, @%2 \n\t"
  77. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  78. break;
  79. case 4:
  80. __asm__ __volatile__ (
  81. "ld %0, @%2 \n\t"
  82. "st %1, @%2 \n\t"
  83. : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
  84. break;
  85. default:
  86. __xchg_called_with_bad_pointer();
  87. }
  88. local_irq_restore(flags);
  89. return (tmp);
  90. }
  91. #define xchg_local(ptr, x) \
  92. ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr), \
  93. sizeof(*(ptr))))
  94. #define __HAVE_ARCH_CMPXCHG 1
  95. static inline unsigned long
  96. __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
  97. {
  98. unsigned long flags;
  99. unsigned int retval;
  100. local_irq_save(flags);
  101. __asm__ __volatile__ (
  102. DCACHE_CLEAR("%0", "r4", "%1")
  103. M32R_LOCK" %0, @%1; \n"
  104. " bne %0, %2, 1f; \n"
  105. M32R_UNLOCK" %3, @%1; \n"
  106. " bra 2f; \n"
  107. " .fillinsn \n"
  108. "1:"
  109. M32R_UNLOCK" %0, @%1; \n"
  110. " .fillinsn \n"
  111. "2:"
  112. : "=&r" (retval)
  113. : "r" (p), "r" (old), "r" (new)
  114. : "cbit", "memory"
  115. #ifdef CONFIG_CHIP_M32700_TS1
  116. , "r4"
  117. #endif /* CONFIG_CHIP_M32700_TS1 */
  118. );
  119. local_irq_restore(flags);
  120. return retval;
  121. }
  122. static inline unsigned long
  123. __cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
  124. unsigned int new)
  125. {
  126. unsigned long flags;
  127. unsigned int retval;
  128. local_irq_save(flags);
  129. __asm__ __volatile__ (
  130. DCACHE_CLEAR("%0", "r4", "%1")
  131. "ld %0, @%1; \n"
  132. " bne %0, %2, 1f; \n"
  133. "st %3, @%1; \n"
  134. " bra 2f; \n"
  135. " .fillinsn \n"
  136. "1:"
  137. "st %0, @%1; \n"
  138. " .fillinsn \n"
  139. "2:"
  140. : "=&r" (retval)
  141. : "r" (p), "r" (old), "r" (new)
  142. : "cbit", "memory"
  143. #ifdef CONFIG_CHIP_M32700_TS1
  144. , "r4"
  145. #endif /* CONFIG_CHIP_M32700_TS1 */
  146. );
  147. local_irq_restore(flags);
  148. return retval;
  149. }
  150. /* This function doesn't exist, so you'll get a linker error
  151. if something tries to do an invalid cmpxchg(). */
  152. extern void __cmpxchg_called_with_bad_pointer(void);
  153. static inline unsigned long
  154. __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
  155. {
  156. switch (size) {
  157. case 4:
  158. return __cmpxchg_u32(ptr, old, new);
  159. #if 0 /* we don't have __cmpxchg_u64 */
  160. case 8:
  161. return __cmpxchg_u64(ptr, old, new);
  162. #endif /* 0 */
  163. }
  164. __cmpxchg_called_with_bad_pointer();
  165. return old;
  166. }
  167. #define cmpxchg(ptr, o, n) \
  168. ((__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)(o), \
  169. (unsigned long)(n), sizeof(*(ptr))))
  170. #include <asm-generic/cmpxchg-local.h>
  171. static inline unsigned long __cmpxchg_local(volatile void *ptr,
  172. unsigned long old,
  173. unsigned long new, int size)
  174. {
  175. switch (size) {
  176. case 4:
  177. return __cmpxchg_local_u32(ptr, old, new);
  178. default:
  179. return __cmpxchg_local_generic(ptr, old, new, size);
  180. }
  181. return old;
  182. }
  183. /*
  184. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  185. * them available.
  186. */
  187. #define cmpxchg_local(ptr, o, n) \
  188. ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
  189. (unsigned long)(n), sizeof(*(ptr))))
  190. #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  191. #endif /* _ASM_M32R_CMPXCHG_H */