atomic.h 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262
  1. /* atomic.h: atomic operation emulation for FR-V
  2. *
  3. * For an explanation of how atomic ops work in this arch, see:
  4. * Documentation/frv/atomic-ops.txt
  5. *
  6. * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
  7. * Written by David Howells (dhowells@redhat.com)
  8. *
  9. * This program is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU General Public License
  11. * as published by the Free Software Foundation; either version
  12. * 2 of the License, or (at your option) any later version.
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/types.h>
  17. #include <asm/spr-regs.h>
  18. #include <asm/system.h>
  19. #ifdef CONFIG_SMP
  20. #error not SMP safe
  21. #endif
  22. /*
  23. * Atomic operations that C can't guarantee us. Useful for
  24. * resource counting etc..
  25. *
  26. * We do not have SMP systems, so we don't have to deal with that.
  27. */
  28. /* Atomic operations are already serializing */
  29. #define smp_mb__before_atomic_dec() barrier()
  30. #define smp_mb__after_atomic_dec() barrier()
  31. #define smp_mb__before_atomic_inc() barrier()
  32. #define smp_mb__after_atomic_inc() barrier()
  33. #define ATOMIC_INIT(i) { (i) }
  34. #define atomic_read(v) ((v)->counter)
  35. #define atomic_set(v, i) (((v)->counter) = (i))
  36. #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
  37. static inline int atomic_add_return(int i, atomic_t *v)
  38. {
  39. unsigned long val;
  40. asm("0: \n"
  41. " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
  42. " ckeq icc3,cc7 \n"
  43. " ld.p %M0,%1 \n" /* LD.P/ORCR must be atomic */
  44. " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
  45. " add%I2 %1,%2,%1 \n"
  46. " cst.p %1,%M0 ,cc3,#1 \n"
  47. " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* clear ICC3.Z if store happens */
  48. " beq icc3,#0,0b \n"
  49. : "+U"(v->counter), "=&r"(val)
  50. : "NPr"(i)
  51. : "memory", "cc7", "cc3", "icc3"
  52. );
  53. return val;
  54. }
  55. static inline int atomic_sub_return(int i, atomic_t *v)
  56. {
  57. unsigned long val;
  58. asm("0: \n"
  59. " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
  60. " ckeq icc3,cc7 \n"
  61. " ld.p %M0,%1 \n" /* LD.P/ORCR must be atomic */
  62. " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
  63. " sub%I2 %1,%2,%1 \n"
  64. " cst.p %1,%M0 ,cc3,#1 \n"
  65. " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* clear ICC3.Z if store happens */
  66. " beq icc3,#0,0b \n"
  67. : "+U"(v->counter), "=&r"(val)
  68. : "NPr"(i)
  69. : "memory", "cc7", "cc3", "icc3"
  70. );
  71. return val;
  72. }
  73. #else
  74. extern int atomic_add_return(int i, atomic_t *v);
  75. extern int atomic_sub_return(int i, atomic_t *v);
  76. #endif
  77. static inline int atomic_add_negative(int i, atomic_t *v)
  78. {
  79. return atomic_add_return(i, v) < 0;
  80. }
  81. static inline void atomic_add(int i, atomic_t *v)
  82. {
  83. atomic_add_return(i, v);
  84. }
  85. static inline void atomic_sub(int i, atomic_t *v)
  86. {
  87. atomic_sub_return(i, v);
  88. }
  89. static inline void atomic_inc(atomic_t *v)
  90. {
  91. atomic_add_return(1, v);
  92. }
  93. static inline void atomic_dec(atomic_t *v)
  94. {
  95. atomic_sub_return(1, v);
  96. }
  97. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  98. #define atomic_inc_return(v) atomic_add_return(1, (v))
  99. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  100. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  101. #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
  102. /*
  103. * 64-bit atomic ops
  104. */
  105. typedef struct {
  106. volatile long long counter;
  107. } atomic64_t;
  108. #define ATOMIC64_INIT(i) { (i) }
  109. static inline long long atomic64_read(atomic64_t *v)
  110. {
  111. long long counter;
  112. asm("ldd%I1 %M1,%0"
  113. : "=e"(counter)
  114. : "m"(v->counter));
  115. return counter;
  116. }
  117. static inline void atomic64_set(atomic64_t *v, long long i)
  118. {
  119. asm volatile("std%I0 %1,%M0"
  120. : "=m"(v->counter)
  121. : "e"(i));
  122. }
  123. extern long long atomic64_inc_return(atomic64_t *v);
  124. extern long long atomic64_dec_return(atomic64_t *v);
  125. extern long long atomic64_add_return(long long i, atomic64_t *v);
  126. extern long long atomic64_sub_return(long long i, atomic64_t *v);
  127. static inline long long atomic64_add_negative(long long i, atomic64_t *v)
  128. {
  129. return atomic64_add_return(i, v) < 0;
  130. }
  131. static inline void atomic64_add(long long i, atomic64_t *v)
  132. {
  133. atomic64_add_return(i, v);
  134. }
  135. static inline void atomic64_sub(long long i, atomic64_t *v)
  136. {
  137. atomic64_sub_return(i, v);
  138. }
  139. static inline void atomic64_inc(atomic64_t *v)
  140. {
  141. atomic64_inc_return(v);
  142. }
  143. static inline void atomic64_dec(atomic64_t *v)
  144. {
  145. atomic64_dec_return(v);
  146. }
  147. #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
  148. #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
  149. #define atomic64_inc_and_test(v) (atomic64_inc_return((v)) == 0)
  150. /*****************************************************************************/
  151. /*
  152. * exchange value with memory
  153. */
  154. extern uint64_t __xchg_64(uint64_t i, volatile void *v);
  155. #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
  156. #define xchg(ptr, x) \
  157. ({ \
  158. __typeof__(ptr) __xg_ptr = (ptr); \
  159. __typeof__(*(ptr)) __xg_orig; \
  160. \
  161. switch (sizeof(__xg_orig)) { \
  162. case 4: \
  163. asm volatile( \
  164. "swap%I0 %M0,%1" \
  165. : "+m"(*__xg_ptr), "=r"(__xg_orig) \
  166. : "1"(x) \
  167. : "memory" \
  168. ); \
  169. break; \
  170. \
  171. default: \
  172. __xg_orig = (__typeof__(__xg_orig))0; \
  173. asm volatile("break"); \
  174. break; \
  175. } \
  176. \
  177. __xg_orig; \
  178. })
  179. #else
  180. extern uint32_t __xchg_32(uint32_t i, volatile void *v);
  181. #define xchg(ptr, x) \
  182. ({ \
  183. __typeof__(ptr) __xg_ptr = (ptr); \
  184. __typeof__(*(ptr)) __xg_orig; \
  185. \
  186. switch (sizeof(__xg_orig)) { \
  187. case 4: __xg_orig = (__typeof__(*(ptr))) __xchg_32((uint32_t) x, __xg_ptr); break; \
  188. default: \
  189. __xg_orig = (__typeof__(__xg_orig))0; \
  190. asm volatile("break"); \
  191. break; \
  192. } \
  193. __xg_orig; \
  194. })
  195. #endif
  196. #define tas(ptr) (xchg((ptr), 1))
  197. #define atomic_cmpxchg(v, old, new) (cmpxchg(&(v)->counter, old, new))
  198. #define atomic_xchg(v, new) (xchg(&(v)->counter, new))
  199. #define atomic64_cmpxchg(v, old, new) (__cmpxchg_64(old, new, &(v)->counter))
  200. #define atomic64_xchg(v, new) (__xchg_64(new, &(v)->counter))
  201. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  202. {
  203. int c, old;
  204. c = atomic_read(v);
  205. for (;;) {
  206. if (unlikely(c == (u)))
  207. break;
  208. old = atomic_cmpxchg((v), c, c + (a));
  209. if (likely(old == c))
  210. break;
  211. c = old;
  212. }
  213. return c != (u);
  214. }
  215. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  216. #include <asm-generic/atomic-long.h>
  217. #endif /* _ASM_ATOMIC_H */