atomic.h 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. /* atomic.h: atomic operation emulation for FR-V
  2. *
  3. * For an explanation of how atomic ops work in this arch, see:
  4. * Documentation/frv/atomic-ops.txt
  5. *
  6. * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
  7. * Written by David Howells (dhowells@redhat.com)
  8. *
  9. * This program is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU General Public License
  11. * as published by the Free Software Foundation; either version
  12. * 2 of the License, or (at your option) any later version.
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/types.h>
  17. #include <asm/spr-regs.h>
  18. #include <asm/system.h>
  19. #ifdef CONFIG_SMP
  20. #error not SMP safe
  21. #endif
  22. /*
  23. * Atomic operations that C can't guarantee us. Useful for
  24. * resource counting etc..
  25. *
  26. * We do not have SMP systems, so we don't have to deal with that.
  27. */
  28. /* Atomic operations are already serializing */
  29. #define smp_mb__before_atomic_dec() barrier()
  30. #define smp_mb__after_atomic_dec() barrier()
  31. #define smp_mb__before_atomic_inc() barrier()
  32. #define smp_mb__after_atomic_inc() barrier()
  33. typedef struct {
  34. int counter;
  35. } atomic_t;
  36. #define ATOMIC_INIT(i) { (i) }
  37. #define atomic_read(v) ((v)->counter)
  38. #define atomic_set(v, i) (((v)->counter) = (i))
  39. #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
  40. static inline int atomic_add_return(int i, atomic_t *v)
  41. {
  42. unsigned long val;
  43. asm("0: \n"
  44. " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
  45. " ckeq icc3,cc7 \n"
  46. " ld.p %M0,%1 \n" /* LD.P/ORCR must be atomic */
  47. " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
  48. " add%I2 %1,%2,%1 \n"
  49. " cst.p %1,%M0 ,cc3,#1 \n"
  50. " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* clear ICC3.Z if store happens */
  51. " beq icc3,#0,0b \n"
  52. : "+U"(v->counter), "=&r"(val)
  53. : "NPr"(i)
  54. : "memory", "cc7", "cc3", "icc3"
  55. );
  56. return val;
  57. }
  58. static inline int atomic_sub_return(int i, atomic_t *v)
  59. {
  60. unsigned long val;
  61. asm("0: \n"
  62. " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
  63. " ckeq icc3,cc7 \n"
  64. " ld.p %M0,%1 \n" /* LD.P/ORCR must be atomic */
  65. " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
  66. " sub%I2 %1,%2,%1 \n"
  67. " cst.p %1,%M0 ,cc3,#1 \n"
  68. " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* clear ICC3.Z if store happens */
  69. " beq icc3,#0,0b \n"
  70. : "+U"(v->counter), "=&r"(val)
  71. : "NPr"(i)
  72. : "memory", "cc7", "cc3", "icc3"
  73. );
  74. return val;
  75. }
  76. #else
  77. extern int atomic_add_return(int i, atomic_t *v);
  78. extern int atomic_sub_return(int i, atomic_t *v);
  79. #endif
  80. static inline int atomic_add_negative(int i, atomic_t *v)
  81. {
  82. return atomic_add_return(i, v) < 0;
  83. }
  84. static inline void atomic_add(int i, atomic_t *v)
  85. {
  86. atomic_add_return(i, v);
  87. }
  88. static inline void atomic_sub(int i, atomic_t *v)
  89. {
  90. atomic_sub_return(i, v);
  91. }
  92. static inline void atomic_inc(atomic_t *v)
  93. {
  94. atomic_add_return(1, v);
  95. }
  96. static inline void atomic_dec(atomic_t *v)
  97. {
  98. atomic_sub_return(1, v);
  99. }
  100. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  101. #define atomic_inc_return(v) atomic_add_return(1, (v))
  102. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  103. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  104. #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
  105. /*****************************************************************************/
  106. /*
  107. * exchange value with memory
  108. */
  109. #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
  110. #define xchg(ptr, x) \
  111. ({ \
  112. __typeof__(ptr) __xg_ptr = (ptr); \
  113. __typeof__(*(ptr)) __xg_orig; \
  114. \
  115. switch (sizeof(__xg_orig)) { \
  116. case 4: \
  117. asm volatile( \
  118. "swap%I0 %M0,%1" \
  119. : "+m"(*__xg_ptr), "=r"(__xg_orig) \
  120. : "1"(x) \
  121. : "memory" \
  122. ); \
  123. break; \
  124. \
  125. default: \
  126. __xg_orig = (__typeof__(__xg_orig))0; \
  127. asm volatile("break"); \
  128. break; \
  129. } \
  130. \
  131. __xg_orig; \
  132. })
  133. #else
  134. extern uint32_t __xchg_32(uint32_t i, volatile void *v);
  135. #define xchg(ptr, x) \
  136. ({ \
  137. __typeof__(ptr) __xg_ptr = (ptr); \
  138. __typeof__(*(ptr)) __xg_orig; \
  139. \
  140. switch (sizeof(__xg_orig)) { \
  141. case 4: __xg_orig = (__typeof__(*(ptr))) __xchg_32((uint32_t) x, __xg_ptr); break; \
  142. default: \
  143. __xg_orig = (__typeof__(__xg_orig))0; \
  144. asm volatile("break"); \
  145. break; \
  146. } \
  147. __xg_orig; \
  148. })
  149. #endif
  150. #define tas(ptr) (xchg((ptr), 1))
  151. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
  152. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  153. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  154. {
  155. int c, old;
  156. c = atomic_read(v);
  157. for (;;) {
  158. if (unlikely(c == (u)))
  159. break;
  160. old = atomic_cmpxchg((v), c, c + (a));
  161. if (likely(old == c))
  162. break;
  163. c = old;
  164. }
  165. return c != (u);
  166. }
  167. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  168. #include <asm-generic/atomic.h>
  169. #endif /* _ASM_ATOMIC_H */