local.h 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289
  1. #ifndef _ARCH_MIPS_LOCAL_H
  2. #define _ARCH_MIPS_LOCAL_H
  3. #include <linux/percpu.h>
  4. #include <linux/bitops.h>
  5. #include <asm/atomic.h>
  6. #include <asm/cmpxchg.h>
  7. #include <asm/war.h>
  8. typedef struct
  9. {
  10. atomic_long_t a;
  11. } local_t;
  12. #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
  13. #define local_read(l) atomic_long_read(&(l)->a)
  14. #define local_set(l,i) atomic_long_set(&(l)->a, (i))
  15. #define local_add(i,l) atomic_long_add((i),(&(l)->a))
  16. #define local_sub(i,l) atomic_long_sub((i),(&(l)->a))
  17. #define local_inc(l) atomic_long_inc(&(l)->a)
  18. #define local_dec(l) atomic_long_dec(&(l)->a)
  19. /*
  20. * Same as above, but return the result value
  21. */
  22. static __inline__ long local_add_return(long i, local_t * l)
  23. {
  24. unsigned long result;
  25. if (cpu_has_llsc && R10000_LLSC_WAR) {
  26. unsigned long temp;
  27. __asm__ __volatile__(
  28. " .set mips3 \n"
  29. "1:" __LL "%1, %2 # local_add_return \n"
  30. " addu %0, %1, %3 \n"
  31. __SC "%0, %2 \n"
  32. " beqzl %0, 1b \n"
  33. " addu %0, %1, %3 \n"
  34. " .set mips0 \n"
  35. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  36. : "Ir" (i), "m" (l->a.counter)
  37. : "memory");
  38. } else if (cpu_has_llsc) {
  39. unsigned long temp;
  40. __asm__ __volatile__(
  41. " .set mips3 \n"
  42. "1:" __LL "%1, %2 # local_add_return \n"
  43. " addu %0, %1, %3 \n"
  44. __SC "%0, %2 \n"
  45. " beqz %0, 1b \n"
  46. " addu %0, %1, %3 \n"
  47. " .set mips0 \n"
  48. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  49. : "Ir" (i), "m" (l->a.counter)
  50. : "memory");
  51. } else {
  52. unsigned long flags;
  53. local_irq_save(flags);
  54. result = l->a.counter;
  55. result += i;
  56. l->a.counter = result;
  57. local_irq_restore(flags);
  58. }
  59. return result;
  60. }
  61. static __inline__ long local_sub_return(long i, local_t * l)
  62. {
  63. unsigned long result;
  64. if (cpu_has_llsc && R10000_LLSC_WAR) {
  65. unsigned long temp;
  66. __asm__ __volatile__(
  67. " .set mips3 \n"
  68. "1:" __LL "%1, %2 # local_sub_return \n"
  69. " subu %0, %1, %3 \n"
  70. __SC "%0, %2 \n"
  71. " beqzl %0, 1b \n"
  72. " subu %0, %1, %3 \n"
  73. " .set mips0 \n"
  74. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  75. : "Ir" (i), "m" (l->a.counter)
  76. : "memory");
  77. } else if (cpu_has_llsc) {
  78. unsigned long temp;
  79. __asm__ __volatile__(
  80. " .set mips3 \n"
  81. "1:" __LL "%1, %2 # local_sub_return \n"
  82. " subu %0, %1, %3 \n"
  83. __SC "%0, %2 \n"
  84. " beqz %0, 1b \n"
  85. " subu %0, %1, %3 \n"
  86. " .set mips0 \n"
  87. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  88. : "Ir" (i), "m" (l->a.counter)
  89. : "memory");
  90. } else {
  91. unsigned long flags;
  92. local_irq_save(flags);
  93. result = l->a.counter;
  94. result -= i;
  95. l->a.counter = result;
  96. local_irq_restore(flags);
  97. }
  98. return result;
  99. }
  100. /*
  101. * local_sub_if_positive - conditionally subtract integer from atomic variable
  102. * @i: integer value to subtract
  103. * @l: pointer of type local_t
  104. *
  105. * Atomically test @l and subtract @i if @l is greater or equal than @i.
  106. * The function returns the old value of @l minus @i.
  107. */
  108. static __inline__ long local_sub_if_positive(long i, local_t * l)
  109. {
  110. unsigned long result;
  111. if (cpu_has_llsc && R10000_LLSC_WAR) {
  112. unsigned long temp;
  113. __asm__ __volatile__(
  114. " .set mips3 \n"
  115. "1:" __LL "%1, %2 # local_sub_if_positive\n"
  116. " dsubu %0, %1, %3 \n"
  117. " bltz %0, 1f \n"
  118. __SC "%0, %2 \n"
  119. " .set noreorder \n"
  120. " beqzl %0, 1b \n"
  121. " dsubu %0, %1, %3 \n"
  122. " .set reorder \n"
  123. "1: \n"
  124. " .set mips0 \n"
  125. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  126. : "Ir" (i), "m" (l->a.counter)
  127. : "memory");
  128. } else if (cpu_has_llsc) {
  129. unsigned long temp;
  130. __asm__ __volatile__(
  131. " .set mips3 \n"
  132. "1:" __LL "%1, %2 # local_sub_if_positive\n"
  133. " dsubu %0, %1, %3 \n"
  134. " bltz %0, 1f \n"
  135. __SC "%0, %2 \n"
  136. " .set noreorder \n"
  137. " beqz %0, 1b \n"
  138. " dsubu %0, %1, %3 \n"
  139. " .set reorder \n"
  140. "1: \n"
  141. " .set mips0 \n"
  142. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  143. : "Ir" (i), "m" (l->a.counter)
  144. : "memory");
  145. } else {
  146. unsigned long flags;
  147. local_irq_save(flags);
  148. result = l->a.counter;
  149. result -= i;
  150. if (result >= 0)
  151. l->a.counter = result;
  152. local_irq_restore(flags);
  153. }
  154. return result;
  155. }
  156. #define local_cmpxchg(l, o, n) \
  157. ((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
  158. #define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n)))
  159. /**
  160. * local_add_unless - add unless the number is a given value
  161. * @l: pointer of type local_t
  162. * @a: the amount to add to l...
  163. * @u: ...unless l is equal to u.
  164. *
  165. * Atomically adds @a to @l, so long as it was not @u.
  166. * Returns non-zero if @l was not @u, and zero otherwise.
  167. */
  168. #define local_add_unless(l, a, u) \
  169. ({ \
  170. long c, old; \
  171. c = local_read(l); \
  172. while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
  173. c = old; \
  174. c != (u); \
  175. })
  176. #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
  177. #define local_dec_return(l) local_sub_return(1,(l))
  178. #define local_inc_return(l) local_add_return(1,(l))
  179. /*
  180. * local_sub_and_test - subtract value from variable and test result
  181. * @i: integer value to subtract
  182. * @l: pointer of type local_t
  183. *
  184. * Atomically subtracts @i from @l and returns
  185. * true if the result is zero, or false for all
  186. * other cases.
  187. */
  188. #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
  189. /*
  190. * local_inc_and_test - increment and test
  191. * @l: pointer of type local_t
  192. *
  193. * Atomically increments @l by 1
  194. * and returns true if the result is zero, or false for all
  195. * other cases.
  196. */
  197. #define local_inc_and_test(l) (local_inc_return(l) == 0)
  198. /*
  199. * local_dec_and_test - decrement by 1 and test
  200. * @l: pointer of type local_t
  201. *
  202. * Atomically decrements @l by 1 and
  203. * returns true if the result is 0, or false for all other
  204. * cases.
  205. */
  206. #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
  207. /*
  208. * local_dec_if_positive - decrement by 1 if old value positive
  209. * @l: pointer of type local_t
  210. */
  211. #define local_dec_if_positive(l) local_sub_if_positive(1, l)
  212. /*
  213. * local_add_negative - add and test if negative
  214. * @l: pointer of type local_t
  215. * @i: integer value to add
  216. *
  217. * Atomically adds @i to @l and returns true
  218. * if the result is negative, or false when
  219. * result is greater than or equal to zero.
  220. */
  221. #define local_add_negative(i,l) (local_add_return(i, (l)) < 0)
  222. /* Use these for per-cpu local_t variables: on some archs they are
  223. * much more efficient than these naive implementations. Note they take
  224. * a variable, not an address.
  225. */
  226. #define __local_inc(l) ((l)->a.counter++)
  227. #define __local_dec(l) ((l)->a.counter++)
  228. #define __local_add(i,l) ((l)->a.counter+=(i))
  229. #define __local_sub(i,l) ((l)->a.counter-=(i))
  230. /* Need to disable preemption for the cpu local counters otherwise we could
  231. still access a variable of a previous CPU in a non atomic way. */
  232. #define cpu_local_wrap_v(l) \
  233. ({ local_t res__; \
  234. preempt_disable(); \
  235. res__ = (l); \
  236. preempt_enable(); \
  237. res__; })
  238. #define cpu_local_wrap(l) \
  239. ({ preempt_disable(); \
  240. l; \
  241. preempt_enable(); }) \
  242. #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
  243. #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
  244. #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l)))
  245. #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l)))
  246. #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
  247. #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
  248. #define __cpu_local_inc(l) cpu_local_inc(l)
  249. #define __cpu_local_dec(l) cpu_local_dec(l)
  250. #define __cpu_local_add(i, l) cpu_local_add((i), (l))
  251. #define __cpu_local_sub(i, l) cpu_local_sub((i), (l))
  252. #endif /* _ARCH_MIPS_LOCAL_H */