local.h 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288
  1. #ifndef _ARCH_MIPS_LOCAL_H
  2. #define _ARCH_MIPS_LOCAL_H
  3. #include <linux/percpu.h>
  4. #include <linux/bitops.h>
  5. #include <asm/atomic.h>
  6. #include <asm/war.h>
  7. typedef struct
  8. {
  9. atomic_long_t a;
  10. } local_t;
  11. #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
  12. #define local_read(l) atomic_long_read(&(l)->a)
  13. #define local_set(l,i) atomic_long_set(&(l)->a, (i))
  14. #define local_add(i,l) atomic_long_add((i),(&(l)->a))
  15. #define local_sub(i,l) atomic_long_sub((i),(&(l)->a))
  16. #define local_inc(l) atomic_long_inc(&(l)->a)
  17. #define local_dec(l) atomic_long_dec(&(l)->a)
  18. /*
  19. * Same as above, but return the result value
  20. */
  21. static __inline__ long local_add_return(long i, local_t * l)
  22. {
  23. unsigned long result;
  24. if (cpu_has_llsc && R10000_LLSC_WAR) {
  25. unsigned long temp;
  26. __asm__ __volatile__(
  27. " .set mips3 \n"
  28. "1:" __LL "%1, %2 # local_add_return \n"
  29. " addu %0, %1, %3 \n"
  30. __SC "%0, %2 \n"
  31. " beqzl %0, 1b \n"
  32. " addu %0, %1, %3 \n"
  33. " .set mips0 \n"
  34. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  35. : "Ir" (i), "m" (l->a.counter)
  36. : "memory");
  37. } else if (cpu_has_llsc) {
  38. unsigned long temp;
  39. __asm__ __volatile__(
  40. " .set mips3 \n"
  41. "1:" __LL "%1, %2 # local_add_return \n"
  42. " addu %0, %1, %3 \n"
  43. __SC "%0, %2 \n"
  44. " beqz %0, 1b \n"
  45. " addu %0, %1, %3 \n"
  46. " .set mips0 \n"
  47. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  48. : "Ir" (i), "m" (l->a.counter)
  49. : "memory");
  50. } else {
  51. unsigned long flags;
  52. local_irq_save(flags);
  53. result = l->a.counter;
  54. result += i;
  55. l->a.counter = result;
  56. local_irq_restore(flags);
  57. }
  58. return result;
  59. }
  60. static __inline__ long local_sub_return(long i, local_t * l)
  61. {
  62. unsigned long result;
  63. if (cpu_has_llsc && R10000_LLSC_WAR) {
  64. unsigned long temp;
  65. __asm__ __volatile__(
  66. " .set mips3 \n"
  67. "1:" __LL "%1, %2 # local_sub_return \n"
  68. " subu %0, %1, %3 \n"
  69. __SC "%0, %2 \n"
  70. " beqzl %0, 1b \n"
  71. " subu %0, %1, %3 \n"
  72. " .set mips0 \n"
  73. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  74. : "Ir" (i), "m" (l->a.counter)
  75. : "memory");
  76. } else if (cpu_has_llsc) {
  77. unsigned long temp;
  78. __asm__ __volatile__(
  79. " .set mips3 \n"
  80. "1:" __LL "%1, %2 # local_sub_return \n"
  81. " subu %0, %1, %3 \n"
  82. __SC "%0, %2 \n"
  83. " beqz %0, 1b \n"
  84. " subu %0, %1, %3 \n"
  85. " .set mips0 \n"
  86. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  87. : "Ir" (i), "m" (l->a.counter)
  88. : "memory");
  89. } else {
  90. unsigned long flags;
  91. local_irq_save(flags);
  92. result = l->a.counter;
  93. result -= i;
  94. l->a.counter = result;
  95. local_irq_restore(flags);
  96. }
  97. return result;
  98. }
  99. /*
  100. * local_sub_if_positive - conditionally subtract integer from atomic variable
  101. * @i: integer value to subtract
  102. * @l: pointer of type local_t
  103. *
  104. * Atomically test @l and subtract @i if @l is greater or equal than @i.
  105. * The function returns the old value of @l minus @i.
  106. */
  107. static __inline__ long local_sub_if_positive(long i, local_t * l)
  108. {
  109. unsigned long result;
  110. if (cpu_has_llsc && R10000_LLSC_WAR) {
  111. unsigned long temp;
  112. __asm__ __volatile__(
  113. " .set mips3 \n"
  114. "1:" __LL "%1, %2 # local_sub_if_positive\n"
  115. " dsubu %0, %1, %3 \n"
  116. " bltz %0, 1f \n"
  117. __SC "%0, %2 \n"
  118. " .set noreorder \n"
  119. " beqzl %0, 1b \n"
  120. " dsubu %0, %1, %3 \n"
  121. " .set reorder \n"
  122. "1: \n"
  123. " .set mips0 \n"
  124. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  125. : "Ir" (i), "m" (l->a.counter)
  126. : "memory");
  127. } else if (cpu_has_llsc) {
  128. unsigned long temp;
  129. __asm__ __volatile__(
  130. " .set mips3 \n"
  131. "1:" __LL "%1, %2 # local_sub_if_positive\n"
  132. " dsubu %0, %1, %3 \n"
  133. " bltz %0, 1f \n"
  134. __SC "%0, %2 \n"
  135. " .set noreorder \n"
  136. " beqz %0, 1b \n"
  137. " dsubu %0, %1, %3 \n"
  138. " .set reorder \n"
  139. "1: \n"
  140. " .set mips0 \n"
  141. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  142. : "Ir" (i), "m" (l->a.counter)
  143. : "memory");
  144. } else {
  145. unsigned long flags;
  146. local_irq_save(flags);
  147. result = l->a.counter;
  148. result -= i;
  149. if (result >= 0)
  150. l->a.counter = result;
  151. local_irq_restore(flags);
  152. }
  153. return result;
  154. }
  155. #define local_cmpxchg(l, o, n) \
  156. ((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
  157. #define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n)))
  158. /**
  159. * local_add_unless - add unless the number is a given value
  160. * @l: pointer of type local_t
  161. * @a: the amount to add to l...
  162. * @u: ...unless l is equal to u.
  163. *
  164. * Atomically adds @a to @l, so long as it was not @u.
  165. * Returns non-zero if @l was not @u, and zero otherwise.
  166. */
  167. #define local_add_unless(l, a, u) \
  168. ({ \
  169. long c, old; \
  170. c = local_read(l); \
  171. while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
  172. c = old; \
  173. c != (u); \
  174. })
  175. #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
  176. #define local_dec_return(l) local_sub_return(1,(l))
  177. #define local_inc_return(l) local_add_return(1,(l))
  178. /*
  179. * local_sub_and_test - subtract value from variable and test result
  180. * @i: integer value to subtract
  181. * @l: pointer of type local_t
  182. *
  183. * Atomically subtracts @i from @l and returns
  184. * true if the result is zero, or false for all
  185. * other cases.
  186. */
  187. #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
  188. /*
  189. * local_inc_and_test - increment and test
  190. * @l: pointer of type local_t
  191. *
  192. * Atomically increments @l by 1
  193. * and returns true if the result is zero, or false for all
  194. * other cases.
  195. */
  196. #define local_inc_and_test(l) (local_inc_return(l) == 0)
  197. /*
  198. * local_dec_and_test - decrement by 1 and test
  199. * @l: pointer of type local_t
  200. *
  201. * Atomically decrements @l by 1 and
  202. * returns true if the result is 0, or false for all other
  203. * cases.
  204. */
  205. #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
  206. /*
  207. * local_dec_if_positive - decrement by 1 if old value positive
  208. * @l: pointer of type local_t
  209. */
  210. #define local_dec_if_positive(l) local_sub_if_positive(1, l)
  211. /*
  212. * local_add_negative - add and test if negative
  213. * @l: pointer of type local_t
  214. * @i: integer value to add
  215. *
  216. * Atomically adds @i to @l and returns true
  217. * if the result is negative, or false when
  218. * result is greater than or equal to zero.
  219. */
  220. #define local_add_negative(i,l) (local_add_return(i, (l)) < 0)
  221. /* Use these for per-cpu local_t variables: on some archs they are
  222. * much more efficient than these naive implementations. Note they take
  223. * a variable, not an address.
  224. */
  225. #define __local_inc(l) ((l)->a.counter++)
  226. #define __local_dec(l) ((l)->a.counter++)
  227. #define __local_add(i,l) ((l)->a.counter+=(i))
  228. #define __local_sub(i,l) ((l)->a.counter-=(i))
  229. /* Need to disable preemption for the cpu local counters otherwise we could
  230. still access a variable of a previous CPU in a non atomic way. */
  231. #define cpu_local_wrap_v(l) \
  232. ({ local_t res__; \
  233. preempt_disable(); \
  234. res__ = (l); \
  235. preempt_enable(); \
  236. res__; })
  237. #define cpu_local_wrap(l) \
  238. ({ preempt_disable(); \
  239. l; \
  240. preempt_enable(); }) \
  241. #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
  242. #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
  243. #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l)))
  244. #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l)))
  245. #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
  246. #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
  247. #define __cpu_local_inc(l) cpu_local_inc(l)
  248. #define __cpu_local_dec(l) cpu_local_dec(l)
  249. #define __cpu_local_add(i, l) cpu_local_add((i), (l))
  250. #define __cpu_local_sub(i, l) cpu_local_sub((i), (l))
  251. #endif /* _ARCH_MIPS_LOCAL_H */