local_32.h 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233
  1. #ifndef _ARCH_I386_LOCAL_H
  2. #define _ARCH_I386_LOCAL_H
  3. #include <linux/percpu.h>
  4. #include <asm/system.h>
  5. #include <asm/atomic.h>
  6. typedef struct
  7. {
  8. atomic_long_t a;
  9. } local_t;
  10. #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
  11. #define local_read(l) atomic_long_read(&(l)->a)
  12. #define local_set(l,i) atomic_long_set(&(l)->a, (i))
  13. static __inline__ void local_inc(local_t *l)
  14. {
  15. __asm__ __volatile__(
  16. "incl %0"
  17. :"+m" (l->a.counter));
  18. }
  19. static __inline__ void local_dec(local_t *l)
  20. {
  21. __asm__ __volatile__(
  22. "decl %0"
  23. :"+m" (l->a.counter));
  24. }
  25. static __inline__ void local_add(long i, local_t *l)
  26. {
  27. __asm__ __volatile__(
  28. "addl %1,%0"
  29. :"+m" (l->a.counter)
  30. :"ir" (i));
  31. }
  32. static __inline__ void local_sub(long i, local_t *l)
  33. {
  34. __asm__ __volatile__(
  35. "subl %1,%0"
  36. :"+m" (l->a.counter)
  37. :"ir" (i));
  38. }
  39. /**
  40. * local_sub_and_test - subtract value from variable and test result
  41. * @i: integer value to subtract
  42. * @l: pointer of type local_t
  43. *
  44. * Atomically subtracts @i from @l and returns
  45. * true if the result is zero, or false for all
  46. * other cases.
  47. */
  48. static __inline__ int local_sub_and_test(long i, local_t *l)
  49. {
  50. unsigned char c;
  51. __asm__ __volatile__(
  52. "subl %2,%0; sete %1"
  53. :"+m" (l->a.counter), "=qm" (c)
  54. :"ir" (i) : "memory");
  55. return c;
  56. }
  57. /**
  58. * local_dec_and_test - decrement and test
  59. * @l: pointer of type local_t
  60. *
  61. * Atomically decrements @l by 1 and
  62. * returns true if the result is 0, or false for all other
  63. * cases.
  64. */
  65. static __inline__ int local_dec_and_test(local_t *l)
  66. {
  67. unsigned char c;
  68. __asm__ __volatile__(
  69. "decl %0; sete %1"
  70. :"+m" (l->a.counter), "=qm" (c)
  71. : : "memory");
  72. return c != 0;
  73. }
  74. /**
  75. * local_inc_and_test - increment and test
  76. * @l: pointer of type local_t
  77. *
  78. * Atomically increments @l by 1
  79. * and returns true if the result is zero, or false for all
  80. * other cases.
  81. */
  82. static __inline__ int local_inc_and_test(local_t *l)
  83. {
  84. unsigned char c;
  85. __asm__ __volatile__(
  86. "incl %0; sete %1"
  87. :"+m" (l->a.counter), "=qm" (c)
  88. : : "memory");
  89. return c != 0;
  90. }
  91. /**
  92. * local_add_negative - add and test if negative
  93. * @l: pointer of type local_t
  94. * @i: integer value to add
  95. *
  96. * Atomically adds @i to @l and returns true
  97. * if the result is negative, or false when
  98. * result is greater than or equal to zero.
  99. */
  100. static __inline__ int local_add_negative(long i, local_t *l)
  101. {
  102. unsigned char c;
  103. __asm__ __volatile__(
  104. "addl %2,%0; sets %1"
  105. :"+m" (l->a.counter), "=qm" (c)
  106. :"ir" (i) : "memory");
  107. return c;
  108. }
  109. /**
  110. * local_add_return - add and return
  111. * @l: pointer of type local_t
  112. * @i: integer value to add
  113. *
  114. * Atomically adds @i to @l and returns @i + @l
  115. */
  116. static __inline__ long local_add_return(long i, local_t *l)
  117. {
  118. long __i;
  119. #ifdef CONFIG_M386
  120. unsigned long flags;
  121. if(unlikely(boot_cpu_data.x86 <= 3))
  122. goto no_xadd;
  123. #endif
  124. /* Modern 486+ processor */
  125. __i = i;
  126. __asm__ __volatile__(
  127. "xaddl %0, %1;"
  128. :"+r" (i), "+m" (l->a.counter)
  129. : : "memory");
  130. return i + __i;
  131. #ifdef CONFIG_M386
  132. no_xadd: /* Legacy 386 processor */
  133. local_irq_save(flags);
  134. __i = local_read(l);
  135. local_set(l, i + __i);
  136. local_irq_restore(flags);
  137. return i + __i;
  138. #endif
  139. }
  140. static __inline__ long local_sub_return(long i, local_t *l)
  141. {
  142. return local_add_return(-i,l);
  143. }
  144. #define local_inc_return(l) (local_add_return(1,l))
  145. #define local_dec_return(l) (local_sub_return(1,l))
  146. #define local_cmpxchg(l, o, n) \
  147. (cmpxchg_local(&((l)->a.counter), (o), (n)))
  148. /* Always has a lock prefix */
  149. #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
  150. /**
  151. * local_add_unless - add unless the number is a given value
  152. * @l: pointer of type local_t
  153. * @a: the amount to add to l...
  154. * @u: ...unless l is equal to u.
  155. *
  156. * Atomically adds @a to @l, so long as it was not @u.
  157. * Returns non-zero if @l was not @u, and zero otherwise.
  158. */
  159. #define local_add_unless(l, a, u) \
  160. ({ \
  161. long c, old; \
  162. c = local_read(l); \
  163. for (;;) { \
  164. if (unlikely(c == (u))) \
  165. break; \
  166. old = local_cmpxchg((l), c, c + (a)); \
  167. if (likely(old == c)) \
  168. break; \
  169. c = old; \
  170. } \
  171. c != (u); \
  172. })
  173. #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
  174. /* On x86, these are no better than the atomic variants. */
  175. #define __local_inc(l) local_inc(l)
  176. #define __local_dec(l) local_dec(l)
  177. #define __local_add(i,l) local_add((i),(l))
  178. #define __local_sub(i,l) local_sub((i),(l))
  179. /* Use these for per-cpu local_t variables: on some archs they are
  180. * much more efficient than these naive implementations. Note they take
  181. * a variable, not an address.
  182. */
  183. /* Need to disable preemption for the cpu local counters otherwise we could
  184. still access a variable of a previous CPU in a non atomic way. */
  185. #define cpu_local_wrap_v(l) \
  186. ({ local_t res__; \
  187. preempt_disable(); \
  188. res__ = (l); \
  189. preempt_enable(); \
  190. res__; })
  191. #define cpu_local_wrap(l) \
  192. ({ preempt_disable(); \
  193. l; \
  194. preempt_enable(); }) \
  195. #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
  196. #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
  197. #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l)))
  198. #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l)))
  199. #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
  200. #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
  201. #define __cpu_local_inc(l) cpu_local_inc(l)
  202. #define __cpu_local_dec(l) cpu_local_dec(l)
  203. #define __cpu_local_add(i, l) cpu_local_add((i), (l))
  204. #define __cpu_local_sub(i, l) cpu_local_sub((i), (l))
  205. #endif /* _ARCH_I386_LOCAL_H */