atomic_32.h 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266
  1. #ifndef __ARCH_I386_ATOMIC__
  2. #define __ARCH_I386_ATOMIC__
  3. #include <linux/compiler.h>
  4. #include <asm/processor.h>
  5. #include <asm/cmpxchg.h>
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. /*
  11. * Make sure gcc doesn't try to be clever and move things around
  12. * on us. We need to use _exactly_ the address the user gave us,
  13. * not some alias that contains the same information.
  14. */
  15. typedef struct { int counter; } atomic_t;
  16. #define ATOMIC_INIT(i) { (i) }
  17. /**
  18. * atomic_read - read atomic variable
  19. * @v: pointer of type atomic_t
  20. *
  21. * Atomically reads the value of @v.
  22. */
  23. #define atomic_read(v) ((v)->counter)
  24. /**
  25. * atomic_set - set atomic variable
  26. * @v: pointer of type atomic_t
  27. * @i: required value
  28. *
  29. * Atomically sets the value of @v to @i.
  30. */
  31. #define atomic_set(v,i) (((v)->counter) = (i))
  32. /**
  33. * atomic_add - add integer to atomic variable
  34. * @i: integer value to add
  35. * @v: pointer of type atomic_t
  36. *
  37. * Atomically adds @i to @v.
  38. */
  39. static __inline__ void atomic_add(int i, atomic_t *v)
  40. {
  41. __asm__ __volatile__(
  42. LOCK_PREFIX "addl %1,%0"
  43. :"+m" (v->counter)
  44. :"ir" (i));
  45. }
  46. /**
  47. * atomic_sub - subtract integer from atomic variable
  48. * @i: integer value to subtract
  49. * @v: pointer of type atomic_t
  50. *
  51. * Atomically subtracts @i from @v.
  52. */
  53. static __inline__ void atomic_sub(int i, atomic_t *v)
  54. {
  55. __asm__ __volatile__(
  56. LOCK_PREFIX "subl %1,%0"
  57. :"+m" (v->counter)
  58. :"ir" (i));
  59. }
  60. /**
  61. * atomic_sub_and_test - subtract value from variable and test result
  62. * @i: integer value to subtract
  63. * @v: pointer of type atomic_t
  64. *
  65. * Atomically subtracts @i from @v and returns
  66. * true if the result is zero, or false for all
  67. * other cases.
  68. */
  69. static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
  70. {
  71. unsigned char c;
  72. __asm__ __volatile__(
  73. LOCK_PREFIX "subl %2,%0; sete %1"
  74. :"+m" (v->counter), "=qm" (c)
  75. :"ir" (i) : "memory");
  76. return c;
  77. }
  78. /**
  79. * atomic_inc - increment atomic variable
  80. * @v: pointer of type atomic_t
  81. *
  82. * Atomically increments @v by 1.
  83. */
  84. static __inline__ void atomic_inc(atomic_t *v)
  85. {
  86. __asm__ __volatile__(
  87. LOCK_PREFIX "incl %0"
  88. :"+m" (v->counter));
  89. }
  90. /**
  91. * atomic_dec - decrement atomic variable
  92. * @v: pointer of type atomic_t
  93. *
  94. * Atomically decrements @v by 1.
  95. */
  96. static __inline__ void atomic_dec(atomic_t *v)
  97. {
  98. __asm__ __volatile__(
  99. LOCK_PREFIX "decl %0"
  100. :"+m" (v->counter));
  101. }
  102. /**
  103. * atomic_dec_and_test - decrement and test
  104. * @v: pointer of type atomic_t
  105. *
  106. * Atomically decrements @v by 1 and
  107. * returns true if the result is 0, or false for all other
  108. * cases.
  109. */
  110. static __inline__ int atomic_dec_and_test(atomic_t *v)
  111. {
  112. unsigned char c;
  113. __asm__ __volatile__(
  114. LOCK_PREFIX "decl %0; sete %1"
  115. :"+m" (v->counter), "=qm" (c)
  116. : : "memory");
  117. return c != 0;
  118. }
  119. /**
  120. * atomic_inc_and_test - increment and test
  121. * @v: pointer of type atomic_t
  122. *
  123. * Atomically increments @v by 1
  124. * and returns true if the result is zero, or false for all
  125. * other cases.
  126. */
  127. static __inline__ int atomic_inc_and_test(atomic_t *v)
  128. {
  129. unsigned char c;
  130. __asm__ __volatile__(
  131. LOCK_PREFIX "incl %0; sete %1"
  132. :"+m" (v->counter), "=qm" (c)
  133. : : "memory");
  134. return c != 0;
  135. }
  136. /**
  137. * atomic_add_negative - add and test if negative
  138. * @v: pointer of type atomic_t
  139. * @i: integer value to add
  140. *
  141. * Atomically adds @i to @v and returns true
  142. * if the result is negative, or false when
  143. * result is greater than or equal to zero.
  144. */
  145. static __inline__ int atomic_add_negative(int i, atomic_t *v)
  146. {
  147. unsigned char c;
  148. __asm__ __volatile__(
  149. LOCK_PREFIX "addl %2,%0; sets %1"
  150. :"+m" (v->counter), "=qm" (c)
  151. :"ir" (i) : "memory");
  152. return c;
  153. }
  154. /**
  155. * atomic_add_return - add integer and return
  156. * @v: pointer of type atomic_t
  157. * @i: integer value to add
  158. *
  159. * Atomically adds @i to @v and returns @i + @v
  160. */
  161. static __inline__ int atomic_add_return(int i, atomic_t *v)
  162. {
  163. int __i;
  164. #ifdef CONFIG_M386
  165. unsigned long flags;
  166. if(unlikely(boot_cpu_data.x86 <= 3))
  167. goto no_xadd;
  168. #endif
  169. /* Modern 486+ processor */
  170. __i = i;
  171. __asm__ __volatile__(
  172. LOCK_PREFIX "xaddl %0, %1"
  173. :"+r" (i), "+m" (v->counter)
  174. : : "memory");
  175. return i + __i;
  176. #ifdef CONFIG_M386
  177. no_xadd: /* Legacy 386 processor */
  178. local_irq_save(flags);
  179. __i = atomic_read(v);
  180. atomic_set(v, i + __i);
  181. local_irq_restore(flags);
  182. return i + __i;
  183. #endif
  184. }
  185. /**
  186. * atomic_sub_return - subtract integer and return
  187. * @v: pointer of type atomic_t
  188. * @i: integer value to subtract
  189. *
  190. * Atomically subtracts @i from @v and returns @v - @i
  191. */
  192. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  193. {
  194. return atomic_add_return(-i,v);
  195. }
  196. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
  197. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  198. /**
  199. * atomic_add_unless - add unless the number is already a given value
  200. * @v: pointer of type atomic_t
  201. * @a: the amount to add to v...
  202. * @u: ...unless v is equal to u.
  203. *
  204. * Atomically adds @a to @v, so long as @v was not already @u.
  205. * Returns non-zero if @v was not @u, and zero otherwise.
  206. */
  207. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  208. {
  209. int c, old;
  210. c = atomic_read(v);
  211. for (;;) {
  212. if (unlikely(c == (u)))
  213. break;
  214. old = atomic_cmpxchg((v), c, c + (a));
  215. if (likely(old == c))
  216. break;
  217. c = old;
  218. }
  219. return c != (u);
  220. }
  221. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  222. #define atomic_inc_return(v) (atomic_add_return(1,v))
  223. #define atomic_dec_return(v) (atomic_sub_return(1,v))
  224. /* These are x86-specific, used by some header files */
  225. #define atomic_clear_mask(mask, addr) \
  226. __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
  227. : : "r" (~(mask)),"m" (*addr) : "memory")
  228. #define atomic_set_mask(mask, addr) \
  229. __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
  230. : : "r" (mask),"m" (*(addr)) : "memory")
  231. /* Atomic operations are already serializing on x86 */
  232. #define smp_mb__before_atomic_dec() barrier()
  233. #define smp_mb__after_atomic_dec() barrier()
  234. #define smp_mb__before_atomic_inc() barrier()
  235. #define smp_mb__after_atomic_inc() barrier()
  236. #include <asm-generic/atomic.h>
  237. #endif