atomic.h 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257
  1. #ifndef __ARCH_I386_ATOMIC__
  2. #define __ARCH_I386_ATOMIC__
  3. #include <linux/compiler.h>
  4. #include <asm/processor.h>
  5. /*
  6. * Atomic operations that C can't guarantee us. Useful for
  7. * resource counting etc..
  8. */
  9. /*
  10. * Make sure gcc doesn't try to be clever and move things around
  11. * on us. We need to use _exactly_ the address the user gave us,
  12. * not some alias that contains the same information.
  13. */
  14. typedef struct { volatile int counter; } atomic_t;
  15. #define ATOMIC_INIT(i) { (i) }
  16. /**
  17. * atomic_read - read atomic variable
  18. * @v: pointer of type atomic_t
  19. *
  20. * Atomically reads the value of @v.
  21. */
  22. #define atomic_read(v) ((v)->counter)
  23. /**
  24. * atomic_set - set atomic variable
  25. * @v: pointer of type atomic_t
  26. * @i: required value
  27. *
  28. * Atomically sets the value of @v to @i.
  29. */
  30. #define atomic_set(v,i) (((v)->counter) = (i))
  31. /**
  32. * atomic_add - add integer to atomic variable
  33. * @i: integer value to add
  34. * @v: pointer of type atomic_t
  35. *
  36. * Atomically adds @i to @v.
  37. */
  38. static __inline__ void atomic_add(int i, atomic_t *v)
  39. {
  40. __asm__ __volatile__(
  41. LOCK_PREFIX "addl %1,%0"
  42. :"+m" (v->counter)
  43. :"ir" (i));
  44. }
  45. /**
  46. * atomic_sub - subtract the atomic variable
  47. * @i: integer value to subtract
  48. * @v: pointer of type atomic_t
  49. *
  50. * Atomically subtracts @i from @v.
  51. */
  52. static __inline__ void atomic_sub(int i, atomic_t *v)
  53. {
  54. __asm__ __volatile__(
  55. LOCK_PREFIX "subl %1,%0"
  56. :"+m" (v->counter)
  57. :"ir" (i));
  58. }
  59. /**
  60. * atomic_sub_and_test - subtract value from variable and test result
  61. * @i: integer value to subtract
  62. * @v: pointer of type atomic_t
  63. *
  64. * Atomically subtracts @i from @v and returns
  65. * true if the result is zero, or false for all
  66. * other cases.
  67. */
  68. static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
  69. {
  70. unsigned char c;
  71. __asm__ __volatile__(
  72. LOCK_PREFIX "subl %2,%0; sete %1"
  73. :"+m" (v->counter), "=qm" (c)
  74. :"ir" (i) : "memory");
  75. return c;
  76. }
  77. /**
  78. * atomic_inc - increment atomic variable
  79. * @v: pointer of type atomic_t
  80. *
  81. * Atomically increments @v by 1.
  82. */
  83. static __inline__ void atomic_inc(atomic_t *v)
  84. {
  85. __asm__ __volatile__(
  86. LOCK_PREFIX "incl %0"
  87. :"+m" (v->counter));
  88. }
  89. /**
  90. * atomic_dec - decrement atomic variable
  91. * @v: pointer of type atomic_t
  92. *
  93. * Atomically decrements @v by 1.
  94. */
  95. static __inline__ void atomic_dec(atomic_t *v)
  96. {
  97. __asm__ __volatile__(
  98. LOCK_PREFIX "decl %0"
  99. :"+m" (v->counter));
  100. }
  101. /**
  102. * atomic_dec_and_test - decrement and test
  103. * @v: pointer of type atomic_t
  104. *
  105. * Atomically decrements @v by 1 and
  106. * returns true if the result is 0, or false for all other
  107. * cases.
  108. */
  109. static __inline__ int atomic_dec_and_test(atomic_t *v)
  110. {
  111. unsigned char c;
  112. __asm__ __volatile__(
  113. LOCK_PREFIX "decl %0; sete %1"
  114. :"+m" (v->counter), "=qm" (c)
  115. : : "memory");
  116. return c != 0;
  117. }
  118. /**
  119. * atomic_inc_and_test - increment and test
  120. * @v: pointer of type atomic_t
  121. *
  122. * Atomically increments @v by 1
  123. * and returns true if the result is zero, or false for all
  124. * other cases.
  125. */
  126. static __inline__ int atomic_inc_and_test(atomic_t *v)
  127. {
  128. unsigned char c;
  129. __asm__ __volatile__(
  130. LOCK_PREFIX "incl %0; sete %1"
  131. :"+m" (v->counter), "=qm" (c)
  132. : : "memory");
  133. return c != 0;
  134. }
  135. /**
  136. * atomic_add_negative - add and test if negative
  137. * @v: pointer of type atomic_t
  138. * @i: integer value to add
  139. *
  140. * Atomically adds @i to @v and returns true
  141. * if the result is negative, or false when
  142. * result is greater than or equal to zero.
  143. */
  144. static __inline__ int atomic_add_negative(int i, atomic_t *v)
  145. {
  146. unsigned char c;
  147. __asm__ __volatile__(
  148. LOCK_PREFIX "addl %2,%0; sets %1"
  149. :"+m" (v->counter), "=qm" (c)
  150. :"ir" (i) : "memory");
  151. return c;
  152. }
  153. /**
  154. * atomic_add_return - add and return
  155. * @v: pointer of type atomic_t
  156. * @i: integer value to add
  157. *
  158. * Atomically adds @i to @v and returns @i + @v
  159. */
  160. static __inline__ int atomic_add_return(int i, atomic_t *v)
  161. {
  162. int __i;
  163. #ifdef CONFIG_M386
  164. unsigned long flags;
  165. if(unlikely(boot_cpu_data.x86==3))
  166. goto no_xadd;
  167. #endif
  168. /* Modern 486+ processor */
  169. __i = i;
  170. __asm__ __volatile__(
  171. LOCK_PREFIX "xaddl %0, %1;"
  172. :"=r"(i)
  173. :"m"(v->counter), "0"(i));
  174. return i + __i;
  175. #ifdef CONFIG_M386
  176. no_xadd: /* Legacy 386 processor */
  177. local_irq_save(flags);
  178. __i = atomic_read(v);
  179. atomic_set(v, i + __i);
  180. local_irq_restore(flags);
  181. return i + __i;
  182. #endif
  183. }
  184. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  185. {
  186. return atomic_add_return(-i,v);
  187. }
  188. #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
  189. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  190. /**
  191. * atomic_add_unless - add unless the number is a given value
  192. * @v: pointer of type atomic_t
  193. * @a: the amount to add to v...
  194. * @u: ...unless v is equal to u.
  195. *
  196. * Atomically adds @a to @v, so long as it was not @u.
  197. * Returns non-zero if @v was not @u, and zero otherwise.
  198. */
  199. #define atomic_add_unless(v, a, u) \
  200. ({ \
  201. int c, old; \
  202. c = atomic_read(v); \
  203. for (;;) { \
  204. if (unlikely(c == (u))) \
  205. break; \
  206. old = atomic_cmpxchg((v), c, c + (a)); \
  207. if (likely(old == c)) \
  208. break; \
  209. c = old; \
  210. } \
  211. c != (u); \
  212. })
  213. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  214. #define atomic_inc_return(v) (atomic_add_return(1,v))
  215. #define atomic_dec_return(v) (atomic_sub_return(1,v))
  216. /* These are x86-specific, used by some header files */
  217. #define atomic_clear_mask(mask, addr) \
  218. __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
  219. : : "r" (~(mask)),"m" (*addr) : "memory")
  220. #define atomic_set_mask(mask, addr) \
  221. __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
  222. : : "r" (mask),"m" (*(addr)) : "memory")
  223. /* Atomic operations are already serializing on x86 */
  224. #define smp_mb__before_atomic_dec() barrier()
  225. #define smp_mb__after_atomic_dec() barrier()
  226. #define smp_mb__before_atomic_inc() barrier()
  227. #define smp_mb__after_atomic_inc() barrier()
  228. #include <asm-generic/atomic.h>
  229. #endif