atomic.h 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259
  1. #ifndef __ARCH_I386_ATOMIC__
  2. #define __ARCH_I386_ATOMIC__
  3. #include <linux/compiler.h>
  4. #include <asm/processor.h>
  5. /*
  6. * Atomic operations that C can't guarantee us. Useful for
  7. * resource counting etc..
  8. */
  9. /*
  10. * Make sure gcc doesn't try to be clever and move things around
  11. * on us. We need to use _exactly_ the address the user gave us,
  12. * not some alias that contains the same information.
  13. */
  14. typedef struct { volatile int counter; } atomic_t;
  15. #define ATOMIC_INIT(i) { (i) }
  16. /**
  17. * atomic_read - read atomic variable
  18. * @v: pointer of type atomic_t
  19. *
  20. * Atomically reads the value of @v.
  21. */
  22. #define atomic_read(v) ((v)->counter)
  23. /**
  24. * atomic_set - set atomic variable
  25. * @v: pointer of type atomic_t
  26. * @i: required value
  27. *
  28. * Atomically sets the value of @v to @i.
  29. */
  30. #define atomic_set(v,i) (((v)->counter) = (i))
  31. /**
  32. * atomic_add - add integer to atomic variable
  33. * @i: integer value to add
  34. * @v: pointer of type atomic_t
  35. *
  36. * Atomically adds @i to @v.
  37. */
  38. static __inline__ void atomic_add(int i, atomic_t *v)
  39. {
  40. __asm__ __volatile__(
  41. LOCK_PREFIX "addl %1,%0"
  42. :"=m" (v->counter)
  43. :"ir" (i), "m" (v->counter));
  44. }
  45. /**
  46. * atomic_sub - subtract the atomic variable
  47. * @i: integer value to subtract
  48. * @v: pointer of type atomic_t
  49. *
  50. * Atomically subtracts @i from @v.
  51. */
  52. static __inline__ void atomic_sub(int i, atomic_t *v)
  53. {
  54. __asm__ __volatile__(
  55. LOCK_PREFIX "subl %1,%0"
  56. :"=m" (v->counter)
  57. :"ir" (i), "m" (v->counter));
  58. }
  59. /**
  60. * atomic_sub_and_test - subtract value from variable and test result
  61. * @i: integer value to subtract
  62. * @v: pointer of type atomic_t
  63. *
  64. * Atomically subtracts @i from @v and returns
  65. * true if the result is zero, or false for all
  66. * other cases.
  67. */
  68. static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
  69. {
  70. unsigned char c;
  71. __asm__ __volatile__(
  72. LOCK_PREFIX "subl %2,%0; sete %1"
  73. :"=m" (v->counter), "=qm" (c)
  74. :"ir" (i), "m" (v->counter) : "memory");
  75. return c;
  76. }
  77. /**
  78. * atomic_inc - increment atomic variable
  79. * @v: pointer of type atomic_t
  80. *
  81. * Atomically increments @v by 1.
  82. */
  83. static __inline__ void atomic_inc(atomic_t *v)
  84. {
  85. __asm__ __volatile__(
  86. LOCK_PREFIX "incl %0"
  87. :"=m" (v->counter)
  88. :"m" (v->counter));
  89. }
  90. /**
  91. * atomic_dec - decrement atomic variable
  92. * @v: pointer of type atomic_t
  93. *
  94. * Atomically decrements @v by 1.
  95. */
  96. static __inline__ void atomic_dec(atomic_t *v)
  97. {
  98. __asm__ __volatile__(
  99. LOCK_PREFIX "decl %0"
  100. :"=m" (v->counter)
  101. :"m" (v->counter));
  102. }
  103. /**
  104. * atomic_dec_and_test - decrement and test
  105. * @v: pointer of type atomic_t
  106. *
  107. * Atomically decrements @v by 1 and
  108. * returns true if the result is 0, or false for all other
  109. * cases.
  110. */
  111. static __inline__ int atomic_dec_and_test(atomic_t *v)
  112. {
  113. unsigned char c;
  114. __asm__ __volatile__(
  115. LOCK_PREFIX "decl %0; sete %1"
  116. :"=m" (v->counter), "=qm" (c)
  117. :"m" (v->counter) : "memory");
  118. return c != 0;
  119. }
  120. /**
  121. * atomic_inc_and_test - increment and test
  122. * @v: pointer of type atomic_t
  123. *
  124. * Atomically increments @v by 1
  125. * and returns true if the result is zero, or false for all
  126. * other cases.
  127. */
  128. static __inline__ int atomic_inc_and_test(atomic_t *v)
  129. {
  130. unsigned char c;
  131. __asm__ __volatile__(
  132. LOCK_PREFIX "incl %0; sete %1"
  133. :"=m" (v->counter), "=qm" (c)
  134. :"m" (v->counter) : "memory");
  135. return c != 0;
  136. }
  137. /**
  138. * atomic_add_negative - add and test if negative
  139. * @v: pointer of type atomic_t
  140. * @i: integer value to add
  141. *
  142. * Atomically adds @i to @v and returns true
  143. * if the result is negative, or false when
  144. * result is greater than or equal to zero.
  145. */
  146. static __inline__ int atomic_add_negative(int i, atomic_t *v)
  147. {
  148. unsigned char c;
  149. __asm__ __volatile__(
  150. LOCK_PREFIX "addl %2,%0; sets %1"
  151. :"=m" (v->counter), "=qm" (c)
  152. :"ir" (i), "m" (v->counter) : "memory");
  153. return c;
  154. }
  155. /**
  156. * atomic_add_return - add and return
  157. * @v: pointer of type atomic_t
  158. * @i: integer value to add
  159. *
  160. * Atomically adds @i to @v and returns @i + @v
  161. */
  162. static __inline__ int atomic_add_return(int i, atomic_t *v)
  163. {
  164. int __i;
  165. #ifdef CONFIG_M386
  166. unsigned long flags;
  167. if(unlikely(boot_cpu_data.x86==3))
  168. goto no_xadd;
  169. #endif
  170. /* Modern 486+ processor */
  171. __i = i;
  172. __asm__ __volatile__(
  173. LOCK_PREFIX "xaddl %0, %1;"
  174. :"=r"(i)
  175. :"m"(v->counter), "0"(i));
  176. return i + __i;
  177. #ifdef CONFIG_M386
  178. no_xadd: /* Legacy 386 processor */
  179. local_irq_save(flags);
  180. __i = atomic_read(v);
  181. atomic_set(v, i + __i);
  182. local_irq_restore(flags);
  183. return i + __i;
  184. #endif
  185. }
  186. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  187. {
  188. return atomic_add_return(-i,v);
  189. }
  190. #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
  191. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  192. /**
  193. * atomic_add_unless - add unless the number is a given value
  194. * @v: pointer of type atomic_t
  195. * @a: the amount to add to v...
  196. * @u: ...unless v is equal to u.
  197. *
  198. * Atomically adds @a to @v, so long as it was not @u.
  199. * Returns non-zero if @v was not @u, and zero otherwise.
  200. */
  201. #define atomic_add_unless(v, a, u) \
  202. ({ \
  203. int c, old; \
  204. c = atomic_read(v); \
  205. for (;;) { \
  206. if (unlikely(c == (u))) \
  207. break; \
  208. old = atomic_cmpxchg((v), c, c + (a)); \
  209. if (likely(old == c)) \
  210. break; \
  211. c = old; \
  212. } \
  213. c != (u); \
  214. })
  215. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  216. #define atomic_inc_return(v) (atomic_add_return(1,v))
  217. #define atomic_dec_return(v) (atomic_sub_return(1,v))
  218. /* These are x86-specific, used by some header files */
  219. #define atomic_clear_mask(mask, addr) \
  220. __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
  221. : : "r" (~(mask)),"m" (*addr) : "memory")
  222. #define atomic_set_mask(mask, addr) \
  223. __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
  224. : : "r" (mask),"m" (*(addr)) : "memory")
  225. /* Atomic operations are already serializing on x86 */
  226. #define smp_mb__before_atomic_dec() barrier()
  227. #define smp_mb__after_atomic_dec() barrier()
  228. #define smp_mb__before_atomic_inc() barrier()
  229. #define smp_mb__after_atomic_inc() barrier()
  230. #include <asm-generic/atomic.h>
  231. #endif