atomic.h 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260
  1. #ifndef __ARCH_I386_ATOMIC__
  2. #define __ARCH_I386_ATOMIC__
  3. #include <linux/config.h>
  4. #include <linux/compiler.h>
  5. #include <asm/processor.h>
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. /*
  11. * Make sure gcc doesn't try to be clever and move things around
  12. * on us. We need to use _exactly_ the address the user gave us,
  13. * not some alias that contains the same information.
  14. */
  15. typedef struct { volatile int counter; } atomic_t;
  16. #define ATOMIC_INIT(i) { (i) }
  17. /**
  18. * atomic_read - read atomic variable
  19. * @v: pointer of type atomic_t
  20. *
  21. * Atomically reads the value of @v.
  22. */
  23. #define atomic_read(v) ((v)->counter)
  24. /**
  25. * atomic_set - set atomic variable
  26. * @v: pointer of type atomic_t
  27. * @i: required value
  28. *
  29. * Atomically sets the value of @v to @i.
  30. */
  31. #define atomic_set(v,i) (((v)->counter) = (i))
  32. /**
  33. * atomic_add - add integer to atomic variable
  34. * @i: integer value to add
  35. * @v: pointer of type atomic_t
  36. *
  37. * Atomically adds @i to @v.
  38. */
  39. static __inline__ void atomic_add(int i, atomic_t *v)
  40. {
  41. __asm__ __volatile__(
  42. LOCK_PREFIX "addl %1,%0"
  43. :"=m" (v->counter)
  44. :"ir" (i), "m" (v->counter));
  45. }
  46. /**
  47. * atomic_sub - subtract the atomic variable
  48. * @i: integer value to subtract
  49. * @v: pointer of type atomic_t
  50. *
  51. * Atomically subtracts @i from @v.
  52. */
  53. static __inline__ void atomic_sub(int i, atomic_t *v)
  54. {
  55. __asm__ __volatile__(
  56. LOCK_PREFIX "subl %1,%0"
  57. :"=m" (v->counter)
  58. :"ir" (i), "m" (v->counter));
  59. }
  60. /**
  61. * atomic_sub_and_test - subtract value from variable and test result
  62. * @i: integer value to subtract
  63. * @v: pointer of type atomic_t
  64. *
  65. * Atomically subtracts @i from @v and returns
  66. * true if the result is zero, or false for all
  67. * other cases.
  68. */
  69. static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
  70. {
  71. unsigned char c;
  72. __asm__ __volatile__(
  73. LOCK_PREFIX "subl %2,%0; sete %1"
  74. :"=m" (v->counter), "=qm" (c)
  75. :"ir" (i), "m" (v->counter) : "memory");
  76. return c;
  77. }
  78. /**
  79. * atomic_inc - increment atomic variable
  80. * @v: pointer of type atomic_t
  81. *
  82. * Atomically increments @v by 1.
  83. */
  84. static __inline__ void atomic_inc(atomic_t *v)
  85. {
  86. __asm__ __volatile__(
  87. LOCK_PREFIX "incl %0"
  88. :"=m" (v->counter)
  89. :"m" (v->counter));
  90. }
  91. /**
  92. * atomic_dec - decrement atomic variable
  93. * @v: pointer of type atomic_t
  94. *
  95. * Atomically decrements @v by 1.
  96. */
  97. static __inline__ void atomic_dec(atomic_t *v)
  98. {
  99. __asm__ __volatile__(
  100. LOCK_PREFIX "decl %0"
  101. :"=m" (v->counter)
  102. :"m" (v->counter));
  103. }
  104. /**
  105. * atomic_dec_and_test - decrement and test
  106. * @v: pointer of type atomic_t
  107. *
  108. * Atomically decrements @v by 1 and
  109. * returns true if the result is 0, or false for all other
  110. * cases.
  111. */
  112. static __inline__ int atomic_dec_and_test(atomic_t *v)
  113. {
  114. unsigned char c;
  115. __asm__ __volatile__(
  116. LOCK_PREFIX "decl %0; sete %1"
  117. :"=m" (v->counter), "=qm" (c)
  118. :"m" (v->counter) : "memory");
  119. return c != 0;
  120. }
  121. /**
  122. * atomic_inc_and_test - increment and test
  123. * @v: pointer of type atomic_t
  124. *
  125. * Atomically increments @v by 1
  126. * and returns true if the result is zero, or false for all
  127. * other cases.
  128. */
  129. static __inline__ int atomic_inc_and_test(atomic_t *v)
  130. {
  131. unsigned char c;
  132. __asm__ __volatile__(
  133. LOCK_PREFIX "incl %0; sete %1"
  134. :"=m" (v->counter), "=qm" (c)
  135. :"m" (v->counter) : "memory");
  136. return c != 0;
  137. }
  138. /**
  139. * atomic_add_negative - add and test if negative
  140. * @v: pointer of type atomic_t
  141. * @i: integer value to add
  142. *
  143. * Atomically adds @i to @v and returns true
  144. * if the result is negative, or false when
  145. * result is greater than or equal to zero.
  146. */
  147. static __inline__ int atomic_add_negative(int i, atomic_t *v)
  148. {
  149. unsigned char c;
  150. __asm__ __volatile__(
  151. LOCK_PREFIX "addl %2,%0; sets %1"
  152. :"=m" (v->counter), "=qm" (c)
  153. :"ir" (i), "m" (v->counter) : "memory");
  154. return c;
  155. }
  156. /**
  157. * atomic_add_return - add and return
  158. * @v: pointer of type atomic_t
  159. * @i: integer value to add
  160. *
  161. * Atomically adds @i to @v and returns @i + @v
  162. */
  163. static __inline__ int atomic_add_return(int i, atomic_t *v)
  164. {
  165. int __i;
  166. #ifdef CONFIG_M386
  167. unsigned long flags;
  168. if(unlikely(boot_cpu_data.x86==3))
  169. goto no_xadd;
  170. #endif
  171. /* Modern 486+ processor */
  172. __i = i;
  173. __asm__ __volatile__(
  174. LOCK_PREFIX "xaddl %0, %1;"
  175. :"=r"(i)
  176. :"m"(v->counter), "0"(i));
  177. return i + __i;
  178. #ifdef CONFIG_M386
  179. no_xadd: /* Legacy 386 processor */
  180. local_irq_save(flags);
  181. __i = atomic_read(v);
  182. atomic_set(v, i + __i);
  183. local_irq_restore(flags);
  184. return i + __i;
  185. #endif
  186. }
  187. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  188. {
  189. return atomic_add_return(-i,v);
  190. }
  191. #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
  192. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  193. /**
  194. * atomic_add_unless - add unless the number is a given value
  195. * @v: pointer of type atomic_t
  196. * @a: the amount to add to v...
  197. * @u: ...unless v is equal to u.
  198. *
  199. * Atomically adds @a to @v, so long as it was not @u.
  200. * Returns non-zero if @v was not @u, and zero otherwise.
  201. */
  202. #define atomic_add_unless(v, a, u) \
  203. ({ \
  204. int c, old; \
  205. c = atomic_read(v); \
  206. for (;;) { \
  207. if (unlikely(c == (u))) \
  208. break; \
  209. old = atomic_cmpxchg((v), c, c + (a)); \
  210. if (likely(old == c)) \
  211. break; \
  212. c = old; \
  213. } \
  214. c != (u); \
  215. })
  216. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  217. #define atomic_inc_return(v) (atomic_add_return(1,v))
  218. #define atomic_dec_return(v) (atomic_sub_return(1,v))
  219. /* These are x86-specific, used by some header files */
  220. #define atomic_clear_mask(mask, addr) \
  221. __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
  222. : : "r" (~(mask)),"m" (*addr) : "memory")
  223. #define atomic_set_mask(mask, addr) \
  224. __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
  225. : : "r" (mask),"m" (*(addr)) : "memory")
  226. /* Atomic operations are already serializing on x86 */
  227. #define smp_mb__before_atomic_dec() barrier()
  228. #define smp_mb__after_atomic_dec() barrier()
  229. #define smp_mb__before_atomic_inc() barrier()
  230. #define smp_mb__after_atomic_inc() barrier()
  231. #include <asm-generic/atomic.h>
  232. #endif