atomic_32.h 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251
  1. #ifndef _ASM_X86_ATOMIC_32_H
  2. #define _ASM_X86_ATOMIC_32_H
  3. #include <linux/compiler.h>
  4. #include <linux/types.h>
  5. #include <asm/processor.h>
  6. #include <asm/cmpxchg.h>
  7. /*
  8. * Atomic operations that C can't guarantee us. Useful for
  9. * resource counting etc..
  10. */
  11. #define ATOMIC_INIT(i) { (i) }
  12. /**
  13. * atomic_read - read atomic variable
  14. * @v: pointer of type atomic_t
  15. *
  16. * Atomically reads the value of @v.
  17. */
  18. #define atomic_read(v) ((v)->counter)
  19. /**
  20. * atomic_set - set atomic variable
  21. * @v: pointer of type atomic_t
  22. * @i: required value
  23. *
  24. * Atomically sets the value of @v to @i.
  25. */
  26. #define atomic_set(v, i) (((v)->counter) = (i))
  27. /**
  28. * atomic_add - add integer to atomic variable
  29. * @i: integer value to add
  30. * @v: pointer of type atomic_t
  31. *
  32. * Atomically adds @i to @v.
  33. */
  34. static inline void atomic_add(int i, atomic_t *v)
  35. {
  36. asm volatile(LOCK_PREFIX "addl %1,%0"
  37. : "+m" (v->counter)
  38. : "ir" (i));
  39. }
  40. /**
  41. * atomic_sub - subtract integer from atomic variable
  42. * @i: integer value to subtract
  43. * @v: pointer of type atomic_t
  44. *
  45. * Atomically subtracts @i from @v.
  46. */
  47. static inline void atomic_sub(int i, atomic_t *v)
  48. {
  49. asm volatile(LOCK_PREFIX "subl %1,%0"
  50. : "+m" (v->counter)
  51. : "ir" (i));
  52. }
  53. /**
  54. * atomic_sub_and_test - subtract value from variable and test result
  55. * @i: integer value to subtract
  56. * @v: pointer of type atomic_t
  57. *
  58. * Atomically subtracts @i from @v and returns
  59. * true if the result is zero, or false for all
  60. * other cases.
  61. */
  62. static inline int atomic_sub_and_test(int i, atomic_t *v)
  63. {
  64. unsigned char c;
  65. asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
  66. : "+m" (v->counter), "=qm" (c)
  67. : "ir" (i) : "memory");
  68. return c;
  69. }
  70. /**
  71. * atomic_inc - increment atomic variable
  72. * @v: pointer of type atomic_t
  73. *
  74. * Atomically increments @v by 1.
  75. */
  76. static inline void atomic_inc(atomic_t *v)
  77. {
  78. asm volatile(LOCK_PREFIX "incl %0"
  79. : "+m" (v->counter));
  80. }
  81. /**
  82. * atomic_dec - decrement atomic variable
  83. * @v: pointer of type atomic_t
  84. *
  85. * Atomically decrements @v by 1.
  86. */
  87. static inline void atomic_dec(atomic_t *v)
  88. {
  89. asm volatile(LOCK_PREFIX "decl %0"
  90. : "+m" (v->counter));
  91. }
  92. /**
  93. * atomic_dec_and_test - decrement and test
  94. * @v: pointer of type atomic_t
  95. *
  96. * Atomically decrements @v by 1 and
  97. * returns true if the result is 0, or false for all other
  98. * cases.
  99. */
  100. static inline int atomic_dec_and_test(atomic_t *v)
  101. {
  102. unsigned char c;
  103. asm volatile(LOCK_PREFIX "decl %0; sete %1"
  104. : "+m" (v->counter), "=qm" (c)
  105. : : "memory");
  106. return c != 0;
  107. }
  108. /**
  109. * atomic_inc_and_test - increment and test
  110. * @v: pointer of type atomic_t
  111. *
  112. * Atomically increments @v by 1
  113. * and returns true if the result is zero, or false for all
  114. * other cases.
  115. */
  116. static inline int atomic_inc_and_test(atomic_t *v)
  117. {
  118. unsigned char c;
  119. asm volatile(LOCK_PREFIX "incl %0; sete %1"
  120. : "+m" (v->counter), "=qm" (c)
  121. : : "memory");
  122. return c != 0;
  123. }
  124. /**
  125. * atomic_add_negative - add and test if negative
  126. * @v: pointer of type atomic_t
  127. * @i: integer value to add
  128. *
  129. * Atomically adds @i to @v and returns true
  130. * if the result is negative, or false when
  131. * result is greater than or equal to zero.
  132. */
  133. static inline int atomic_add_negative(int i, atomic_t *v)
  134. {
  135. unsigned char c;
  136. asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
  137. : "+m" (v->counter), "=qm" (c)
  138. : "ir" (i) : "memory");
  139. return c;
  140. }
  141. /**
  142. * atomic_add_return - add integer and return
  143. * @v: pointer of type atomic_t
  144. * @i: integer value to add
  145. *
  146. * Atomically adds @i to @v and returns @i + @v
  147. */
  148. static inline int atomic_add_return(int i, atomic_t *v)
  149. {
  150. int __i;
  151. #ifdef CONFIG_M386
  152. unsigned long flags;
  153. if (unlikely(boot_cpu_data.x86 <= 3))
  154. goto no_xadd;
  155. #endif
  156. /* Modern 486+ processor */
  157. __i = i;
  158. asm volatile(LOCK_PREFIX "xaddl %0, %1"
  159. : "+r" (i), "+m" (v->counter)
  160. : : "memory");
  161. return i + __i;
  162. #ifdef CONFIG_M386
  163. no_xadd: /* Legacy 386 processor */
  164. local_irq_save(flags);
  165. __i = atomic_read(v);
  166. atomic_set(v, i + __i);
  167. local_irq_restore(flags);
  168. return i + __i;
  169. #endif
  170. }
  171. /**
  172. * atomic_sub_return - subtract integer and return
  173. * @v: pointer of type atomic_t
  174. * @i: integer value to subtract
  175. *
  176. * Atomically subtracts @i from @v and returns @v - @i
  177. */
  178. static inline int atomic_sub_return(int i, atomic_t *v)
  179. {
  180. return atomic_add_return(-i, v);
  181. }
  182. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
  183. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  184. /**
  185. * atomic_add_unless - add unless the number is already a given value
  186. * @v: pointer of type atomic_t
  187. * @a: the amount to add to v...
  188. * @u: ...unless v is equal to u.
  189. *
  190. * Atomically adds @a to @v, so long as @v was not already @u.
  191. * Returns non-zero if @v was not @u, and zero otherwise.
  192. */
  193. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  194. {
  195. int c, old;
  196. c = atomic_read(v);
  197. for (;;) {
  198. if (unlikely(c == (u)))
  199. break;
  200. old = atomic_cmpxchg((v), c, c + (a));
  201. if (likely(old == c))
  202. break;
  203. c = old;
  204. }
  205. return c != (u);
  206. }
  207. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  208. #define atomic_inc_return(v) (atomic_add_return(1, v))
  209. #define atomic_dec_return(v) (atomic_sub_return(1, v))
  210. /* These are x86-specific, used by some header files */
  211. #define atomic_clear_mask(mask, addr) \
  212. asm volatile(LOCK_PREFIX "andl %0,%1" \
  213. : : "r" (~(mask)), "m" (*(addr)) : "memory")
  214. #define atomic_set_mask(mask, addr) \
  215. asm volatile(LOCK_PREFIX "orl %0,%1" \
  216. : : "r" (mask), "m" (*(addr)) : "memory")
  217. /* Atomic operations are already serializing on x86 */
  218. #define smp_mb__before_atomic_dec() barrier()
  219. #define smp_mb__after_atomic_dec() barrier()
  220. #define smp_mb__before_atomic_inc() barrier()
  221. #define smp_mb__after_atomic_inc() barrier()
  222. #include <asm-generic/atomic.h>
  223. #endif /* _ASM_X86_ATOMIC_32_H */