atomic.h 8.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381
  1. #ifndef __ARCH_X86_64_ATOMIC__
  2. #define __ARCH_X86_64_ATOMIC__
  3. #include <linux/config.h>
  4. /* atomic_t should be 32 bit signed type */
  5. /*
  6. * Atomic operations that C can't guarantee us. Useful for
  7. * resource counting etc..
  8. */
  9. #ifdef CONFIG_SMP
  10. #define LOCK "lock ; "
  11. #else
  12. #define LOCK ""
  13. #endif
  14. /*
  15. * Make sure gcc doesn't try to be clever and move things around
  16. * on us. We need to use _exactly_ the address the user gave us,
  17. * not some alias that contains the same information.
  18. */
  19. typedef struct { volatile int counter; } atomic_t;
  20. #define ATOMIC_INIT(i) { (i) }
  21. /**
  22. * atomic_read - read atomic variable
  23. * @v: pointer of type atomic_t
  24. *
  25. * Atomically reads the value of @v.
  26. */
  27. #define atomic_read(v) ((v)->counter)
  28. /**
  29. * atomic_set - set atomic variable
  30. * @v: pointer of type atomic_t
  31. * @i: required value
  32. *
  33. * Atomically sets the value of @v to @i.
  34. */
  35. #define atomic_set(v,i) (((v)->counter) = (i))
  36. /**
  37. * atomic_add - add integer to atomic variable
  38. * @i: integer value to add
  39. * @v: pointer of type atomic_t
  40. *
  41. * Atomically adds @i to @v.
  42. */
  43. static __inline__ void atomic_add(int i, atomic_t *v)
  44. {
  45. __asm__ __volatile__(
  46. LOCK "addl %1,%0"
  47. :"=m" (v->counter)
  48. :"ir" (i), "m" (v->counter));
  49. }
  50. /**
  51. * atomic_sub - subtract the atomic variable
  52. * @i: integer value to subtract
  53. * @v: pointer of type atomic_t
  54. *
  55. * Atomically subtracts @i from @v.
  56. */
  57. static __inline__ void atomic_sub(int i, atomic_t *v)
  58. {
  59. __asm__ __volatile__(
  60. LOCK "subl %1,%0"
  61. :"=m" (v->counter)
  62. :"ir" (i), "m" (v->counter));
  63. }
  64. /**
  65. * atomic_sub_and_test - subtract value from variable and test result
  66. * @i: integer value to subtract
  67. * @v: pointer of type atomic_t
  68. *
  69. * Atomically subtracts @i from @v and returns
  70. * true if the result is zero, or false for all
  71. * other cases.
  72. */
  73. static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
  74. {
  75. unsigned char c;
  76. __asm__ __volatile__(
  77. LOCK "subl %2,%0; sete %1"
  78. :"=m" (v->counter), "=qm" (c)
  79. :"ir" (i), "m" (v->counter) : "memory");
  80. return c;
  81. }
  82. /**
  83. * atomic_inc - increment atomic variable
  84. * @v: pointer of type atomic_t
  85. *
  86. * Atomically increments @v by 1.
  87. */
  88. static __inline__ void atomic_inc(atomic_t *v)
  89. {
  90. __asm__ __volatile__(
  91. LOCK "incl %0"
  92. :"=m" (v->counter)
  93. :"m" (v->counter));
  94. }
  95. /**
  96. * atomic_dec - decrement atomic variable
  97. * @v: pointer of type atomic_t
  98. *
  99. * Atomically decrements @v by 1.
  100. */
  101. static __inline__ void atomic_dec(atomic_t *v)
  102. {
  103. __asm__ __volatile__(
  104. LOCK "decl %0"
  105. :"=m" (v->counter)
  106. :"m" (v->counter));
  107. }
  108. /**
  109. * atomic_dec_and_test - decrement and test
  110. * @v: pointer of type atomic_t
  111. *
  112. * Atomically decrements @v by 1 and
  113. * returns true if the result is 0, or false for all other
  114. * cases.
  115. */
  116. static __inline__ int atomic_dec_and_test(atomic_t *v)
  117. {
  118. unsigned char c;
  119. __asm__ __volatile__(
  120. LOCK "decl %0; sete %1"
  121. :"=m" (v->counter), "=qm" (c)
  122. :"m" (v->counter) : "memory");
  123. return c != 0;
  124. }
  125. /**
  126. * atomic_inc_and_test - increment and test
  127. * @v: pointer of type atomic_t
  128. *
  129. * Atomically increments @v by 1
  130. * and returns true if the result is zero, or false for all
  131. * other cases.
  132. */
  133. static __inline__ int atomic_inc_and_test(atomic_t *v)
  134. {
  135. unsigned char c;
  136. __asm__ __volatile__(
  137. LOCK "incl %0; sete %1"
  138. :"=m" (v->counter), "=qm" (c)
  139. :"m" (v->counter) : "memory");
  140. return c != 0;
  141. }
  142. /**
  143. * atomic_add_negative - add and test if negative
  144. * @v: pointer of type atomic_t
  145. * @i: integer value to add
  146. *
  147. * Atomically adds @i to @v and returns true
  148. * if the result is negative, or false when
  149. * result is greater than or equal to zero.
  150. */
  151. static __inline__ int atomic_add_negative(int i, atomic_t *v)
  152. {
  153. unsigned char c;
  154. __asm__ __volatile__(
  155. LOCK "addl %2,%0; sets %1"
  156. :"=m" (v->counter), "=qm" (c)
  157. :"ir" (i), "m" (v->counter) : "memory");
  158. return c;
  159. }
  160. /* An 64bit atomic type */
  161. typedef struct { volatile long counter; } atomic64_t;
  162. #define ATOMIC64_INIT(i) { (i) }
  163. /**
  164. * atomic64_read - read atomic64 variable
  165. * @v: pointer of type atomic64_t
  166. *
  167. * Atomically reads the value of @v.
  168. * Doesn't imply a read memory barrier.
  169. */
  170. #define atomic64_read(v) ((v)->counter)
  171. /**
  172. * atomic64_set - set atomic64 variable
  173. * @v: pointer to type atomic64_t
  174. * @i: required value
  175. *
  176. * Atomically sets the value of @v to @i.
  177. */
  178. #define atomic64_set(v,i) (((v)->counter) = (i))
  179. /**
  180. * atomic64_add - add integer to atomic64 variable
  181. * @i: integer value to add
  182. * @v: pointer to type atomic64_t
  183. *
  184. * Atomically adds @i to @v.
  185. */
  186. static __inline__ void atomic64_add(long i, atomic64_t *v)
  187. {
  188. __asm__ __volatile__(
  189. LOCK "addq %1,%0"
  190. :"=m" (v->counter)
  191. :"ir" (i), "m" (v->counter));
  192. }
  193. /**
  194. * atomic64_sub - subtract the atomic64 variable
  195. * @i: integer value to subtract
  196. * @v: pointer to type atomic64_t
  197. *
  198. * Atomically subtracts @i from @v.
  199. */
  200. static __inline__ void atomic64_sub(long i, atomic64_t *v)
  201. {
  202. __asm__ __volatile__(
  203. LOCK "subq %1,%0"
  204. :"=m" (v->counter)
  205. :"ir" (i), "m" (v->counter));
  206. }
  207. /**
  208. * atomic64_sub_and_test - subtract value from variable and test result
  209. * @i: integer value to subtract
  210. * @v: pointer to type atomic64_t
  211. *
  212. * Atomically subtracts @i from @v and returns
  213. * true if the result is zero, or false for all
  214. * other cases.
  215. */
  216. static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
  217. {
  218. unsigned char c;
  219. __asm__ __volatile__(
  220. LOCK "subq %2,%0; sete %1"
  221. :"=m" (v->counter), "=qm" (c)
  222. :"ir" (i), "m" (v->counter) : "memory");
  223. return c;
  224. }
  225. /**
  226. * atomic64_inc - increment atomic64 variable
  227. * @v: pointer to type atomic64_t
  228. *
  229. * Atomically increments @v by 1.
  230. */
  231. static __inline__ void atomic64_inc(atomic64_t *v)
  232. {
  233. __asm__ __volatile__(
  234. LOCK "incq %0"
  235. :"=m" (v->counter)
  236. :"m" (v->counter));
  237. }
  238. /**
  239. * atomic64_dec - decrement atomic64 variable
  240. * @v: pointer to type atomic64_t
  241. *
  242. * Atomically decrements @v by 1.
  243. */
  244. static __inline__ void atomic64_dec(atomic64_t *v)
  245. {
  246. __asm__ __volatile__(
  247. LOCK "decq %0"
  248. :"=m" (v->counter)
  249. :"m" (v->counter));
  250. }
  251. /**
  252. * atomic64_dec_and_test - decrement and test
  253. * @v: pointer to type atomic64_t
  254. *
  255. * Atomically decrements @v by 1 and
  256. * returns true if the result is 0, or false for all other
  257. * cases.
  258. */
  259. static __inline__ int atomic64_dec_and_test(atomic64_t *v)
  260. {
  261. unsigned char c;
  262. __asm__ __volatile__(
  263. LOCK "decq %0; sete %1"
  264. :"=m" (v->counter), "=qm" (c)
  265. :"m" (v->counter) : "memory");
  266. return c != 0;
  267. }
  268. /**
  269. * atomic64_inc_and_test - increment and test
  270. * @v: pointer to type atomic64_t
  271. *
  272. * Atomically increments @v by 1
  273. * and returns true if the result is zero, or false for all
  274. * other cases.
  275. */
  276. static __inline__ int atomic64_inc_and_test(atomic64_t *v)
  277. {
  278. unsigned char c;
  279. __asm__ __volatile__(
  280. LOCK "incq %0; sete %1"
  281. :"=m" (v->counter), "=qm" (c)
  282. :"m" (v->counter) : "memory");
  283. return c != 0;
  284. }
  285. /**
  286. * atomic64_add_negative - add and test if negative
  287. * @v: pointer to atomic64_t
  288. * @i: integer value to add
  289. *
  290. * Atomically adds @i to @v and returns true
  291. * if the result is negative, or false when
  292. * result is greater than or equal to zero.
  293. */
  294. static __inline__ long atomic64_add_negative(long i, atomic64_t *v)
  295. {
  296. unsigned char c;
  297. __asm__ __volatile__(
  298. LOCK "addq %2,%0; sets %1"
  299. :"=m" (v->counter), "=qm" (c)
  300. :"ir" (i), "m" (v->counter) : "memory");
  301. return c;
  302. }
  303. /**
  304. * atomic_add_return - add and return
  305. * @v: pointer of type atomic_t
  306. * @i: integer value to add
  307. *
  308. * Atomically adds @i to @v and returns @i + @v
  309. */
  310. static __inline__ int atomic_add_return(int i, atomic_t *v)
  311. {
  312. int __i = i;
  313. __asm__ __volatile__(
  314. LOCK "xaddl %0, %1;"
  315. :"=r"(i)
  316. :"m"(v->counter), "0"(i));
  317. return i + __i;
  318. }
  319. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  320. {
  321. return atomic_add_return(-i,v);
  322. }
  323. #define atomic_inc_return(v) (atomic_add_return(1,v))
  324. #define atomic_dec_return(v) (atomic_sub_return(1,v))
  325. /* These are x86-specific, used by some header files */
  326. #define atomic_clear_mask(mask, addr) \
  327. __asm__ __volatile__(LOCK "andl %0,%1" \
  328. : : "r" (~(mask)),"m" (*addr) : "memory")
  329. #define atomic_set_mask(mask, addr) \
  330. __asm__ __volatile__(LOCK "orl %0,%1" \
  331. : : "r" ((unsigned)mask),"m" (*(addr)) : "memory")
  332. /* Atomic operations are already serializing on x86 */
  333. #define smp_mb__before_atomic_dec() barrier()
  334. #define smp_mb__after_atomic_dec() barrier()
  335. #define smp_mb__before_atomic_inc() barrier()
  336. #define smp_mb__after_atomic_inc() barrier()
  337. #endif