atomic_64.h 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453
  1. #ifndef __ARCH_X86_64_ATOMIC__
  2. #define __ARCH_X86_64_ATOMIC__
  3. #include <asm/alternative.h>
  4. #include <asm/cmpxchg.h>
  5. /* atomic_t should be 32 bit signed type */
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. #ifdef CONFIG_SMP
  11. #define LOCK "lock ; "
  12. #else
  13. #define LOCK ""
  14. #endif
  15. /*
  16. * Make sure gcc doesn't try to be clever and move things around
  17. * on us. We need to use _exactly_ the address the user gave us,
  18. * not some alias that contains the same information.
  19. */
  20. typedef struct {
  21. int counter;
  22. } atomic_t;
  23. #define ATOMIC_INIT(i) { (i) }
  24. /**
  25. * atomic_read - read atomic variable
  26. * @v: pointer of type atomic_t
  27. *
  28. * Atomically reads the value of @v.
  29. */
  30. #define atomic_read(v) ((v)->counter)
  31. /**
  32. * atomic_set - set atomic variable
  33. * @v: pointer of type atomic_t
  34. * @i: required value
  35. *
  36. * Atomically sets the value of @v to @i.
  37. */
  38. #define atomic_set(v, i) (((v)->counter) = (i))
  39. /**
  40. * atomic_add - add integer to atomic variable
  41. * @i: integer value to add
  42. * @v: pointer of type atomic_t
  43. *
  44. * Atomically adds @i to @v.
  45. */
  46. static inline void atomic_add(int i, atomic_t *v)
  47. {
  48. asm volatile(LOCK_PREFIX "addl %1,%0"
  49. : "=m" (v->counter)
  50. : "ir" (i), "m" (v->counter));
  51. }
  52. /**
  53. * atomic_sub - subtract the atomic variable
  54. * @i: integer value to subtract
  55. * @v: pointer of type atomic_t
  56. *
  57. * Atomically subtracts @i from @v.
  58. */
  59. static inline void atomic_sub(int i, atomic_t *v)
  60. {
  61. asm volatile(LOCK_PREFIX "subl %1,%0"
  62. : "=m" (v->counter)
  63. : "ir" (i), "m" (v->counter));
  64. }
  65. /**
  66. * atomic_sub_and_test - subtract value from variable and test result
  67. * @i: integer value to subtract
  68. * @v: pointer of type atomic_t
  69. *
  70. * Atomically subtracts @i from @v and returns
  71. * true if the result is zero, or false for all
  72. * other cases.
  73. */
  74. static inline int atomic_sub_and_test(int i, atomic_t *v)
  75. {
  76. unsigned char c;
  77. asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
  78. : "=m" (v->counter), "=qm" (c)
  79. : "ir" (i), "m" (v->counter) : "memory");
  80. return c;
  81. }
  82. /**
  83. * atomic_inc - increment atomic variable
  84. * @v: pointer of type atomic_t
  85. *
  86. * Atomically increments @v by 1.
  87. */
  88. static inline void atomic_inc(atomic_t *v)
  89. {
  90. asm volatile(LOCK_PREFIX "incl %0"
  91. : "=m" (v->counter)
  92. : "m" (v->counter));
  93. }
  94. /**
  95. * atomic_dec - decrement atomic variable
  96. * @v: pointer of type atomic_t
  97. *
  98. * Atomically decrements @v by 1.
  99. */
  100. static inline void atomic_dec(atomic_t *v)
  101. {
  102. asm volatile(LOCK_PREFIX "decl %0"
  103. : "=m" (v->counter)
  104. : "m" (v->counter));
  105. }
  106. /**
  107. * atomic_dec_and_test - decrement and test
  108. * @v: pointer of type atomic_t
  109. *
  110. * Atomically decrements @v by 1 and
  111. * returns true if the result is 0, or false for all other
  112. * cases.
  113. */
  114. static inline int atomic_dec_and_test(atomic_t *v)
  115. {
  116. unsigned char c;
  117. asm volatile(LOCK_PREFIX "decl %0; sete %1"
  118. : "=m" (v->counter), "=qm" (c)
  119. : "m" (v->counter) : "memory");
  120. return c != 0;
  121. }
  122. /**
  123. * atomic_inc_and_test - increment and test
  124. * @v: pointer of type atomic_t
  125. *
  126. * Atomically increments @v by 1
  127. * and returns true if the result is zero, or false for all
  128. * other cases.
  129. */
  130. static inline int atomic_inc_and_test(atomic_t *v)
  131. {
  132. unsigned char c;
  133. asm volatile(LOCK_PREFIX "incl %0; sete %1"
  134. : "=m" (v->counter), "=qm" (c)
  135. : "m" (v->counter) : "memory");
  136. return c != 0;
  137. }
  138. /**
  139. * atomic_add_negative - add and test if negative
  140. * @i: integer value to add
  141. * @v: pointer of type atomic_t
  142. *
  143. * Atomically adds @i to @v and returns true
  144. * if the result is negative, or false when
  145. * result is greater than or equal to zero.
  146. */
  147. static inline int atomic_add_negative(int i, atomic_t *v)
  148. {
  149. unsigned char c;
  150. asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
  151. : "=m" (v->counter), "=qm" (c)
  152. : "ir" (i), "m" (v->counter) : "memory");
  153. return c;
  154. }
  155. /**
  156. * atomic_add_return - add and return
  157. * @i: integer value to add
  158. * @v: pointer of type atomic_t
  159. *
  160. * Atomically adds @i to @v and returns @i + @v
  161. */
  162. static inline int atomic_add_return(int i, atomic_t *v)
  163. {
  164. int __i = i;
  165. asm volatile(LOCK_PREFIX "xaddl %0, %1"
  166. : "+r" (i), "+m" (v->counter)
  167. : : "memory");
  168. return i + __i;
  169. }
  170. static inline int atomic_sub_return(int i, atomic_t *v)
  171. {
  172. return atomic_add_return(-i, v);
  173. }
  174. #define atomic_inc_return(v) (atomic_add_return(1, v))
  175. #define atomic_dec_return(v) (atomic_sub_return(1, v))
  176. /* An 64bit atomic type */
  177. typedef struct {
  178. long counter;
  179. } atomic64_t;
  180. #define ATOMIC64_INIT(i) { (i) }
  181. /**
  182. * atomic64_read - read atomic64 variable
  183. * @v: pointer of type atomic64_t
  184. *
  185. * Atomically reads the value of @v.
  186. * Doesn't imply a read memory barrier.
  187. */
  188. #define atomic64_read(v) ((v)->counter)
  189. /**
  190. * atomic64_set - set atomic64 variable
  191. * @v: pointer to type atomic64_t
  192. * @i: required value
  193. *
  194. * Atomically sets the value of @v to @i.
  195. */
  196. #define atomic64_set(v, i) (((v)->counter) = (i))
  197. /**
  198. * atomic64_add - add integer to atomic64 variable
  199. * @i: integer value to add
  200. * @v: pointer to type atomic64_t
  201. *
  202. * Atomically adds @i to @v.
  203. */
  204. static inline void atomic64_add(long i, atomic64_t *v)
  205. {
  206. asm volatile(LOCK_PREFIX "addq %1,%0"
  207. : "=m" (v->counter)
  208. : "ir" (i), "m" (v->counter));
  209. }
  210. /**
  211. * atomic64_sub - subtract the atomic64 variable
  212. * @i: integer value to subtract
  213. * @v: pointer to type atomic64_t
  214. *
  215. * Atomically subtracts @i from @v.
  216. */
  217. static inline void atomic64_sub(long i, atomic64_t *v)
  218. {
  219. asm volatile(LOCK_PREFIX "subq %1,%0"
  220. : "=m" (v->counter)
  221. : "ir" (i), "m" (v->counter));
  222. }
  223. /**
  224. * atomic64_sub_and_test - subtract value from variable and test result
  225. * @i: integer value to subtract
  226. * @v: pointer to type atomic64_t
  227. *
  228. * Atomically subtracts @i from @v and returns
  229. * true if the result is zero, or false for all
  230. * other cases.
  231. */
  232. static inline int atomic64_sub_and_test(long i, atomic64_t *v)
  233. {
  234. unsigned char c;
  235. asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
  236. : "=m" (v->counter), "=qm" (c)
  237. : "ir" (i), "m" (v->counter) : "memory");
  238. return c;
  239. }
  240. /**
  241. * atomic64_inc - increment atomic64 variable
  242. * @v: pointer to type atomic64_t
  243. *
  244. * Atomically increments @v by 1.
  245. */
  246. static inline void atomic64_inc(atomic64_t *v)
  247. {
  248. asm volatile(LOCK_PREFIX "incq %0"
  249. : "=m" (v->counter)
  250. : "m" (v->counter));
  251. }
  252. /**
  253. * atomic64_dec - decrement atomic64 variable
  254. * @v: pointer to type atomic64_t
  255. *
  256. * Atomically decrements @v by 1.
  257. */
  258. static inline void atomic64_dec(atomic64_t *v)
  259. {
  260. asm volatile(LOCK_PREFIX "decq %0"
  261. : "=m" (v->counter)
  262. : "m" (v->counter));
  263. }
  264. /**
  265. * atomic64_dec_and_test - decrement and test
  266. * @v: pointer to type atomic64_t
  267. *
  268. * Atomically decrements @v by 1 and
  269. * returns true if the result is 0, or false for all other
  270. * cases.
  271. */
  272. static inline int atomic64_dec_and_test(atomic64_t *v)
  273. {
  274. unsigned char c;
  275. asm volatile(LOCK_PREFIX "decq %0; sete %1"
  276. : "=m" (v->counter), "=qm" (c)
  277. : "m" (v->counter) : "memory");
  278. return c != 0;
  279. }
  280. /**
  281. * atomic64_inc_and_test - increment and test
  282. * @v: pointer to type atomic64_t
  283. *
  284. * Atomically increments @v by 1
  285. * and returns true if the result is zero, or false for all
  286. * other cases.
  287. */
  288. static inline int atomic64_inc_and_test(atomic64_t *v)
  289. {
  290. unsigned char c;
  291. asm volatile(LOCK_PREFIX "incq %0; sete %1"
  292. : "=m" (v->counter), "=qm" (c)
  293. : "m" (v->counter) : "memory");
  294. return c != 0;
  295. }
  296. /**
  297. * atomic64_add_negative - add and test if negative
  298. * @i: integer value to add
  299. * @v: pointer to type atomic64_t
  300. *
  301. * Atomically adds @i to @v and returns true
  302. * if the result is negative, or false when
  303. * result is greater than or equal to zero.
  304. */
  305. static inline int atomic64_add_negative(long i, atomic64_t *v)
  306. {
  307. unsigned char c;
  308. asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
  309. : "=m" (v->counter), "=qm" (c)
  310. : "ir" (i), "m" (v->counter) : "memory");
  311. return c;
  312. }
  313. /**
  314. * atomic64_add_return - add and return
  315. * @i: integer value to add
  316. * @v: pointer to type atomic64_t
  317. *
  318. * Atomically adds @i to @v and returns @i + @v
  319. */
  320. static inline long atomic64_add_return(long i, atomic64_t *v)
  321. {
  322. long __i = i;
  323. asm volatile(LOCK_PREFIX "xaddq %0, %1;"
  324. : "+r" (i), "+m" (v->counter)
  325. : : "memory");
  326. return i + __i;
  327. }
  328. static inline long atomic64_sub_return(long i, atomic64_t *v)
  329. {
  330. return atomic64_add_return(-i, v);
  331. }
  332. #define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
  333. #define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
  334. #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
  335. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  336. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
  337. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  338. /**
  339. * atomic_add_unless - add unless the number is a given value
  340. * @v: pointer of type atomic_t
  341. * @a: the amount to add to v...
  342. * @u: ...unless v is equal to u.
  343. *
  344. * Atomically adds @a to @v, so long as it was not @u.
  345. * Returns non-zero if @v was not @u, and zero otherwise.
  346. */
  347. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  348. {
  349. int c, old;
  350. c = atomic_read(v);
  351. for (;;) {
  352. if (unlikely(c == (u)))
  353. break;
  354. old = atomic_cmpxchg((v), c, c + (a));
  355. if (likely(old == c))
  356. break;
  357. c = old;
  358. }
  359. return c != (u);
  360. }
  361. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  362. /**
  363. * atomic64_add_unless - add unless the number is a given value
  364. * @v: pointer of type atomic64_t
  365. * @a: the amount to add to v...
  366. * @u: ...unless v is equal to u.
  367. *
  368. * Atomically adds @a to @v, so long as it was not @u.
  369. * Returns non-zero if @v was not @u, and zero otherwise.
  370. */
  371. static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
  372. {
  373. long c, old;
  374. c = atomic64_read(v);
  375. for (;;) {
  376. if (unlikely(c == (u)))
  377. break;
  378. old = atomic64_cmpxchg((v), c, c + (a));
  379. if (likely(old == c))
  380. break;
  381. c = old;
  382. }
  383. return c != (u);
  384. }
  385. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  386. /* These are x86-specific, used by some header files */
  387. #define atomic_clear_mask(mask, addr) \
  388. asm volatile(LOCK_PREFIX "andl %0,%1" \
  389. : : "r" (~(mask)), "m" (*(addr)) : "memory")
  390. #define atomic_set_mask(mask, addr) \
  391. asm volatile(LOCK_PREFIX "orl %0,%1" \
  392. : : "r" ((unsigned)(mask)), "m" (*(addr)) \
  393. : "memory")
  394. /* Atomic operations are already serializing on x86 */
  395. #define smp_mb__before_atomic_dec() barrier()
  396. #define smp_mb__after_atomic_dec() barrier()
  397. #define smp_mb__before_atomic_inc() barrier()
  398. #define smp_mb__after_atomic_inc() barrier()
  399. #include <asm-generic/atomic.h>
  400. #endif