atomic_64.h 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459
  1. #ifndef _ASM_X86_ATOMIC_64_H
  2. #define _ASM_X86_ATOMIC_64_H
  3. #include <linux/types.h>
  4. #include <asm/alternative.h>
  5. #include <asm/cmpxchg.h>
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. #define ATOMIC_INIT(i) { (i) }
  11. /**
  12. * atomic_read - read atomic variable
  13. * @v: pointer of type atomic_t
  14. *
  15. * Atomically reads the value of @v.
  16. */
  17. #define atomic_read(v) ((v)->counter)
  18. /**
  19. * atomic_set - set atomic variable
  20. * @v: pointer of type atomic_t
  21. * @i: required value
  22. *
  23. * Atomically sets the value of @v to @i.
  24. */
  25. #define atomic_set(v, i) (((v)->counter) = (i))
  26. /**
  27. * atomic_add - add integer to atomic variable
  28. * @i: integer value to add
  29. * @v: pointer of type atomic_t
  30. *
  31. * Atomically adds @i to @v.
  32. */
  33. static inline void atomic_add(int i, atomic_t *v)
  34. {
  35. asm volatile(LOCK_PREFIX "addl %1,%0"
  36. : "=m" (v->counter)
  37. : "ir" (i), "m" (v->counter));
  38. }
  39. /**
  40. * atomic_sub - subtract the atomic variable
  41. * @i: integer value to subtract
  42. * @v: pointer of type atomic_t
  43. *
  44. * Atomically subtracts @i from @v.
  45. */
  46. static inline void atomic_sub(int i, atomic_t *v)
  47. {
  48. asm volatile(LOCK_PREFIX "subl %1,%0"
  49. : "=m" (v->counter)
  50. : "ir" (i), "m" (v->counter));
  51. }
  52. /**
  53. * atomic_sub_and_test - subtract value from variable and test result
  54. * @i: integer value to subtract
  55. * @v: pointer of type atomic_t
  56. *
  57. * Atomically subtracts @i from @v and returns
  58. * true if the result is zero, or false for all
  59. * other cases.
  60. */
  61. static inline int atomic_sub_and_test(int i, atomic_t *v)
  62. {
  63. unsigned char c;
  64. asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
  65. : "=m" (v->counter), "=qm" (c)
  66. : "ir" (i), "m" (v->counter) : "memory");
  67. return c;
  68. }
  69. /**
  70. * atomic_inc - increment atomic variable
  71. * @v: pointer of type atomic_t
  72. *
  73. * Atomically increments @v by 1.
  74. */
  75. static inline void atomic_inc(atomic_t *v)
  76. {
  77. asm volatile(LOCK_PREFIX "incl %0"
  78. : "=m" (v->counter)
  79. : "m" (v->counter));
  80. }
  81. /**
  82. * atomic_dec - decrement atomic variable
  83. * @v: pointer of type atomic_t
  84. *
  85. * Atomically decrements @v by 1.
  86. */
  87. static inline void atomic_dec(atomic_t *v)
  88. {
  89. asm volatile(LOCK_PREFIX "decl %0"
  90. : "=m" (v->counter)
  91. : "m" (v->counter));
  92. }
  93. /**
  94. * atomic_dec_and_test - decrement and test
  95. * @v: pointer of type atomic_t
  96. *
  97. * Atomically decrements @v by 1 and
  98. * returns true if the result is 0, or false for all other
  99. * cases.
  100. */
  101. static inline int atomic_dec_and_test(atomic_t *v)
  102. {
  103. unsigned char c;
  104. asm volatile(LOCK_PREFIX "decl %0; sete %1"
  105. : "=m" (v->counter), "=qm" (c)
  106. : "m" (v->counter) : "memory");
  107. return c != 0;
  108. }
  109. /**
  110. * atomic_inc_and_test - increment and test
  111. * @v: pointer of type atomic_t
  112. *
  113. * Atomically increments @v by 1
  114. * and returns true if the result is zero, or false for all
  115. * other cases.
  116. */
  117. static inline int atomic_inc_and_test(atomic_t *v)
  118. {
  119. unsigned char c;
  120. asm volatile(LOCK_PREFIX "incl %0; sete %1"
  121. : "=m" (v->counter), "=qm" (c)
  122. : "m" (v->counter) : "memory");
  123. return c != 0;
  124. }
  125. /**
  126. * atomic_add_negative - add and test if negative
  127. * @i: integer value to add
  128. * @v: pointer of type atomic_t
  129. *
  130. * Atomically adds @i to @v and returns true
  131. * if the result is negative, or false when
  132. * result is greater than or equal to zero.
  133. */
  134. static inline int atomic_add_negative(int i, atomic_t *v)
  135. {
  136. unsigned char c;
  137. asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
  138. : "=m" (v->counter), "=qm" (c)
  139. : "ir" (i), "m" (v->counter) : "memory");
  140. return c;
  141. }
  142. /**
  143. * atomic_add_return - add and return
  144. * @i: integer value to add
  145. * @v: pointer of type atomic_t
  146. *
  147. * Atomically adds @i to @v and returns @i + @v
  148. */
  149. static inline int atomic_add_return(int i, atomic_t *v)
  150. {
  151. int __i = i;
  152. asm volatile(LOCK_PREFIX "xaddl %0, %1"
  153. : "+r" (i), "+m" (v->counter)
  154. : : "memory");
  155. return i + __i;
  156. }
  157. static inline int atomic_sub_return(int i, atomic_t *v)
  158. {
  159. return atomic_add_return(-i, v);
  160. }
  161. #define atomic_inc_return(v) (atomic_add_return(1, v))
  162. #define atomic_dec_return(v) (atomic_sub_return(1, v))
  163. /* The 64-bit atomic type */
  164. #define ATOMIC64_INIT(i) { (i) }
  165. /**
  166. * atomic64_read - read atomic64 variable
  167. * @v: pointer of type atomic64_t
  168. *
  169. * Atomically reads the value of @v.
  170. * Doesn't imply a read memory barrier.
  171. */
  172. #define atomic64_read(v) ((v)->counter)
  173. /**
  174. * atomic64_set - set atomic64 variable
  175. * @v: pointer to type atomic64_t
  176. * @i: required value
  177. *
  178. * Atomically sets the value of @v to @i.
  179. */
  180. #define atomic64_set(v, i) (((v)->counter) = (i))
  181. /**
  182. * atomic64_add - add integer to atomic64 variable
  183. * @i: integer value to add
  184. * @v: pointer to type atomic64_t
  185. *
  186. * Atomically adds @i to @v.
  187. */
  188. static inline void atomic64_add(long i, atomic64_t *v)
  189. {
  190. asm volatile(LOCK_PREFIX "addq %1,%0"
  191. : "=m" (v->counter)
  192. : "er" (i), "m" (v->counter));
  193. }
  194. /**
  195. * atomic64_sub - subtract the atomic64 variable
  196. * @i: integer value to subtract
  197. * @v: pointer to type atomic64_t
  198. *
  199. * Atomically subtracts @i from @v.
  200. */
  201. static inline void atomic64_sub(long i, atomic64_t *v)
  202. {
  203. asm volatile(LOCK_PREFIX "subq %1,%0"
  204. : "=m" (v->counter)
  205. : "er" (i), "m" (v->counter));
  206. }
  207. /**
  208. * atomic64_sub_and_test - subtract value from variable and test result
  209. * @i: integer value to subtract
  210. * @v: pointer to type atomic64_t
  211. *
  212. * Atomically subtracts @i from @v and returns
  213. * true if the result is zero, or false for all
  214. * other cases.
  215. */
  216. static inline int atomic64_sub_and_test(long i, atomic64_t *v)
  217. {
  218. unsigned char c;
  219. asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
  220. : "=m" (v->counter), "=qm" (c)
  221. : "er" (i), "m" (v->counter) : "memory");
  222. return c;
  223. }
  224. /**
  225. * atomic64_inc - increment atomic64 variable
  226. * @v: pointer to type atomic64_t
  227. *
  228. * Atomically increments @v by 1.
  229. */
  230. static inline void atomic64_inc(atomic64_t *v)
  231. {
  232. asm volatile(LOCK_PREFIX "incq %0"
  233. : "=m" (v->counter)
  234. : "m" (v->counter));
  235. }
  236. /**
  237. * atomic64_dec - decrement atomic64 variable
  238. * @v: pointer to type atomic64_t
  239. *
  240. * Atomically decrements @v by 1.
  241. */
  242. static inline void atomic64_dec(atomic64_t *v)
  243. {
  244. asm volatile(LOCK_PREFIX "decq %0"
  245. : "=m" (v->counter)
  246. : "m" (v->counter));
  247. }
  248. /**
  249. * atomic64_dec_and_test - decrement and test
  250. * @v: pointer to type atomic64_t
  251. *
  252. * Atomically decrements @v by 1 and
  253. * returns true if the result is 0, or false for all other
  254. * cases.
  255. */
  256. static inline int atomic64_dec_and_test(atomic64_t *v)
  257. {
  258. unsigned char c;
  259. asm volatile(LOCK_PREFIX "decq %0; sete %1"
  260. : "=m" (v->counter), "=qm" (c)
  261. : "m" (v->counter) : "memory");
  262. return c != 0;
  263. }
  264. /**
  265. * atomic64_inc_and_test - increment and test
  266. * @v: pointer to type atomic64_t
  267. *
  268. * Atomically increments @v by 1
  269. * and returns true if the result is zero, or false for all
  270. * other cases.
  271. */
  272. static inline int atomic64_inc_and_test(atomic64_t *v)
  273. {
  274. unsigned char c;
  275. asm volatile(LOCK_PREFIX "incq %0; sete %1"
  276. : "=m" (v->counter), "=qm" (c)
  277. : "m" (v->counter) : "memory");
  278. return c != 0;
  279. }
  280. /**
  281. * atomic64_add_negative - add and test if negative
  282. * @i: integer value to add
  283. * @v: pointer to type atomic64_t
  284. *
  285. * Atomically adds @i to @v and returns true
  286. * if the result is negative, or false when
  287. * result is greater than or equal to zero.
  288. */
  289. static inline int atomic64_add_negative(long i, atomic64_t *v)
  290. {
  291. unsigned char c;
  292. asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
  293. : "=m" (v->counter), "=qm" (c)
  294. : "er" (i), "m" (v->counter) : "memory");
  295. return c;
  296. }
  297. /**
  298. * atomic64_add_return - add and return
  299. * @i: integer value to add
  300. * @v: pointer to type atomic64_t
  301. *
  302. * Atomically adds @i to @v and returns @i + @v
  303. */
  304. static inline long atomic64_add_return(long i, atomic64_t *v)
  305. {
  306. long __i = i;
  307. asm volatile(LOCK_PREFIX "xaddq %0, %1;"
  308. : "+r" (i), "+m" (v->counter)
  309. : : "memory");
  310. return i + __i;
  311. }
  312. static inline long atomic64_sub_return(long i, atomic64_t *v)
  313. {
  314. return atomic64_add_return(-i, v);
  315. }
  316. #define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
  317. #define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
  318. #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
  319. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  320. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
  321. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  322. /**
  323. * atomic_add_unless - add unless the number is a given value
  324. * @v: pointer of type atomic_t
  325. * @a: the amount to add to v...
  326. * @u: ...unless v is equal to u.
  327. *
  328. * Atomically adds @a to @v, so long as it was not @u.
  329. * Returns non-zero if @v was not @u, and zero otherwise.
  330. */
  331. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  332. {
  333. int c, old;
  334. c = atomic_read(v);
  335. for (;;) {
  336. if (unlikely(c == (u)))
  337. break;
  338. old = atomic_cmpxchg((v), c, c + (a));
  339. if (likely(old == c))
  340. break;
  341. c = old;
  342. }
  343. return c != (u);
  344. }
  345. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  346. /**
  347. * atomic64_add_unless - add unless the number is a given value
  348. * @v: pointer of type atomic64_t
  349. * @a: the amount to add to v...
  350. * @u: ...unless v is equal to u.
  351. *
  352. * Atomically adds @a to @v, so long as it was not @u.
  353. * Returns non-zero if @v was not @u, and zero otherwise.
  354. */
  355. static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
  356. {
  357. long c, old;
  358. c = atomic64_read(v);
  359. for (;;) {
  360. if (unlikely(c == (u)))
  361. break;
  362. old = atomic64_cmpxchg((v), c, c + (a));
  363. if (likely(old == c))
  364. break;
  365. c = old;
  366. }
  367. return c != (u);
  368. }
  369. /**
  370. * atomic_inc_short - increment of a short integer
  371. * @v: pointer to type int
  372. *
  373. * Atomically adds 1 to @v
  374. * Returns the new value of @u
  375. */
  376. static inline short int atomic_inc_short(short int *v)
  377. {
  378. asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
  379. return *v;
  380. }
  381. /**
  382. * atomic_or_long - OR of two long integers
  383. * @v1: pointer to type unsigned long
  384. * @v2: pointer to type unsigned long
  385. *
  386. * Atomically ORs @v1 and @v2
  387. * Returns the result of the OR
  388. */
  389. static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
  390. {
  391. asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
  392. }
  393. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  394. /* These are x86-specific, used by some header files */
  395. #define atomic_clear_mask(mask, addr) \
  396. asm volatile(LOCK_PREFIX "andl %0,%1" \
  397. : : "r" (~(mask)), "m" (*(addr)) : "memory")
  398. #define atomic_set_mask(mask, addr) \
  399. asm volatile(LOCK_PREFIX "orl %0,%1" \
  400. : : "r" ((unsigned)(mask)), "m" (*(addr)) \
  401. : "memory")
  402. /* Atomic operations are already serializing on x86 */
  403. #define smp_mb__before_atomic_dec() barrier()
  404. #define smp_mb__after_atomic_dec() barrier()
  405. #define smp_mb__before_atomic_inc() barrier()
  406. #define smp_mb__after_atomic_inc() barrier()
  407. #include <asm-generic/atomic.h>
  408. #endif /* _ASM_X86_ATOMIC_64_H */