atomic_64.h 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485
  1. #ifndef _ASM_X86_ATOMIC_64_H
  2. #define _ASM_X86_ATOMIC_64_H
  3. #include <linux/types.h>
  4. #include <asm/alternative.h>
  5. #include <asm/cmpxchg.h>
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. #define ATOMIC_INIT(i) { (i) }
  11. /**
  12. * atomic_read - read atomic variable
  13. * @v: pointer of type atomic_t
  14. *
  15. * Atomically reads the value of @v.
  16. */
  17. static inline int atomic_read(const atomic_t *v)
  18. {
  19. return v->counter;
  20. }
  21. /**
  22. * atomic_set - set atomic variable
  23. * @v: pointer of type atomic_t
  24. * @i: required value
  25. *
  26. * Atomically sets the value of @v to @i.
  27. */
  28. static inline void atomic_set(atomic_t *v, int i)
  29. {
  30. v->counter = i;
  31. }
  32. /**
  33. * atomic_add - add integer to atomic variable
  34. * @i: integer value to add
  35. * @v: pointer of type atomic_t
  36. *
  37. * Atomically adds @i to @v.
  38. */
  39. static inline void atomic_add(int i, atomic_t *v)
  40. {
  41. asm volatile(LOCK_PREFIX "addl %1,%0"
  42. : "=m" (v->counter)
  43. : "ir" (i), "m" (v->counter));
  44. }
  45. /**
  46. * atomic_sub - subtract the atomic variable
  47. * @i: integer value to subtract
  48. * @v: pointer of type atomic_t
  49. *
  50. * Atomically subtracts @i from @v.
  51. */
  52. static inline void atomic_sub(int i, atomic_t *v)
  53. {
  54. asm volatile(LOCK_PREFIX "subl %1,%0"
  55. : "=m" (v->counter)
  56. : "ir" (i), "m" (v->counter));
  57. }
  58. /**
  59. * atomic_sub_and_test - subtract value from variable and test result
  60. * @i: integer value to subtract
  61. * @v: pointer of type atomic_t
  62. *
  63. * Atomically subtracts @i from @v and returns
  64. * true if the result is zero, or false for all
  65. * other cases.
  66. */
  67. static inline int atomic_sub_and_test(int i, atomic_t *v)
  68. {
  69. unsigned char c;
  70. asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
  71. : "=m" (v->counter), "=qm" (c)
  72. : "ir" (i), "m" (v->counter) : "memory");
  73. return c;
  74. }
  75. /**
  76. * atomic_inc - increment atomic variable
  77. * @v: pointer of type atomic_t
  78. *
  79. * Atomically increments @v by 1.
  80. */
  81. static inline void atomic_inc(atomic_t *v)
  82. {
  83. asm volatile(LOCK_PREFIX "incl %0"
  84. : "=m" (v->counter)
  85. : "m" (v->counter));
  86. }
  87. /**
  88. * atomic_dec - decrement atomic variable
  89. * @v: pointer of type atomic_t
  90. *
  91. * Atomically decrements @v by 1.
  92. */
  93. static inline void atomic_dec(atomic_t *v)
  94. {
  95. asm volatile(LOCK_PREFIX "decl %0"
  96. : "=m" (v->counter)
  97. : "m" (v->counter));
  98. }
  99. /**
  100. * atomic_dec_and_test - decrement and test
  101. * @v: pointer of type atomic_t
  102. *
  103. * Atomically decrements @v by 1 and
  104. * returns true if the result is 0, or false for all other
  105. * cases.
  106. */
  107. static inline int atomic_dec_and_test(atomic_t *v)
  108. {
  109. unsigned char c;
  110. asm volatile(LOCK_PREFIX "decl %0; sete %1"
  111. : "=m" (v->counter), "=qm" (c)
  112. : "m" (v->counter) : "memory");
  113. return c != 0;
  114. }
  115. /**
  116. * atomic_inc_and_test - increment and test
  117. * @v: pointer of type atomic_t
  118. *
  119. * Atomically increments @v by 1
  120. * and returns true if the result is zero, or false for all
  121. * other cases.
  122. */
  123. static inline int atomic_inc_and_test(atomic_t *v)
  124. {
  125. unsigned char c;
  126. asm volatile(LOCK_PREFIX "incl %0; sete %1"
  127. : "=m" (v->counter), "=qm" (c)
  128. : "m" (v->counter) : "memory");
  129. return c != 0;
  130. }
  131. /**
  132. * atomic_add_negative - add and test if negative
  133. * @i: integer value to add
  134. * @v: pointer of type atomic_t
  135. *
  136. * Atomically adds @i to @v and returns true
  137. * if the result is negative, or false when
  138. * result is greater than or equal to zero.
  139. */
  140. static inline int atomic_add_negative(int i, atomic_t *v)
  141. {
  142. unsigned char c;
  143. asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
  144. : "=m" (v->counter), "=qm" (c)
  145. : "ir" (i), "m" (v->counter) : "memory");
  146. return c;
  147. }
  148. /**
  149. * atomic_add_return - add and return
  150. * @i: integer value to add
  151. * @v: pointer of type atomic_t
  152. *
  153. * Atomically adds @i to @v and returns @i + @v
  154. */
  155. static inline int atomic_add_return(int i, atomic_t *v)
  156. {
  157. int __i = i;
  158. asm volatile(LOCK_PREFIX "xaddl %0, %1"
  159. : "+r" (i), "+m" (v->counter)
  160. : : "memory");
  161. return i + __i;
  162. }
  163. static inline int atomic_sub_return(int i, atomic_t *v)
  164. {
  165. return atomic_add_return(-i, v);
  166. }
  167. #define atomic_inc_return(v) (atomic_add_return(1, v))
  168. #define atomic_dec_return(v) (atomic_sub_return(1, v))
  169. /* The 64-bit atomic type */
  170. #define ATOMIC64_INIT(i) { (i) }
  171. /**
  172. * atomic64_read - read atomic64 variable
  173. * @v: pointer of type atomic64_t
  174. *
  175. * Atomically reads the value of @v.
  176. * Doesn't imply a read memory barrier.
  177. */
  178. static inline long atomic64_read(const atomic64_t *v)
  179. {
  180. return v->counter;
  181. }
  182. /**
  183. * atomic64_set - set atomic64 variable
  184. * @v: pointer to type atomic64_t
  185. * @i: required value
  186. *
  187. * Atomically sets the value of @v to @i.
  188. */
  189. static inline void atomic64_set(atomic64_t *v, long i)
  190. {
  191. v->counter = i;
  192. }
  193. /**
  194. * atomic64_add - add integer to atomic64 variable
  195. * @i: integer value to add
  196. * @v: pointer to type atomic64_t
  197. *
  198. * Atomically adds @i to @v.
  199. */
  200. static inline void atomic64_add(long i, atomic64_t *v)
  201. {
  202. asm volatile(LOCK_PREFIX "addq %1,%0"
  203. : "=m" (v->counter)
  204. : "er" (i), "m" (v->counter));
  205. }
  206. /**
  207. * atomic64_sub - subtract the atomic64 variable
  208. * @i: integer value to subtract
  209. * @v: pointer to type atomic64_t
  210. *
  211. * Atomically subtracts @i from @v.
  212. */
  213. static inline void atomic64_sub(long i, atomic64_t *v)
  214. {
  215. asm volatile(LOCK_PREFIX "subq %1,%0"
  216. : "=m" (v->counter)
  217. : "er" (i), "m" (v->counter));
  218. }
  219. /**
  220. * atomic64_sub_and_test - subtract value from variable and test result
  221. * @i: integer value to subtract
  222. * @v: pointer to type atomic64_t
  223. *
  224. * Atomically subtracts @i from @v and returns
  225. * true if the result is zero, or false for all
  226. * other cases.
  227. */
  228. static inline int atomic64_sub_and_test(long i, atomic64_t *v)
  229. {
  230. unsigned char c;
  231. asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
  232. : "=m" (v->counter), "=qm" (c)
  233. : "er" (i), "m" (v->counter) : "memory");
  234. return c;
  235. }
  236. /**
  237. * atomic64_inc - increment atomic64 variable
  238. * @v: pointer to type atomic64_t
  239. *
  240. * Atomically increments @v by 1.
  241. */
  242. static inline void atomic64_inc(atomic64_t *v)
  243. {
  244. asm volatile(LOCK_PREFIX "incq %0"
  245. : "=m" (v->counter)
  246. : "m" (v->counter));
  247. }
  248. /**
  249. * atomic64_dec - decrement atomic64 variable
  250. * @v: pointer to type atomic64_t
  251. *
  252. * Atomically decrements @v by 1.
  253. */
  254. static inline void atomic64_dec(atomic64_t *v)
  255. {
  256. asm volatile(LOCK_PREFIX "decq %0"
  257. : "=m" (v->counter)
  258. : "m" (v->counter));
  259. }
  260. /**
  261. * atomic64_dec_and_test - decrement and test
  262. * @v: pointer to type atomic64_t
  263. *
  264. * Atomically decrements @v by 1 and
  265. * returns true if the result is 0, or false for all other
  266. * cases.
  267. */
  268. static inline int atomic64_dec_and_test(atomic64_t *v)
  269. {
  270. unsigned char c;
  271. asm volatile(LOCK_PREFIX "decq %0; sete %1"
  272. : "=m" (v->counter), "=qm" (c)
  273. : "m" (v->counter) : "memory");
  274. return c != 0;
  275. }
  276. /**
  277. * atomic64_inc_and_test - increment and test
  278. * @v: pointer to type atomic64_t
  279. *
  280. * Atomically increments @v by 1
  281. * and returns true if the result is zero, or false for all
  282. * other cases.
  283. */
  284. static inline int atomic64_inc_and_test(atomic64_t *v)
  285. {
  286. unsigned char c;
  287. asm volatile(LOCK_PREFIX "incq %0; sete %1"
  288. : "=m" (v->counter), "=qm" (c)
  289. : "m" (v->counter) : "memory");
  290. return c != 0;
  291. }
  292. /**
  293. * atomic64_add_negative - add and test if negative
  294. * @i: integer value to add
  295. * @v: pointer to type atomic64_t
  296. *
  297. * Atomically adds @i to @v and returns true
  298. * if the result is negative, or false when
  299. * result is greater than or equal to zero.
  300. */
  301. static inline int atomic64_add_negative(long i, atomic64_t *v)
  302. {
  303. unsigned char c;
  304. asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
  305. : "=m" (v->counter), "=qm" (c)
  306. : "er" (i), "m" (v->counter) : "memory");
  307. return c;
  308. }
  309. /**
  310. * atomic64_add_return - add and return
  311. * @i: integer value to add
  312. * @v: pointer to type atomic64_t
  313. *
  314. * Atomically adds @i to @v and returns @i + @v
  315. */
  316. static inline long atomic64_add_return(long i, atomic64_t *v)
  317. {
  318. long __i = i;
  319. asm volatile(LOCK_PREFIX "xaddq %0, %1;"
  320. : "+r" (i), "+m" (v->counter)
  321. : : "memory");
  322. return i + __i;
  323. }
  324. static inline long atomic64_sub_return(long i, atomic64_t *v)
  325. {
  326. return atomic64_add_return(-i, v);
  327. }
  328. #define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
  329. #define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
  330. static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
  331. {
  332. return cmpxchg(&v->counter, old, new);
  333. }
  334. static inline long atomic64_xchg(atomic64_t *v, long new)
  335. {
  336. return xchg(&v->counter, new);
  337. }
  338. static inline long atomic_cmpxchg(atomic_t *v, int old, int new)
  339. {
  340. return cmpxchg(&v->counter, old, new);
  341. }
  342. static inline long atomic_xchg(atomic_t *v, int new)
  343. {
  344. return xchg(&v->counter, new);
  345. }
  346. /**
  347. * atomic_add_unless - add unless the number is a given value
  348. * @v: pointer of type atomic_t
  349. * @a: the amount to add to v...
  350. * @u: ...unless v is equal to u.
  351. *
  352. * Atomically adds @a to @v, so long as it was not @u.
  353. * Returns non-zero if @v was not @u, and zero otherwise.
  354. */
  355. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  356. {
  357. int c, old;
  358. c = atomic_read(v);
  359. for (;;) {
  360. if (unlikely(c == (u)))
  361. break;
  362. old = atomic_cmpxchg((v), c, c + (a));
  363. if (likely(old == c))
  364. break;
  365. c = old;
  366. }
  367. return c != (u);
  368. }
  369. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  370. /**
  371. * atomic64_add_unless - add unless the number is a given value
  372. * @v: pointer of type atomic64_t
  373. * @a: the amount to add to v...
  374. * @u: ...unless v is equal to u.
  375. *
  376. * Atomically adds @a to @v, so long as it was not @u.
  377. * Returns non-zero if @v was not @u, and zero otherwise.
  378. */
  379. static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
  380. {
  381. long c, old;
  382. c = atomic64_read(v);
  383. for (;;) {
  384. if (unlikely(c == (u)))
  385. break;
  386. old = atomic64_cmpxchg((v), c, c + (a));
  387. if (likely(old == c))
  388. break;
  389. c = old;
  390. }
  391. return c != (u);
  392. }
  393. /**
  394. * atomic_inc_short - increment of a short integer
  395. * @v: pointer to type int
  396. *
  397. * Atomically adds 1 to @v
  398. * Returns the new value of @u
  399. */
  400. static inline short int atomic_inc_short(short int *v)
  401. {
  402. asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
  403. return *v;
  404. }
  405. /**
  406. * atomic_or_long - OR of two long integers
  407. * @v1: pointer to type unsigned long
  408. * @v2: pointer to type unsigned long
  409. *
  410. * Atomically ORs @v1 and @v2
  411. * Returns the result of the OR
  412. */
  413. static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
  414. {
  415. asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
  416. }
  417. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  418. /* These are x86-specific, used by some header files */
  419. #define atomic_clear_mask(mask, addr) \
  420. asm volatile(LOCK_PREFIX "andl %0,%1" \
  421. : : "r" (~(mask)), "m" (*(addr)) : "memory")
  422. #define atomic_set_mask(mask, addr) \
  423. asm volatile(LOCK_PREFIX "orl %0,%1" \
  424. : : "r" ((unsigned)(mask)), "m" (*(addr)) \
  425. : "memory")
  426. /* Atomic operations are already serializing on x86 */
  427. #define smp_mb__before_atomic_dec() barrier()
  428. #define smp_mb__after_atomic_dec() barrier()
  429. #define smp_mb__before_atomic_inc() barrier()
  430. #define smp_mb__after_atomic_inc() barrier()
  431. #include <asm-generic/atomic-long.h>
  432. #endif /* _ASM_X86_ATOMIC_64_H */