atomic_64.h 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466
  1. #ifndef __ARCH_X86_64_ATOMIC__
  2. #define __ARCH_X86_64_ATOMIC__
  3. #include <asm/alternative.h>
  4. #include <asm/cmpxchg.h>
  5. /* atomic_t should be 32 bit signed type */
  6. /*
  7. * Atomic operations that C can't guarantee us. Useful for
  8. * resource counting etc..
  9. */
  10. #ifdef CONFIG_SMP
  11. #define LOCK "lock ; "
  12. #else
  13. #define LOCK ""
  14. #endif
  15. /*
  16. * Make sure gcc doesn't try to be clever and move things around
  17. * on us. We need to use _exactly_ the address the user gave us,
  18. * not some alias that contains the same information.
  19. */
  20. typedef struct { int counter; } atomic_t;
  21. #define ATOMIC_INIT(i) { (i) }
  22. /**
  23. * atomic_read - read atomic variable
  24. * @v: pointer of type atomic_t
  25. *
  26. * Atomically reads the value of @v.
  27. */
  28. #define atomic_read(v) ((v)->counter)
  29. /**
  30. * atomic_set - set atomic variable
  31. * @v: pointer of type atomic_t
  32. * @i: required value
  33. *
  34. * Atomically sets the value of @v to @i.
  35. */
  36. #define atomic_set(v,i) (((v)->counter) = (i))
  37. /**
  38. * atomic_add - add integer to atomic variable
  39. * @i: integer value to add
  40. * @v: pointer of type atomic_t
  41. *
  42. * Atomically adds @i to @v.
  43. */
  44. static __inline__ void atomic_add(int i, atomic_t *v)
  45. {
  46. __asm__ __volatile__(
  47. LOCK_PREFIX "addl %1,%0"
  48. :"=m" (v->counter)
  49. :"ir" (i), "m" (v->counter));
  50. }
  51. /**
  52. * atomic_sub - subtract the atomic variable
  53. * @i: integer value to subtract
  54. * @v: pointer of type atomic_t
  55. *
  56. * Atomically subtracts @i from @v.
  57. */
  58. static __inline__ void atomic_sub(int i, atomic_t *v)
  59. {
  60. __asm__ __volatile__(
  61. LOCK_PREFIX "subl %1,%0"
  62. :"=m" (v->counter)
  63. :"ir" (i), "m" (v->counter));
  64. }
  65. /**
  66. * atomic_sub_and_test - subtract value from variable and test result
  67. * @i: integer value to subtract
  68. * @v: pointer of type atomic_t
  69. *
  70. * Atomically subtracts @i from @v and returns
  71. * true if the result is zero, or false for all
  72. * other cases.
  73. */
  74. static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
  75. {
  76. unsigned char c;
  77. __asm__ __volatile__(
  78. LOCK_PREFIX "subl %2,%0; sete %1"
  79. :"=m" (v->counter), "=qm" (c)
  80. :"ir" (i), "m" (v->counter) : "memory");
  81. return c;
  82. }
  83. /**
  84. * atomic_inc - increment atomic variable
  85. * @v: pointer of type atomic_t
  86. *
  87. * Atomically increments @v by 1.
  88. */
  89. static __inline__ void atomic_inc(atomic_t *v)
  90. {
  91. __asm__ __volatile__(
  92. LOCK_PREFIX "incl %0"
  93. :"=m" (v->counter)
  94. :"m" (v->counter));
  95. }
  96. /**
  97. * atomic_dec - decrement atomic variable
  98. * @v: pointer of type atomic_t
  99. *
  100. * Atomically decrements @v by 1.
  101. */
  102. static __inline__ void atomic_dec(atomic_t *v)
  103. {
  104. __asm__ __volatile__(
  105. LOCK_PREFIX "decl %0"
  106. :"=m" (v->counter)
  107. :"m" (v->counter));
  108. }
  109. /**
  110. * atomic_dec_and_test - decrement and test
  111. * @v: pointer of type atomic_t
  112. *
  113. * Atomically decrements @v by 1 and
  114. * returns true if the result is 0, or false for all other
  115. * cases.
  116. */
  117. static __inline__ int atomic_dec_and_test(atomic_t *v)
  118. {
  119. unsigned char c;
  120. __asm__ __volatile__(
  121. LOCK_PREFIX "decl %0; sete %1"
  122. :"=m" (v->counter), "=qm" (c)
  123. :"m" (v->counter) : "memory");
  124. return c != 0;
  125. }
  126. /**
  127. * atomic_inc_and_test - increment and test
  128. * @v: pointer of type atomic_t
  129. *
  130. * Atomically increments @v by 1
  131. * and returns true if the result is zero, or false for all
  132. * other cases.
  133. */
  134. static __inline__ int atomic_inc_and_test(atomic_t *v)
  135. {
  136. unsigned char c;
  137. __asm__ __volatile__(
  138. LOCK_PREFIX "incl %0; sete %1"
  139. :"=m" (v->counter), "=qm" (c)
  140. :"m" (v->counter) : "memory");
  141. return c != 0;
  142. }
  143. /**
  144. * atomic_add_negative - add and test if negative
  145. * @i: integer value to add
  146. * @v: pointer of type atomic_t
  147. *
  148. * Atomically adds @i to @v and returns true
  149. * if the result is negative, or false when
  150. * result is greater than or equal to zero.
  151. */
  152. static __inline__ int atomic_add_negative(int i, atomic_t *v)
  153. {
  154. unsigned char c;
  155. __asm__ __volatile__(
  156. LOCK_PREFIX "addl %2,%0; sets %1"
  157. :"=m" (v->counter), "=qm" (c)
  158. :"ir" (i), "m" (v->counter) : "memory");
  159. return c;
  160. }
  161. /**
  162. * atomic_add_return - add and return
  163. * @i: integer value to add
  164. * @v: pointer of type atomic_t
  165. *
  166. * Atomically adds @i to @v and returns @i + @v
  167. */
  168. static __inline__ int atomic_add_return(int i, atomic_t *v)
  169. {
  170. int __i = i;
  171. __asm__ __volatile__(
  172. LOCK_PREFIX "xaddl %0, %1"
  173. :"+r" (i), "+m" (v->counter)
  174. : : "memory");
  175. return i + __i;
  176. }
  177. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  178. {
  179. return atomic_add_return(-i,v);
  180. }
  181. #define atomic_inc_return(v) (atomic_add_return(1,v))
  182. #define atomic_dec_return(v) (atomic_sub_return(1,v))
  183. /* An 64bit atomic type */
  184. typedef struct { volatile long counter; } atomic64_t;
  185. #define ATOMIC64_INIT(i) { (i) }
  186. /**
  187. * atomic64_read - read atomic64 variable
  188. * @v: pointer of type atomic64_t
  189. *
  190. * Atomically reads the value of @v.
  191. * Doesn't imply a read memory barrier.
  192. */
  193. #define atomic64_read(v) ((v)->counter)
  194. /**
  195. * atomic64_set - set atomic64 variable
  196. * @v: pointer to type atomic64_t
  197. * @i: required value
  198. *
  199. * Atomically sets the value of @v to @i.
  200. */
  201. #define atomic64_set(v,i) (((v)->counter) = (i))
  202. /**
  203. * atomic64_add - add integer to atomic64 variable
  204. * @i: integer value to add
  205. * @v: pointer to type atomic64_t
  206. *
  207. * Atomically adds @i to @v.
  208. */
  209. static __inline__ void atomic64_add(long i, atomic64_t *v)
  210. {
  211. __asm__ __volatile__(
  212. LOCK_PREFIX "addq %1,%0"
  213. :"=m" (v->counter)
  214. :"ir" (i), "m" (v->counter));
  215. }
  216. /**
  217. * atomic64_sub - subtract the atomic64 variable
  218. * @i: integer value to subtract
  219. * @v: pointer to type atomic64_t
  220. *
  221. * Atomically subtracts @i from @v.
  222. */
  223. static __inline__ void atomic64_sub(long i, atomic64_t *v)
  224. {
  225. __asm__ __volatile__(
  226. LOCK_PREFIX "subq %1,%0"
  227. :"=m" (v->counter)
  228. :"ir" (i), "m" (v->counter));
  229. }
  230. /**
  231. * atomic64_sub_and_test - subtract value from variable and test result
  232. * @i: integer value to subtract
  233. * @v: pointer to type atomic64_t
  234. *
  235. * Atomically subtracts @i from @v and returns
  236. * true if the result is zero, or false for all
  237. * other cases.
  238. */
  239. static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
  240. {
  241. unsigned char c;
  242. __asm__ __volatile__(
  243. LOCK_PREFIX "subq %2,%0; sete %1"
  244. :"=m" (v->counter), "=qm" (c)
  245. :"ir" (i), "m" (v->counter) : "memory");
  246. return c;
  247. }
  248. /**
  249. * atomic64_inc - increment atomic64 variable
  250. * @v: pointer to type atomic64_t
  251. *
  252. * Atomically increments @v by 1.
  253. */
  254. static __inline__ void atomic64_inc(atomic64_t *v)
  255. {
  256. __asm__ __volatile__(
  257. LOCK_PREFIX "incq %0"
  258. :"=m" (v->counter)
  259. :"m" (v->counter));
  260. }
  261. /**
  262. * atomic64_dec - decrement atomic64 variable
  263. * @v: pointer to type atomic64_t
  264. *
  265. * Atomically decrements @v by 1.
  266. */
  267. static __inline__ void atomic64_dec(atomic64_t *v)
  268. {
  269. __asm__ __volatile__(
  270. LOCK_PREFIX "decq %0"
  271. :"=m" (v->counter)
  272. :"m" (v->counter));
  273. }
  274. /**
  275. * atomic64_dec_and_test - decrement and test
  276. * @v: pointer to type atomic64_t
  277. *
  278. * Atomically decrements @v by 1 and
  279. * returns true if the result is 0, or false for all other
  280. * cases.
  281. */
  282. static __inline__ int atomic64_dec_and_test(atomic64_t *v)
  283. {
  284. unsigned char c;
  285. __asm__ __volatile__(
  286. LOCK_PREFIX "decq %0; sete %1"
  287. :"=m" (v->counter), "=qm" (c)
  288. :"m" (v->counter) : "memory");
  289. return c != 0;
  290. }
  291. /**
  292. * atomic64_inc_and_test - increment and test
  293. * @v: pointer to type atomic64_t
  294. *
  295. * Atomically increments @v by 1
  296. * and returns true if the result is zero, or false for all
  297. * other cases.
  298. */
  299. static __inline__ int atomic64_inc_and_test(atomic64_t *v)
  300. {
  301. unsigned char c;
  302. __asm__ __volatile__(
  303. LOCK_PREFIX "incq %0; sete %1"
  304. :"=m" (v->counter), "=qm" (c)
  305. :"m" (v->counter) : "memory");
  306. return c != 0;
  307. }
  308. /**
  309. * atomic64_add_negative - add and test if negative
  310. * @i: integer value to add
  311. * @v: pointer to type atomic64_t
  312. *
  313. * Atomically adds @i to @v and returns true
  314. * if the result is negative, or false when
  315. * result is greater than or equal to zero.
  316. */
  317. static __inline__ int atomic64_add_negative(long i, atomic64_t *v)
  318. {
  319. unsigned char c;
  320. __asm__ __volatile__(
  321. LOCK_PREFIX "addq %2,%0; sets %1"
  322. :"=m" (v->counter), "=qm" (c)
  323. :"ir" (i), "m" (v->counter) : "memory");
  324. return c;
  325. }
  326. /**
  327. * atomic64_add_return - add and return
  328. * @i: integer value to add
  329. * @v: pointer to type atomic64_t
  330. *
  331. * Atomically adds @i to @v and returns @i + @v
  332. */
  333. static __inline__ long atomic64_add_return(long i, atomic64_t *v)
  334. {
  335. long __i = i;
  336. __asm__ __volatile__(
  337. LOCK_PREFIX "xaddq %0, %1;"
  338. :"+r" (i), "+m" (v->counter)
  339. : : "memory");
  340. return i + __i;
  341. }
  342. static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
  343. {
  344. return atomic64_add_return(-i,v);
  345. }
  346. #define atomic64_inc_return(v) (atomic64_add_return(1,v))
  347. #define atomic64_dec_return(v) (atomic64_sub_return(1,v))
  348. #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
  349. #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
  350. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
  351. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  352. /**
  353. * atomic_add_unless - add unless the number is a given value
  354. * @v: pointer of type atomic_t
  355. * @a: the amount to add to v...
  356. * @u: ...unless v is equal to u.
  357. *
  358. * Atomically adds @a to @v, so long as it was not @u.
  359. * Returns non-zero if @v was not @u, and zero otherwise.
  360. */
  361. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  362. {
  363. int c, old;
  364. c = atomic_read(v);
  365. for (;;) {
  366. if (unlikely(c == (u)))
  367. break;
  368. old = atomic_cmpxchg((v), c, c + (a));
  369. if (likely(old == c))
  370. break;
  371. c = old;
  372. }
  373. return c != (u);
  374. }
  375. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  376. /**
  377. * atomic64_add_unless - add unless the number is a given value
  378. * @v: pointer of type atomic64_t
  379. * @a: the amount to add to v...
  380. * @u: ...unless v is equal to u.
  381. *
  382. * Atomically adds @a to @v, so long as it was not @u.
  383. * Returns non-zero if @v was not @u, and zero otherwise.
  384. */
  385. static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  386. {
  387. long c, old;
  388. c = atomic64_read(v);
  389. for (;;) {
  390. if (unlikely(c == (u)))
  391. break;
  392. old = atomic64_cmpxchg((v), c, c + (a));
  393. if (likely(old == c))
  394. break;
  395. c = old;
  396. }
  397. return c != (u);
  398. }
  399. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  400. /* These are x86-specific, used by some header files */
  401. #define atomic_clear_mask(mask, addr) \
  402. __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
  403. : : "r" (~(mask)),"m" (*addr) : "memory")
  404. #define atomic_set_mask(mask, addr) \
  405. __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
  406. : : "r" ((unsigned)mask),"m" (*(addr)) : "memory")
  407. /* Atomic operations are already serializing on x86 */
  408. #define smp_mb__before_atomic_dec() barrier()
  409. #define smp_mb__after_atomic_dec() barrier()
  410. #define smp_mb__before_atomic_inc() barrier()
  411. #define smp_mb__after_atomic_inc() barrier()
  412. #include <asm-generic/atomic.h>
  413. #endif