atomic.h 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435
  1. #ifndef __ARCH_X86_64_ATOMIC__
  2. #define __ARCH_X86_64_ATOMIC__
  3. #include <asm/alternative.h>
  4. /* atomic_t should be 32 bit signed type */
  5. /*
  6. * Atomic operations that C can't guarantee us. Useful for
  7. * resource counting etc..
  8. */
  9. #ifdef CONFIG_SMP
  10. #define LOCK "lock ; "
  11. #else
  12. #define LOCK ""
  13. #endif
  14. /*
  15. * Make sure gcc doesn't try to be clever and move things around
  16. * on us. We need to use _exactly_ the address the user gave us,
  17. * not some alias that contains the same information.
  18. */
  19. typedef struct { volatile int counter; } atomic_t;
  20. #define ATOMIC_INIT(i) { (i) }
  21. /**
  22. * atomic_read - read atomic variable
  23. * @v: pointer of type atomic_t
  24. *
  25. * Atomically reads the value of @v.
  26. */
  27. #define atomic_read(v) ((v)->counter)
  28. /**
  29. * atomic_set - set atomic variable
  30. * @v: pointer of type atomic_t
  31. * @i: required value
  32. *
  33. * Atomically sets the value of @v to @i.
  34. */
  35. #define atomic_set(v,i) (((v)->counter) = (i))
  36. /**
  37. * atomic_add - add integer to atomic variable
  38. * @i: integer value to add
  39. * @v: pointer of type atomic_t
  40. *
  41. * Atomically adds @i to @v.
  42. */
  43. static __inline__ void atomic_add(int i, atomic_t *v)
  44. {
  45. __asm__ __volatile__(
  46. LOCK_PREFIX "addl %1,%0"
  47. :"=m" (v->counter)
  48. :"ir" (i), "m" (v->counter));
  49. }
  50. /**
  51. * atomic_sub - subtract the atomic variable
  52. * @i: integer value to subtract
  53. * @v: pointer of type atomic_t
  54. *
  55. * Atomically subtracts @i from @v.
  56. */
  57. static __inline__ void atomic_sub(int i, atomic_t *v)
  58. {
  59. __asm__ __volatile__(
  60. LOCK_PREFIX "subl %1,%0"
  61. :"=m" (v->counter)
  62. :"ir" (i), "m" (v->counter));
  63. }
  64. /**
  65. * atomic_sub_and_test - subtract value from variable and test result
  66. * @i: integer value to subtract
  67. * @v: pointer of type atomic_t
  68. *
  69. * Atomically subtracts @i from @v and returns
  70. * true if the result is zero, or false for all
  71. * other cases.
  72. */
  73. static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
  74. {
  75. unsigned char c;
  76. __asm__ __volatile__(
  77. LOCK_PREFIX "subl %2,%0; sete %1"
  78. :"=m" (v->counter), "=qm" (c)
  79. :"ir" (i), "m" (v->counter) : "memory");
  80. return c;
  81. }
  82. /**
  83. * atomic_inc - increment atomic variable
  84. * @v: pointer of type atomic_t
  85. *
  86. * Atomically increments @v by 1.
  87. */
  88. static __inline__ void atomic_inc(atomic_t *v)
  89. {
  90. __asm__ __volatile__(
  91. LOCK_PREFIX "incl %0"
  92. :"=m" (v->counter)
  93. :"m" (v->counter));
  94. }
  95. /**
  96. * atomic_dec - decrement atomic variable
  97. * @v: pointer of type atomic_t
  98. *
  99. * Atomically decrements @v by 1.
  100. */
  101. static __inline__ void atomic_dec(atomic_t *v)
  102. {
  103. __asm__ __volatile__(
  104. LOCK_PREFIX "decl %0"
  105. :"=m" (v->counter)
  106. :"m" (v->counter));
  107. }
  108. /**
  109. * atomic_dec_and_test - decrement and test
  110. * @v: pointer of type atomic_t
  111. *
  112. * Atomically decrements @v by 1 and
  113. * returns true if the result is 0, or false for all other
  114. * cases.
  115. */
  116. static __inline__ int atomic_dec_and_test(atomic_t *v)
  117. {
  118. unsigned char c;
  119. __asm__ __volatile__(
  120. LOCK_PREFIX "decl %0; sete %1"
  121. :"=m" (v->counter), "=qm" (c)
  122. :"m" (v->counter) : "memory");
  123. return c != 0;
  124. }
  125. /**
  126. * atomic_inc_and_test - increment and test
  127. * @v: pointer of type atomic_t
  128. *
  129. * Atomically increments @v by 1
  130. * and returns true if the result is zero, or false for all
  131. * other cases.
  132. */
  133. static __inline__ int atomic_inc_and_test(atomic_t *v)
  134. {
  135. unsigned char c;
  136. __asm__ __volatile__(
  137. LOCK_PREFIX "incl %0; sete %1"
  138. :"=m" (v->counter), "=qm" (c)
  139. :"m" (v->counter) : "memory");
  140. return c != 0;
  141. }
  142. /**
  143. * atomic_add_negative - add and test if negative
  144. * @i: integer value to add
  145. * @v: pointer of type atomic_t
  146. *
  147. * Atomically adds @i to @v and returns true
  148. * if the result is negative, or false when
  149. * result is greater than or equal to zero.
  150. */
  151. static __inline__ int atomic_add_negative(int i, atomic_t *v)
  152. {
  153. unsigned char c;
  154. __asm__ __volatile__(
  155. LOCK_PREFIX "addl %2,%0; sets %1"
  156. :"=m" (v->counter), "=qm" (c)
  157. :"ir" (i), "m" (v->counter) : "memory");
  158. return c;
  159. }
  160. /**
  161. * atomic_add_return - add and return
  162. * @i: integer value to add
  163. * @v: pointer of type atomic_t
  164. *
  165. * Atomically adds @i to @v and returns @i + @v
  166. */
  167. static __inline__ int atomic_add_return(int i, atomic_t *v)
  168. {
  169. int __i = i;
  170. __asm__ __volatile__(
  171. LOCK_PREFIX "xaddl %0, %1;"
  172. :"=r"(i)
  173. :"m"(v->counter), "0"(i));
  174. return i + __i;
  175. }
  176. static __inline__ int atomic_sub_return(int i, atomic_t *v)
  177. {
  178. return atomic_add_return(-i,v);
  179. }
  180. #define atomic_inc_return(v) (atomic_add_return(1,v))
  181. #define atomic_dec_return(v) (atomic_sub_return(1,v))
  182. /* An 64bit atomic type */
  183. typedef struct { volatile long counter; } atomic64_t;
  184. #define ATOMIC64_INIT(i) { (i) }
  185. /**
  186. * atomic64_read - read atomic64 variable
  187. * @v: pointer of type atomic64_t
  188. *
  189. * Atomically reads the value of @v.
  190. * Doesn't imply a read memory barrier.
  191. */
  192. #define atomic64_read(v) ((v)->counter)
  193. /**
  194. * atomic64_set - set atomic64 variable
  195. * @v: pointer to type atomic64_t
  196. * @i: required value
  197. *
  198. * Atomically sets the value of @v to @i.
  199. */
  200. #define atomic64_set(v,i) (((v)->counter) = (i))
  201. /**
  202. * atomic64_add - add integer to atomic64 variable
  203. * @i: integer value to add
  204. * @v: pointer to type atomic64_t
  205. *
  206. * Atomically adds @i to @v.
  207. */
  208. static __inline__ void atomic64_add(long i, atomic64_t *v)
  209. {
  210. __asm__ __volatile__(
  211. LOCK_PREFIX "addq %1,%0"
  212. :"=m" (v->counter)
  213. :"ir" (i), "m" (v->counter));
  214. }
  215. /**
  216. * atomic64_sub - subtract the atomic64 variable
  217. * @i: integer value to subtract
  218. * @v: pointer to type atomic64_t
  219. *
  220. * Atomically subtracts @i from @v.
  221. */
  222. static __inline__ void atomic64_sub(long i, atomic64_t *v)
  223. {
  224. __asm__ __volatile__(
  225. LOCK_PREFIX "subq %1,%0"
  226. :"=m" (v->counter)
  227. :"ir" (i), "m" (v->counter));
  228. }
  229. /**
  230. * atomic64_sub_and_test - subtract value from variable and test result
  231. * @i: integer value to subtract
  232. * @v: pointer to type atomic64_t
  233. *
  234. * Atomically subtracts @i from @v and returns
  235. * true if the result is zero, or false for all
  236. * other cases.
  237. */
  238. static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
  239. {
  240. unsigned char c;
  241. __asm__ __volatile__(
  242. LOCK_PREFIX "subq %2,%0; sete %1"
  243. :"=m" (v->counter), "=qm" (c)
  244. :"ir" (i), "m" (v->counter) : "memory");
  245. return c;
  246. }
  247. /**
  248. * atomic64_inc - increment atomic64 variable
  249. * @v: pointer to type atomic64_t
  250. *
  251. * Atomically increments @v by 1.
  252. */
  253. static __inline__ void atomic64_inc(atomic64_t *v)
  254. {
  255. __asm__ __volatile__(
  256. LOCK_PREFIX "incq %0"
  257. :"=m" (v->counter)
  258. :"m" (v->counter));
  259. }
  260. /**
  261. * atomic64_dec - decrement atomic64 variable
  262. * @v: pointer to type atomic64_t
  263. *
  264. * Atomically decrements @v by 1.
  265. */
  266. static __inline__ void atomic64_dec(atomic64_t *v)
  267. {
  268. __asm__ __volatile__(
  269. LOCK_PREFIX "decq %0"
  270. :"=m" (v->counter)
  271. :"m" (v->counter));
  272. }
  273. /**
  274. * atomic64_dec_and_test - decrement and test
  275. * @v: pointer to type atomic64_t
  276. *
  277. * Atomically decrements @v by 1 and
  278. * returns true if the result is 0, or false for all other
  279. * cases.
  280. */
  281. static __inline__ int atomic64_dec_and_test(atomic64_t *v)
  282. {
  283. unsigned char c;
  284. __asm__ __volatile__(
  285. LOCK_PREFIX "decq %0; sete %1"
  286. :"=m" (v->counter), "=qm" (c)
  287. :"m" (v->counter) : "memory");
  288. return c != 0;
  289. }
  290. /**
  291. * atomic64_inc_and_test - increment and test
  292. * @v: pointer to type atomic64_t
  293. *
  294. * Atomically increments @v by 1
  295. * and returns true if the result is zero, or false for all
  296. * other cases.
  297. */
  298. static __inline__ int atomic64_inc_and_test(atomic64_t *v)
  299. {
  300. unsigned char c;
  301. __asm__ __volatile__(
  302. LOCK_PREFIX "incq %0; sete %1"
  303. :"=m" (v->counter), "=qm" (c)
  304. :"m" (v->counter) : "memory");
  305. return c != 0;
  306. }
  307. /**
  308. * atomic64_add_negative - add and test if negative
  309. * @i: integer value to add
  310. * @v: pointer to type atomic64_t
  311. *
  312. * Atomically adds @i to @v and returns true
  313. * if the result is negative, or false when
  314. * result is greater than or equal to zero.
  315. */
  316. static __inline__ int atomic64_add_negative(long i, atomic64_t *v)
  317. {
  318. unsigned char c;
  319. __asm__ __volatile__(
  320. LOCK_PREFIX "addq %2,%0; sets %1"
  321. :"=m" (v->counter), "=qm" (c)
  322. :"ir" (i), "m" (v->counter) : "memory");
  323. return c;
  324. }
  325. /**
  326. * atomic64_add_return - add and return
  327. * @i: integer value to add
  328. * @v: pointer to type atomic64_t
  329. *
  330. * Atomically adds @i to @v and returns @i + @v
  331. */
  332. static __inline__ long atomic64_add_return(long i, atomic64_t *v)
  333. {
  334. long __i = i;
  335. __asm__ __volatile__(
  336. LOCK_PREFIX "xaddq %0, %1;"
  337. :"=r"(i)
  338. :"m"(v->counter), "0"(i));
  339. return i + __i;
  340. }
  341. static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
  342. {
  343. return atomic64_add_return(-i,v);
  344. }
  345. #define atomic64_inc_return(v) (atomic64_add_return(1,v))
  346. #define atomic64_dec_return(v) (atomic64_sub_return(1,v))
  347. #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
  348. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  349. /**
  350. * atomic_add_unless - add unless the number is a given value
  351. * @v: pointer of type atomic_t
  352. * @a: the amount to add to v...
  353. * @u: ...unless v is equal to u.
  354. *
  355. * Atomically adds @a to @v, so long as it was not @u.
  356. * Returns non-zero if @v was not @u, and zero otherwise.
  357. */
  358. #define atomic_add_unless(v, a, u) \
  359. ({ \
  360. int c, old; \
  361. c = atomic_read(v); \
  362. for (;;) { \
  363. if (unlikely(c == (u))) \
  364. break; \
  365. old = atomic_cmpxchg((v), c, c + (a)); \
  366. if (likely(old == c)) \
  367. break; \
  368. c = old; \
  369. } \
  370. c != (u); \
  371. })
  372. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  373. /* These are x86-specific, used by some header files */
  374. #define atomic_clear_mask(mask, addr) \
  375. __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
  376. : : "r" (~(mask)),"m" (*addr) : "memory")
  377. #define atomic_set_mask(mask, addr) \
  378. __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
  379. : : "r" ((unsigned)mask),"m" (*(addr)) : "memory")
  380. /* Atomic operations are already serializing on x86 */
  381. #define smp_mb__before_atomic_dec() barrier()
  382. #define smp_mb__after_atomic_dec() barrier()
  383. #define smp_mb__before_atomic_inc() barrier()
  384. #define smp_mb__after_atomic_inc() barrier()
  385. #include <asm-generic/atomic.h>
  386. #endif