atomic.h 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <asm/barrier.h>
  18. #include <asm/cpu-features.h>
  19. #include <asm/war.h>
  20. typedef struct { volatile int counter; } atomic_t;
  21. #define ATOMIC_INIT(i) { (i) }
  22. /*
  23. * atomic_read - read atomic variable
  24. * @v: pointer of type atomic_t
  25. *
  26. * Atomically reads the value of @v.
  27. */
  28. #define atomic_read(v) ((v)->counter)
  29. /*
  30. * atomic_set - set atomic variable
  31. * @v: pointer of type atomic_t
  32. * @i: required value
  33. *
  34. * Atomically sets the value of @v to @i.
  35. */
  36. #define atomic_set(v,i) ((v)->counter = (i))
  37. /*
  38. * atomic_add - add integer to atomic variable
  39. * @i: integer value to add
  40. * @v: pointer of type atomic_t
  41. *
  42. * Atomically adds @i to @v.
  43. */
  44. static __inline__ void atomic_add(int i, atomic_t * v)
  45. {
  46. if (cpu_has_llsc && R10000_LLSC_WAR) {
  47. unsigned long temp;
  48. __asm__ __volatile__(
  49. " .set mips3 \n"
  50. "1: ll %0, %1 # atomic_add \n"
  51. " addu %0, %2 \n"
  52. " sc %0, %1 \n"
  53. " beqzl %0, 1b \n"
  54. " .set mips0 \n"
  55. : "=&r" (temp), "=m" (v->counter)
  56. : "Ir" (i), "m" (v->counter));
  57. } else if (cpu_has_llsc) {
  58. unsigned long temp;
  59. __asm__ __volatile__(
  60. " .set mips3 \n"
  61. "1: ll %0, %1 # atomic_add \n"
  62. " addu %0, %2 \n"
  63. " sc %0, %1 \n"
  64. " beqz %0, 2f \n"
  65. " .subsection 2 \n"
  66. "2: b 1b \n"
  67. " .previous \n"
  68. " .set mips0 \n"
  69. : "=&r" (temp), "=m" (v->counter)
  70. : "Ir" (i), "m" (v->counter));
  71. } else {
  72. unsigned long flags;
  73. raw_local_irq_save(flags);
  74. v->counter += i;
  75. raw_local_irq_restore(flags);
  76. }
  77. }
  78. /*
  79. * atomic_sub - subtract the atomic variable
  80. * @i: integer value to subtract
  81. * @v: pointer of type atomic_t
  82. *
  83. * Atomically subtracts @i from @v.
  84. */
  85. static __inline__ void atomic_sub(int i, atomic_t * v)
  86. {
  87. if (cpu_has_llsc && R10000_LLSC_WAR) {
  88. unsigned long temp;
  89. __asm__ __volatile__(
  90. " .set mips3 \n"
  91. "1: ll %0, %1 # atomic_sub \n"
  92. " subu %0, %2 \n"
  93. " sc %0, %1 \n"
  94. " beqzl %0, 1b \n"
  95. " .set mips0 \n"
  96. : "=&r" (temp), "=m" (v->counter)
  97. : "Ir" (i), "m" (v->counter));
  98. } else if (cpu_has_llsc) {
  99. unsigned long temp;
  100. __asm__ __volatile__(
  101. " .set mips3 \n"
  102. "1: ll %0, %1 # atomic_sub \n"
  103. " subu %0, %2 \n"
  104. " sc %0, %1 \n"
  105. " beqz %0, 2f \n"
  106. " .subsection 2 \n"
  107. "2: b 1b \n"
  108. " .previous \n"
  109. " .set mips0 \n"
  110. : "=&r" (temp), "=m" (v->counter)
  111. : "Ir" (i), "m" (v->counter));
  112. } else {
  113. unsigned long flags;
  114. raw_local_irq_save(flags);
  115. v->counter -= i;
  116. raw_local_irq_restore(flags);
  117. }
  118. }
  119. /*
  120. * Same as above, but return the result value
  121. */
  122. static __inline__ int atomic_add_return(int i, atomic_t * v)
  123. {
  124. unsigned long result;
  125. smp_mb();
  126. if (cpu_has_llsc && R10000_LLSC_WAR) {
  127. unsigned long temp;
  128. __asm__ __volatile__(
  129. " .set mips3 \n"
  130. "1: ll %1, %2 # atomic_add_return \n"
  131. " addu %0, %1, %3 \n"
  132. " sc %0, %2 \n"
  133. " beqzl %0, 1b \n"
  134. " addu %0, %1, %3 \n"
  135. " .set mips0 \n"
  136. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  137. : "Ir" (i), "m" (v->counter)
  138. : "memory");
  139. } else if (cpu_has_llsc) {
  140. unsigned long temp;
  141. __asm__ __volatile__(
  142. " .set mips3 \n"
  143. "1: ll %1, %2 # atomic_add_return \n"
  144. " addu %0, %1, %3 \n"
  145. " sc %0, %2 \n"
  146. " beqz %0, 2f \n"
  147. " addu %0, %1, %3 \n"
  148. " .subsection 2 \n"
  149. "2: b 1b \n"
  150. " .previous \n"
  151. " .set mips0 \n"
  152. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  153. : "Ir" (i), "m" (v->counter)
  154. : "memory");
  155. } else {
  156. unsigned long flags;
  157. raw_local_irq_save(flags);
  158. result = v->counter;
  159. result += i;
  160. v->counter = result;
  161. raw_local_irq_restore(flags);
  162. }
  163. smp_mb();
  164. return result;
  165. }
  166. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  167. {
  168. unsigned long result;
  169. smp_mb();
  170. if (cpu_has_llsc && R10000_LLSC_WAR) {
  171. unsigned long temp;
  172. __asm__ __volatile__(
  173. " .set mips3 \n"
  174. "1: ll %1, %2 # atomic_sub_return \n"
  175. " subu %0, %1, %3 \n"
  176. " sc %0, %2 \n"
  177. " beqzl %0, 1b \n"
  178. " subu %0, %1, %3 \n"
  179. " .set mips0 \n"
  180. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  181. : "Ir" (i), "m" (v->counter)
  182. : "memory");
  183. } else if (cpu_has_llsc) {
  184. unsigned long temp;
  185. __asm__ __volatile__(
  186. " .set mips3 \n"
  187. "1: ll %1, %2 # atomic_sub_return \n"
  188. " subu %0, %1, %3 \n"
  189. " sc %0, %2 \n"
  190. " beqz %0, 2f \n"
  191. " subu %0, %1, %3 \n"
  192. " .subsection 2 \n"
  193. "2: b 1b \n"
  194. " .previous \n"
  195. " .set mips0 \n"
  196. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  197. : "Ir" (i), "m" (v->counter)
  198. : "memory");
  199. } else {
  200. unsigned long flags;
  201. raw_local_irq_save(flags);
  202. result = v->counter;
  203. result -= i;
  204. v->counter = result;
  205. raw_local_irq_restore(flags);
  206. }
  207. smp_mb();
  208. return result;
  209. }
  210. /*
  211. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  212. * @i: integer value to subtract
  213. * @v: pointer of type atomic_t
  214. *
  215. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  216. * The function returns the old value of @v minus @i.
  217. */
  218. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  219. {
  220. unsigned long result;
  221. smp_mb();
  222. if (cpu_has_llsc && R10000_LLSC_WAR) {
  223. unsigned long temp;
  224. __asm__ __volatile__(
  225. " .set mips3 \n"
  226. "1: ll %1, %2 # atomic_sub_if_positive\n"
  227. " subu %0, %1, %3 \n"
  228. " bltz %0, 1f \n"
  229. " sc %0, %2 \n"
  230. " .set noreorder \n"
  231. " beqzl %0, 1b \n"
  232. " subu %0, %1, %3 \n"
  233. " .set reorder \n"
  234. "1: \n"
  235. " .set mips0 \n"
  236. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  237. : "Ir" (i), "m" (v->counter)
  238. : "memory");
  239. } else if (cpu_has_llsc) {
  240. unsigned long temp;
  241. __asm__ __volatile__(
  242. " .set mips3 \n"
  243. "1: ll %1, %2 # atomic_sub_if_positive\n"
  244. " subu %0, %1, %3 \n"
  245. " bltz %0, 1f \n"
  246. " sc %0, %2 \n"
  247. " .set noreorder \n"
  248. " beqz %0, 2f \n"
  249. " subu %0, %1, %3 \n"
  250. " .set reorder \n"
  251. "1: \n"
  252. " .subsection 2 \n"
  253. "2: b 1b \n"
  254. " .previous \n"
  255. " .set mips0 \n"
  256. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  257. : "Ir" (i), "m" (v->counter)
  258. : "memory");
  259. } else {
  260. unsigned long flags;
  261. raw_local_irq_save(flags);
  262. result = v->counter;
  263. result -= i;
  264. if (result >= 0)
  265. v->counter = result;
  266. raw_local_irq_restore(flags);
  267. }
  268. smp_mb();
  269. return result;
  270. }
  271. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  272. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  273. /**
  274. * atomic_add_unless - add unless the number is a given value
  275. * @v: pointer of type atomic_t
  276. * @a: the amount to add to v...
  277. * @u: ...unless v is equal to u.
  278. *
  279. * Atomically adds @a to @v, so long as it was not @u.
  280. * Returns non-zero if @v was not @u, and zero otherwise.
  281. */
  282. #define atomic_add_unless(v, a, u) \
  283. ({ \
  284. int c, old; \
  285. c = atomic_read(v); \
  286. while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
  287. c = old; \
  288. c != (u); \
  289. })
  290. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  291. #define atomic_dec_return(v) atomic_sub_return(1,(v))
  292. #define atomic_inc_return(v) atomic_add_return(1,(v))
  293. /*
  294. * atomic_sub_and_test - subtract value from variable and test result
  295. * @i: integer value to subtract
  296. * @v: pointer of type atomic_t
  297. *
  298. * Atomically subtracts @i from @v and returns
  299. * true if the result is zero, or false for all
  300. * other cases.
  301. */
  302. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  303. /*
  304. * atomic_inc_and_test - increment and test
  305. * @v: pointer of type atomic_t
  306. *
  307. * Atomically increments @v by 1
  308. * and returns true if the result is zero, or false for all
  309. * other cases.
  310. */
  311. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  312. /*
  313. * atomic_dec_and_test - decrement by 1 and test
  314. * @v: pointer of type atomic_t
  315. *
  316. * Atomically decrements @v by 1 and
  317. * returns true if the result is 0, or false for all other
  318. * cases.
  319. */
  320. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  321. /*
  322. * atomic_dec_if_positive - decrement by 1 if old value positive
  323. * @v: pointer of type atomic_t
  324. */
  325. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  326. /*
  327. * atomic_inc - increment atomic variable
  328. * @v: pointer of type atomic_t
  329. *
  330. * Atomically increments @v by 1.
  331. */
  332. #define atomic_inc(v) atomic_add(1,(v))
  333. /*
  334. * atomic_dec - decrement and test
  335. * @v: pointer of type atomic_t
  336. *
  337. * Atomically decrements @v by 1.
  338. */
  339. #define atomic_dec(v) atomic_sub(1,(v))
  340. /*
  341. * atomic_add_negative - add and test if negative
  342. * @v: pointer of type atomic_t
  343. * @i: integer value to add
  344. *
  345. * Atomically adds @i to @v and returns true
  346. * if the result is negative, or false when
  347. * result is greater than or equal to zero.
  348. */
  349. #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
  350. #ifdef CONFIG_64BIT
  351. typedef struct { volatile long counter; } atomic64_t;
  352. #define ATOMIC64_INIT(i) { (i) }
  353. /*
  354. * atomic64_read - read atomic variable
  355. * @v: pointer of type atomic64_t
  356. *
  357. */
  358. #define atomic64_read(v) ((v)->counter)
  359. /*
  360. * atomic64_set - set atomic variable
  361. * @v: pointer of type atomic64_t
  362. * @i: required value
  363. */
  364. #define atomic64_set(v,i) ((v)->counter = (i))
  365. /*
  366. * atomic64_add - add integer to atomic variable
  367. * @i: integer value to add
  368. * @v: pointer of type atomic64_t
  369. *
  370. * Atomically adds @i to @v.
  371. */
  372. static __inline__ void atomic64_add(long i, atomic64_t * v)
  373. {
  374. if (cpu_has_llsc && R10000_LLSC_WAR) {
  375. unsigned long temp;
  376. __asm__ __volatile__(
  377. " .set mips3 \n"
  378. "1: lld %0, %1 # atomic64_add \n"
  379. " addu %0, %2 \n"
  380. " scd %0, %1 \n"
  381. " beqzl %0, 1b \n"
  382. " .set mips0 \n"
  383. : "=&r" (temp), "=m" (v->counter)
  384. : "Ir" (i), "m" (v->counter));
  385. } else if (cpu_has_llsc) {
  386. unsigned long temp;
  387. __asm__ __volatile__(
  388. " .set mips3 \n"
  389. "1: lld %0, %1 # atomic64_add \n"
  390. " addu %0, %2 \n"
  391. " scd %0, %1 \n"
  392. " beqz %0, 2f \n"
  393. " .subsection 2 \n"
  394. "2: b 1b \n"
  395. " .previous \n"
  396. " .set mips0 \n"
  397. : "=&r" (temp), "=m" (v->counter)
  398. : "Ir" (i), "m" (v->counter));
  399. } else {
  400. unsigned long flags;
  401. raw_local_irq_save(flags);
  402. v->counter += i;
  403. raw_local_irq_restore(flags);
  404. }
  405. }
  406. /*
  407. * atomic64_sub - subtract the atomic variable
  408. * @i: integer value to subtract
  409. * @v: pointer of type atomic64_t
  410. *
  411. * Atomically subtracts @i from @v.
  412. */
  413. static __inline__ void atomic64_sub(long i, atomic64_t * v)
  414. {
  415. if (cpu_has_llsc && R10000_LLSC_WAR) {
  416. unsigned long temp;
  417. __asm__ __volatile__(
  418. " .set mips3 \n"
  419. "1: lld %0, %1 # atomic64_sub \n"
  420. " subu %0, %2 \n"
  421. " scd %0, %1 \n"
  422. " beqzl %0, 1b \n"
  423. " .set mips0 \n"
  424. : "=&r" (temp), "=m" (v->counter)
  425. : "Ir" (i), "m" (v->counter));
  426. } else if (cpu_has_llsc) {
  427. unsigned long temp;
  428. __asm__ __volatile__(
  429. " .set mips3 \n"
  430. "1: lld %0, %1 # atomic64_sub \n"
  431. " subu %0, %2 \n"
  432. " scd %0, %1 \n"
  433. " beqz %0, 2f \n"
  434. " .subsection 2 \n"
  435. "2: b 1b \n"
  436. " .previous \n"
  437. " .set mips0 \n"
  438. : "=&r" (temp), "=m" (v->counter)
  439. : "Ir" (i), "m" (v->counter));
  440. } else {
  441. unsigned long flags;
  442. raw_local_irq_save(flags);
  443. v->counter -= i;
  444. raw_local_irq_restore(flags);
  445. }
  446. }
  447. /*
  448. * Same as above, but return the result value
  449. */
  450. static __inline__ long atomic64_add_return(long i, atomic64_t * v)
  451. {
  452. unsigned long result;
  453. smp_mb();
  454. if (cpu_has_llsc && R10000_LLSC_WAR) {
  455. unsigned long temp;
  456. __asm__ __volatile__(
  457. " .set mips3 \n"
  458. "1: lld %1, %2 # atomic64_add_return \n"
  459. " addu %0, %1, %3 \n"
  460. " scd %0, %2 \n"
  461. " beqzl %0, 1b \n"
  462. " addu %0, %1, %3 \n"
  463. " .set mips0 \n"
  464. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  465. : "Ir" (i), "m" (v->counter)
  466. : "memory");
  467. } else if (cpu_has_llsc) {
  468. unsigned long temp;
  469. __asm__ __volatile__(
  470. " .set mips3 \n"
  471. "1: lld %1, %2 # atomic64_add_return \n"
  472. " addu %0, %1, %3 \n"
  473. " scd %0, %2 \n"
  474. " beqz %0, 2f \n"
  475. " addu %0, %1, %3 \n"
  476. " .subsection 2 \n"
  477. "2: b 1b \n"
  478. " .previous \n"
  479. " .set mips0 \n"
  480. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  481. : "Ir" (i), "m" (v->counter)
  482. : "memory");
  483. } else {
  484. unsigned long flags;
  485. raw_local_irq_save(flags);
  486. result = v->counter;
  487. result += i;
  488. v->counter = result;
  489. raw_local_irq_restore(flags);
  490. }
  491. smp_mb();
  492. return result;
  493. }
  494. static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
  495. {
  496. unsigned long result;
  497. smp_mb();
  498. if (cpu_has_llsc && R10000_LLSC_WAR) {
  499. unsigned long temp;
  500. __asm__ __volatile__(
  501. " .set mips3 \n"
  502. "1: lld %1, %2 # atomic64_sub_return \n"
  503. " subu %0, %1, %3 \n"
  504. " scd %0, %2 \n"
  505. " beqzl %0, 1b \n"
  506. " subu %0, %1, %3 \n"
  507. " .set mips0 \n"
  508. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  509. : "Ir" (i), "m" (v->counter)
  510. : "memory");
  511. } else if (cpu_has_llsc) {
  512. unsigned long temp;
  513. __asm__ __volatile__(
  514. " .set mips3 \n"
  515. "1: lld %1, %2 # atomic64_sub_return \n"
  516. " subu %0, %1, %3 \n"
  517. " scd %0, %2 \n"
  518. " beqz %0, 2f \n"
  519. " subu %0, %1, %3 \n"
  520. " .subsection 2 \n"
  521. "2: b 1b \n"
  522. " .previous \n"
  523. " .set mips0 \n"
  524. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  525. : "Ir" (i), "m" (v->counter)
  526. : "memory");
  527. } else {
  528. unsigned long flags;
  529. raw_local_irq_save(flags);
  530. result = v->counter;
  531. result -= i;
  532. v->counter = result;
  533. raw_local_irq_restore(flags);
  534. }
  535. smp_mb();
  536. return result;
  537. }
  538. /*
  539. * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
  540. * @i: integer value to subtract
  541. * @v: pointer of type atomic64_t
  542. *
  543. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  544. * The function returns the old value of @v minus @i.
  545. */
  546. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  547. {
  548. unsigned long result;
  549. smp_mb();
  550. if (cpu_has_llsc && R10000_LLSC_WAR) {
  551. unsigned long temp;
  552. __asm__ __volatile__(
  553. " .set mips3 \n"
  554. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  555. " dsubu %0, %1, %3 \n"
  556. " bltz %0, 1f \n"
  557. " scd %0, %2 \n"
  558. " .set noreorder \n"
  559. " beqzl %0, 1b \n"
  560. " dsubu %0, %1, %3 \n"
  561. " .set reorder \n"
  562. "1: \n"
  563. " .set mips0 \n"
  564. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  565. : "Ir" (i), "m" (v->counter)
  566. : "memory");
  567. } else if (cpu_has_llsc) {
  568. unsigned long temp;
  569. __asm__ __volatile__(
  570. " .set mips3 \n"
  571. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  572. " dsubu %0, %1, %3 \n"
  573. " bltz %0, 1f \n"
  574. " scd %0, %2 \n"
  575. " .set noreorder \n"
  576. " beqz %0, 2f \n"
  577. " dsubu %0, %1, %3 \n"
  578. " .set reorder \n"
  579. "1: \n"
  580. " .subsection 2 \n"
  581. "2: b 1b \n"
  582. " .previous \n"
  583. " .set mips0 \n"
  584. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  585. : "Ir" (i), "m" (v->counter)
  586. : "memory");
  587. } else {
  588. unsigned long flags;
  589. raw_local_irq_save(flags);
  590. result = v->counter;
  591. result -= i;
  592. if (result >= 0)
  593. v->counter = result;
  594. raw_local_irq_restore(flags);
  595. }
  596. smp_mb();
  597. return result;
  598. }
  599. #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
  600. #define atomic64_inc_return(v) atomic64_add_return(1,(v))
  601. /*
  602. * atomic64_sub_and_test - subtract value from variable and test result
  603. * @i: integer value to subtract
  604. * @v: pointer of type atomic64_t
  605. *
  606. * Atomically subtracts @i from @v and returns
  607. * true if the result is zero, or false for all
  608. * other cases.
  609. */
  610. #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
  611. /*
  612. * atomic64_inc_and_test - increment and test
  613. * @v: pointer of type atomic64_t
  614. *
  615. * Atomically increments @v by 1
  616. * and returns true if the result is zero, or false for all
  617. * other cases.
  618. */
  619. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  620. /*
  621. * atomic64_dec_and_test - decrement by 1 and test
  622. * @v: pointer of type atomic64_t
  623. *
  624. * Atomically decrements @v by 1 and
  625. * returns true if the result is 0, or false for all other
  626. * cases.
  627. */
  628. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  629. /*
  630. * atomic64_dec_if_positive - decrement by 1 if old value positive
  631. * @v: pointer of type atomic64_t
  632. */
  633. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  634. /*
  635. * atomic64_inc - increment atomic variable
  636. * @v: pointer of type atomic64_t
  637. *
  638. * Atomically increments @v by 1.
  639. */
  640. #define atomic64_inc(v) atomic64_add(1,(v))
  641. /*
  642. * atomic64_dec - decrement and test
  643. * @v: pointer of type atomic64_t
  644. *
  645. * Atomically decrements @v by 1.
  646. */
  647. #define atomic64_dec(v) atomic64_sub(1,(v))
  648. /*
  649. * atomic64_add_negative - add and test if negative
  650. * @v: pointer of type atomic64_t
  651. * @i: integer value to add
  652. *
  653. * Atomically adds @i to @v and returns true
  654. * if the result is negative, or false when
  655. * result is greater than or equal to zero.
  656. */
  657. #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
  658. #endif /* CONFIG_64BIT */
  659. /*
  660. * atomic*_return operations are serializing but not the non-*_return
  661. * versions.
  662. */
  663. #define smp_mb__before_atomic_dec() smp_mb()
  664. #define smp_mb__after_atomic_dec() smp_mb()
  665. #define smp_mb__before_atomic_inc() smp_mb()
  666. #define smp_mb__after_atomic_inc() smp_mb()
  667. #include <asm-generic/atomic.h>
  668. #endif /* _ASM_ATOMIC_H */