atomic.h 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <linux/types.h>
  18. #include <asm/barrier.h>
  19. #include <asm/cpu-features.h>
  20. #include <asm/cmpxchg.h>
  21. #include <asm/war.h>
  22. #define ATOMIC_INIT(i) { (i) }
  23. /*
  24. * atomic_read - read atomic variable
  25. * @v: pointer of type atomic_t
  26. *
  27. * Atomically reads the value of @v.
  28. */
  29. #define atomic_read(v) (*(volatile int *)&(v)->counter)
  30. /*
  31. * atomic_set - set atomic variable
  32. * @v: pointer of type atomic_t
  33. * @i: required value
  34. *
  35. * Atomically sets the value of @v to @i.
  36. */
  37. #define atomic_set(v, i) ((v)->counter = (i))
  38. /*
  39. * atomic_add - add integer to atomic variable
  40. * @i: integer value to add
  41. * @v: pointer of type atomic_t
  42. *
  43. * Atomically adds @i to @v.
  44. */
  45. static __inline__ void atomic_add(int i, atomic_t * v)
  46. {
  47. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  48. int temp;
  49. __asm__ __volatile__(
  50. " .set mips3 \n"
  51. "1: ll %0, %1 # atomic_add \n"
  52. " addu %0, %2 \n"
  53. " sc %0, %1 \n"
  54. " beqzl %0, 1b \n"
  55. " .set mips0 \n"
  56. : "=&r" (temp), "+m" (v->counter)
  57. : "Ir" (i));
  58. } else if (kernel_uses_llsc) {
  59. int temp;
  60. do {
  61. __asm__ __volatile__(
  62. " .set mips3 \n"
  63. " ll %0, %1 # atomic_add \n"
  64. " addu %0, %2 \n"
  65. " sc %0, %1 \n"
  66. " .set mips0 \n"
  67. : "=&r" (temp), "+m" (v->counter)
  68. : "Ir" (i));
  69. } while (unlikely(!temp));
  70. } else {
  71. unsigned long flags;
  72. raw_local_irq_save(flags);
  73. v->counter += i;
  74. raw_local_irq_restore(flags);
  75. }
  76. }
  77. /*
  78. * atomic_sub - subtract the atomic variable
  79. * @i: integer value to subtract
  80. * @v: pointer of type atomic_t
  81. *
  82. * Atomically subtracts @i from @v.
  83. */
  84. static __inline__ void atomic_sub(int i, atomic_t * v)
  85. {
  86. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  87. int temp;
  88. __asm__ __volatile__(
  89. " .set mips3 \n"
  90. "1: ll %0, %1 # atomic_sub \n"
  91. " subu %0, %2 \n"
  92. " sc %0, %1 \n"
  93. " beqzl %0, 1b \n"
  94. " .set mips0 \n"
  95. : "=&r" (temp), "+m" (v->counter)
  96. : "Ir" (i));
  97. } else if (kernel_uses_llsc) {
  98. int temp;
  99. do {
  100. __asm__ __volatile__(
  101. " .set mips3 \n"
  102. " ll %0, %1 # atomic_sub \n"
  103. " subu %0, %2 \n"
  104. " sc %0, %1 \n"
  105. " .set mips0 \n"
  106. : "=&r" (temp), "+m" (v->counter)
  107. : "Ir" (i));
  108. } while (unlikely(!temp));
  109. } else {
  110. unsigned long flags;
  111. raw_local_irq_save(flags);
  112. v->counter -= i;
  113. raw_local_irq_restore(flags);
  114. }
  115. }
  116. /*
  117. * Same as above, but return the result value
  118. */
  119. static __inline__ int atomic_add_return(int i, atomic_t * v)
  120. {
  121. int result;
  122. smp_mb__before_llsc();
  123. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  124. int temp;
  125. __asm__ __volatile__(
  126. " .set mips3 \n"
  127. "1: ll %1, %2 # atomic_add_return \n"
  128. " addu %0, %1, %3 \n"
  129. " sc %0, %2 \n"
  130. " beqzl %0, 1b \n"
  131. " addu %0, %1, %3 \n"
  132. " .set mips0 \n"
  133. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  134. : "Ir" (i));
  135. } else if (kernel_uses_llsc) {
  136. int temp;
  137. do {
  138. __asm__ __volatile__(
  139. " .set mips3 \n"
  140. " ll %1, %2 # atomic_add_return \n"
  141. " addu %0, %1, %3 \n"
  142. " sc %0, %2 \n"
  143. " .set mips0 \n"
  144. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  145. : "Ir" (i));
  146. } while (unlikely(!result));
  147. result = temp + i;
  148. } else {
  149. unsigned long flags;
  150. raw_local_irq_save(flags);
  151. result = v->counter;
  152. result += i;
  153. v->counter = result;
  154. raw_local_irq_restore(flags);
  155. }
  156. smp_llsc_mb();
  157. return result;
  158. }
  159. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  160. {
  161. int result;
  162. smp_mb__before_llsc();
  163. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  164. int temp;
  165. __asm__ __volatile__(
  166. " .set mips3 \n"
  167. "1: ll %1, %2 # atomic_sub_return \n"
  168. " subu %0, %1, %3 \n"
  169. " sc %0, %2 \n"
  170. " beqzl %0, 1b \n"
  171. " subu %0, %1, %3 \n"
  172. " .set mips0 \n"
  173. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  174. : "Ir" (i), "m" (v->counter)
  175. : "memory");
  176. result = temp - i;
  177. } else if (kernel_uses_llsc) {
  178. int temp;
  179. do {
  180. __asm__ __volatile__(
  181. " .set mips3 \n"
  182. " ll %1, %2 # atomic_sub_return \n"
  183. " subu %0, %1, %3 \n"
  184. " sc %0, %2 \n"
  185. " .set mips0 \n"
  186. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  187. : "Ir" (i));
  188. } while (unlikely(!result));
  189. result = temp - i;
  190. } else {
  191. unsigned long flags;
  192. raw_local_irq_save(flags);
  193. result = v->counter;
  194. result -= i;
  195. v->counter = result;
  196. raw_local_irq_restore(flags);
  197. }
  198. smp_llsc_mb();
  199. return result;
  200. }
  201. /*
  202. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  203. * @i: integer value to subtract
  204. * @v: pointer of type atomic_t
  205. *
  206. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  207. * The function returns the old value of @v minus @i.
  208. */
  209. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  210. {
  211. int result;
  212. smp_mb__before_llsc();
  213. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  214. int temp;
  215. __asm__ __volatile__(
  216. " .set mips3 \n"
  217. "1: ll %1, %2 # atomic_sub_if_positive\n"
  218. " subu %0, %1, %3 \n"
  219. " bltz %0, 1f \n"
  220. " sc %0, %2 \n"
  221. " .set noreorder \n"
  222. " beqzl %0, 1b \n"
  223. " subu %0, %1, %3 \n"
  224. " .set reorder \n"
  225. "1: \n"
  226. " .set mips0 \n"
  227. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  228. : "Ir" (i), "m" (v->counter)
  229. : "memory");
  230. } else if (kernel_uses_llsc) {
  231. int temp;
  232. __asm__ __volatile__(
  233. " .set mips3 \n"
  234. "1: ll %1, %2 # atomic_sub_if_positive\n"
  235. " subu %0, %1, %3 \n"
  236. " bltz %0, 1f \n"
  237. " sc %0, %2 \n"
  238. " .set noreorder \n"
  239. " beqz %0, 1b \n"
  240. " subu %0, %1, %3 \n"
  241. " .set reorder \n"
  242. "1: \n"
  243. " .set mips0 \n"
  244. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  245. : "Ir" (i));
  246. } else {
  247. unsigned long flags;
  248. raw_local_irq_save(flags);
  249. result = v->counter;
  250. result -= i;
  251. if (result >= 0)
  252. v->counter = result;
  253. raw_local_irq_restore(flags);
  254. }
  255. smp_llsc_mb();
  256. return result;
  257. }
  258. #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  259. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  260. /**
  261. * __atomic_add_unless - add unless the number is a given value
  262. * @v: pointer of type atomic_t
  263. * @a: the amount to add to v...
  264. * @u: ...unless v is equal to u.
  265. *
  266. * Atomically adds @a to @v, so long as it was not @u.
  267. * Returns the old value of @v.
  268. */
  269. static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  270. {
  271. int c, old;
  272. c = atomic_read(v);
  273. for (;;) {
  274. if (unlikely(c == (u)))
  275. break;
  276. old = atomic_cmpxchg((v), c, c + (a));
  277. if (likely(old == c))
  278. break;
  279. c = old;
  280. }
  281. return c;
  282. }
  283. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  284. #define atomic_inc_return(v) atomic_add_return(1, (v))
  285. /*
  286. * atomic_sub_and_test - subtract value from variable and test result
  287. * @i: integer value to subtract
  288. * @v: pointer of type atomic_t
  289. *
  290. * Atomically subtracts @i from @v and returns
  291. * true if the result is zero, or false for all
  292. * other cases.
  293. */
  294. #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
  295. /*
  296. * atomic_inc_and_test - increment and test
  297. * @v: pointer of type atomic_t
  298. *
  299. * Atomically increments @v by 1
  300. * and returns true if the result is zero, or false for all
  301. * other cases.
  302. */
  303. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  304. /*
  305. * atomic_dec_and_test - decrement by 1 and test
  306. * @v: pointer of type atomic_t
  307. *
  308. * Atomically decrements @v by 1 and
  309. * returns true if the result is 0, or false for all other
  310. * cases.
  311. */
  312. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  313. /*
  314. * atomic_dec_if_positive - decrement by 1 if old value positive
  315. * @v: pointer of type atomic_t
  316. */
  317. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  318. /*
  319. * atomic_inc - increment atomic variable
  320. * @v: pointer of type atomic_t
  321. *
  322. * Atomically increments @v by 1.
  323. */
  324. #define atomic_inc(v) atomic_add(1, (v))
  325. /*
  326. * atomic_dec - decrement and test
  327. * @v: pointer of type atomic_t
  328. *
  329. * Atomically decrements @v by 1.
  330. */
  331. #define atomic_dec(v) atomic_sub(1, (v))
  332. /*
  333. * atomic_add_negative - add and test if negative
  334. * @v: pointer of type atomic_t
  335. * @i: integer value to add
  336. *
  337. * Atomically adds @i to @v and returns true
  338. * if the result is negative, or false when
  339. * result is greater than or equal to zero.
  340. */
  341. #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
  342. #ifdef CONFIG_64BIT
  343. #define ATOMIC64_INIT(i) { (i) }
  344. /*
  345. * atomic64_read - read atomic variable
  346. * @v: pointer of type atomic64_t
  347. *
  348. */
  349. #define atomic64_read(v) (*(volatile long *)&(v)->counter)
  350. /*
  351. * atomic64_set - set atomic variable
  352. * @v: pointer of type atomic64_t
  353. * @i: required value
  354. */
  355. #define atomic64_set(v, i) ((v)->counter = (i))
  356. /*
  357. * atomic64_add - add integer to atomic variable
  358. * @i: integer value to add
  359. * @v: pointer of type atomic64_t
  360. *
  361. * Atomically adds @i to @v.
  362. */
  363. static __inline__ void atomic64_add(long i, atomic64_t * v)
  364. {
  365. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  366. long temp;
  367. __asm__ __volatile__(
  368. " .set mips3 \n"
  369. "1: lld %0, %1 # atomic64_add \n"
  370. " daddu %0, %2 \n"
  371. " scd %0, %1 \n"
  372. " beqzl %0, 1b \n"
  373. " .set mips0 \n"
  374. : "=&r" (temp), "+m" (v->counter)
  375. : "Ir" (i));
  376. } else if (kernel_uses_llsc) {
  377. long temp;
  378. do {
  379. __asm__ __volatile__(
  380. " .set mips3 \n"
  381. " lld %0, %1 # atomic64_add \n"
  382. " daddu %0, %2 \n"
  383. " scd %0, %1 \n"
  384. " .set mips0 \n"
  385. : "=&r" (temp), "+m" (v->counter)
  386. : "Ir" (i));
  387. } while (unlikely(!temp));
  388. } else {
  389. unsigned long flags;
  390. raw_local_irq_save(flags);
  391. v->counter += i;
  392. raw_local_irq_restore(flags);
  393. }
  394. }
  395. /*
  396. * atomic64_sub - subtract the atomic variable
  397. * @i: integer value to subtract
  398. * @v: pointer of type atomic64_t
  399. *
  400. * Atomically subtracts @i from @v.
  401. */
  402. static __inline__ void atomic64_sub(long i, atomic64_t * v)
  403. {
  404. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  405. long temp;
  406. __asm__ __volatile__(
  407. " .set mips3 \n"
  408. "1: lld %0, %1 # atomic64_sub \n"
  409. " dsubu %0, %2 \n"
  410. " scd %0, %1 \n"
  411. " beqzl %0, 1b \n"
  412. " .set mips0 \n"
  413. : "=&r" (temp), "+m" (v->counter)
  414. : "Ir" (i));
  415. } else if (kernel_uses_llsc) {
  416. long temp;
  417. do {
  418. __asm__ __volatile__(
  419. " .set mips3 \n"
  420. " lld %0, %1 # atomic64_sub \n"
  421. " dsubu %0, %2 \n"
  422. " scd %0, %1 \n"
  423. " .set mips0 \n"
  424. : "=&r" (temp), "+m" (v->counter)
  425. : "Ir" (i));
  426. } while (unlikely(!temp));
  427. } else {
  428. unsigned long flags;
  429. raw_local_irq_save(flags);
  430. v->counter -= i;
  431. raw_local_irq_restore(flags);
  432. }
  433. }
  434. /*
  435. * Same as above, but return the result value
  436. */
  437. static __inline__ long atomic64_add_return(long i, atomic64_t * v)
  438. {
  439. long result;
  440. smp_mb__before_llsc();
  441. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  442. long temp;
  443. __asm__ __volatile__(
  444. " .set mips3 \n"
  445. "1: lld %1, %2 # atomic64_add_return \n"
  446. " daddu %0, %1, %3 \n"
  447. " scd %0, %2 \n"
  448. " beqzl %0, 1b \n"
  449. " daddu %0, %1, %3 \n"
  450. " .set mips0 \n"
  451. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  452. : "Ir" (i));
  453. } else if (kernel_uses_llsc) {
  454. long temp;
  455. do {
  456. __asm__ __volatile__(
  457. " .set mips3 \n"
  458. " lld %1, %2 # atomic64_add_return \n"
  459. " daddu %0, %1, %3 \n"
  460. " scd %0, %2 \n"
  461. " .set mips0 \n"
  462. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  463. : "Ir" (i), "m" (v->counter)
  464. : "memory");
  465. } while (unlikely(!result));
  466. result = temp + i;
  467. } else {
  468. unsigned long flags;
  469. raw_local_irq_save(flags);
  470. result = v->counter;
  471. result += i;
  472. v->counter = result;
  473. raw_local_irq_restore(flags);
  474. }
  475. smp_llsc_mb();
  476. return result;
  477. }
  478. static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
  479. {
  480. long result;
  481. smp_mb__before_llsc();
  482. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  483. long temp;
  484. __asm__ __volatile__(
  485. " .set mips3 \n"
  486. "1: lld %1, %2 # atomic64_sub_return \n"
  487. " dsubu %0, %1, %3 \n"
  488. " scd %0, %2 \n"
  489. " beqzl %0, 1b \n"
  490. " dsubu %0, %1, %3 \n"
  491. " .set mips0 \n"
  492. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  493. : "Ir" (i), "m" (v->counter)
  494. : "memory");
  495. } else if (kernel_uses_llsc) {
  496. long temp;
  497. do {
  498. __asm__ __volatile__(
  499. " .set mips3 \n"
  500. " lld %1, %2 # atomic64_sub_return \n"
  501. " dsubu %0, %1, %3 \n"
  502. " scd %0, %2 \n"
  503. " .set mips0 \n"
  504. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  505. : "Ir" (i), "m" (v->counter)
  506. : "memory");
  507. } while (unlikely(!result));
  508. result = temp - i;
  509. } else {
  510. unsigned long flags;
  511. raw_local_irq_save(flags);
  512. result = v->counter;
  513. result -= i;
  514. v->counter = result;
  515. raw_local_irq_restore(flags);
  516. }
  517. smp_llsc_mb();
  518. return result;
  519. }
  520. /*
  521. * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
  522. * @i: integer value to subtract
  523. * @v: pointer of type atomic64_t
  524. *
  525. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  526. * The function returns the old value of @v minus @i.
  527. */
  528. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  529. {
  530. long result;
  531. smp_mb__before_llsc();
  532. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  533. long temp;
  534. __asm__ __volatile__(
  535. " .set mips3 \n"
  536. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  537. " dsubu %0, %1, %3 \n"
  538. " bltz %0, 1f \n"
  539. " scd %0, %2 \n"
  540. " .set noreorder \n"
  541. " beqzl %0, 1b \n"
  542. " dsubu %0, %1, %3 \n"
  543. " .set reorder \n"
  544. "1: \n"
  545. " .set mips0 \n"
  546. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  547. : "Ir" (i), "m" (v->counter)
  548. : "memory");
  549. } else if (kernel_uses_llsc) {
  550. long temp;
  551. __asm__ __volatile__(
  552. " .set mips3 \n"
  553. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  554. " dsubu %0, %1, %3 \n"
  555. " bltz %0, 1f \n"
  556. " scd %0, %2 \n"
  557. " .set noreorder \n"
  558. " beqz %0, 1b \n"
  559. " dsubu %0, %1, %3 \n"
  560. " .set reorder \n"
  561. "1: \n"
  562. " .set mips0 \n"
  563. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  564. : "Ir" (i));
  565. } else {
  566. unsigned long flags;
  567. raw_local_irq_save(flags);
  568. result = v->counter;
  569. result -= i;
  570. if (result >= 0)
  571. v->counter = result;
  572. raw_local_irq_restore(flags);
  573. }
  574. smp_llsc_mb();
  575. return result;
  576. }
  577. #define atomic64_cmpxchg(v, o, n) \
  578. ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  579. #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
  580. /**
  581. * atomic64_add_unless - add unless the number is a given value
  582. * @v: pointer of type atomic64_t
  583. * @a: the amount to add to v...
  584. * @u: ...unless v is equal to u.
  585. *
  586. * Atomically adds @a to @v, so long as it was not @u.
  587. * Returns the old value of @v.
  588. */
  589. static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  590. {
  591. long c, old;
  592. c = atomic64_read(v);
  593. for (;;) {
  594. if (unlikely(c == (u)))
  595. break;
  596. old = atomic64_cmpxchg((v), c, c + (a));
  597. if (likely(old == c))
  598. break;
  599. c = old;
  600. }
  601. return c != (u);
  602. }
  603. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  604. #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
  605. #define atomic64_inc_return(v) atomic64_add_return(1, (v))
  606. /*
  607. * atomic64_sub_and_test - subtract value from variable and test result
  608. * @i: integer value to subtract
  609. * @v: pointer of type atomic64_t
  610. *
  611. * Atomically subtracts @i from @v and returns
  612. * true if the result is zero, or false for all
  613. * other cases.
  614. */
  615. #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
  616. /*
  617. * atomic64_inc_and_test - increment and test
  618. * @v: pointer of type atomic64_t
  619. *
  620. * Atomically increments @v by 1
  621. * and returns true if the result is zero, or false for all
  622. * other cases.
  623. */
  624. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  625. /*
  626. * atomic64_dec_and_test - decrement by 1 and test
  627. * @v: pointer of type atomic64_t
  628. *
  629. * Atomically decrements @v by 1 and
  630. * returns true if the result is 0, or false for all other
  631. * cases.
  632. */
  633. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  634. /*
  635. * atomic64_dec_if_positive - decrement by 1 if old value positive
  636. * @v: pointer of type atomic64_t
  637. */
  638. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  639. /*
  640. * atomic64_inc - increment atomic variable
  641. * @v: pointer of type atomic64_t
  642. *
  643. * Atomically increments @v by 1.
  644. */
  645. #define atomic64_inc(v) atomic64_add(1, (v))
  646. /*
  647. * atomic64_dec - decrement and test
  648. * @v: pointer of type atomic64_t
  649. *
  650. * Atomically decrements @v by 1.
  651. */
  652. #define atomic64_dec(v) atomic64_sub(1, (v))
  653. /*
  654. * atomic64_add_negative - add and test if negative
  655. * @v: pointer of type atomic64_t
  656. * @i: integer value to add
  657. *
  658. * Atomically adds @i to @v and returns true
  659. * if the result is negative, or false when
  660. * result is greater than or equal to zero.
  661. */
  662. #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
  663. #endif /* CONFIG_64BIT */
  664. /*
  665. * atomic*_return operations are serializing but not the non-*_return
  666. * versions.
  667. */
  668. #define smp_mb__before_atomic_dec() smp_mb__before_llsc()
  669. #define smp_mb__after_atomic_dec() smp_llsc_mb()
  670. #define smp_mb__before_atomic_inc() smp_mb__before_llsc()
  671. #define smp_mb__after_atomic_inc() smp_llsc_mb()
  672. #endif /* _ASM_ATOMIC_H */