atomic.h 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
  13. */
  14. /*
  15. * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
  16. * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
  17. * main big wrapper ...
  18. */
  19. #include <linux/config.h>
  20. #include <linux/spinlock.h>
  21. #ifndef _ASM_ATOMIC_H
  22. #define _ASM_ATOMIC_H
  23. #include <asm/cpu-features.h>
  24. #include <asm/interrupt.h>
  25. #include <asm/war.h>
  26. typedef struct { volatile int counter; } atomic_t;
  27. #define ATOMIC_INIT(i) { (i) }
  28. /*
  29. * atomic_read - read atomic variable
  30. * @v: pointer of type atomic_t
  31. *
  32. * Atomically reads the value of @v.
  33. */
  34. #define atomic_read(v) ((v)->counter)
  35. /*
  36. * atomic_set - set atomic variable
  37. * @v: pointer of type atomic_t
  38. * @i: required value
  39. *
  40. * Atomically sets the value of @v to @i.
  41. */
  42. #define atomic_set(v,i) ((v)->counter = (i))
  43. /*
  44. * atomic_add - add integer to atomic variable
  45. * @i: integer value to add
  46. * @v: pointer of type atomic_t
  47. *
  48. * Atomically adds @i to @v.
  49. */
  50. static __inline__ void atomic_add(int i, atomic_t * v)
  51. {
  52. if (cpu_has_llsc && R10000_LLSC_WAR) {
  53. unsigned long temp;
  54. __asm__ __volatile__(
  55. " .set mips3 \n"
  56. "1: ll %0, %1 # atomic_add \n"
  57. " addu %0, %2 \n"
  58. " sc %0, %1 \n"
  59. " beqzl %0, 1b \n"
  60. " .set mips0 \n"
  61. : "=&r" (temp), "=m" (v->counter)
  62. : "Ir" (i), "m" (v->counter));
  63. } else if (cpu_has_llsc) {
  64. unsigned long temp;
  65. __asm__ __volatile__(
  66. " .set mips3 \n"
  67. "1: ll %0, %1 # atomic_add \n"
  68. " addu %0, %2 \n"
  69. " sc %0, %1 \n"
  70. " beqz %0, 1b \n"
  71. " .set mips0 \n"
  72. : "=&r" (temp), "=m" (v->counter)
  73. : "Ir" (i), "m" (v->counter));
  74. } else {
  75. unsigned long flags;
  76. local_irq_save(flags);
  77. v->counter += i;
  78. local_irq_restore(flags);
  79. }
  80. }
  81. /*
  82. * atomic_sub - subtract the atomic variable
  83. * @i: integer value to subtract
  84. * @v: pointer of type atomic_t
  85. *
  86. * Atomically subtracts @i from @v.
  87. */
  88. static __inline__ void atomic_sub(int i, atomic_t * v)
  89. {
  90. if (cpu_has_llsc && R10000_LLSC_WAR) {
  91. unsigned long temp;
  92. __asm__ __volatile__(
  93. " .set mips3 \n"
  94. "1: ll %0, %1 # atomic_sub \n"
  95. " subu %0, %2 \n"
  96. " sc %0, %1 \n"
  97. " beqzl %0, 1b \n"
  98. " .set mips0 \n"
  99. : "=&r" (temp), "=m" (v->counter)
  100. : "Ir" (i), "m" (v->counter));
  101. } else if (cpu_has_llsc) {
  102. unsigned long temp;
  103. __asm__ __volatile__(
  104. " .set mips3 \n"
  105. "1: ll %0, %1 # atomic_sub \n"
  106. " subu %0, %2 \n"
  107. " sc %0, %1 \n"
  108. " beqz %0, 1b \n"
  109. " .set mips0 \n"
  110. : "=&r" (temp), "=m" (v->counter)
  111. : "Ir" (i), "m" (v->counter));
  112. } else {
  113. unsigned long flags;
  114. local_irq_save(flags);
  115. v->counter -= i;
  116. local_irq_restore(flags);
  117. }
  118. }
  119. /*
  120. * Same as above, but return the result value
  121. */
  122. static __inline__ int atomic_add_return(int i, atomic_t * v)
  123. {
  124. unsigned long result;
  125. if (cpu_has_llsc && R10000_LLSC_WAR) {
  126. unsigned long temp;
  127. __asm__ __volatile__(
  128. " .set mips3 \n"
  129. "1: ll %1, %2 # atomic_add_return \n"
  130. " addu %0, %1, %3 \n"
  131. " sc %0, %2 \n"
  132. " beqzl %0, 1b \n"
  133. " addu %0, %1, %3 \n"
  134. " sync \n"
  135. " .set mips0 \n"
  136. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  137. : "Ir" (i), "m" (v->counter)
  138. : "memory");
  139. } else if (cpu_has_llsc) {
  140. unsigned long temp;
  141. __asm__ __volatile__(
  142. " .set mips3 \n"
  143. "1: ll %1, %2 # atomic_add_return \n"
  144. " addu %0, %1, %3 \n"
  145. " sc %0, %2 \n"
  146. " beqz %0, 1b \n"
  147. " addu %0, %1, %3 \n"
  148. " sync \n"
  149. " .set mips0 \n"
  150. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  151. : "Ir" (i), "m" (v->counter)
  152. : "memory");
  153. } else {
  154. unsigned long flags;
  155. local_irq_save(flags);
  156. result = v->counter;
  157. result += i;
  158. v->counter = result;
  159. local_irq_restore(flags);
  160. }
  161. return result;
  162. }
  163. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  164. {
  165. unsigned long result;
  166. if (cpu_has_llsc && R10000_LLSC_WAR) {
  167. unsigned long temp;
  168. __asm__ __volatile__(
  169. " .set mips3 \n"
  170. "1: ll %1, %2 # atomic_sub_return \n"
  171. " subu %0, %1, %3 \n"
  172. " sc %0, %2 \n"
  173. " beqzl %0, 1b \n"
  174. " subu %0, %1, %3 \n"
  175. " sync \n"
  176. " .set mips0 \n"
  177. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  178. : "Ir" (i), "m" (v->counter)
  179. : "memory");
  180. } else if (cpu_has_llsc) {
  181. unsigned long temp;
  182. __asm__ __volatile__(
  183. " .set mips3 \n"
  184. "1: ll %1, %2 # atomic_sub_return \n"
  185. " subu %0, %1, %3 \n"
  186. " sc %0, %2 \n"
  187. " beqz %0, 1b \n"
  188. " subu %0, %1, %3 \n"
  189. " sync \n"
  190. " .set mips0 \n"
  191. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  192. : "Ir" (i), "m" (v->counter)
  193. : "memory");
  194. } else {
  195. unsigned long flags;
  196. local_irq_save(flags);
  197. result = v->counter;
  198. result -= i;
  199. v->counter = result;
  200. local_irq_restore(flags);
  201. }
  202. return result;
  203. }
  204. /*
  205. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  206. * @i: integer value to subtract
  207. * @v: pointer of type atomic_t
  208. *
  209. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  210. * The function returns the old value of @v minus @i.
  211. */
  212. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  213. {
  214. unsigned long result;
  215. if (cpu_has_llsc && R10000_LLSC_WAR) {
  216. unsigned long temp;
  217. __asm__ __volatile__(
  218. " .set mips3 \n"
  219. "1: ll %1, %2 # atomic_sub_if_positive\n"
  220. " subu %0, %1, %3 \n"
  221. " bltz %0, 1f \n"
  222. " sc %0, %2 \n"
  223. " .set noreorder \n"
  224. " beqzl %0, 1b \n"
  225. " subu %0, %1, %3 \n"
  226. " .set reorder \n"
  227. " sync \n"
  228. "1: \n"
  229. " .set mips0 \n"
  230. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  231. : "Ir" (i), "m" (v->counter)
  232. : "memory");
  233. } else if (cpu_has_llsc) {
  234. unsigned long temp;
  235. __asm__ __volatile__(
  236. " .set mips3 \n"
  237. "1: ll %1, %2 # atomic_sub_if_positive\n"
  238. " subu %0, %1, %3 \n"
  239. " bltz %0, 1f \n"
  240. " sc %0, %2 \n"
  241. " .set noreorder \n"
  242. " beqz %0, 1b \n"
  243. " subu %0, %1, %3 \n"
  244. " .set reorder \n"
  245. " sync \n"
  246. "1: \n"
  247. " .set mips0 \n"
  248. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  249. : "Ir" (i), "m" (v->counter)
  250. : "memory");
  251. } else {
  252. unsigned long flags;
  253. local_irq_save(flags);
  254. result = v->counter;
  255. result -= i;
  256. if (result >= 0)
  257. v->counter = result;
  258. local_irq_restore(flags);
  259. }
  260. return result;
  261. }
  262. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  263. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  264. /**
  265. * atomic_add_unless - add unless the number is a given value
  266. * @v: pointer of type atomic_t
  267. * @a: the amount to add to v...
  268. * @u: ...unless v is equal to u.
  269. *
  270. * Atomically adds @a to @v, so long as it was not @u.
  271. * Returns non-zero if @v was not @u, and zero otherwise.
  272. */
  273. #define atomic_add_unless(v, a, u) \
  274. ({ \
  275. int c, old; \
  276. c = atomic_read(v); \
  277. while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
  278. c = old; \
  279. c != (u); \
  280. })
  281. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  282. #define atomic_dec_return(v) atomic_sub_return(1,(v))
  283. #define atomic_inc_return(v) atomic_add_return(1,(v))
  284. /*
  285. * atomic_sub_and_test - subtract value from variable and test result
  286. * @i: integer value to subtract
  287. * @v: pointer of type atomic_t
  288. *
  289. * Atomically subtracts @i from @v and returns
  290. * true if the result is zero, or false for all
  291. * other cases.
  292. */
  293. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  294. /*
  295. * atomic_inc_and_test - increment and test
  296. * @v: pointer of type atomic_t
  297. *
  298. * Atomically increments @v by 1
  299. * and returns true if the result is zero, or false for all
  300. * other cases.
  301. */
  302. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  303. /*
  304. * atomic_dec_and_test - decrement by 1 and test
  305. * @v: pointer of type atomic_t
  306. *
  307. * Atomically decrements @v by 1 and
  308. * returns true if the result is 0, or false for all other
  309. * cases.
  310. */
  311. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  312. /*
  313. * atomic_dec_if_positive - decrement by 1 if old value positive
  314. * @v: pointer of type atomic_t
  315. */
  316. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  317. /*
  318. * atomic_inc - increment atomic variable
  319. * @v: pointer of type atomic_t
  320. *
  321. * Atomically increments @v by 1.
  322. */
  323. #define atomic_inc(v) atomic_add(1,(v))
  324. /*
  325. * atomic_dec - decrement and test
  326. * @v: pointer of type atomic_t
  327. *
  328. * Atomically decrements @v by 1.
  329. */
  330. #define atomic_dec(v) atomic_sub(1,(v))
  331. /*
  332. * atomic_add_negative - add and test if negative
  333. * @v: pointer of type atomic_t
  334. * @i: integer value to add
  335. *
  336. * Atomically adds @i to @v and returns true
  337. * if the result is negative, or false when
  338. * result is greater than or equal to zero.
  339. */
  340. #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
  341. #ifdef CONFIG_64BIT
  342. typedef struct { volatile __s64 counter; } atomic64_t;
  343. #define ATOMIC64_INIT(i) { (i) }
  344. /*
  345. * atomic64_read - read atomic variable
  346. * @v: pointer of type atomic64_t
  347. *
  348. */
  349. #define atomic64_read(v) ((v)->counter)
  350. /*
  351. * atomic64_set - set atomic variable
  352. * @v: pointer of type atomic64_t
  353. * @i: required value
  354. */
  355. #define atomic64_set(v,i) ((v)->counter = (i))
  356. /*
  357. * atomic64_add - add integer to atomic variable
  358. * @i: integer value to add
  359. * @v: pointer of type atomic64_t
  360. *
  361. * Atomically adds @i to @v.
  362. */
  363. static __inline__ void atomic64_add(long i, atomic64_t * v)
  364. {
  365. if (cpu_has_llsc && R10000_LLSC_WAR) {
  366. unsigned long temp;
  367. __asm__ __volatile__(
  368. " .set mips3 \n"
  369. "1: lld %0, %1 # atomic64_add \n"
  370. " addu %0, %2 \n"
  371. " scd %0, %1 \n"
  372. " beqzl %0, 1b \n"
  373. " .set mips0 \n"
  374. : "=&r" (temp), "=m" (v->counter)
  375. : "Ir" (i), "m" (v->counter));
  376. } else if (cpu_has_llsc) {
  377. unsigned long temp;
  378. __asm__ __volatile__(
  379. " .set mips3 \n"
  380. "1: lld %0, %1 # atomic64_add \n"
  381. " addu %0, %2 \n"
  382. " scd %0, %1 \n"
  383. " beqz %0, 1b \n"
  384. " .set mips0 \n"
  385. : "=&r" (temp), "=m" (v->counter)
  386. : "Ir" (i), "m" (v->counter));
  387. } else {
  388. unsigned long flags;
  389. local_irq_save(flags);
  390. v->counter += i;
  391. local_irq_restore(flags);
  392. }
  393. }
  394. /*
  395. * atomic64_sub - subtract the atomic variable
  396. * @i: integer value to subtract
  397. * @v: pointer of type atomic64_t
  398. *
  399. * Atomically subtracts @i from @v.
  400. */
  401. static __inline__ void atomic64_sub(long i, atomic64_t * v)
  402. {
  403. if (cpu_has_llsc && R10000_LLSC_WAR) {
  404. unsigned long temp;
  405. __asm__ __volatile__(
  406. " .set mips3 \n"
  407. "1: lld %0, %1 # atomic64_sub \n"
  408. " subu %0, %2 \n"
  409. " scd %0, %1 \n"
  410. " beqzl %0, 1b \n"
  411. " .set mips0 \n"
  412. : "=&r" (temp), "=m" (v->counter)
  413. : "Ir" (i), "m" (v->counter));
  414. } else if (cpu_has_llsc) {
  415. unsigned long temp;
  416. __asm__ __volatile__(
  417. " .set mips3 \n"
  418. "1: lld %0, %1 # atomic64_sub \n"
  419. " subu %0, %2 \n"
  420. " scd %0, %1 \n"
  421. " beqz %0, 1b \n"
  422. " .set mips0 \n"
  423. : "=&r" (temp), "=m" (v->counter)
  424. : "Ir" (i), "m" (v->counter));
  425. } else {
  426. unsigned long flags;
  427. local_irq_save(flags);
  428. v->counter -= i;
  429. local_irq_restore(flags);
  430. }
  431. }
  432. /*
  433. * Same as above, but return the result value
  434. */
  435. static __inline__ long atomic64_add_return(long i, atomic64_t * v)
  436. {
  437. unsigned long result;
  438. if (cpu_has_llsc && R10000_LLSC_WAR) {
  439. unsigned long temp;
  440. __asm__ __volatile__(
  441. " .set mips3 \n"
  442. "1: lld %1, %2 # atomic64_add_return \n"
  443. " addu %0, %1, %3 \n"
  444. " scd %0, %2 \n"
  445. " beqzl %0, 1b \n"
  446. " addu %0, %1, %3 \n"
  447. " sync \n"
  448. " .set mips0 \n"
  449. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  450. : "Ir" (i), "m" (v->counter)
  451. : "memory");
  452. } else if (cpu_has_llsc) {
  453. unsigned long temp;
  454. __asm__ __volatile__(
  455. " .set mips3 \n"
  456. "1: lld %1, %2 # atomic64_add_return \n"
  457. " addu %0, %1, %3 \n"
  458. " scd %0, %2 \n"
  459. " beqz %0, 1b \n"
  460. " addu %0, %1, %3 \n"
  461. " sync \n"
  462. " .set mips0 \n"
  463. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  464. : "Ir" (i), "m" (v->counter)
  465. : "memory");
  466. } else {
  467. unsigned long flags;
  468. local_irq_save(flags);
  469. result = v->counter;
  470. result += i;
  471. v->counter = result;
  472. local_irq_restore(flags);
  473. }
  474. return result;
  475. }
  476. static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
  477. {
  478. unsigned long result;
  479. if (cpu_has_llsc && R10000_LLSC_WAR) {
  480. unsigned long temp;
  481. __asm__ __volatile__(
  482. " .set mips3 \n"
  483. "1: lld %1, %2 # atomic64_sub_return \n"
  484. " subu %0, %1, %3 \n"
  485. " scd %0, %2 \n"
  486. " beqzl %0, 1b \n"
  487. " subu %0, %1, %3 \n"
  488. " sync \n"
  489. " .set mips0 \n"
  490. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  491. : "Ir" (i), "m" (v->counter)
  492. : "memory");
  493. } else if (cpu_has_llsc) {
  494. unsigned long temp;
  495. __asm__ __volatile__(
  496. " .set mips3 \n"
  497. "1: lld %1, %2 # atomic64_sub_return \n"
  498. " subu %0, %1, %3 \n"
  499. " scd %0, %2 \n"
  500. " beqz %0, 1b \n"
  501. " subu %0, %1, %3 \n"
  502. " sync \n"
  503. " .set mips0 \n"
  504. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  505. : "Ir" (i), "m" (v->counter)
  506. : "memory");
  507. } else {
  508. unsigned long flags;
  509. local_irq_save(flags);
  510. result = v->counter;
  511. result -= i;
  512. v->counter = result;
  513. local_irq_restore(flags);
  514. }
  515. return result;
  516. }
  517. /*
  518. * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
  519. * @i: integer value to subtract
  520. * @v: pointer of type atomic64_t
  521. *
  522. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  523. * The function returns the old value of @v minus @i.
  524. */
  525. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  526. {
  527. unsigned long result;
  528. if (cpu_has_llsc && R10000_LLSC_WAR) {
  529. unsigned long temp;
  530. __asm__ __volatile__(
  531. " .set mips3 \n"
  532. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  533. " dsubu %0, %1, %3 \n"
  534. " bltz %0, 1f \n"
  535. " scd %0, %2 \n"
  536. " .set noreorder \n"
  537. " beqzl %0, 1b \n"
  538. " dsubu %0, %1, %3 \n"
  539. " .set reorder \n"
  540. " sync \n"
  541. "1: \n"
  542. " .set mips0 \n"
  543. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  544. : "Ir" (i), "m" (v->counter)
  545. : "memory");
  546. } else if (cpu_has_llsc) {
  547. unsigned long temp;
  548. __asm__ __volatile__(
  549. " .set mips3 \n"
  550. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  551. " dsubu %0, %1, %3 \n"
  552. " bltz %0, 1f \n"
  553. " scd %0, %2 \n"
  554. " .set noreorder \n"
  555. " beqz %0, 1b \n"
  556. " dsubu %0, %1, %3 \n"
  557. " .set reorder \n"
  558. " sync \n"
  559. "1: \n"
  560. " .set mips0 \n"
  561. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  562. : "Ir" (i), "m" (v->counter)
  563. : "memory");
  564. } else {
  565. unsigned long flags;
  566. local_irq_save(flags);
  567. result = v->counter;
  568. result -= i;
  569. if (result >= 0)
  570. v->counter = result;
  571. local_irq_restore(flags);
  572. }
  573. return result;
  574. }
  575. #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
  576. #define atomic64_inc_return(v) atomic64_add_return(1,(v))
  577. /*
  578. * atomic64_sub_and_test - subtract value from variable and test result
  579. * @i: integer value to subtract
  580. * @v: pointer of type atomic64_t
  581. *
  582. * Atomically subtracts @i from @v and returns
  583. * true if the result is zero, or false for all
  584. * other cases.
  585. */
  586. #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
  587. /*
  588. * atomic64_inc_and_test - increment and test
  589. * @v: pointer of type atomic64_t
  590. *
  591. * Atomically increments @v by 1
  592. * and returns true if the result is zero, or false for all
  593. * other cases.
  594. */
  595. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  596. /*
  597. * atomic64_dec_and_test - decrement by 1 and test
  598. * @v: pointer of type atomic64_t
  599. *
  600. * Atomically decrements @v by 1 and
  601. * returns true if the result is 0, or false for all other
  602. * cases.
  603. */
  604. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  605. /*
  606. * atomic64_dec_if_positive - decrement by 1 if old value positive
  607. * @v: pointer of type atomic64_t
  608. */
  609. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  610. /*
  611. * atomic64_inc - increment atomic variable
  612. * @v: pointer of type atomic64_t
  613. *
  614. * Atomically increments @v by 1.
  615. */
  616. #define atomic64_inc(v) atomic64_add(1,(v))
  617. /*
  618. * atomic64_dec - decrement and test
  619. * @v: pointer of type atomic64_t
  620. *
  621. * Atomically decrements @v by 1.
  622. */
  623. #define atomic64_dec(v) atomic64_sub(1,(v))
  624. /*
  625. * atomic64_add_negative - add and test if negative
  626. * @v: pointer of type atomic64_t
  627. * @i: integer value to add
  628. *
  629. * Atomically adds @i to @v and returns true
  630. * if the result is negative, or false when
  631. * result is greater than or equal to zero.
  632. */
  633. #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
  634. #endif /* CONFIG_64BIT */
  635. /*
  636. * atomic*_return operations are serializing but not the non-*_return
  637. * versions.
  638. */
  639. #define smp_mb__before_atomic_dec() smp_mb()
  640. #define smp_mb__after_atomic_dec() smp_mb()
  641. #define smp_mb__before_atomic_inc() smp_mb()
  642. #define smp_mb__after_atomic_inc() smp_mb()
  643. #include <asm-generic/atomic.h>
  644. #endif /* _ASM_ATOMIC_H */