atomic.h 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
  13. */
  14. /*
  15. * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
  16. * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
  17. * main big wrapper ...
  18. */
  19. #include <linux/spinlock.h>
  20. #ifndef _ASM_ATOMIC_H
  21. #define _ASM_ATOMIC_H
  22. #include <linux/irqflags.h>
  23. #include <asm/cpu-features.h>
  24. #include <asm/war.h>
  25. typedef struct { volatile int counter; } atomic_t;
  26. #define ATOMIC_INIT(i) { (i) }
  27. /*
  28. * atomic_read - read atomic variable
  29. * @v: pointer of type atomic_t
  30. *
  31. * Atomically reads the value of @v.
  32. */
  33. #define atomic_read(v) ((v)->counter)
  34. /*
  35. * atomic_set - set atomic variable
  36. * @v: pointer of type atomic_t
  37. * @i: required value
  38. *
  39. * Atomically sets the value of @v to @i.
  40. */
  41. #define atomic_set(v,i) ((v)->counter = (i))
  42. /*
  43. * atomic_add - add integer to atomic variable
  44. * @i: integer value to add
  45. * @v: pointer of type atomic_t
  46. *
  47. * Atomically adds @i to @v.
  48. */
  49. static __inline__ void atomic_add(int i, atomic_t * v)
  50. {
  51. if (cpu_has_llsc && R10000_LLSC_WAR) {
  52. unsigned long temp;
  53. __asm__ __volatile__(
  54. " .set mips3 \n"
  55. "1: ll %0, %1 # atomic_add \n"
  56. " addu %0, %2 \n"
  57. " sc %0, %1 \n"
  58. " beqzl %0, 1b \n"
  59. " .set mips0 \n"
  60. : "=&r" (temp), "=m" (v->counter)
  61. : "Ir" (i), "m" (v->counter));
  62. } else if (cpu_has_llsc) {
  63. unsigned long temp;
  64. __asm__ __volatile__(
  65. " .set mips3 \n"
  66. "1: ll %0, %1 # atomic_add \n"
  67. " addu %0, %2 \n"
  68. " sc %0, %1 \n"
  69. " beqz %0, 1b \n"
  70. " .set mips0 \n"
  71. : "=&r" (temp), "=m" (v->counter)
  72. : "Ir" (i), "m" (v->counter));
  73. } else {
  74. unsigned long flags;
  75. local_irq_save(flags);
  76. v->counter += i;
  77. local_irq_restore(flags);
  78. }
  79. }
  80. /*
  81. * atomic_sub - subtract the atomic variable
  82. * @i: integer value to subtract
  83. * @v: pointer of type atomic_t
  84. *
  85. * Atomically subtracts @i from @v.
  86. */
  87. static __inline__ void atomic_sub(int i, atomic_t * v)
  88. {
  89. if (cpu_has_llsc && R10000_LLSC_WAR) {
  90. unsigned long temp;
  91. __asm__ __volatile__(
  92. " .set mips3 \n"
  93. "1: ll %0, %1 # atomic_sub \n"
  94. " subu %0, %2 \n"
  95. " sc %0, %1 \n"
  96. " beqzl %0, 1b \n"
  97. " .set mips0 \n"
  98. : "=&r" (temp), "=m" (v->counter)
  99. : "Ir" (i), "m" (v->counter));
  100. } else if (cpu_has_llsc) {
  101. unsigned long temp;
  102. __asm__ __volatile__(
  103. " .set mips3 \n"
  104. "1: ll %0, %1 # atomic_sub \n"
  105. " subu %0, %2 \n"
  106. " sc %0, %1 \n"
  107. " beqz %0, 1b \n"
  108. " .set mips0 \n"
  109. : "=&r" (temp), "=m" (v->counter)
  110. : "Ir" (i), "m" (v->counter));
  111. } else {
  112. unsigned long flags;
  113. local_irq_save(flags);
  114. v->counter -= i;
  115. local_irq_restore(flags);
  116. }
  117. }
  118. /*
  119. * Same as above, but return the result value
  120. */
  121. static __inline__ int atomic_add_return(int i, atomic_t * v)
  122. {
  123. unsigned long result;
  124. if (cpu_has_llsc && R10000_LLSC_WAR) {
  125. unsigned long temp;
  126. __asm__ __volatile__(
  127. " .set mips3 \n"
  128. "1: ll %1, %2 # atomic_add_return \n"
  129. " addu %0, %1, %3 \n"
  130. " sc %0, %2 \n"
  131. " beqzl %0, 1b \n"
  132. " addu %0, %1, %3 \n"
  133. " sync \n"
  134. " .set mips0 \n"
  135. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  136. : "Ir" (i), "m" (v->counter)
  137. : "memory");
  138. } else if (cpu_has_llsc) {
  139. unsigned long temp;
  140. __asm__ __volatile__(
  141. " .set mips3 \n"
  142. "1: ll %1, %2 # atomic_add_return \n"
  143. " addu %0, %1, %3 \n"
  144. " sc %0, %2 \n"
  145. " beqz %0, 1b \n"
  146. " addu %0, %1, %3 \n"
  147. " sync \n"
  148. " .set mips0 \n"
  149. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  150. : "Ir" (i), "m" (v->counter)
  151. : "memory");
  152. } else {
  153. unsigned long flags;
  154. local_irq_save(flags);
  155. result = v->counter;
  156. result += i;
  157. v->counter = result;
  158. local_irq_restore(flags);
  159. }
  160. return result;
  161. }
  162. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  163. {
  164. unsigned long result;
  165. if (cpu_has_llsc && R10000_LLSC_WAR) {
  166. unsigned long temp;
  167. __asm__ __volatile__(
  168. " .set mips3 \n"
  169. "1: ll %1, %2 # atomic_sub_return \n"
  170. " subu %0, %1, %3 \n"
  171. " sc %0, %2 \n"
  172. " beqzl %0, 1b \n"
  173. " subu %0, %1, %3 \n"
  174. " sync \n"
  175. " .set mips0 \n"
  176. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  177. : "Ir" (i), "m" (v->counter)
  178. : "memory");
  179. } else if (cpu_has_llsc) {
  180. unsigned long temp;
  181. __asm__ __volatile__(
  182. " .set mips3 \n"
  183. "1: ll %1, %2 # atomic_sub_return \n"
  184. " subu %0, %1, %3 \n"
  185. " sc %0, %2 \n"
  186. " beqz %0, 1b \n"
  187. " subu %0, %1, %3 \n"
  188. " sync \n"
  189. " .set mips0 \n"
  190. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  191. : "Ir" (i), "m" (v->counter)
  192. : "memory");
  193. } else {
  194. unsigned long flags;
  195. local_irq_save(flags);
  196. result = v->counter;
  197. result -= i;
  198. v->counter = result;
  199. local_irq_restore(flags);
  200. }
  201. return result;
  202. }
  203. /*
  204. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  205. * @i: integer value to subtract
  206. * @v: pointer of type atomic_t
  207. *
  208. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  209. * The function returns the old value of @v minus @i.
  210. */
  211. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  212. {
  213. unsigned long result;
  214. if (cpu_has_llsc && R10000_LLSC_WAR) {
  215. unsigned long temp;
  216. __asm__ __volatile__(
  217. " .set mips3 \n"
  218. "1: ll %1, %2 # atomic_sub_if_positive\n"
  219. " subu %0, %1, %3 \n"
  220. " bltz %0, 1f \n"
  221. " sc %0, %2 \n"
  222. " .set noreorder \n"
  223. " beqzl %0, 1b \n"
  224. " subu %0, %1, %3 \n"
  225. " .set reorder \n"
  226. " sync \n"
  227. "1: \n"
  228. " .set mips0 \n"
  229. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  230. : "Ir" (i), "m" (v->counter)
  231. : "memory");
  232. } else if (cpu_has_llsc) {
  233. unsigned long temp;
  234. __asm__ __volatile__(
  235. " .set mips3 \n"
  236. "1: ll %1, %2 # atomic_sub_if_positive\n"
  237. " subu %0, %1, %3 \n"
  238. " bltz %0, 1f \n"
  239. " sc %0, %2 \n"
  240. " .set noreorder \n"
  241. " beqz %0, 1b \n"
  242. " subu %0, %1, %3 \n"
  243. " .set reorder \n"
  244. " sync \n"
  245. "1: \n"
  246. " .set mips0 \n"
  247. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  248. : "Ir" (i), "m" (v->counter)
  249. : "memory");
  250. } else {
  251. unsigned long flags;
  252. local_irq_save(flags);
  253. result = v->counter;
  254. result -= i;
  255. if (result >= 0)
  256. v->counter = result;
  257. local_irq_restore(flags);
  258. }
  259. return result;
  260. }
  261. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  262. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  263. /**
  264. * atomic_add_unless - add unless the number is a given value
  265. * @v: pointer of type atomic_t
  266. * @a: the amount to add to v...
  267. * @u: ...unless v is equal to u.
  268. *
  269. * Atomically adds @a to @v, so long as it was not @u.
  270. * Returns non-zero if @v was not @u, and zero otherwise.
  271. */
  272. #define atomic_add_unless(v, a, u) \
  273. ({ \
  274. int c, old; \
  275. c = atomic_read(v); \
  276. while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
  277. c = old; \
  278. c != (u); \
  279. })
  280. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  281. #define atomic_dec_return(v) atomic_sub_return(1,(v))
  282. #define atomic_inc_return(v) atomic_add_return(1,(v))
  283. /*
  284. * atomic_sub_and_test - subtract value from variable and test result
  285. * @i: integer value to subtract
  286. * @v: pointer of type atomic_t
  287. *
  288. * Atomically subtracts @i from @v and returns
  289. * true if the result is zero, or false for all
  290. * other cases.
  291. */
  292. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  293. /*
  294. * atomic_inc_and_test - increment and test
  295. * @v: pointer of type atomic_t
  296. *
  297. * Atomically increments @v by 1
  298. * and returns true if the result is zero, or false for all
  299. * other cases.
  300. */
  301. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  302. /*
  303. * atomic_dec_and_test - decrement by 1 and test
  304. * @v: pointer of type atomic_t
  305. *
  306. * Atomically decrements @v by 1 and
  307. * returns true if the result is 0, or false for all other
  308. * cases.
  309. */
  310. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  311. /*
  312. * atomic_dec_if_positive - decrement by 1 if old value positive
  313. * @v: pointer of type atomic_t
  314. */
  315. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  316. /*
  317. * atomic_inc - increment atomic variable
  318. * @v: pointer of type atomic_t
  319. *
  320. * Atomically increments @v by 1.
  321. */
  322. #define atomic_inc(v) atomic_add(1,(v))
  323. /*
  324. * atomic_dec - decrement and test
  325. * @v: pointer of type atomic_t
  326. *
  327. * Atomically decrements @v by 1.
  328. */
  329. #define atomic_dec(v) atomic_sub(1,(v))
  330. /*
  331. * atomic_add_negative - add and test if negative
  332. * @v: pointer of type atomic_t
  333. * @i: integer value to add
  334. *
  335. * Atomically adds @i to @v and returns true
  336. * if the result is negative, or false when
  337. * result is greater than or equal to zero.
  338. */
  339. #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
  340. #ifdef CONFIG_64BIT
  341. typedef struct { volatile __s64 counter; } atomic64_t;
  342. #define ATOMIC64_INIT(i) { (i) }
  343. /*
  344. * atomic64_read - read atomic variable
  345. * @v: pointer of type atomic64_t
  346. *
  347. */
  348. #define atomic64_read(v) ((v)->counter)
  349. /*
  350. * atomic64_set - set atomic variable
  351. * @v: pointer of type atomic64_t
  352. * @i: required value
  353. */
  354. #define atomic64_set(v,i) ((v)->counter = (i))
  355. /*
  356. * atomic64_add - add integer to atomic variable
  357. * @i: integer value to add
  358. * @v: pointer of type atomic64_t
  359. *
  360. * Atomically adds @i to @v.
  361. */
  362. static __inline__ void atomic64_add(long i, atomic64_t * v)
  363. {
  364. if (cpu_has_llsc && R10000_LLSC_WAR) {
  365. unsigned long temp;
  366. __asm__ __volatile__(
  367. " .set mips3 \n"
  368. "1: lld %0, %1 # atomic64_add \n"
  369. " addu %0, %2 \n"
  370. " scd %0, %1 \n"
  371. " beqzl %0, 1b \n"
  372. " .set mips0 \n"
  373. : "=&r" (temp), "=m" (v->counter)
  374. : "Ir" (i), "m" (v->counter));
  375. } else if (cpu_has_llsc) {
  376. unsigned long temp;
  377. __asm__ __volatile__(
  378. " .set mips3 \n"
  379. "1: lld %0, %1 # atomic64_add \n"
  380. " addu %0, %2 \n"
  381. " scd %0, %1 \n"
  382. " beqz %0, 1b \n"
  383. " .set mips0 \n"
  384. : "=&r" (temp), "=m" (v->counter)
  385. : "Ir" (i), "m" (v->counter));
  386. } else {
  387. unsigned long flags;
  388. local_irq_save(flags);
  389. v->counter += i;
  390. local_irq_restore(flags);
  391. }
  392. }
  393. /*
  394. * atomic64_sub - subtract the atomic variable
  395. * @i: integer value to subtract
  396. * @v: pointer of type atomic64_t
  397. *
  398. * Atomically subtracts @i from @v.
  399. */
  400. static __inline__ void atomic64_sub(long i, atomic64_t * v)
  401. {
  402. if (cpu_has_llsc && R10000_LLSC_WAR) {
  403. unsigned long temp;
  404. __asm__ __volatile__(
  405. " .set mips3 \n"
  406. "1: lld %0, %1 # atomic64_sub \n"
  407. " subu %0, %2 \n"
  408. " scd %0, %1 \n"
  409. " beqzl %0, 1b \n"
  410. " .set mips0 \n"
  411. : "=&r" (temp), "=m" (v->counter)
  412. : "Ir" (i), "m" (v->counter));
  413. } else if (cpu_has_llsc) {
  414. unsigned long temp;
  415. __asm__ __volatile__(
  416. " .set mips3 \n"
  417. "1: lld %0, %1 # atomic64_sub \n"
  418. " subu %0, %2 \n"
  419. " scd %0, %1 \n"
  420. " beqz %0, 1b \n"
  421. " .set mips0 \n"
  422. : "=&r" (temp), "=m" (v->counter)
  423. : "Ir" (i), "m" (v->counter));
  424. } else {
  425. unsigned long flags;
  426. local_irq_save(flags);
  427. v->counter -= i;
  428. local_irq_restore(flags);
  429. }
  430. }
  431. /*
  432. * Same as above, but return the result value
  433. */
  434. static __inline__ long atomic64_add_return(long i, atomic64_t * v)
  435. {
  436. unsigned long result;
  437. if (cpu_has_llsc && R10000_LLSC_WAR) {
  438. unsigned long temp;
  439. __asm__ __volatile__(
  440. " .set mips3 \n"
  441. "1: lld %1, %2 # atomic64_add_return \n"
  442. " addu %0, %1, %3 \n"
  443. " scd %0, %2 \n"
  444. " beqzl %0, 1b \n"
  445. " addu %0, %1, %3 \n"
  446. " sync \n"
  447. " .set mips0 \n"
  448. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  449. : "Ir" (i), "m" (v->counter)
  450. : "memory");
  451. } else if (cpu_has_llsc) {
  452. unsigned long temp;
  453. __asm__ __volatile__(
  454. " .set mips3 \n"
  455. "1: lld %1, %2 # atomic64_add_return \n"
  456. " addu %0, %1, %3 \n"
  457. " scd %0, %2 \n"
  458. " beqz %0, 1b \n"
  459. " addu %0, %1, %3 \n"
  460. " sync \n"
  461. " .set mips0 \n"
  462. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  463. : "Ir" (i), "m" (v->counter)
  464. : "memory");
  465. } else {
  466. unsigned long flags;
  467. local_irq_save(flags);
  468. result = v->counter;
  469. result += i;
  470. v->counter = result;
  471. local_irq_restore(flags);
  472. }
  473. return result;
  474. }
  475. static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
  476. {
  477. unsigned long result;
  478. if (cpu_has_llsc && R10000_LLSC_WAR) {
  479. unsigned long temp;
  480. __asm__ __volatile__(
  481. " .set mips3 \n"
  482. "1: lld %1, %2 # atomic64_sub_return \n"
  483. " subu %0, %1, %3 \n"
  484. " scd %0, %2 \n"
  485. " beqzl %0, 1b \n"
  486. " subu %0, %1, %3 \n"
  487. " sync \n"
  488. " .set mips0 \n"
  489. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  490. : "Ir" (i), "m" (v->counter)
  491. : "memory");
  492. } else if (cpu_has_llsc) {
  493. unsigned long temp;
  494. __asm__ __volatile__(
  495. " .set mips3 \n"
  496. "1: lld %1, %2 # atomic64_sub_return \n"
  497. " subu %0, %1, %3 \n"
  498. " scd %0, %2 \n"
  499. " beqz %0, 1b \n"
  500. " subu %0, %1, %3 \n"
  501. " sync \n"
  502. " .set mips0 \n"
  503. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  504. : "Ir" (i), "m" (v->counter)
  505. : "memory");
  506. } else {
  507. unsigned long flags;
  508. local_irq_save(flags);
  509. result = v->counter;
  510. result -= i;
  511. v->counter = result;
  512. local_irq_restore(flags);
  513. }
  514. return result;
  515. }
  516. /*
  517. * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
  518. * @i: integer value to subtract
  519. * @v: pointer of type atomic64_t
  520. *
  521. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  522. * The function returns the old value of @v minus @i.
  523. */
  524. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  525. {
  526. unsigned long result;
  527. if (cpu_has_llsc && R10000_LLSC_WAR) {
  528. unsigned long temp;
  529. __asm__ __volatile__(
  530. " .set mips3 \n"
  531. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  532. " dsubu %0, %1, %3 \n"
  533. " bltz %0, 1f \n"
  534. " scd %0, %2 \n"
  535. " .set noreorder \n"
  536. " beqzl %0, 1b \n"
  537. " dsubu %0, %1, %3 \n"
  538. " .set reorder \n"
  539. " sync \n"
  540. "1: \n"
  541. " .set mips0 \n"
  542. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  543. : "Ir" (i), "m" (v->counter)
  544. : "memory");
  545. } else if (cpu_has_llsc) {
  546. unsigned long temp;
  547. __asm__ __volatile__(
  548. " .set mips3 \n"
  549. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  550. " dsubu %0, %1, %3 \n"
  551. " bltz %0, 1f \n"
  552. " scd %0, %2 \n"
  553. " .set noreorder \n"
  554. " beqz %0, 1b \n"
  555. " dsubu %0, %1, %3 \n"
  556. " .set reorder \n"
  557. " sync \n"
  558. "1: \n"
  559. " .set mips0 \n"
  560. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  561. : "Ir" (i), "m" (v->counter)
  562. : "memory");
  563. } else {
  564. unsigned long flags;
  565. local_irq_save(flags);
  566. result = v->counter;
  567. result -= i;
  568. if (result >= 0)
  569. v->counter = result;
  570. local_irq_restore(flags);
  571. }
  572. return result;
  573. }
  574. #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
  575. #define atomic64_inc_return(v) atomic64_add_return(1,(v))
  576. /*
  577. * atomic64_sub_and_test - subtract value from variable and test result
  578. * @i: integer value to subtract
  579. * @v: pointer of type atomic64_t
  580. *
  581. * Atomically subtracts @i from @v and returns
  582. * true if the result is zero, or false for all
  583. * other cases.
  584. */
  585. #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
  586. /*
  587. * atomic64_inc_and_test - increment and test
  588. * @v: pointer of type atomic64_t
  589. *
  590. * Atomically increments @v by 1
  591. * and returns true if the result is zero, or false for all
  592. * other cases.
  593. */
  594. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  595. /*
  596. * atomic64_dec_and_test - decrement by 1 and test
  597. * @v: pointer of type atomic64_t
  598. *
  599. * Atomically decrements @v by 1 and
  600. * returns true if the result is 0, or false for all other
  601. * cases.
  602. */
  603. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  604. /*
  605. * atomic64_dec_if_positive - decrement by 1 if old value positive
  606. * @v: pointer of type atomic64_t
  607. */
  608. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  609. /*
  610. * atomic64_inc - increment atomic variable
  611. * @v: pointer of type atomic64_t
  612. *
  613. * Atomically increments @v by 1.
  614. */
  615. #define atomic64_inc(v) atomic64_add(1,(v))
  616. /*
  617. * atomic64_dec - decrement and test
  618. * @v: pointer of type atomic64_t
  619. *
  620. * Atomically decrements @v by 1.
  621. */
  622. #define atomic64_dec(v) atomic64_sub(1,(v))
  623. /*
  624. * atomic64_add_negative - add and test if negative
  625. * @v: pointer of type atomic64_t
  626. * @i: integer value to add
  627. *
  628. * Atomically adds @i to @v and returns true
  629. * if the result is negative, or false when
  630. * result is greater than or equal to zero.
  631. */
  632. #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
  633. #endif /* CONFIG_64BIT */
  634. /*
  635. * atomic*_return operations are serializing but not the non-*_return
  636. * versions.
  637. */
  638. #define smp_mb__before_atomic_dec() smp_mb()
  639. #define smp_mb__after_atomic_dec() smp_mb()
  640. #define smp_mb__before_atomic_inc() smp_mb()
  641. #define smp_mb__after_atomic_inc() smp_mb()
  642. #include <asm-generic/atomic.h>
  643. #endif /* _ASM_ATOMIC_H */