atomic.h 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <asm/barrier.h>
  18. #include <asm/cpu-features.h>
  19. #include <asm/war.h>
  20. typedef struct { volatile int counter; } atomic_t;
  21. #define ATOMIC_INIT(i) { (i) }
  22. /*
  23. * atomic_read - read atomic variable
  24. * @v: pointer of type atomic_t
  25. *
  26. * Atomically reads the value of @v.
  27. */
  28. #define atomic_read(v) ((v)->counter)
  29. /*
  30. * atomic_set - set atomic variable
  31. * @v: pointer of type atomic_t
  32. * @i: required value
  33. *
  34. * Atomically sets the value of @v to @i.
  35. */
  36. #define atomic_set(v,i) ((v)->counter = (i))
  37. /*
  38. * atomic_add - add integer to atomic variable
  39. * @i: integer value to add
  40. * @v: pointer of type atomic_t
  41. *
  42. * Atomically adds @i to @v.
  43. */
  44. static __inline__ void atomic_add(int i, atomic_t * v)
  45. {
  46. if (cpu_has_llsc && R10000_LLSC_WAR) {
  47. unsigned long temp;
  48. __asm__ __volatile__(
  49. " .set mips3 \n"
  50. "1: ll %0, %1 # atomic_add \n"
  51. " addu %0, %2 \n"
  52. " sc %0, %1 \n"
  53. " beqzl %0, 1b \n"
  54. " .set mips0 \n"
  55. : "=&r" (temp), "=m" (v->counter)
  56. : "Ir" (i), "m" (v->counter));
  57. } else if (cpu_has_llsc) {
  58. unsigned long temp;
  59. __asm__ __volatile__(
  60. " .set mips3 \n"
  61. "1: ll %0, %1 # atomic_add \n"
  62. " addu %0, %2 \n"
  63. " sc %0, %1 \n"
  64. " beqz %0, 1b \n"
  65. " .set mips0 \n"
  66. : "=&r" (temp), "=m" (v->counter)
  67. : "Ir" (i), "m" (v->counter));
  68. } else {
  69. unsigned long flags;
  70. local_irq_save(flags);
  71. v->counter += i;
  72. local_irq_restore(flags);
  73. }
  74. }
  75. /*
  76. * atomic_sub - subtract the atomic variable
  77. * @i: integer value to subtract
  78. * @v: pointer of type atomic_t
  79. *
  80. * Atomically subtracts @i from @v.
  81. */
  82. static __inline__ void atomic_sub(int i, atomic_t * v)
  83. {
  84. if (cpu_has_llsc && R10000_LLSC_WAR) {
  85. unsigned long temp;
  86. __asm__ __volatile__(
  87. " .set mips3 \n"
  88. "1: ll %0, %1 # atomic_sub \n"
  89. " subu %0, %2 \n"
  90. " sc %0, %1 \n"
  91. " beqzl %0, 1b \n"
  92. " .set mips0 \n"
  93. : "=&r" (temp), "=m" (v->counter)
  94. : "Ir" (i), "m" (v->counter));
  95. } else if (cpu_has_llsc) {
  96. unsigned long temp;
  97. __asm__ __volatile__(
  98. " .set mips3 \n"
  99. "1: ll %0, %1 # atomic_sub \n"
  100. " subu %0, %2 \n"
  101. " sc %0, %1 \n"
  102. " beqz %0, 1b \n"
  103. " .set mips0 \n"
  104. : "=&r" (temp), "=m" (v->counter)
  105. : "Ir" (i), "m" (v->counter));
  106. } else {
  107. unsigned long flags;
  108. local_irq_save(flags);
  109. v->counter -= i;
  110. local_irq_restore(flags);
  111. }
  112. }
  113. /*
  114. * Same as above, but return the result value
  115. */
  116. static __inline__ int atomic_add_return(int i, atomic_t * v)
  117. {
  118. unsigned long result;
  119. smp_mb();
  120. if (cpu_has_llsc && R10000_LLSC_WAR) {
  121. unsigned long temp;
  122. __asm__ __volatile__(
  123. " .set mips3 \n"
  124. "1: ll %1, %2 # atomic_add_return \n"
  125. " addu %0, %1, %3 \n"
  126. " sc %0, %2 \n"
  127. " beqzl %0, 1b \n"
  128. " addu %0, %1, %3 \n"
  129. " .set mips0 \n"
  130. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  131. : "Ir" (i), "m" (v->counter)
  132. : "memory");
  133. } else if (cpu_has_llsc) {
  134. unsigned long temp;
  135. __asm__ __volatile__(
  136. " .set mips3 \n"
  137. "1: ll %1, %2 # atomic_add_return \n"
  138. " addu %0, %1, %3 \n"
  139. " sc %0, %2 \n"
  140. " beqz %0, 1b \n"
  141. " addu %0, %1, %3 \n"
  142. " .set mips0 \n"
  143. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  144. : "Ir" (i), "m" (v->counter)
  145. : "memory");
  146. } else {
  147. unsigned long flags;
  148. local_irq_save(flags);
  149. result = v->counter;
  150. result += i;
  151. v->counter = result;
  152. local_irq_restore(flags);
  153. }
  154. smp_mb();
  155. return result;
  156. }
  157. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  158. {
  159. unsigned long result;
  160. smp_mb();
  161. if (cpu_has_llsc && R10000_LLSC_WAR) {
  162. unsigned long temp;
  163. __asm__ __volatile__(
  164. " .set mips3 \n"
  165. "1: ll %1, %2 # atomic_sub_return \n"
  166. " subu %0, %1, %3 \n"
  167. " sc %0, %2 \n"
  168. " beqzl %0, 1b \n"
  169. " subu %0, %1, %3 \n"
  170. " .set mips0 \n"
  171. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  172. : "Ir" (i), "m" (v->counter)
  173. : "memory");
  174. } else if (cpu_has_llsc) {
  175. unsigned long temp;
  176. __asm__ __volatile__(
  177. " .set mips3 \n"
  178. "1: ll %1, %2 # atomic_sub_return \n"
  179. " subu %0, %1, %3 \n"
  180. " sc %0, %2 \n"
  181. " beqz %0, 1b \n"
  182. " subu %0, %1, %3 \n"
  183. " .set mips0 \n"
  184. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  185. : "Ir" (i), "m" (v->counter)
  186. : "memory");
  187. } else {
  188. unsigned long flags;
  189. local_irq_save(flags);
  190. result = v->counter;
  191. result -= i;
  192. v->counter = result;
  193. local_irq_restore(flags);
  194. }
  195. smp_mb();
  196. return result;
  197. }
  198. /*
  199. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  200. * @i: integer value to subtract
  201. * @v: pointer of type atomic_t
  202. *
  203. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  204. * The function returns the old value of @v minus @i.
  205. */
  206. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  207. {
  208. unsigned long result;
  209. smp_mb();
  210. if (cpu_has_llsc && R10000_LLSC_WAR) {
  211. unsigned long temp;
  212. __asm__ __volatile__(
  213. " .set mips3 \n"
  214. "1: ll %1, %2 # atomic_sub_if_positive\n"
  215. " subu %0, %1, %3 \n"
  216. " bltz %0, 1f \n"
  217. " sc %0, %2 \n"
  218. " .set noreorder \n"
  219. " beqzl %0, 1b \n"
  220. " subu %0, %1, %3 \n"
  221. " .set reorder \n"
  222. "1: \n"
  223. " .set mips0 \n"
  224. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  225. : "Ir" (i), "m" (v->counter)
  226. : "memory");
  227. } else if (cpu_has_llsc) {
  228. unsigned long temp;
  229. __asm__ __volatile__(
  230. " .set mips3 \n"
  231. "1: ll %1, %2 # atomic_sub_if_positive\n"
  232. " subu %0, %1, %3 \n"
  233. " bltz %0, 1f \n"
  234. " sc %0, %2 \n"
  235. " .set noreorder \n"
  236. " beqz %0, 1b \n"
  237. " subu %0, %1, %3 \n"
  238. " .set reorder \n"
  239. "1: \n"
  240. " .set mips0 \n"
  241. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  242. : "Ir" (i), "m" (v->counter)
  243. : "memory");
  244. } else {
  245. unsigned long flags;
  246. local_irq_save(flags);
  247. result = v->counter;
  248. result -= i;
  249. if (result >= 0)
  250. v->counter = result;
  251. local_irq_restore(flags);
  252. }
  253. smp_mb();
  254. return result;
  255. }
  256. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  257. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  258. /**
  259. * atomic_add_unless - add unless the number is a given value
  260. * @v: pointer of type atomic_t
  261. * @a: the amount to add to v...
  262. * @u: ...unless v is equal to u.
  263. *
  264. * Atomically adds @a to @v, so long as it was not @u.
  265. * Returns non-zero if @v was not @u, and zero otherwise.
  266. */
  267. #define atomic_add_unless(v, a, u) \
  268. ({ \
  269. int c, old; \
  270. c = atomic_read(v); \
  271. while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
  272. c = old; \
  273. c != (u); \
  274. })
  275. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  276. #define atomic_dec_return(v) atomic_sub_return(1,(v))
  277. #define atomic_inc_return(v) atomic_add_return(1,(v))
  278. /*
  279. * atomic_sub_and_test - subtract value from variable and test result
  280. * @i: integer value to subtract
  281. * @v: pointer of type atomic_t
  282. *
  283. * Atomically subtracts @i from @v and returns
  284. * true if the result is zero, or false for all
  285. * other cases.
  286. */
  287. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  288. /*
  289. * atomic_inc_and_test - increment and test
  290. * @v: pointer of type atomic_t
  291. *
  292. * Atomically increments @v by 1
  293. * and returns true if the result is zero, or false for all
  294. * other cases.
  295. */
  296. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  297. /*
  298. * atomic_dec_and_test - decrement by 1 and test
  299. * @v: pointer of type atomic_t
  300. *
  301. * Atomically decrements @v by 1 and
  302. * returns true if the result is 0, or false for all other
  303. * cases.
  304. */
  305. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  306. /*
  307. * atomic_dec_if_positive - decrement by 1 if old value positive
  308. * @v: pointer of type atomic_t
  309. */
  310. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  311. /*
  312. * atomic_inc - increment atomic variable
  313. * @v: pointer of type atomic_t
  314. *
  315. * Atomically increments @v by 1.
  316. */
  317. #define atomic_inc(v) atomic_add(1,(v))
  318. /*
  319. * atomic_dec - decrement and test
  320. * @v: pointer of type atomic_t
  321. *
  322. * Atomically decrements @v by 1.
  323. */
  324. #define atomic_dec(v) atomic_sub(1,(v))
  325. /*
  326. * atomic_add_negative - add and test if negative
  327. * @v: pointer of type atomic_t
  328. * @i: integer value to add
  329. *
  330. * Atomically adds @i to @v and returns true
  331. * if the result is negative, or false when
  332. * result is greater than or equal to zero.
  333. */
  334. #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
  335. #ifdef CONFIG_64BIT
  336. typedef struct { volatile long counter; } atomic64_t;
  337. #define ATOMIC64_INIT(i) { (i) }
  338. /*
  339. * atomic64_read - read atomic variable
  340. * @v: pointer of type atomic64_t
  341. *
  342. */
  343. #define atomic64_read(v) ((v)->counter)
  344. /*
  345. * atomic64_set - set atomic variable
  346. * @v: pointer of type atomic64_t
  347. * @i: required value
  348. */
  349. #define atomic64_set(v,i) ((v)->counter = (i))
  350. /*
  351. * atomic64_add - add integer to atomic variable
  352. * @i: integer value to add
  353. * @v: pointer of type atomic64_t
  354. *
  355. * Atomically adds @i to @v.
  356. */
  357. static __inline__ void atomic64_add(long i, atomic64_t * v)
  358. {
  359. if (cpu_has_llsc && R10000_LLSC_WAR) {
  360. unsigned long temp;
  361. __asm__ __volatile__(
  362. " .set mips3 \n"
  363. "1: lld %0, %1 # atomic64_add \n"
  364. " addu %0, %2 \n"
  365. " scd %0, %1 \n"
  366. " beqzl %0, 1b \n"
  367. " .set mips0 \n"
  368. : "=&r" (temp), "=m" (v->counter)
  369. : "Ir" (i), "m" (v->counter));
  370. } else if (cpu_has_llsc) {
  371. unsigned long temp;
  372. __asm__ __volatile__(
  373. " .set mips3 \n"
  374. "1: lld %0, %1 # atomic64_add \n"
  375. " addu %0, %2 \n"
  376. " scd %0, %1 \n"
  377. " beqz %0, 1b \n"
  378. " .set mips0 \n"
  379. : "=&r" (temp), "=m" (v->counter)
  380. : "Ir" (i), "m" (v->counter));
  381. } else {
  382. unsigned long flags;
  383. local_irq_save(flags);
  384. v->counter += i;
  385. local_irq_restore(flags);
  386. }
  387. }
  388. /*
  389. * atomic64_sub - subtract the atomic variable
  390. * @i: integer value to subtract
  391. * @v: pointer of type atomic64_t
  392. *
  393. * Atomically subtracts @i from @v.
  394. */
  395. static __inline__ void atomic64_sub(long i, atomic64_t * v)
  396. {
  397. if (cpu_has_llsc && R10000_LLSC_WAR) {
  398. unsigned long temp;
  399. __asm__ __volatile__(
  400. " .set mips3 \n"
  401. "1: lld %0, %1 # atomic64_sub \n"
  402. " subu %0, %2 \n"
  403. " scd %0, %1 \n"
  404. " beqzl %0, 1b \n"
  405. " .set mips0 \n"
  406. : "=&r" (temp), "=m" (v->counter)
  407. : "Ir" (i), "m" (v->counter));
  408. } else if (cpu_has_llsc) {
  409. unsigned long temp;
  410. __asm__ __volatile__(
  411. " .set mips3 \n"
  412. "1: lld %0, %1 # atomic64_sub \n"
  413. " subu %0, %2 \n"
  414. " scd %0, %1 \n"
  415. " beqz %0, 1b \n"
  416. " .set mips0 \n"
  417. : "=&r" (temp), "=m" (v->counter)
  418. : "Ir" (i), "m" (v->counter));
  419. } else {
  420. unsigned long flags;
  421. local_irq_save(flags);
  422. v->counter -= i;
  423. local_irq_restore(flags);
  424. }
  425. }
  426. /*
  427. * Same as above, but return the result value
  428. */
  429. static __inline__ long atomic64_add_return(long i, atomic64_t * v)
  430. {
  431. unsigned long result;
  432. smp_mb();
  433. if (cpu_has_llsc && R10000_LLSC_WAR) {
  434. unsigned long temp;
  435. __asm__ __volatile__(
  436. " .set mips3 \n"
  437. "1: lld %1, %2 # atomic64_add_return \n"
  438. " addu %0, %1, %3 \n"
  439. " scd %0, %2 \n"
  440. " beqzl %0, 1b \n"
  441. " addu %0, %1, %3 \n"
  442. " .set mips0 \n"
  443. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  444. : "Ir" (i), "m" (v->counter)
  445. : "memory");
  446. } else if (cpu_has_llsc) {
  447. unsigned long temp;
  448. __asm__ __volatile__(
  449. " .set mips3 \n"
  450. "1: lld %1, %2 # atomic64_add_return \n"
  451. " addu %0, %1, %3 \n"
  452. " scd %0, %2 \n"
  453. " beqz %0, 1b \n"
  454. " addu %0, %1, %3 \n"
  455. " .set mips0 \n"
  456. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  457. : "Ir" (i), "m" (v->counter)
  458. : "memory");
  459. } else {
  460. unsigned long flags;
  461. local_irq_save(flags);
  462. result = v->counter;
  463. result += i;
  464. v->counter = result;
  465. local_irq_restore(flags);
  466. }
  467. smp_mb();
  468. return result;
  469. }
  470. static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
  471. {
  472. unsigned long result;
  473. smp_mb();
  474. if (cpu_has_llsc && R10000_LLSC_WAR) {
  475. unsigned long temp;
  476. __asm__ __volatile__(
  477. " .set mips3 \n"
  478. "1: lld %1, %2 # atomic64_sub_return \n"
  479. " subu %0, %1, %3 \n"
  480. " scd %0, %2 \n"
  481. " beqzl %0, 1b \n"
  482. " subu %0, %1, %3 \n"
  483. " .set mips0 \n"
  484. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  485. : "Ir" (i), "m" (v->counter)
  486. : "memory");
  487. } else if (cpu_has_llsc) {
  488. unsigned long temp;
  489. __asm__ __volatile__(
  490. " .set mips3 \n"
  491. "1: lld %1, %2 # atomic64_sub_return \n"
  492. " subu %0, %1, %3 \n"
  493. " scd %0, %2 \n"
  494. " beqz %0, 1b \n"
  495. " subu %0, %1, %3 \n"
  496. " .set mips0 \n"
  497. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  498. : "Ir" (i), "m" (v->counter)
  499. : "memory");
  500. } else {
  501. unsigned long flags;
  502. local_irq_save(flags);
  503. result = v->counter;
  504. result -= i;
  505. v->counter = result;
  506. local_irq_restore(flags);
  507. }
  508. smp_mb();
  509. return result;
  510. }
  511. /*
  512. * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
  513. * @i: integer value to subtract
  514. * @v: pointer of type atomic64_t
  515. *
  516. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  517. * The function returns the old value of @v minus @i.
  518. */
  519. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  520. {
  521. unsigned long result;
  522. smp_mb();
  523. if (cpu_has_llsc && R10000_LLSC_WAR) {
  524. unsigned long temp;
  525. __asm__ __volatile__(
  526. " .set mips3 \n"
  527. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  528. " dsubu %0, %1, %3 \n"
  529. " bltz %0, 1f \n"
  530. " scd %0, %2 \n"
  531. " .set noreorder \n"
  532. " beqzl %0, 1b \n"
  533. " dsubu %0, %1, %3 \n"
  534. " .set reorder \n"
  535. "1: \n"
  536. " .set mips0 \n"
  537. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  538. : "Ir" (i), "m" (v->counter)
  539. : "memory");
  540. } else if (cpu_has_llsc) {
  541. unsigned long temp;
  542. __asm__ __volatile__(
  543. " .set mips3 \n"
  544. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  545. " dsubu %0, %1, %3 \n"
  546. " bltz %0, 1f \n"
  547. " scd %0, %2 \n"
  548. " .set noreorder \n"
  549. " beqz %0, 1b \n"
  550. " dsubu %0, %1, %3 \n"
  551. " .set reorder \n"
  552. "1: \n"
  553. " .set mips0 \n"
  554. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  555. : "Ir" (i), "m" (v->counter)
  556. : "memory");
  557. } else {
  558. unsigned long flags;
  559. local_irq_save(flags);
  560. result = v->counter;
  561. result -= i;
  562. if (result >= 0)
  563. v->counter = result;
  564. local_irq_restore(flags);
  565. }
  566. smp_mb();
  567. return result;
  568. }
  569. #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
  570. #define atomic64_inc_return(v) atomic64_add_return(1,(v))
  571. /*
  572. * atomic64_sub_and_test - subtract value from variable and test result
  573. * @i: integer value to subtract
  574. * @v: pointer of type atomic64_t
  575. *
  576. * Atomically subtracts @i from @v and returns
  577. * true if the result is zero, or false for all
  578. * other cases.
  579. */
  580. #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
  581. /*
  582. * atomic64_inc_and_test - increment and test
  583. * @v: pointer of type atomic64_t
  584. *
  585. * Atomically increments @v by 1
  586. * and returns true if the result is zero, or false for all
  587. * other cases.
  588. */
  589. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  590. /*
  591. * atomic64_dec_and_test - decrement by 1 and test
  592. * @v: pointer of type atomic64_t
  593. *
  594. * Atomically decrements @v by 1 and
  595. * returns true if the result is 0, or false for all other
  596. * cases.
  597. */
  598. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  599. /*
  600. * atomic64_dec_if_positive - decrement by 1 if old value positive
  601. * @v: pointer of type atomic64_t
  602. */
  603. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  604. /*
  605. * atomic64_inc - increment atomic variable
  606. * @v: pointer of type atomic64_t
  607. *
  608. * Atomically increments @v by 1.
  609. */
  610. #define atomic64_inc(v) atomic64_add(1,(v))
  611. /*
  612. * atomic64_dec - decrement and test
  613. * @v: pointer of type atomic64_t
  614. *
  615. * Atomically decrements @v by 1.
  616. */
  617. #define atomic64_dec(v) atomic64_sub(1,(v))
  618. /*
  619. * atomic64_add_negative - add and test if negative
  620. * @v: pointer of type atomic64_t
  621. * @i: integer value to add
  622. *
  623. * Atomically adds @i to @v and returns true
  624. * if the result is negative, or false when
  625. * result is greater than or equal to zero.
  626. */
  627. #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
  628. #endif /* CONFIG_64BIT */
  629. /*
  630. * atomic*_return operations are serializing but not the non-*_return
  631. * versions.
  632. */
  633. #define smp_mb__before_atomic_dec() smp_mb()
  634. #define smp_mb__after_atomic_dec() smp_mb()
  635. #define smp_mb__before_atomic_inc() smp_mb()
  636. #define smp_mb__after_atomic_inc() smp_mb()
  637. #include <asm-generic/atomic.h>
  638. #endif /* _ASM_ATOMIC_H */