atomic.h 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <linux/types.h>
  18. #include <asm/barrier.h>
  19. #include <asm/cpu-features.h>
  20. #include <asm/war.h>
  21. #include <asm/system.h>
  22. #define ATOMIC_INIT(i) { (i) }
  23. /*
  24. * atomic_read - read atomic variable
  25. * @v: pointer of type atomic_t
  26. *
  27. * Atomically reads the value of @v.
  28. */
  29. #define atomic_read(v) ((v)->counter)
  30. /*
  31. * atomic_set - set atomic variable
  32. * @v: pointer of type atomic_t
  33. * @i: required value
  34. *
  35. * Atomically sets the value of @v to @i.
  36. */
  37. #define atomic_set(v, i) ((v)->counter = (i))
  38. /*
  39. * atomic_add - add integer to atomic variable
  40. * @i: integer value to add
  41. * @v: pointer of type atomic_t
  42. *
  43. * Atomically adds @i to @v.
  44. */
  45. static __inline__ void atomic_add(int i, atomic_t * v)
  46. {
  47. if (cpu_has_llsc && R10000_LLSC_WAR) {
  48. int temp;
  49. __asm__ __volatile__(
  50. " .set mips3 \n"
  51. "1: ll %0, %1 # atomic_add \n"
  52. " addu %0, %2 \n"
  53. " sc %0, %1 \n"
  54. " beqzl %0, 1b \n"
  55. " .set mips0 \n"
  56. : "=&r" (temp), "=m" (v->counter)
  57. : "Ir" (i), "m" (v->counter));
  58. } else if (cpu_has_llsc) {
  59. int temp;
  60. __asm__ __volatile__(
  61. " .set mips3 \n"
  62. "1: ll %0, %1 # atomic_add \n"
  63. " addu %0, %2 \n"
  64. " sc %0, %1 \n"
  65. " beqz %0, 2f \n"
  66. " .subsection 2 \n"
  67. "2: b 1b \n"
  68. " .previous \n"
  69. " .set mips0 \n"
  70. : "=&r" (temp), "=m" (v->counter)
  71. : "Ir" (i), "m" (v->counter));
  72. } else {
  73. unsigned long flags;
  74. raw_local_irq_save(flags);
  75. v->counter += i;
  76. raw_local_irq_restore(flags);
  77. }
  78. }
  79. /*
  80. * atomic_sub - subtract the atomic variable
  81. * @i: integer value to subtract
  82. * @v: pointer of type atomic_t
  83. *
  84. * Atomically subtracts @i from @v.
  85. */
  86. static __inline__ void atomic_sub(int i, atomic_t * v)
  87. {
  88. if (cpu_has_llsc && R10000_LLSC_WAR) {
  89. int temp;
  90. __asm__ __volatile__(
  91. " .set mips3 \n"
  92. "1: ll %0, %1 # atomic_sub \n"
  93. " subu %0, %2 \n"
  94. " sc %0, %1 \n"
  95. " beqzl %0, 1b \n"
  96. " .set mips0 \n"
  97. : "=&r" (temp), "=m" (v->counter)
  98. : "Ir" (i), "m" (v->counter));
  99. } else if (cpu_has_llsc) {
  100. int temp;
  101. __asm__ __volatile__(
  102. " .set mips3 \n"
  103. "1: ll %0, %1 # atomic_sub \n"
  104. " subu %0, %2 \n"
  105. " sc %0, %1 \n"
  106. " beqz %0, 2f \n"
  107. " .subsection 2 \n"
  108. "2: b 1b \n"
  109. " .previous \n"
  110. " .set mips0 \n"
  111. : "=&r" (temp), "=m" (v->counter)
  112. : "Ir" (i), "m" (v->counter));
  113. } else {
  114. unsigned long flags;
  115. raw_local_irq_save(flags);
  116. v->counter -= i;
  117. raw_local_irq_restore(flags);
  118. }
  119. }
  120. /*
  121. * Same as above, but return the result value
  122. */
  123. static __inline__ int atomic_add_return(int i, atomic_t * v)
  124. {
  125. int result;
  126. smp_llsc_mb();
  127. if (cpu_has_llsc && R10000_LLSC_WAR) {
  128. int temp;
  129. __asm__ __volatile__(
  130. " .set mips3 \n"
  131. "1: ll %1, %2 # atomic_add_return \n"
  132. " addu %0, %1, %3 \n"
  133. " sc %0, %2 \n"
  134. " beqzl %0, 1b \n"
  135. " addu %0, %1, %3 \n"
  136. " .set mips0 \n"
  137. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  138. : "Ir" (i), "m" (v->counter)
  139. : "memory");
  140. } else if (cpu_has_llsc) {
  141. int temp;
  142. __asm__ __volatile__(
  143. " .set mips3 \n"
  144. "1: ll %1, %2 # atomic_add_return \n"
  145. " addu %0, %1, %3 \n"
  146. " sc %0, %2 \n"
  147. " beqz %0, 2f \n"
  148. " addu %0, %1, %3 \n"
  149. " .subsection 2 \n"
  150. "2: b 1b \n"
  151. " .previous \n"
  152. " .set mips0 \n"
  153. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  154. : "Ir" (i), "m" (v->counter)
  155. : "memory");
  156. } else {
  157. unsigned long flags;
  158. raw_local_irq_save(flags);
  159. result = v->counter;
  160. result += i;
  161. v->counter = result;
  162. raw_local_irq_restore(flags);
  163. }
  164. smp_llsc_mb();
  165. return result;
  166. }
  167. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  168. {
  169. int result;
  170. smp_llsc_mb();
  171. if (cpu_has_llsc && R10000_LLSC_WAR) {
  172. int temp;
  173. __asm__ __volatile__(
  174. " .set mips3 \n"
  175. "1: ll %1, %2 # atomic_sub_return \n"
  176. " subu %0, %1, %3 \n"
  177. " sc %0, %2 \n"
  178. " beqzl %0, 1b \n"
  179. " subu %0, %1, %3 \n"
  180. " .set mips0 \n"
  181. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  182. : "Ir" (i), "m" (v->counter)
  183. : "memory");
  184. } else if (cpu_has_llsc) {
  185. int temp;
  186. __asm__ __volatile__(
  187. " .set mips3 \n"
  188. "1: ll %1, %2 # atomic_sub_return \n"
  189. " subu %0, %1, %3 \n"
  190. " sc %0, %2 \n"
  191. " beqz %0, 2f \n"
  192. " subu %0, %1, %3 \n"
  193. " .subsection 2 \n"
  194. "2: b 1b \n"
  195. " .previous \n"
  196. " .set mips0 \n"
  197. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  198. : "Ir" (i), "m" (v->counter)
  199. : "memory");
  200. } else {
  201. unsigned long flags;
  202. raw_local_irq_save(flags);
  203. result = v->counter;
  204. result -= i;
  205. v->counter = result;
  206. raw_local_irq_restore(flags);
  207. }
  208. smp_llsc_mb();
  209. return result;
  210. }
  211. /*
  212. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  213. * @i: integer value to subtract
  214. * @v: pointer of type atomic_t
  215. *
  216. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  217. * The function returns the old value of @v minus @i.
  218. */
  219. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  220. {
  221. int result;
  222. smp_llsc_mb();
  223. if (cpu_has_llsc && R10000_LLSC_WAR) {
  224. int temp;
  225. __asm__ __volatile__(
  226. " .set mips3 \n"
  227. "1: ll %1, %2 # atomic_sub_if_positive\n"
  228. " subu %0, %1, %3 \n"
  229. " bltz %0, 1f \n"
  230. " sc %0, %2 \n"
  231. " .set noreorder \n"
  232. " beqzl %0, 1b \n"
  233. " subu %0, %1, %3 \n"
  234. " .set reorder \n"
  235. "1: \n"
  236. " .set mips0 \n"
  237. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  238. : "Ir" (i), "m" (v->counter)
  239. : "memory");
  240. } else if (cpu_has_llsc) {
  241. int temp;
  242. __asm__ __volatile__(
  243. " .set mips3 \n"
  244. "1: ll %1, %2 # atomic_sub_if_positive\n"
  245. " subu %0, %1, %3 \n"
  246. " bltz %0, 1f \n"
  247. " sc %0, %2 \n"
  248. " .set noreorder \n"
  249. " beqz %0, 2f \n"
  250. " subu %0, %1, %3 \n"
  251. " .set reorder \n"
  252. " .subsection 2 \n"
  253. "2: b 1b \n"
  254. " .previous \n"
  255. "1: \n"
  256. " .set mips0 \n"
  257. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  258. : "Ir" (i), "m" (v->counter)
  259. : "memory");
  260. } else {
  261. unsigned long flags;
  262. raw_local_irq_save(flags);
  263. result = v->counter;
  264. result -= i;
  265. if (result >= 0)
  266. v->counter = result;
  267. raw_local_irq_restore(flags);
  268. }
  269. smp_llsc_mb();
  270. return result;
  271. }
  272. #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  273. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  274. /**
  275. * atomic_add_unless - add unless the number is a given value
  276. * @v: pointer of type atomic_t
  277. * @a: the amount to add to v...
  278. * @u: ...unless v is equal to u.
  279. *
  280. * Atomically adds @a to @v, so long as it was not @u.
  281. * Returns non-zero if @v was not @u, and zero otherwise.
  282. */
  283. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  284. {
  285. int c, old;
  286. c = atomic_read(v);
  287. for (;;) {
  288. if (unlikely(c == (u)))
  289. break;
  290. old = atomic_cmpxchg((v), c, c + (a));
  291. if (likely(old == c))
  292. break;
  293. c = old;
  294. }
  295. return c != (u);
  296. }
  297. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  298. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  299. #define atomic_inc_return(v) atomic_add_return(1, (v))
  300. /*
  301. * atomic_sub_and_test - subtract value from variable and test result
  302. * @i: integer value to subtract
  303. * @v: pointer of type atomic_t
  304. *
  305. * Atomically subtracts @i from @v and returns
  306. * true if the result is zero, or false for all
  307. * other cases.
  308. */
  309. #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
  310. /*
  311. * atomic_inc_and_test - increment and test
  312. * @v: pointer of type atomic_t
  313. *
  314. * Atomically increments @v by 1
  315. * and returns true if the result is zero, or false for all
  316. * other cases.
  317. */
  318. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  319. /*
  320. * atomic_dec_and_test - decrement by 1 and test
  321. * @v: pointer of type atomic_t
  322. *
  323. * Atomically decrements @v by 1 and
  324. * returns true if the result is 0, or false for all other
  325. * cases.
  326. */
  327. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  328. /*
  329. * atomic_dec_if_positive - decrement by 1 if old value positive
  330. * @v: pointer of type atomic_t
  331. */
  332. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  333. /*
  334. * atomic_inc - increment atomic variable
  335. * @v: pointer of type atomic_t
  336. *
  337. * Atomically increments @v by 1.
  338. */
  339. #define atomic_inc(v) atomic_add(1, (v))
  340. /*
  341. * atomic_dec - decrement and test
  342. * @v: pointer of type atomic_t
  343. *
  344. * Atomically decrements @v by 1.
  345. */
  346. #define atomic_dec(v) atomic_sub(1, (v))
  347. /*
  348. * atomic_add_negative - add and test if negative
  349. * @v: pointer of type atomic_t
  350. * @i: integer value to add
  351. *
  352. * Atomically adds @i to @v and returns true
  353. * if the result is negative, or false when
  354. * result is greater than or equal to zero.
  355. */
  356. #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
  357. #ifdef CONFIG_64BIT
  358. #define ATOMIC64_INIT(i) { (i) }
  359. /*
  360. * atomic64_read - read atomic variable
  361. * @v: pointer of type atomic64_t
  362. *
  363. */
  364. #define atomic64_read(v) ((v)->counter)
  365. /*
  366. * atomic64_set - set atomic variable
  367. * @v: pointer of type atomic64_t
  368. * @i: required value
  369. */
  370. #define atomic64_set(v, i) ((v)->counter = (i))
  371. /*
  372. * atomic64_add - add integer to atomic variable
  373. * @i: integer value to add
  374. * @v: pointer of type atomic64_t
  375. *
  376. * Atomically adds @i to @v.
  377. */
  378. static __inline__ void atomic64_add(long i, atomic64_t * v)
  379. {
  380. if (cpu_has_llsc && R10000_LLSC_WAR) {
  381. long temp;
  382. __asm__ __volatile__(
  383. " .set mips3 \n"
  384. "1: lld %0, %1 # atomic64_add \n"
  385. " addu %0, %2 \n"
  386. " scd %0, %1 \n"
  387. " beqzl %0, 1b \n"
  388. " .set mips0 \n"
  389. : "=&r" (temp), "=m" (v->counter)
  390. : "Ir" (i), "m" (v->counter));
  391. } else if (cpu_has_llsc) {
  392. long temp;
  393. __asm__ __volatile__(
  394. " .set mips3 \n"
  395. "1: lld %0, %1 # atomic64_add \n"
  396. " addu %0, %2 \n"
  397. " scd %0, %1 \n"
  398. " beqz %0, 2f \n"
  399. " .subsection 2 \n"
  400. "2: b 1b \n"
  401. " .previous \n"
  402. " .set mips0 \n"
  403. : "=&r" (temp), "=m" (v->counter)
  404. : "Ir" (i), "m" (v->counter));
  405. } else {
  406. unsigned long flags;
  407. raw_local_irq_save(flags);
  408. v->counter += i;
  409. raw_local_irq_restore(flags);
  410. }
  411. }
  412. /*
  413. * atomic64_sub - subtract the atomic variable
  414. * @i: integer value to subtract
  415. * @v: pointer of type atomic64_t
  416. *
  417. * Atomically subtracts @i from @v.
  418. */
  419. static __inline__ void atomic64_sub(long i, atomic64_t * v)
  420. {
  421. if (cpu_has_llsc && R10000_LLSC_WAR) {
  422. long temp;
  423. __asm__ __volatile__(
  424. " .set mips3 \n"
  425. "1: lld %0, %1 # atomic64_sub \n"
  426. " subu %0, %2 \n"
  427. " scd %0, %1 \n"
  428. " beqzl %0, 1b \n"
  429. " .set mips0 \n"
  430. : "=&r" (temp), "=m" (v->counter)
  431. : "Ir" (i), "m" (v->counter));
  432. } else if (cpu_has_llsc) {
  433. long temp;
  434. __asm__ __volatile__(
  435. " .set mips3 \n"
  436. "1: lld %0, %1 # atomic64_sub \n"
  437. " subu %0, %2 \n"
  438. " scd %0, %1 \n"
  439. " beqz %0, 2f \n"
  440. " .subsection 2 \n"
  441. "2: b 1b \n"
  442. " .previous \n"
  443. " .set mips0 \n"
  444. : "=&r" (temp), "=m" (v->counter)
  445. : "Ir" (i), "m" (v->counter));
  446. } else {
  447. unsigned long flags;
  448. raw_local_irq_save(flags);
  449. v->counter -= i;
  450. raw_local_irq_restore(flags);
  451. }
  452. }
  453. /*
  454. * Same as above, but return the result value
  455. */
  456. static __inline__ long atomic64_add_return(long i, atomic64_t * v)
  457. {
  458. long result;
  459. smp_llsc_mb();
  460. if (cpu_has_llsc && R10000_LLSC_WAR) {
  461. long temp;
  462. __asm__ __volatile__(
  463. " .set mips3 \n"
  464. "1: lld %1, %2 # atomic64_add_return \n"
  465. " addu %0, %1, %3 \n"
  466. " scd %0, %2 \n"
  467. " beqzl %0, 1b \n"
  468. " addu %0, %1, %3 \n"
  469. " .set mips0 \n"
  470. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  471. : "Ir" (i), "m" (v->counter)
  472. : "memory");
  473. } else if (cpu_has_llsc) {
  474. long temp;
  475. __asm__ __volatile__(
  476. " .set mips3 \n"
  477. "1: lld %1, %2 # atomic64_add_return \n"
  478. " addu %0, %1, %3 \n"
  479. " scd %0, %2 \n"
  480. " beqz %0, 2f \n"
  481. " addu %0, %1, %3 \n"
  482. " .subsection 2 \n"
  483. "2: b 1b \n"
  484. " .previous \n"
  485. " .set mips0 \n"
  486. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  487. : "Ir" (i), "m" (v->counter)
  488. : "memory");
  489. } else {
  490. unsigned long flags;
  491. raw_local_irq_save(flags);
  492. result = v->counter;
  493. result += i;
  494. v->counter = result;
  495. raw_local_irq_restore(flags);
  496. }
  497. smp_llsc_mb();
  498. return result;
  499. }
  500. static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
  501. {
  502. long result;
  503. smp_llsc_mb();
  504. if (cpu_has_llsc && R10000_LLSC_WAR) {
  505. long temp;
  506. __asm__ __volatile__(
  507. " .set mips3 \n"
  508. "1: lld %1, %2 # atomic64_sub_return \n"
  509. " subu %0, %1, %3 \n"
  510. " scd %0, %2 \n"
  511. " beqzl %0, 1b \n"
  512. " subu %0, %1, %3 \n"
  513. " .set mips0 \n"
  514. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  515. : "Ir" (i), "m" (v->counter)
  516. : "memory");
  517. } else if (cpu_has_llsc) {
  518. long temp;
  519. __asm__ __volatile__(
  520. " .set mips3 \n"
  521. "1: lld %1, %2 # atomic64_sub_return \n"
  522. " subu %0, %1, %3 \n"
  523. " scd %0, %2 \n"
  524. " beqz %0, 2f \n"
  525. " subu %0, %1, %3 \n"
  526. " .subsection 2 \n"
  527. "2: b 1b \n"
  528. " .previous \n"
  529. " .set mips0 \n"
  530. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  531. : "Ir" (i), "m" (v->counter)
  532. : "memory");
  533. } else {
  534. unsigned long flags;
  535. raw_local_irq_save(flags);
  536. result = v->counter;
  537. result -= i;
  538. v->counter = result;
  539. raw_local_irq_restore(flags);
  540. }
  541. smp_llsc_mb();
  542. return result;
  543. }
  544. /*
  545. * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
  546. * @i: integer value to subtract
  547. * @v: pointer of type atomic64_t
  548. *
  549. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  550. * The function returns the old value of @v minus @i.
  551. */
  552. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  553. {
  554. long result;
  555. smp_llsc_mb();
  556. if (cpu_has_llsc && R10000_LLSC_WAR) {
  557. long temp;
  558. __asm__ __volatile__(
  559. " .set mips3 \n"
  560. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  561. " dsubu %0, %1, %3 \n"
  562. " bltz %0, 1f \n"
  563. " scd %0, %2 \n"
  564. " .set noreorder \n"
  565. " beqzl %0, 1b \n"
  566. " dsubu %0, %1, %3 \n"
  567. " .set reorder \n"
  568. "1: \n"
  569. " .set mips0 \n"
  570. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  571. : "Ir" (i), "m" (v->counter)
  572. : "memory");
  573. } else if (cpu_has_llsc) {
  574. long temp;
  575. __asm__ __volatile__(
  576. " .set mips3 \n"
  577. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  578. " dsubu %0, %1, %3 \n"
  579. " bltz %0, 1f \n"
  580. " scd %0, %2 \n"
  581. " .set noreorder \n"
  582. " beqz %0, 2f \n"
  583. " dsubu %0, %1, %3 \n"
  584. " .set reorder \n"
  585. " .subsection 2 \n"
  586. "2: b 1b \n"
  587. " .previous \n"
  588. "1: \n"
  589. " .set mips0 \n"
  590. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  591. : "Ir" (i), "m" (v->counter)
  592. : "memory");
  593. } else {
  594. unsigned long flags;
  595. raw_local_irq_save(flags);
  596. result = v->counter;
  597. result -= i;
  598. if (result >= 0)
  599. v->counter = result;
  600. raw_local_irq_restore(flags);
  601. }
  602. smp_llsc_mb();
  603. return result;
  604. }
  605. #define atomic64_cmpxchg(v, o, n) \
  606. ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  607. #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
  608. /**
  609. * atomic64_add_unless - add unless the number is a given value
  610. * @v: pointer of type atomic64_t
  611. * @a: the amount to add to v...
  612. * @u: ...unless v is equal to u.
  613. *
  614. * Atomically adds @a to @v, so long as it was not @u.
  615. * Returns non-zero if @v was not @u, and zero otherwise.
  616. */
  617. static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  618. {
  619. long c, old;
  620. c = atomic64_read(v);
  621. for (;;) {
  622. if (unlikely(c == (u)))
  623. break;
  624. old = atomic64_cmpxchg((v), c, c + (a));
  625. if (likely(old == c))
  626. break;
  627. c = old;
  628. }
  629. return c != (u);
  630. }
  631. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  632. #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
  633. #define atomic64_inc_return(v) atomic64_add_return(1, (v))
  634. /*
  635. * atomic64_sub_and_test - subtract value from variable and test result
  636. * @i: integer value to subtract
  637. * @v: pointer of type atomic64_t
  638. *
  639. * Atomically subtracts @i from @v and returns
  640. * true if the result is zero, or false for all
  641. * other cases.
  642. */
  643. #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
  644. /*
  645. * atomic64_inc_and_test - increment and test
  646. * @v: pointer of type atomic64_t
  647. *
  648. * Atomically increments @v by 1
  649. * and returns true if the result is zero, or false for all
  650. * other cases.
  651. */
  652. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  653. /*
  654. * atomic64_dec_and_test - decrement by 1 and test
  655. * @v: pointer of type atomic64_t
  656. *
  657. * Atomically decrements @v by 1 and
  658. * returns true if the result is 0, or false for all other
  659. * cases.
  660. */
  661. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  662. /*
  663. * atomic64_dec_if_positive - decrement by 1 if old value positive
  664. * @v: pointer of type atomic64_t
  665. */
  666. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  667. /*
  668. * atomic64_inc - increment atomic variable
  669. * @v: pointer of type atomic64_t
  670. *
  671. * Atomically increments @v by 1.
  672. */
  673. #define atomic64_inc(v) atomic64_add(1, (v))
  674. /*
  675. * atomic64_dec - decrement and test
  676. * @v: pointer of type atomic64_t
  677. *
  678. * Atomically decrements @v by 1.
  679. */
  680. #define atomic64_dec(v) atomic64_sub(1, (v))
  681. /*
  682. * atomic64_add_negative - add and test if negative
  683. * @v: pointer of type atomic64_t
  684. * @i: integer value to add
  685. *
  686. * Atomically adds @i to @v and returns true
  687. * if the result is negative, or false when
  688. * result is greater than or equal to zero.
  689. */
  690. #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
  691. #endif /* CONFIG_64BIT */
  692. /*
  693. * atomic*_return operations are serializing but not the non-*_return
  694. * versions.
  695. */
  696. #define smp_mb__before_atomic_dec() smp_llsc_mb()
  697. #define smp_mb__after_atomic_dec() smp_llsc_mb()
  698. #define smp_mb__before_atomic_inc() smp_llsc_mb()
  699. #define smp_mb__after_atomic_inc() smp_llsc_mb()
  700. #include <asm-generic/atomic-long.h>
  701. #endif /* _ASM_ATOMIC_H */