atomic.h 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <asm/barrier.h>
  18. #include <asm/cpu-features.h>
  19. #include <asm/war.h>
  20. #include <asm/system.h>
  21. typedef struct { volatile int counter; } atomic_t;
  22. #define ATOMIC_INIT(i) { (i) }
  23. /*
  24. * atomic_read - read atomic variable
  25. * @v: pointer of type atomic_t
  26. *
  27. * Atomically reads the value of @v.
  28. */
  29. #define atomic_read(v) ((v)->counter)
  30. /*
  31. * atomic_set - set atomic variable
  32. * @v: pointer of type atomic_t
  33. * @i: required value
  34. *
  35. * Atomically sets the value of @v to @i.
  36. */
  37. #define atomic_set(v, i) ((v)->counter = (i))
  38. /*
  39. * atomic_add - add integer to atomic variable
  40. * @i: integer value to add
  41. * @v: pointer of type atomic_t
  42. *
  43. * Atomically adds @i to @v.
  44. */
  45. static __inline__ void atomic_add(int i, atomic_t * v)
  46. {
  47. if (cpu_has_llsc && R10000_LLSC_WAR) {
  48. unsigned long temp;
  49. __asm__ __volatile__(
  50. " .set mips3 \n"
  51. "1: ll %0, %1 # atomic_add \n"
  52. " addu %0, %2 \n"
  53. " sc %0, %1 \n"
  54. " beqzl %0, 1b \n"
  55. " .set mips0 \n"
  56. : "=&r" (temp), "=m" (v->counter)
  57. : "Ir" (i), "m" (v->counter));
  58. } else if (cpu_has_llsc) {
  59. unsigned long temp;
  60. __asm__ __volatile__(
  61. " .set mips3 \n"
  62. "1: ll %0, %1 # atomic_add \n"
  63. " addu %0, %2 \n"
  64. " sc %0, %1 \n"
  65. " beqz %0, 2f \n"
  66. " .subsection 2 \n"
  67. "2: b 1b \n"
  68. " .previous \n"
  69. " .set mips0 \n"
  70. : "=&r" (temp), "=m" (v->counter)
  71. : "Ir" (i), "m" (v->counter));
  72. } else {
  73. unsigned long flags;
  74. raw_local_irq_save(flags);
  75. v->counter += i;
  76. raw_local_irq_restore(flags);
  77. }
  78. }
  79. /*
  80. * atomic_sub - subtract the atomic variable
  81. * @i: integer value to subtract
  82. * @v: pointer of type atomic_t
  83. *
  84. * Atomically subtracts @i from @v.
  85. */
  86. static __inline__ void atomic_sub(int i, atomic_t * v)
  87. {
  88. if (cpu_has_llsc && R10000_LLSC_WAR) {
  89. unsigned long temp;
  90. __asm__ __volatile__(
  91. " .set mips3 \n"
  92. "1: ll %0, %1 # atomic_sub \n"
  93. " subu %0, %2 \n"
  94. " sc %0, %1 \n"
  95. " beqzl %0, 1b \n"
  96. " .set mips0 \n"
  97. : "=&r" (temp), "=m" (v->counter)
  98. : "Ir" (i), "m" (v->counter));
  99. } else if (cpu_has_llsc) {
  100. unsigned long temp;
  101. __asm__ __volatile__(
  102. " .set mips3 \n"
  103. "1: ll %0, %1 # atomic_sub \n"
  104. " subu %0, %2 \n"
  105. " sc %0, %1 \n"
  106. " beqz %0, 2f \n"
  107. " .subsection 2 \n"
  108. "2: b 1b \n"
  109. " .previous \n"
  110. " .set mips0 \n"
  111. : "=&r" (temp), "=m" (v->counter)
  112. : "Ir" (i), "m" (v->counter));
  113. } else {
  114. unsigned long flags;
  115. raw_local_irq_save(flags);
  116. v->counter -= i;
  117. raw_local_irq_restore(flags);
  118. }
  119. }
  120. /*
  121. * Same as above, but return the result value
  122. */
  123. static __inline__ int atomic_add_return(int i, atomic_t * v)
  124. {
  125. unsigned long result;
  126. smp_llsc_mb();
  127. if (cpu_has_llsc && R10000_LLSC_WAR) {
  128. unsigned long temp;
  129. __asm__ __volatile__(
  130. " .set mips3 \n"
  131. "1: ll %1, %2 # atomic_add_return \n"
  132. " addu %0, %1, %3 \n"
  133. " sc %0, %2 \n"
  134. " beqzl %0, 1b \n"
  135. " addu %0, %1, %3 \n"
  136. " .set mips0 \n"
  137. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  138. : "Ir" (i), "m" (v->counter)
  139. : "memory");
  140. } else if (cpu_has_llsc) {
  141. unsigned long temp;
  142. __asm__ __volatile__(
  143. " .set mips3 \n"
  144. "1: ll %1, %2 # atomic_add_return \n"
  145. " addu %0, %1, %3 \n"
  146. " sc %0, %2 \n"
  147. " beqz %0, 2f \n"
  148. " addu %0, %1, %3 \n"
  149. " .subsection 2 \n"
  150. "2: b 1b \n"
  151. " .previous \n"
  152. " .set mips0 \n"
  153. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  154. : "Ir" (i), "m" (v->counter)
  155. : "memory");
  156. } else {
  157. unsigned long flags;
  158. raw_local_irq_save(flags);
  159. result = v->counter;
  160. result += i;
  161. v->counter = result;
  162. raw_local_irq_restore(flags);
  163. }
  164. smp_llsc_mb();
  165. return result;
  166. }
  167. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  168. {
  169. unsigned long result;
  170. smp_llsc_mb();
  171. if (cpu_has_llsc && R10000_LLSC_WAR) {
  172. unsigned long temp;
  173. __asm__ __volatile__(
  174. " .set mips3 \n"
  175. "1: ll %1, %2 # atomic_sub_return \n"
  176. " subu %0, %1, %3 \n"
  177. " sc %0, %2 \n"
  178. " beqzl %0, 1b \n"
  179. " subu %0, %1, %3 \n"
  180. " .set mips0 \n"
  181. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  182. : "Ir" (i), "m" (v->counter)
  183. : "memory");
  184. } else if (cpu_has_llsc) {
  185. unsigned long temp;
  186. __asm__ __volatile__(
  187. " .set mips3 \n"
  188. "1: ll %1, %2 # atomic_sub_return \n"
  189. " subu %0, %1, %3 \n"
  190. " sc %0, %2 \n"
  191. " beqz %0, 2f \n"
  192. " subu %0, %1, %3 \n"
  193. " .subsection 2 \n"
  194. "2: b 1b \n"
  195. " .previous \n"
  196. " .set mips0 \n"
  197. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  198. : "Ir" (i), "m" (v->counter)
  199. : "memory");
  200. } else {
  201. unsigned long flags;
  202. raw_local_irq_save(flags);
  203. result = v->counter;
  204. result -= i;
  205. v->counter = result;
  206. raw_local_irq_restore(flags);
  207. }
  208. smp_llsc_mb();
  209. return result;
  210. }
  211. /*
  212. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  213. * @i: integer value to subtract
  214. * @v: pointer of type atomic_t
  215. *
  216. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  217. * The function returns the old value of @v minus @i.
  218. */
  219. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  220. {
  221. unsigned long result;
  222. smp_llsc_mb();
  223. if (cpu_has_llsc && R10000_LLSC_WAR) {
  224. unsigned long temp;
  225. __asm__ __volatile__(
  226. " .set mips3 \n"
  227. "1: ll %1, %2 # atomic_sub_if_positive\n"
  228. " subu %0, %1, %3 \n"
  229. " bltz %0, 1f \n"
  230. " sc %0, %2 \n"
  231. " .set noreorder \n"
  232. " beqzl %0, 1b \n"
  233. " subu %0, %1, %3 \n"
  234. " .set reorder \n"
  235. "1: \n"
  236. " .set mips0 \n"
  237. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  238. : "Ir" (i), "m" (v->counter)
  239. : "memory");
  240. } else if (cpu_has_llsc) {
  241. unsigned long temp;
  242. __asm__ __volatile__(
  243. " .set mips3 \n"
  244. "1: ll %1, %2 # atomic_sub_if_positive\n"
  245. " subu %0, %1, %3 \n"
  246. " bltz %0, 1f \n"
  247. " sc %0, %2 \n"
  248. " .set noreorder \n"
  249. " beqz %0, 2f \n"
  250. " subu %0, %1, %3 \n"
  251. " .set reorder \n"
  252. "1: \n"
  253. " .subsection 2 \n"
  254. "2: b 1b \n"
  255. " .previous \n"
  256. " .set mips0 \n"
  257. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  258. : "Ir" (i), "m" (v->counter)
  259. : "memory");
  260. } else {
  261. unsigned long flags;
  262. raw_local_irq_save(flags);
  263. result = v->counter;
  264. result -= i;
  265. if (result >= 0)
  266. v->counter = result;
  267. raw_local_irq_restore(flags);
  268. }
  269. smp_llsc_mb();
  270. return result;
  271. }
  272. #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
  273. #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
  274. /**
  275. * atomic_add_unless - add unless the number is a given value
  276. * @v: pointer of type atomic_t
  277. * @a: the amount to add to v...
  278. * @u: ...unless v is equal to u.
  279. *
  280. * Atomically adds @a to @v, so long as it was not @u.
  281. * Returns non-zero if @v was not @u, and zero otherwise.
  282. */
  283. static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
  284. {
  285. int c, old;
  286. c = atomic_read(v);
  287. for (;;) {
  288. if (unlikely(c == (u)))
  289. break;
  290. old = atomic_cmpxchg((v), c, c + (a));
  291. if (likely(old == c))
  292. break;
  293. c = old;
  294. }
  295. return c != (u);
  296. }
  297. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  298. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  299. #define atomic_inc_return(v) atomic_add_return(1, (v))
  300. /*
  301. * atomic_sub_and_test - subtract value from variable and test result
  302. * @i: integer value to subtract
  303. * @v: pointer of type atomic_t
  304. *
  305. * Atomically subtracts @i from @v and returns
  306. * true if the result is zero, or false for all
  307. * other cases.
  308. */
  309. #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
  310. /*
  311. * atomic_inc_and_test - increment and test
  312. * @v: pointer of type atomic_t
  313. *
  314. * Atomically increments @v by 1
  315. * and returns true if the result is zero, or false for all
  316. * other cases.
  317. */
  318. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  319. /*
  320. * atomic_dec_and_test - decrement by 1 and test
  321. * @v: pointer of type atomic_t
  322. *
  323. * Atomically decrements @v by 1 and
  324. * returns true if the result is 0, or false for all other
  325. * cases.
  326. */
  327. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  328. /*
  329. * atomic_dec_if_positive - decrement by 1 if old value positive
  330. * @v: pointer of type atomic_t
  331. */
  332. #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
  333. /*
  334. * atomic_inc - increment atomic variable
  335. * @v: pointer of type atomic_t
  336. *
  337. * Atomically increments @v by 1.
  338. */
  339. #define atomic_inc(v) atomic_add(1, (v))
  340. /*
  341. * atomic_dec - decrement and test
  342. * @v: pointer of type atomic_t
  343. *
  344. * Atomically decrements @v by 1.
  345. */
  346. #define atomic_dec(v) atomic_sub(1, (v))
  347. /*
  348. * atomic_add_negative - add and test if negative
  349. * @v: pointer of type atomic_t
  350. * @i: integer value to add
  351. *
  352. * Atomically adds @i to @v and returns true
  353. * if the result is negative, or false when
  354. * result is greater than or equal to zero.
  355. */
  356. #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
  357. #ifdef CONFIG_64BIT
  358. typedef struct { volatile long counter; } atomic64_t;
  359. #define ATOMIC64_INIT(i) { (i) }
  360. /*
  361. * atomic64_read - read atomic variable
  362. * @v: pointer of type atomic64_t
  363. *
  364. */
  365. #define atomic64_read(v) ((v)->counter)
  366. /*
  367. * atomic64_set - set atomic variable
  368. * @v: pointer of type atomic64_t
  369. * @i: required value
  370. */
  371. #define atomic64_set(v, i) ((v)->counter = (i))
  372. /*
  373. * atomic64_add - add integer to atomic variable
  374. * @i: integer value to add
  375. * @v: pointer of type atomic64_t
  376. *
  377. * Atomically adds @i to @v.
  378. */
  379. static __inline__ void atomic64_add(long i, atomic64_t * v)
  380. {
  381. if (cpu_has_llsc && R10000_LLSC_WAR) {
  382. unsigned long temp;
  383. __asm__ __volatile__(
  384. " .set mips3 \n"
  385. "1: lld %0, %1 # atomic64_add \n"
  386. " addu %0, %2 \n"
  387. " scd %0, %1 \n"
  388. " beqzl %0, 1b \n"
  389. " .set mips0 \n"
  390. : "=&r" (temp), "=m" (v->counter)
  391. : "Ir" (i), "m" (v->counter));
  392. } else if (cpu_has_llsc) {
  393. unsigned long temp;
  394. __asm__ __volatile__(
  395. " .set mips3 \n"
  396. "1: lld %0, %1 # atomic64_add \n"
  397. " addu %0, %2 \n"
  398. " scd %0, %1 \n"
  399. " beqz %0, 2f \n"
  400. " .subsection 2 \n"
  401. "2: b 1b \n"
  402. " .previous \n"
  403. " .set mips0 \n"
  404. : "=&r" (temp), "=m" (v->counter)
  405. : "Ir" (i), "m" (v->counter));
  406. } else {
  407. unsigned long flags;
  408. raw_local_irq_save(flags);
  409. v->counter += i;
  410. raw_local_irq_restore(flags);
  411. }
  412. }
  413. /*
  414. * atomic64_sub - subtract the atomic variable
  415. * @i: integer value to subtract
  416. * @v: pointer of type atomic64_t
  417. *
  418. * Atomically subtracts @i from @v.
  419. */
  420. static __inline__ void atomic64_sub(long i, atomic64_t * v)
  421. {
  422. if (cpu_has_llsc && R10000_LLSC_WAR) {
  423. unsigned long temp;
  424. __asm__ __volatile__(
  425. " .set mips3 \n"
  426. "1: lld %0, %1 # atomic64_sub \n"
  427. " subu %0, %2 \n"
  428. " scd %0, %1 \n"
  429. " beqzl %0, 1b \n"
  430. " .set mips0 \n"
  431. : "=&r" (temp), "=m" (v->counter)
  432. : "Ir" (i), "m" (v->counter));
  433. } else if (cpu_has_llsc) {
  434. unsigned long temp;
  435. __asm__ __volatile__(
  436. " .set mips3 \n"
  437. "1: lld %0, %1 # atomic64_sub \n"
  438. " subu %0, %2 \n"
  439. " scd %0, %1 \n"
  440. " beqz %0, 2f \n"
  441. " .subsection 2 \n"
  442. "2: b 1b \n"
  443. " .previous \n"
  444. " .set mips0 \n"
  445. : "=&r" (temp), "=m" (v->counter)
  446. : "Ir" (i), "m" (v->counter));
  447. } else {
  448. unsigned long flags;
  449. raw_local_irq_save(flags);
  450. v->counter -= i;
  451. raw_local_irq_restore(flags);
  452. }
  453. }
  454. /*
  455. * Same as above, but return the result value
  456. */
  457. static __inline__ long atomic64_add_return(long i, atomic64_t * v)
  458. {
  459. unsigned long result;
  460. smp_llsc_mb();
  461. if (cpu_has_llsc && R10000_LLSC_WAR) {
  462. unsigned long temp;
  463. __asm__ __volatile__(
  464. " .set mips3 \n"
  465. "1: lld %1, %2 # atomic64_add_return \n"
  466. " addu %0, %1, %3 \n"
  467. " scd %0, %2 \n"
  468. " beqzl %0, 1b \n"
  469. " addu %0, %1, %3 \n"
  470. " .set mips0 \n"
  471. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  472. : "Ir" (i), "m" (v->counter)
  473. : "memory");
  474. } else if (cpu_has_llsc) {
  475. unsigned long temp;
  476. __asm__ __volatile__(
  477. " .set mips3 \n"
  478. "1: lld %1, %2 # atomic64_add_return \n"
  479. " addu %0, %1, %3 \n"
  480. " scd %0, %2 \n"
  481. " beqz %0, 2f \n"
  482. " addu %0, %1, %3 \n"
  483. " .subsection 2 \n"
  484. "2: b 1b \n"
  485. " .previous \n"
  486. " .set mips0 \n"
  487. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  488. : "Ir" (i), "m" (v->counter)
  489. : "memory");
  490. } else {
  491. unsigned long flags;
  492. raw_local_irq_save(flags);
  493. result = v->counter;
  494. result += i;
  495. v->counter = result;
  496. raw_local_irq_restore(flags);
  497. }
  498. smp_llsc_mb();
  499. return result;
  500. }
  501. static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
  502. {
  503. unsigned long result;
  504. smp_llsc_mb();
  505. if (cpu_has_llsc && R10000_LLSC_WAR) {
  506. unsigned long temp;
  507. __asm__ __volatile__(
  508. " .set mips3 \n"
  509. "1: lld %1, %2 # atomic64_sub_return \n"
  510. " subu %0, %1, %3 \n"
  511. " scd %0, %2 \n"
  512. " beqzl %0, 1b \n"
  513. " subu %0, %1, %3 \n"
  514. " .set mips0 \n"
  515. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  516. : "Ir" (i), "m" (v->counter)
  517. : "memory");
  518. } else if (cpu_has_llsc) {
  519. unsigned long temp;
  520. __asm__ __volatile__(
  521. " .set mips3 \n"
  522. "1: lld %1, %2 # atomic64_sub_return \n"
  523. " subu %0, %1, %3 \n"
  524. " scd %0, %2 \n"
  525. " beqz %0, 2f \n"
  526. " subu %0, %1, %3 \n"
  527. " .subsection 2 \n"
  528. "2: b 1b \n"
  529. " .previous \n"
  530. " .set mips0 \n"
  531. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  532. : "Ir" (i), "m" (v->counter)
  533. : "memory");
  534. } else {
  535. unsigned long flags;
  536. raw_local_irq_save(flags);
  537. result = v->counter;
  538. result -= i;
  539. v->counter = result;
  540. raw_local_irq_restore(flags);
  541. }
  542. smp_llsc_mb();
  543. return result;
  544. }
  545. /*
  546. * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
  547. * @i: integer value to subtract
  548. * @v: pointer of type atomic64_t
  549. *
  550. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  551. * The function returns the old value of @v minus @i.
  552. */
  553. static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
  554. {
  555. unsigned long result;
  556. smp_llsc_mb();
  557. if (cpu_has_llsc && R10000_LLSC_WAR) {
  558. unsigned long temp;
  559. __asm__ __volatile__(
  560. " .set mips3 \n"
  561. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  562. " dsubu %0, %1, %3 \n"
  563. " bltz %0, 1f \n"
  564. " scd %0, %2 \n"
  565. " .set noreorder \n"
  566. " beqzl %0, 1b \n"
  567. " dsubu %0, %1, %3 \n"
  568. " .set reorder \n"
  569. "1: \n"
  570. " .set mips0 \n"
  571. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  572. : "Ir" (i), "m" (v->counter)
  573. : "memory");
  574. } else if (cpu_has_llsc) {
  575. unsigned long temp;
  576. __asm__ __volatile__(
  577. " .set mips3 \n"
  578. "1: lld %1, %2 # atomic64_sub_if_positive\n"
  579. " dsubu %0, %1, %3 \n"
  580. " bltz %0, 1f \n"
  581. " scd %0, %2 \n"
  582. " .set noreorder \n"
  583. " beqz %0, 2f \n"
  584. " dsubu %0, %1, %3 \n"
  585. " .set reorder \n"
  586. "1: \n"
  587. " .subsection 2 \n"
  588. "2: b 1b \n"
  589. " .previous \n"
  590. " .set mips0 \n"
  591. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  592. : "Ir" (i), "m" (v->counter)
  593. : "memory");
  594. } else {
  595. unsigned long flags;
  596. raw_local_irq_save(flags);
  597. result = v->counter;
  598. result -= i;
  599. if (result >= 0)
  600. v->counter = result;
  601. raw_local_irq_restore(flags);
  602. }
  603. smp_llsc_mb();
  604. return result;
  605. }
  606. #define atomic64_cmpxchg(v, o, n) \
  607. ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
  608. #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
  609. /**
  610. * atomic64_add_unless - add unless the number is a given value
  611. * @v: pointer of type atomic64_t
  612. * @a: the amount to add to v...
  613. * @u: ...unless v is equal to u.
  614. *
  615. * Atomically adds @a to @v, so long as it was not @u.
  616. * Returns non-zero if @v was not @u, and zero otherwise.
  617. */
  618. static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  619. {
  620. long c, old;
  621. c = atomic64_read(v);
  622. for (;;) {
  623. if (unlikely(c == (u)))
  624. break;
  625. old = atomic64_cmpxchg((v), c, c + (a));
  626. if (likely(old == c))
  627. break;
  628. c = old;
  629. }
  630. return c != (u);
  631. }
  632. #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
  633. #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
  634. #define atomic64_inc_return(v) atomic64_add_return(1, (v))
  635. /*
  636. * atomic64_sub_and_test - subtract value from variable and test result
  637. * @i: integer value to subtract
  638. * @v: pointer of type atomic64_t
  639. *
  640. * Atomically subtracts @i from @v and returns
  641. * true if the result is zero, or false for all
  642. * other cases.
  643. */
  644. #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
  645. /*
  646. * atomic64_inc_and_test - increment and test
  647. * @v: pointer of type atomic64_t
  648. *
  649. * Atomically increments @v by 1
  650. * and returns true if the result is zero, or false for all
  651. * other cases.
  652. */
  653. #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
  654. /*
  655. * atomic64_dec_and_test - decrement by 1 and test
  656. * @v: pointer of type atomic64_t
  657. *
  658. * Atomically decrements @v by 1 and
  659. * returns true if the result is 0, or false for all other
  660. * cases.
  661. */
  662. #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
  663. /*
  664. * atomic64_dec_if_positive - decrement by 1 if old value positive
  665. * @v: pointer of type atomic64_t
  666. */
  667. #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
  668. /*
  669. * atomic64_inc - increment atomic variable
  670. * @v: pointer of type atomic64_t
  671. *
  672. * Atomically increments @v by 1.
  673. */
  674. #define atomic64_inc(v) atomic64_add(1, (v))
  675. /*
  676. * atomic64_dec - decrement and test
  677. * @v: pointer of type atomic64_t
  678. *
  679. * Atomically decrements @v by 1.
  680. */
  681. #define atomic64_dec(v) atomic64_sub(1, (v))
  682. /*
  683. * atomic64_add_negative - add and test if negative
  684. * @v: pointer of type atomic64_t
  685. * @i: integer value to add
  686. *
  687. * Atomically adds @i to @v and returns true
  688. * if the result is negative, or false when
  689. * result is greater than or equal to zero.
  690. */
  691. #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
  692. #endif /* CONFIG_64BIT */
  693. /*
  694. * atomic*_return operations are serializing but not the non-*_return
  695. * versions.
  696. */
  697. #define smp_mb__before_atomic_dec() smp_llsc_mb()
  698. #define smp_mb__after_atomic_dec() smp_llsc_mb()
  699. #define smp_mb__before_atomic_inc() smp_llsc_mb()
  700. #define smp_mb__after_atomic_inc() smp_llsc_mb()
  701. #include <asm-generic/atomic.h>
  702. #endif /* _ASM_ATOMIC_H */