123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798 |
- /*
- * Atomic operations that C can't guarantee us. Useful for
- * resource counting etc..
- *
- * But use these as seldom as possible since they are much more slower
- * than regular operations.
- *
- * This file is subject to the terms and conditions of the GNU General Public
- * License. See the file "COPYING" in the main directory of this archive
- * for more details.
- *
- * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
- */
- #ifndef _ASM_ATOMIC_H
- #define _ASM_ATOMIC_H
- #include <linux/irqflags.h>
- #include <linux/types.h>
- #include <asm/barrier.h>
- #include <asm/cpu-features.h>
- #include <asm/war.h>
- #include <asm/system.h>
- #define ATOMIC_INIT(i) { (i) }
- /*
- * atomic_read - read atomic variable
- * @v: pointer of type atomic_t
- *
- * Atomically reads the value of @v.
- */
- #define atomic_read(v) ((v)->counter)
- /*
- * atomic_set - set atomic variable
- * @v: pointer of type atomic_t
- * @i: required value
- *
- * Atomically sets the value of @v to @i.
- */
- #define atomic_set(v, i) ((v)->counter = (i))
- /*
- * atomic_add - add integer to atomic variable
- * @i: integer value to add
- * @v: pointer of type atomic_t
- *
- * Atomically adds @i to @v.
- */
- static __inline__ void atomic_add(int i, atomic_t * v)
- {
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %0, %1 # atomic_add \n"
- " addu %0, %2 \n"
- " sc %0, %1 \n"
- " beqzl %0, 1b \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter));
- } else if (cpu_has_llsc) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %0, %1 # atomic_add \n"
- " addu %0, %2 \n"
- " sc %0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter));
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- v->counter += i;
- raw_local_irq_restore(flags);
- }
- }
- /*
- * atomic_sub - subtract the atomic variable
- * @i: integer value to subtract
- * @v: pointer of type atomic_t
- *
- * Atomically subtracts @i from @v.
- */
- static __inline__ void atomic_sub(int i, atomic_t * v)
- {
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %0, %1 # atomic_sub \n"
- " subu %0, %2 \n"
- " sc %0, %1 \n"
- " beqzl %0, 1b \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter));
- } else if (cpu_has_llsc) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %0, %1 # atomic_sub \n"
- " subu %0, %2 \n"
- " sc %0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter));
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- v->counter -= i;
- raw_local_irq_restore(flags);
- }
- }
- /*
- * Same as above, but return the result value
- */
- static __inline__ int atomic_add_return(int i, atomic_t * v)
- {
- int result;
- smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %1, %2 # atomic_add_return \n"
- " addu %0, %1, %3 \n"
- " sc %0, %2 \n"
- " beqzl %0, 1b \n"
- " addu %0, %1, %3 \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else if (cpu_has_llsc) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %1, %2 # atomic_add_return \n"
- " addu %0, %1, %3 \n"
- " sc %0, %2 \n"
- " beqz %0, 2f \n"
- " addu %0, %1, %3 \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- result = v->counter;
- result += i;
- v->counter = result;
- raw_local_irq_restore(flags);
- }
- smp_llsc_mb();
- return result;
- }
- static __inline__ int atomic_sub_return(int i, atomic_t * v)
- {
- int result;
- smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %1, %2 # atomic_sub_return \n"
- " subu %0, %1, %3 \n"
- " sc %0, %2 \n"
- " beqzl %0, 1b \n"
- " subu %0, %1, %3 \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else if (cpu_has_llsc) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %1, %2 # atomic_sub_return \n"
- " subu %0, %1, %3 \n"
- " sc %0, %2 \n"
- " beqz %0, 2f \n"
- " subu %0, %1, %3 \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- result = v->counter;
- result -= i;
- v->counter = result;
- raw_local_irq_restore(flags);
- }
- smp_llsc_mb();
- return result;
- }
- /*
- * atomic_sub_if_positive - conditionally subtract integer from atomic variable
- * @i: integer value to subtract
- * @v: pointer of type atomic_t
- *
- * Atomically test @v and subtract @i if @v is greater or equal than @i.
- * The function returns the old value of @v minus @i.
- */
- static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
- {
- int result;
- smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %1, %2 # atomic_sub_if_positive\n"
- " subu %0, %1, %3 \n"
- " bltz %0, 1f \n"
- " sc %0, %2 \n"
- " .set noreorder \n"
- " beqzl %0, 1b \n"
- " subu %0, %1, %3 \n"
- " .set reorder \n"
- "1: \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else if (cpu_has_llsc) {
- int temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: ll %1, %2 # atomic_sub_if_positive\n"
- " subu %0, %1, %3 \n"
- " bltz %0, 1f \n"
- " sc %0, %2 \n"
- " .set noreorder \n"
- " beqz %0, 2f \n"
- " subu %0, %1, %3 \n"
- " .set reorder \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- "1: \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- result = v->counter;
- result -= i;
- if (result >= 0)
- v->counter = result;
- raw_local_irq_restore(flags);
- }
- smp_llsc_mb();
- return result;
- }
- #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
- #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
- /**
- * atomic_add_unless - add unless the number is a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as it was not @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
- */
- static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
- {
- int c, old;
- c = atomic_read(v);
- for (;;) {
- if (unlikely(c == (u)))
- break;
- old = atomic_cmpxchg((v), c, c + (a));
- if (likely(old == c))
- break;
- c = old;
- }
- return c != (u);
- }
- #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
- #define atomic_dec_return(v) atomic_sub_return(1, (v))
- #define atomic_inc_return(v) atomic_add_return(1, (v))
- /*
- * atomic_sub_and_test - subtract value from variable and test result
- * @i: integer value to subtract
- * @v: pointer of type atomic_t
- *
- * Atomically subtracts @i from @v and returns
- * true if the result is zero, or false for all
- * other cases.
- */
- #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
- /*
- * atomic_inc_and_test - increment and test
- * @v: pointer of type atomic_t
- *
- * Atomically increments @v by 1
- * and returns true if the result is zero, or false for all
- * other cases.
- */
- #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
- /*
- * atomic_dec_and_test - decrement by 1 and test
- * @v: pointer of type atomic_t
- *
- * Atomically decrements @v by 1 and
- * returns true if the result is 0, or false for all other
- * cases.
- */
- #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
- /*
- * atomic_dec_if_positive - decrement by 1 if old value positive
- * @v: pointer of type atomic_t
- */
- #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
- /*
- * atomic_inc - increment atomic variable
- * @v: pointer of type atomic_t
- *
- * Atomically increments @v by 1.
- */
- #define atomic_inc(v) atomic_add(1, (v))
- /*
- * atomic_dec - decrement and test
- * @v: pointer of type atomic_t
- *
- * Atomically decrements @v by 1.
- */
- #define atomic_dec(v) atomic_sub(1, (v))
- /*
- * atomic_add_negative - add and test if negative
- * @v: pointer of type atomic_t
- * @i: integer value to add
- *
- * Atomically adds @i to @v and returns true
- * if the result is negative, or false when
- * result is greater than or equal to zero.
- */
- #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
- #ifdef CONFIG_64BIT
- #define ATOMIC64_INIT(i) { (i) }
- /*
- * atomic64_read - read atomic variable
- * @v: pointer of type atomic64_t
- *
- */
- #define atomic64_read(v) ((v)->counter)
- /*
- * atomic64_set - set atomic variable
- * @v: pointer of type atomic64_t
- * @i: required value
- */
- #define atomic64_set(v, i) ((v)->counter = (i))
- /*
- * atomic64_add - add integer to atomic variable
- * @i: integer value to add
- * @v: pointer of type atomic64_t
- *
- * Atomically adds @i to @v.
- */
- static __inline__ void atomic64_add(long i, atomic64_t * v)
- {
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %0, %1 # atomic64_add \n"
- " addu %0, %2 \n"
- " scd %0, %1 \n"
- " beqzl %0, 1b \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter));
- } else if (cpu_has_llsc) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %0, %1 # atomic64_add \n"
- " addu %0, %2 \n"
- " scd %0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter));
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- v->counter += i;
- raw_local_irq_restore(flags);
- }
- }
- /*
- * atomic64_sub - subtract the atomic variable
- * @i: integer value to subtract
- * @v: pointer of type atomic64_t
- *
- * Atomically subtracts @i from @v.
- */
- static __inline__ void atomic64_sub(long i, atomic64_t * v)
- {
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %0, %1 # atomic64_sub \n"
- " subu %0, %2 \n"
- " scd %0, %1 \n"
- " beqzl %0, 1b \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter));
- } else if (cpu_has_llsc) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %0, %1 # atomic64_sub \n"
- " subu %0, %2 \n"
- " scd %0, %1 \n"
- " beqz %0, 2f \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter));
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- v->counter -= i;
- raw_local_irq_restore(flags);
- }
- }
- /*
- * Same as above, but return the result value
- */
- static __inline__ long atomic64_add_return(long i, atomic64_t * v)
- {
- long result;
- smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %1, %2 # atomic64_add_return \n"
- " addu %0, %1, %3 \n"
- " scd %0, %2 \n"
- " beqzl %0, 1b \n"
- " addu %0, %1, %3 \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else if (cpu_has_llsc) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %1, %2 # atomic64_add_return \n"
- " addu %0, %1, %3 \n"
- " scd %0, %2 \n"
- " beqz %0, 2f \n"
- " addu %0, %1, %3 \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- result = v->counter;
- result += i;
- v->counter = result;
- raw_local_irq_restore(flags);
- }
- smp_llsc_mb();
- return result;
- }
- static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
- {
- long result;
- smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %1, %2 # atomic64_sub_return \n"
- " subu %0, %1, %3 \n"
- " scd %0, %2 \n"
- " beqzl %0, 1b \n"
- " subu %0, %1, %3 \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else if (cpu_has_llsc) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %1, %2 # atomic64_sub_return \n"
- " subu %0, %1, %3 \n"
- " scd %0, %2 \n"
- " beqz %0, 2f \n"
- " subu %0, %1, %3 \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- result = v->counter;
- result -= i;
- v->counter = result;
- raw_local_irq_restore(flags);
- }
- smp_llsc_mb();
- return result;
- }
- /*
- * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
- * @i: integer value to subtract
- * @v: pointer of type atomic64_t
- *
- * Atomically test @v and subtract @i if @v is greater or equal than @i.
- * The function returns the old value of @v minus @i.
- */
- static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
- {
- long result;
- smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %1, %2 # atomic64_sub_if_positive\n"
- " dsubu %0, %1, %3 \n"
- " bltz %0, 1f \n"
- " scd %0, %2 \n"
- " .set noreorder \n"
- " beqzl %0, 1b \n"
- " dsubu %0, %1, %3 \n"
- " .set reorder \n"
- "1: \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else if (cpu_has_llsc) {
- long temp;
- __asm__ __volatile__(
- " .set mips3 \n"
- "1: lld %1, %2 # atomic64_sub_if_positive\n"
- " dsubu %0, %1, %3 \n"
- " bltz %0, 1f \n"
- " scd %0, %2 \n"
- " .set noreorder \n"
- " beqz %0, 2f \n"
- " dsubu %0, %1, %3 \n"
- " .set reorder \n"
- " .subsection 2 \n"
- "2: b 1b \n"
- " .previous \n"
- "1: \n"
- " .set mips0 \n"
- : "=&r" (result), "=&r" (temp), "=m" (v->counter)
- : "Ir" (i), "m" (v->counter)
- : "memory");
- } else {
- unsigned long flags;
- raw_local_irq_save(flags);
- result = v->counter;
- result -= i;
- if (result >= 0)
- v->counter = result;
- raw_local_irq_restore(flags);
- }
- smp_llsc_mb();
- return result;
- }
- #define atomic64_cmpxchg(v, o, n) \
- ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
- #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
- /**
- * atomic64_add_unless - add unless the number is a given value
- * @v: pointer of type atomic64_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as it was not @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
- */
- static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
- {
- long c, old;
- c = atomic64_read(v);
- for (;;) {
- if (unlikely(c == (u)))
- break;
- old = atomic64_cmpxchg((v), c, c + (a));
- if (likely(old == c))
- break;
- c = old;
- }
- return c != (u);
- }
- #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
- #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
- #define atomic64_inc_return(v) atomic64_add_return(1, (v))
- /*
- * atomic64_sub_and_test - subtract value from variable and test result
- * @i: integer value to subtract
- * @v: pointer of type atomic64_t
- *
- * Atomically subtracts @i from @v and returns
- * true if the result is zero, or false for all
- * other cases.
- */
- #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
- /*
- * atomic64_inc_and_test - increment and test
- * @v: pointer of type atomic64_t
- *
- * Atomically increments @v by 1
- * and returns true if the result is zero, or false for all
- * other cases.
- */
- #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
- /*
- * atomic64_dec_and_test - decrement by 1 and test
- * @v: pointer of type atomic64_t
- *
- * Atomically decrements @v by 1 and
- * returns true if the result is 0, or false for all other
- * cases.
- */
- #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
- /*
- * atomic64_dec_if_positive - decrement by 1 if old value positive
- * @v: pointer of type atomic64_t
- */
- #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
- /*
- * atomic64_inc - increment atomic variable
- * @v: pointer of type atomic64_t
- *
- * Atomically increments @v by 1.
- */
- #define atomic64_inc(v) atomic64_add(1, (v))
- /*
- * atomic64_dec - decrement and test
- * @v: pointer of type atomic64_t
- *
- * Atomically decrements @v by 1.
- */
- #define atomic64_dec(v) atomic64_sub(1, (v))
- /*
- * atomic64_add_negative - add and test if negative
- * @v: pointer of type atomic64_t
- * @i: integer value to add
- *
- * Atomically adds @i to @v and returns true
- * if the result is negative, or false when
- * result is greater than or equal to zero.
- */
- #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
- #endif /* CONFIG_64BIT */
- /*
- * atomic*_return operations are serializing but not the non-*_return
- * versions.
- */
- #define smp_mb__before_atomic_dec() smp_llsc_mb()
- #define smp_mb__after_atomic_dec() smp_llsc_mb()
- #define smp_mb__before_atomic_inc() smp_llsc_mb()
- #define smp_mb__after_atomic_inc() smp_llsc_mb()
- #include <asm-generic/atomic-long.h>
- #endif /* _ASM_ATOMIC_H */
|