123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408 |
- #ifndef _ASM_POWERPC_ATOMIC_H_
- #define _ASM_POWERPC_ATOMIC_H_
- /*
- * PowerPC atomic operations
- */
- typedef struct { volatile int counter; } atomic_t;
- #ifdef __KERNEL__
- #include <asm/synch.h>
- #include <asm/asm-compat.h>
- #define ATOMIC_INIT(i) { (i) }
- #define atomic_read(v) ((v)->counter)
- #define atomic_set(v,i) (((v)->counter) = (i))
- static __inline__ void atomic_add(int a, atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- "1: lwarx %0,0,%3 # atomic_add\n\
- add %0,%2,%0\n"
- PPC405_ERR77(0,%3)
- " stwcx. %0,0,%3 \n\
- bne- 1b"
- : "=&r" (t), "=m" (v->counter)
- : "r" (a), "r" (&v->counter), "m" (v->counter)
- : "cc");
- }
- static __inline__ int atomic_add_return(int a, atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: lwarx %0,0,%2 # atomic_add_return\n\
- add %0,%1,%0\n"
- PPC405_ERR77(0,%2)
- " stwcx. %0,0,%2 \n\
- bne- 1b"
- ISYNC_ON_SMP
- : "=&r" (t)
- : "r" (a), "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
- static __inline__ void atomic_sub(int a, atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- "1: lwarx %0,0,%3 # atomic_sub\n\
- subf %0,%2,%0\n"
- PPC405_ERR77(0,%3)
- " stwcx. %0,0,%3 \n\
- bne- 1b"
- : "=&r" (t), "=m" (v->counter)
- : "r" (a), "r" (&v->counter), "m" (v->counter)
- : "cc");
- }
- static __inline__ int atomic_sub_return(int a, atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: lwarx %0,0,%2 # atomic_sub_return\n\
- subf %0,%1,%0\n"
- PPC405_ERR77(0,%2)
- " stwcx. %0,0,%2 \n\
- bne- 1b"
- ISYNC_ON_SMP
- : "=&r" (t)
- : "r" (a), "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- static __inline__ void atomic_inc(atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- "1: lwarx %0,0,%2 # atomic_inc\n\
- addic %0,%0,1\n"
- PPC405_ERR77(0,%2)
- " stwcx. %0,0,%2 \n\
- bne- 1b"
- : "=&r" (t), "=m" (v->counter)
- : "r" (&v->counter), "m" (v->counter)
- : "cc");
- }
- static __inline__ int atomic_inc_return(atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: lwarx %0,0,%1 # atomic_inc_return\n\
- addic %0,%0,1\n"
- PPC405_ERR77(0,%1)
- " stwcx. %0,0,%1 \n\
- bne- 1b"
- ISYNC_ON_SMP
- : "=&r" (t)
- : "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- /*
- * atomic_inc_and_test - increment and test
- * @v: pointer of type atomic_t
- *
- * Atomically increments @v by 1
- * and returns true if the result is zero, or false for all
- * other cases.
- */
- #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
- static __inline__ void atomic_dec(atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- "1: lwarx %0,0,%2 # atomic_dec\n\
- addic %0,%0,-1\n"
- PPC405_ERR77(0,%2)\
- " stwcx. %0,0,%2\n\
- bne- 1b"
- : "=&r" (t), "=m" (v->counter)
- : "r" (&v->counter), "m" (v->counter)
- : "cc");
- }
- static __inline__ int atomic_dec_return(atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: lwarx %0,0,%1 # atomic_dec_return\n\
- addic %0,%0,-1\n"
- PPC405_ERR77(0,%1)
- " stwcx. %0,0,%1\n\
- bne- 1b"
- ISYNC_ON_SMP
- : "=&r" (t)
- : "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
- #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
- /**
- * atomic_add_unless - add unless the number is a given value
- * @v: pointer of type atomic_t
- * @a: the amount to add to v...
- * @u: ...unless v is equal to u.
- *
- * Atomically adds @a to @v, so long as it was not @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
- */
- #define atomic_add_unless(v, a, u) \
- ({ \
- int c, old; \
- c = atomic_read(v); \
- for (;;) { \
- if (unlikely(c == (u))) \
- break; \
- old = atomic_cmpxchg((v), c, c + (a)); \
- if (likely(old == c)) \
- break; \
- c = old; \
- } \
- c != (u); \
- })
- #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
- #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
- #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
- /*
- * Atomically test *v and decrement if it is greater than 0.
- * The function returns the old value of *v minus 1.
- */
- static __inline__ int atomic_dec_if_positive(atomic_t *v)
- {
- int t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
- addic. %0,%0,-1\n\
- blt- 2f\n"
- PPC405_ERR77(0,%1)
- " stwcx. %0,0,%1\n\
- bne- 1b"
- ISYNC_ON_SMP
- "\n\
- 2:" : "=&r" (t)
- : "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- #define smp_mb__before_atomic_dec() smp_mb()
- #define smp_mb__after_atomic_dec() smp_mb()
- #define smp_mb__before_atomic_inc() smp_mb()
- #define smp_mb__after_atomic_inc() smp_mb()
- #ifdef __powerpc64__
- typedef struct { volatile long counter; } atomic64_t;
- #define ATOMIC64_INIT(i) { (i) }
- #define atomic64_read(v) ((v)->counter)
- #define atomic64_set(v,i) (((v)->counter) = (i))
- static __inline__ void atomic64_add(long a, atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- "1: ldarx %0,0,%3 # atomic64_add\n\
- add %0,%2,%0\n\
- stdcx. %0,0,%3 \n\
- bne- 1b"
- : "=&r" (t), "=m" (v->counter)
- : "r" (a), "r" (&v->counter), "m" (v->counter)
- : "cc");
- }
- static __inline__ long atomic64_add_return(long a, atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: ldarx %0,0,%2 # atomic64_add_return\n\
- add %0,%1,%0\n\
- stdcx. %0,0,%2 \n\
- bne- 1b"
- ISYNC_ON_SMP
- : "=&r" (t)
- : "r" (a), "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
- static __inline__ void atomic64_sub(long a, atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- "1: ldarx %0,0,%3 # atomic64_sub\n\
- subf %0,%2,%0\n\
- stdcx. %0,0,%3 \n\
- bne- 1b"
- : "=&r" (t), "=m" (v->counter)
- : "r" (a), "r" (&v->counter), "m" (v->counter)
- : "cc");
- }
- static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: ldarx %0,0,%2 # atomic64_sub_return\n\
- subf %0,%1,%0\n\
- stdcx. %0,0,%2 \n\
- bne- 1b"
- ISYNC_ON_SMP
- : "=&r" (t)
- : "r" (a), "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- static __inline__ void atomic64_inc(atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- "1: ldarx %0,0,%2 # atomic64_inc\n\
- addic %0,%0,1\n\
- stdcx. %0,0,%2 \n\
- bne- 1b"
- : "=&r" (t), "=m" (v->counter)
- : "r" (&v->counter), "m" (v->counter)
- : "cc");
- }
- static __inline__ long atomic64_inc_return(atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: ldarx %0,0,%1 # atomic64_inc_return\n\
- addic %0,%0,1\n\
- stdcx. %0,0,%1 \n\
- bne- 1b"
- ISYNC_ON_SMP
- : "=&r" (t)
- : "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- /*
- * atomic64_inc_and_test - increment and test
- * @v: pointer of type atomic64_t
- *
- * Atomically increments @v by 1
- * and returns true if the result is zero, or false for all
- * other cases.
- */
- #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
- static __inline__ void atomic64_dec(atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- "1: ldarx %0,0,%2 # atomic64_dec\n\
- addic %0,%0,-1\n\
- stdcx. %0,0,%2\n\
- bne- 1b"
- : "=&r" (t), "=m" (v->counter)
- : "r" (&v->counter), "m" (v->counter)
- : "cc");
- }
- static __inline__ long atomic64_dec_return(atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: ldarx %0,0,%1 # atomic64_dec_return\n\
- addic %0,%0,-1\n\
- stdcx. %0,0,%1\n\
- bne- 1b"
- ISYNC_ON_SMP
- : "=&r" (t)
- : "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
- #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
- /*
- * Atomically test *v and decrement if it is greater than 0.
- * The function returns the old value of *v minus 1.
- */
- static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
- {
- long t;
- __asm__ __volatile__(
- LWSYNC_ON_SMP
- "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
- addic. %0,%0,-1\n\
- blt- 2f\n\
- stdcx. %0,0,%1\n\
- bne- 1b"
- ISYNC_ON_SMP
- "\n\
- 2:" : "=&r" (t)
- : "r" (&v->counter)
- : "cc", "memory");
- return t;
- }
- #endif /* __powerpc64__ */
- #include <asm-generic/atomic.h>
- #endif /* __KERNEL__ */
- #endif /* _ASM_POWERPC_ATOMIC_H_ */
|