|
@@ -165,7 +165,8 @@ static __inline__ int atomic_dec_return(atomic_t *v)
|
|
|
return t;
|
|
|
}
|
|
|
|
|
|
-#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
|
|
|
+#define atomic_cmpxchg(v, o, n) \
|
|
|
+ ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
|
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
|
|
|
|
|
/**
|
|
@@ -413,6 +414,43 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
|
|
|
return t;
|
|
|
}
|
|
|
|
|
|
+#define atomic64_cmpxchg(v, o, n) \
|
|
|
+ ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
|
|
+#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
|
|
+
|
|
|
+/**
|
|
|
+ * atomic64_add_unless - add unless the number is a given value
|
|
|
+ * @v: pointer of type atomic64_t
|
|
|
+ * @a: the amount to add to v...
|
|
|
+ * @u: ...unless v is equal to u.
|
|
|
+ *
|
|
|
+ * Atomically adds @a to @v, so long as it was not @u.
|
|
|
+ * Returns non-zero if @v was not @u, and zero otherwise.
|
|
|
+ */
|
|
|
+static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
|
|
+{
|
|
|
+ long t;
|
|
|
+
|
|
|
+ __asm__ __volatile__ (
|
|
|
+ LWSYNC_ON_SMP
|
|
|
+"1: ldarx %0,0,%1 # atomic_add_unless\n\
|
|
|
+ cmpd 0,%0,%3 \n\
|
|
|
+ beq- 2f \n\
|
|
|
+ add %0,%2,%0 \n"
|
|
|
+" stdcx. %0,0,%1 \n\
|
|
|
+ bne- 1b \n"
|
|
|
+ ISYNC_ON_SMP
|
|
|
+" subf %0,%2,%0 \n\
|
|
|
+2:"
|
|
|
+ : "=&r" (t)
|
|
|
+ : "r" (&v->counter), "r" (a), "r" (u)
|
|
|
+ : "cc", "memory");
|
|
|
+
|
|
|
+ return t != u;
|
|
|
+}
|
|
|
+
|
|
|
+#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
|
|
|
+
|
|
|
#endif /* __powerpc64__ */
|
|
|
|
|
|
#include <asm-generic/atomic.h>
|