|
@@ -70,12 +70,12 @@ extern int atomic64_sub_ret(int, atomic64_t *);
|
|
|
#define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)
|
|
|
#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
|
|
|
|
|
|
-#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
|
|
|
+#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
|
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
|
|
|
|
|
#define atomic_add_unless(v, a, u) \
|
|
|
({ \
|
|
|
- int c, old; \
|
|
|
+ __typeof__((v)->counter) c, old; \
|
|
|
c = atomic_read(v); \
|
|
|
for (;;) { \
|
|
|
if (unlikely(c == (u))) \
|
|
@@ -89,6 +89,26 @@ extern int atomic64_sub_ret(int, atomic64_t *);
|
|
|
})
|
|
|
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
|
|
|
|
|
|
+#define atomic64_cmpxchg(v, o, n) \
|
|
|
+ ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
|
|
|
+#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
|
|
+
|
|
|
+#define atomic64_add_unless(v, a, u) \
|
|
|
+({ \
|
|
|
+ __typeof__((v)->counter) c, old; \
|
|
|
+ c = atomic64_read(v); \
|
|
|
+ for (;;) { \
|
|
|
+ if (unlikely(c == (u))) \
|
|
|
+ break; \
|
|
|
+ old = atomic64_cmpxchg((v), c, c + (a)); \
|
|
|
+ if (likely(old == c)) \
|
|
|
+ break; \
|
|
|
+ c = old; \
|
|
|
+ } \
|
|
|
+ likely(c != (u)); \
|
|
|
+})
|
|
|
+#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
|
|
|
+
|
|
|
/* Atomic operations are already serializing */
|
|
|
#ifdef CONFIG_SMP
|
|
|
#define smp_mb__before_atomic_dec() membar_storeload_loadload();
|