|
@@ -119,14 +119,23 @@ static inline void atomic_dec(atomic_t *v)
|
|
|
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
|
|
|
#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
|
|
|
|
|
|
-#define atomic_add_unless(v, a, u) \
|
|
|
-({ \
|
|
|
- int c, old; \
|
|
|
- c = atomic_read(v); \
|
|
|
- while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
|
|
|
- c = old; \
|
|
|
- c != (u); \
|
|
|
-})
|
|
|
+#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
|
|
|
+#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
|
|
|
+
|
|
|
+#define cmpxchg_local(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+
|
|
|
+#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
|
|
|
+
|
|
|
+static inline int atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
+{
|
|
|
+ int c, old;
|
|
|
+ c = atomic_read(v);
|
|
|
+ while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
|
|
|
+ c = old;
|
|
|
+ return c != u;
|
|
|
+}
|
|
|
|
|
|
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
|
|
|
|
|
@@ -140,15 +149,6 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
|
|
|
raw_local_irq_restore(flags);
|
|
|
}
|
|
|
|
|
|
-#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
|
|
|
-#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
|
|
|
-
|
|
|
-#define cmpxchg_local(ptr, o, n) \
|
|
|
- ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
|
|
|
- (unsigned long)(n), sizeof(*(ptr))))
|
|
|
-
|
|
|
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
|
|
|
-
|
|
|
/* Assume that atomic operations are already serializing */
|
|
|
#define smp_mb__before_atomic_dec() barrier()
|
|
|
#define smp_mb__after_atomic_dec() barrier()
|