|
@@ -141,7 +141,7 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
|
|
|
#define atomic_sub_and_test(i,v) (atomic_sub_return((i),(v)) == 0)
|
|
|
|
|
|
-#define ATOMIC_INIT(i) ((atomic_t) { (i) })
|
|
|
+#define ATOMIC_INIT(i) { (i) }
|
|
|
|
|
|
#define smp_mb__before_atomic_dec() smp_mb()
|
|
|
#define smp_mb__after_atomic_dec() smp_mb()
|
|
@@ -150,7 +150,7 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
|
|
|
#ifdef CONFIG_64BIT
|
|
|
|
|
|
-#define ATOMIC64_INIT(i) ((atomic64_t) { (i) })
|
|
|
+#define ATOMIC64_INIT(i) { (i) }
|
|
|
|
|
|
static __inline__ s64
|
|
|
__atomic64_add_return(s64 i, atomic64_t *v)
|