|
@@ -72,12 +72,7 @@ static inline void atomic64_sub(long i, atomic64_t *v)
|
|
|
*/
|
|
|
static inline int atomic64_sub_and_test(long i, atomic64_t *v)
|
|
|
{
|
|
|
- unsigned char c;
|
|
|
-
|
|
|
- asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
|
|
|
- : "=m" (v->counter), "=qm" (c)
|
|
|
- : "er" (i), "m" (v->counter) : "memory");
|
|
|
- return c;
|
|
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, i, "%0", "e");
|
|
|
}
|
|
|
|
|
|
/**
|
|
@@ -116,12 +111,7 @@ static inline void atomic64_dec(atomic64_t *v)
|
|
|
*/
|
|
|
static inline int atomic64_dec_and_test(atomic64_t *v)
|
|
|
{
|
|
|
- unsigned char c;
|
|
|
-
|
|
|
- asm volatile(LOCK_PREFIX "decq %0; sete %1"
|
|
|
- : "=m" (v->counter), "=qm" (c)
|
|
|
- : "m" (v->counter) : "memory");
|
|
|
- return c != 0;
|
|
|
+ GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", "e");
|
|
|
}
|
|
|
|
|
|
/**
|
|
@@ -134,12 +124,7 @@ static inline int atomic64_dec_and_test(atomic64_t *v)
|
|
|
*/
|
|
|
static inline int atomic64_inc_and_test(atomic64_t *v)
|
|
|
{
|
|
|
- unsigned char c;
|
|
|
-
|
|
|
- asm volatile(LOCK_PREFIX "incq %0; sete %1"
|
|
|
- : "=m" (v->counter), "=qm" (c)
|
|
|
- : "m" (v->counter) : "memory");
|
|
|
- return c != 0;
|
|
|
+ GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", "e");
|
|
|
}
|
|
|
|
|
|
/**
|
|
@@ -153,12 +138,7 @@ static inline int atomic64_inc_and_test(atomic64_t *v)
|
|
|
*/
|
|
|
static inline int atomic64_add_negative(long i, atomic64_t *v)
|
|
|
{
|
|
|
- unsigned char c;
|
|
|
-
|
|
|
- asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
|
|
|
- : "=m" (v->counter), "=qm" (c)
|
|
|
- : "er" (i), "m" (v->counter) : "memory");
|
|
|
- return c;
|
|
|
+ GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, i, "%0", "s");
|
|
|
}
|
|
|
|
|
|
/**
|