|
@@ -14,13 +14,52 @@ typedef struct {
|
|
|
|
|
|
#define ATOMIC64_INIT(val) { (val) }
|
|
#define ATOMIC64_INIT(val) { (val) }
|
|
|
|
|
|
|
|
+#define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
|
|
|
|
+#ifndef ATOMIC64_EXPORT
|
|
|
|
+#define ATOMIC64_DECL_ONE __ATOMIC64_DECL
|
|
|
|
+#else
|
|
|
|
+#define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
|
|
|
|
+ ATOMIC64_EXPORT(atomic64_##sym)
|
|
|
|
+#endif
|
|
|
|
+
|
|
#ifdef CONFIG_X86_CMPXCHG64
|
|
#ifdef CONFIG_X86_CMPXCHG64
|
|
-#define ATOMIC64_ALTERNATIVE_(f, g) "call atomic64_" #g "_cx8"
|
|
|
|
|
|
+#define __alternative_atomic64(f, g, out, in...) \
|
|
|
|
+ asm volatile("call %P[func]" \
|
|
|
|
+ : out : [func] "i" (atomic64_##g##_cx8), ## in)
|
|
|
|
+
|
|
|
|
+#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
|
|
#else
|
|
#else
|
|
-#define ATOMIC64_ALTERNATIVE_(f, g) ALTERNATIVE("call atomic64_" #f "_386", "call atomic64_" #g "_cx8", X86_FEATURE_CX8)
|
|
|
|
|
|
+#define __alternative_atomic64(f, g, out, in...) \
|
|
|
|
+ alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
|
|
|
|
+ X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
|
|
|
|
+
|
|
|
|
+#define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
|
|
|
|
+ ATOMIC64_DECL_ONE(sym##_386)
|
|
|
|
+
|
|
|
|
+ATOMIC64_DECL_ONE(add_386);
|
|
|
|
+ATOMIC64_DECL_ONE(sub_386);
|
|
|
|
+ATOMIC64_DECL_ONE(inc_386);
|
|
|
|
+ATOMIC64_DECL_ONE(dec_386);
|
|
#endif
|
|
#endif
|
|
|
|
|
|
-#define ATOMIC64_ALTERNATIVE(f) ATOMIC64_ALTERNATIVE_(f, f)
|
|
|
|
|
|
+#define alternative_atomic64(f, out, in...) \
|
|
|
|
+ __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
|
|
|
|
+
|
|
|
|
+ATOMIC64_DECL(read);
|
|
|
|
+ATOMIC64_DECL(set);
|
|
|
|
+ATOMIC64_DECL(xchg);
|
|
|
|
+ATOMIC64_DECL(add_return);
|
|
|
|
+ATOMIC64_DECL(sub_return);
|
|
|
|
+ATOMIC64_DECL(inc_return);
|
|
|
|
+ATOMIC64_DECL(dec_return);
|
|
|
|
+ATOMIC64_DECL(dec_if_positive);
|
|
|
|
+ATOMIC64_DECL(inc_not_zero);
|
|
|
|
+ATOMIC64_DECL(add_unless);
|
|
|
|
+
|
|
|
|
+#undef ATOMIC64_DECL
|
|
|
|
+#undef ATOMIC64_DECL_ONE
|
|
|
|
+#undef __ATOMIC64_DECL
|
|
|
|
+#undef ATOMIC64_EXPORT
|
|
|
|
|
|
/**
|
|
/**
|
|
* atomic64_cmpxchg - cmpxchg atomic64 variable
|
|
* atomic64_cmpxchg - cmpxchg atomic64 variable
|
|
@@ -50,11 +89,9 @@ static inline long long atomic64_xchg(atomic64_t *v, long long n)
|
|
long long o;
|
|
long long o;
|
|
unsigned high = (unsigned)(n >> 32);
|
|
unsigned high = (unsigned)(n >> 32);
|
|
unsigned low = (unsigned)n;
|
|
unsigned low = (unsigned)n;
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(xchg)
|
|
|
|
- : "=A" (o), "+b" (low), "+c" (high)
|
|
|
|
- : "S" (v)
|
|
|
|
- : "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(xchg, "=&A" (o),
|
|
|
|
+ "S" (v), "b" (low), "c" (high)
|
|
|
|
+ : "memory");
|
|
return o;
|
|
return o;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -69,11 +106,9 @@ static inline void atomic64_set(atomic64_t *v, long long i)
|
|
{
|
|
{
|
|
unsigned high = (unsigned)(i >> 32);
|
|
unsigned high = (unsigned)(i >> 32);
|
|
unsigned low = (unsigned)i;
|
|
unsigned low = (unsigned)i;
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(set)
|
|
|
|
- : "+b" (low), "+c" (high)
|
|
|
|
- : "S" (v)
|
|
|
|
- : "eax", "edx", "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(set, /* no output */,
|
|
|
|
+ "S" (v), "b" (low), "c" (high)
|
|
|
|
+ : "eax", "edx", "memory");
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
@@ -85,10 +120,7 @@ static inline void atomic64_set(atomic64_t *v, long long i)
|
|
static inline long long atomic64_read(const atomic64_t *v)
|
|
static inline long long atomic64_read(const atomic64_t *v)
|
|
{
|
|
{
|
|
long long r;
|
|
long long r;
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(read)
|
|
|
|
- : "=A" (r), "+c" (v)
|
|
|
|
- : : "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
|
|
return r;
|
|
return r;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -101,10 +133,9 @@ static inline long long atomic64_read(const atomic64_t *v)
|
|
*/
|
|
*/
|
|
static inline long long atomic64_add_return(long long i, atomic64_t *v)
|
|
static inline long long atomic64_add_return(long long i, atomic64_t *v)
|
|
{
|
|
{
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(add_return)
|
|
|
|
- : "+A" (i), "+c" (v)
|
|
|
|
- : : "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(add_return,
|
|
|
|
+ ASM_OUTPUT2("+A" (i), "+c" (v)),
|
|
|
|
+ ASM_NO_INPUT_CLOBBER("memory"));
|
|
return i;
|
|
return i;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -113,32 +144,25 @@ static inline long long atomic64_add_return(long long i, atomic64_t *v)
|
|
*/
|
|
*/
|
|
static inline long long atomic64_sub_return(long long i, atomic64_t *v)
|
|
static inline long long atomic64_sub_return(long long i, atomic64_t *v)
|
|
{
|
|
{
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(sub_return)
|
|
|
|
- : "+A" (i), "+c" (v)
|
|
|
|
- : : "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(sub_return,
|
|
|
|
+ ASM_OUTPUT2("+A" (i), "+c" (v)),
|
|
|
|
+ ASM_NO_INPUT_CLOBBER("memory"));
|
|
return i;
|
|
return i;
|
|
}
|
|
}
|
|
|
|
|
|
static inline long long atomic64_inc_return(atomic64_t *v)
|
|
static inline long long atomic64_inc_return(atomic64_t *v)
|
|
{
|
|
{
|
|
long long a;
|
|
long long a;
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(inc_return)
|
|
|
|
- : "=A" (a)
|
|
|
|
- : "S" (v)
|
|
|
|
- : "memory", "ecx"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(inc_return, "=&A" (a),
|
|
|
|
+ "S" (v) : "memory", "ecx");
|
|
return a;
|
|
return a;
|
|
}
|
|
}
|
|
|
|
|
|
static inline long long atomic64_dec_return(atomic64_t *v)
|
|
static inline long long atomic64_dec_return(atomic64_t *v)
|
|
{
|
|
{
|
|
long long a;
|
|
long long a;
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(dec_return)
|
|
|
|
- : "=A" (a)
|
|
|
|
- : "S" (v)
|
|
|
|
- : "memory", "ecx"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(dec_return, "=&A" (a),
|
|
|
|
+ "S" (v) : "memory", "ecx");
|
|
return a;
|
|
return a;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -151,10 +175,9 @@ static inline long long atomic64_dec_return(atomic64_t *v)
|
|
*/
|
|
*/
|
|
static inline long long atomic64_add(long long i, atomic64_t *v)
|
|
static inline long long atomic64_add(long long i, atomic64_t *v)
|
|
{
|
|
{
|
|
- asm volatile(ATOMIC64_ALTERNATIVE_(add, add_return)
|
|
|
|
- : "+A" (i), "+c" (v)
|
|
|
|
- : : "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ __alternative_atomic64(add, add_return,
|
|
|
|
+ ASM_OUTPUT2("+A" (i), "+c" (v)),
|
|
|
|
+ ASM_NO_INPUT_CLOBBER("memory"));
|
|
return i;
|
|
return i;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -167,10 +190,9 @@ static inline long long atomic64_add(long long i, atomic64_t *v)
|
|
*/
|
|
*/
|
|
static inline long long atomic64_sub(long long i, atomic64_t *v)
|
|
static inline long long atomic64_sub(long long i, atomic64_t *v)
|
|
{
|
|
{
|
|
- asm volatile(ATOMIC64_ALTERNATIVE_(sub, sub_return)
|
|
|
|
- : "+A" (i), "+c" (v)
|
|
|
|
- : : "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ __alternative_atomic64(sub, sub_return,
|
|
|
|
+ ASM_OUTPUT2("+A" (i), "+c" (v)),
|
|
|
|
+ ASM_NO_INPUT_CLOBBER("memory"));
|
|
return i;
|
|
return i;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -196,10 +218,8 @@ static inline int atomic64_sub_and_test(long long i, atomic64_t *v)
|
|
*/
|
|
*/
|
|
static inline void atomic64_inc(atomic64_t *v)
|
|
static inline void atomic64_inc(atomic64_t *v)
|
|
{
|
|
{
|
|
- asm volatile(ATOMIC64_ALTERNATIVE_(inc, inc_return)
|
|
|
|
- : : "S" (v)
|
|
|
|
- : "memory", "eax", "ecx", "edx"
|
|
|
|
- );
|
|
|
|
|
|
+ __alternative_atomic64(inc, inc_return, /* no output */,
|
|
|
|
+ "S" (v) : "memory", "eax", "ecx", "edx");
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
@@ -210,10 +230,8 @@ static inline void atomic64_inc(atomic64_t *v)
|
|
*/
|
|
*/
|
|
static inline void atomic64_dec(atomic64_t *v)
|
|
static inline void atomic64_dec(atomic64_t *v)
|
|
{
|
|
{
|
|
- asm volatile(ATOMIC64_ALTERNATIVE_(dec, dec_return)
|
|
|
|
- : : "S" (v)
|
|
|
|
- : "memory", "eax", "ecx", "edx"
|
|
|
|
- );
|
|
|
|
|
|
+ __alternative_atomic64(dec, dec_return, /* no output */,
|
|
|
|
+ "S" (v) : "memory", "eax", "ecx", "edx");
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
@@ -263,15 +281,15 @@ static inline int atomic64_add_negative(long long i, atomic64_t *v)
|
|
* @u: ...unless v is equal to u.
|
|
* @u: ...unless v is equal to u.
|
|
*
|
|
*
|
|
* Atomically adds @a to @v, so long as it was not @u.
|
|
* Atomically adds @a to @v, so long as it was not @u.
|
|
- * Returns the old value of @v.
|
|
|
|
|
|
+ * Returns non-zero if the add was done, zero otherwise.
|
|
*/
|
|
*/
|
|
static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
|
|
static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
|
|
{
|
|
{
|
|
unsigned low = (unsigned)u;
|
|
unsigned low = (unsigned)u;
|
|
unsigned high = (unsigned)(u >> 32);
|
|
unsigned high = (unsigned)(u >> 32);
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(add_unless) "\n\t"
|
|
|
|
- : "+A" (a), "+c" (v), "+S" (low), "+D" (high)
|
|
|
|
- : : "memory");
|
|
|
|
|
|
+ alternative_atomic64(add_unless,
|
|
|
|
+ ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
|
|
|
|
+ "S" (v) : "memory");
|
|
return (int)a;
|
|
return (int)a;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -279,26 +297,20 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
|
|
static inline int atomic64_inc_not_zero(atomic64_t *v)
|
|
static inline int atomic64_inc_not_zero(atomic64_t *v)
|
|
{
|
|
{
|
|
int r;
|
|
int r;
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(inc_not_zero)
|
|
|
|
- : "=a" (r)
|
|
|
|
- : "S" (v)
|
|
|
|
- : "ecx", "edx", "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(inc_not_zero, "=&a" (r),
|
|
|
|
+ "S" (v) : "ecx", "edx", "memory");
|
|
return r;
|
|
return r;
|
|
}
|
|
}
|
|
|
|
|
|
static inline long long atomic64_dec_if_positive(atomic64_t *v)
|
|
static inline long long atomic64_dec_if_positive(atomic64_t *v)
|
|
{
|
|
{
|
|
long long r;
|
|
long long r;
|
|
- asm volatile(ATOMIC64_ALTERNATIVE(dec_if_positive)
|
|
|
|
- : "=A" (r)
|
|
|
|
- : "S" (v)
|
|
|
|
- : "ecx", "memory"
|
|
|
|
- );
|
|
|
|
|
|
+ alternative_atomic64(dec_if_positive, "=&A" (r),
|
|
|
|
+ "S" (v) : "ecx", "memory");
|
|
return r;
|
|
return r;
|
|
}
|
|
}
|
|
|
|
|
|
-#undef ATOMIC64_ALTERNATIVE
|
|
|
|
-#undef ATOMIC64_ALTERNATIVE_
|
|
|
|
|
|
+#undef alternative_atomic64
|
|
|
|
+#undef __alternative_atomic64
|
|
|
|
|
|
#endif /* _ASM_X86_ATOMIC64_32_H */
|
|
#endif /* _ASM_X86_ATOMIC64_32_H */
|