|
@@ -105,15 +105,24 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
|
|
|
|
|
|
#ifdef CONFIG_X86_CMPXCHG
|
|
|
#define __HAVE_ARCH_CMPXCHG 1
|
|
|
-#define cmpxchg(ptr,o,n)\
|
|
|
- ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
|
|
|
- (unsigned long)(n),sizeof(*(ptr))))
|
|
|
-#define sync_cmpxchg(ptr,o,n)\
|
|
|
- ((__typeof__(*(ptr)))__sync_cmpxchg((ptr),(unsigned long)(o),\
|
|
|
- (unsigned long)(n),sizeof(*(ptr))))
|
|
|
-#define cmpxchg_local(ptr,o,n)\
|
|
|
- ((__typeof__(*(ptr)))__cmpxchg_local((ptr),(unsigned long)(o),\
|
|
|
- (unsigned long)(n),sizeof(*(ptr))))
|
|
|
+#define cmpxchg(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+#define sync_cmpxchg(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__sync_cmpxchg((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+#define cmpxchg_local(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+#endif
|
|
|
+
|
|
|
+#ifdef CONFIG_X86_CMPXCHG64
|
|
|
+#define cmpxchg64(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg64((ptr), (unsigned long long)(o), \
|
|
|
+ (unsigned long long)(n)))
|
|
|
+#define cmpxchg64_local(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg64_local((ptr), (unsigned long long)(o),\
|
|
|
+ (unsigned long long)(n)))
|
|
|
#endif
|
|
|
|
|
|
static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
|
|
@@ -203,6 +212,34 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
|
|
|
return old;
|
|
|
}
|
|
|
|
|
|
+static inline unsigned long long __cmpxchg64(volatile void *ptr,
|
|
|
+ unsigned long long old, unsigned long long new)
|
|
|
+{
|
|
|
+ unsigned long long prev;
|
|
|
+ __asm__ __volatile__(LOCK_PREFIX "cmpxchg8b %3"
|
|
|
+ : "=A"(prev)
|
|
|
+ : "b"((unsigned long)new),
|
|
|
+ "c"((unsigned long)(new >> 32)),
|
|
|
+ "m"(*__xg(ptr)),
|
|
|
+ "0"(old)
|
|
|
+ : "memory");
|
|
|
+ return prev;
|
|
|
+}
|
|
|
+
|
|
|
+static inline unsigned long long __cmpxchg64_local(volatile void *ptr,
|
|
|
+ unsigned long long old, unsigned long long new)
|
|
|
+{
|
|
|
+ unsigned long long prev;
|
|
|
+ __asm__ __volatile__("cmpxchg8b %3"
|
|
|
+ : "=A"(prev)
|
|
|
+ : "b"((unsigned long)new),
|
|
|
+ "c"((unsigned long)(new >> 32)),
|
|
|
+ "m"(*__xg(ptr)),
|
|
|
+ "0"(old)
|
|
|
+ : "memory");
|
|
|
+ return prev;
|
|
|
+}
|
|
|
+
|
|
|
#ifndef CONFIG_X86_CMPXCHG
|
|
|
/*
|
|
|
* Building a kernel capable running on 80386. It may be necessary to
|
|
@@ -228,7 +265,7 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
|
|
|
return old;
|
|
|
}
|
|
|
|
|
|
-#define cmpxchg(ptr,o,n) \
|
|
|
+#define cmpxchg(ptr, o, n) \
|
|
|
({ \
|
|
|
__typeof__(*(ptr)) __ret; \
|
|
|
if (likely(boot_cpu_data.x86 > 3)) \
|
|
@@ -239,7 +276,7 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
|
|
|
(unsigned long)(n), sizeof(*(ptr))); \
|
|
|
__ret; \
|
|
|
})
|
|
|
-#define cmpxchg_local(ptr,o,n) \
|
|
|
+#define cmpxchg_local(ptr, o, n) \
|
|
|
({ \
|
|
|
__typeof__(*(ptr)) __ret; \
|
|
|
if (likely(boot_cpu_data.x86 > 3)) \
|
|
@@ -252,38 +289,37 @@ static inline unsigned long cmpxchg_386(volatile void *ptr, unsigned long old,
|
|
|
})
|
|
|
#endif
|
|
|
|
|
|
-static inline unsigned long long __cmpxchg64(volatile void *ptr, unsigned long long old,
|
|
|
- unsigned long long new)
|
|
|
-{
|
|
|
- unsigned long long prev;
|
|
|
- __asm__ __volatile__(LOCK_PREFIX "cmpxchg8b %3"
|
|
|
- : "=A"(prev)
|
|
|
- : "b"((unsigned long)new),
|
|
|
- "c"((unsigned long)(new >> 32)),
|
|
|
- "m"(*__xg(ptr)),
|
|
|
- "0"(old)
|
|
|
- : "memory");
|
|
|
- return prev;
|
|
|
-}
|
|
|
+#ifndef CONFIG_X86_CMPXCHG64
|
|
|
+/*
|
|
|
+ * Building a kernel capable running on 80386 and 80486. It may be necessary
|
|
|
+ * to simulate the cmpxchg8b on the 80386 and 80486 CPU.
|
|
|
+ */
|
|
|
|
|
|
-static inline unsigned long long __cmpxchg64_local(volatile void *ptr,
|
|
|
- unsigned long long old, unsigned long long new)
|
|
|
-{
|
|
|
- unsigned long long prev;
|
|
|
- __asm__ __volatile__("cmpxchg8b %3"
|
|
|
- : "=A"(prev)
|
|
|
- : "b"((unsigned long)new),
|
|
|
- "c"((unsigned long)(new >> 32)),
|
|
|
- "m"(*__xg(ptr)),
|
|
|
- "0"(old)
|
|
|
- : "memory");
|
|
|
- return prev;
|
|
|
-}
|
|
|
+extern unsigned long long cmpxchg_486_u64(volatile void *, u64, u64);
|
|
|
+
|
|
|
+#define cmpxchg64(ptr, o, n) \
|
|
|
+({ \
|
|
|
+ __typeof__(*(ptr)) __ret; \
|
|
|
+ if (likely(boot_cpu_data.x86 > 4)) \
|
|
|
+ __ret = __cmpxchg64((ptr), (unsigned long long)(o), \
|
|
|
+ (unsigned long long)(n)); \
|
|
|
+ else \
|
|
|
+ __ret = cmpxchg_486_u64((ptr), (unsigned long long)(o), \
|
|
|
+ (unsigned long long)(n)); \
|
|
|
+ __ret; \
|
|
|
+})
|
|
|
+#define cmpxchg64_local(ptr, o, n) \
|
|
|
+({ \
|
|
|
+ __typeof__(*(ptr)) __ret; \
|
|
|
+ if (likely(boot_cpu_data.x86 > 4)) \
|
|
|
+ __ret = __cmpxchg64_local((ptr), (unsigned long long)(o), \
|
|
|
+ (unsigned long long)(n)); \
|
|
|
+ else \
|
|
|
+ __ret = cmpxchg_486_u64((ptr), (unsigned long long)(o), \
|
|
|
+ (unsigned long long)(n)); \
|
|
|
+ __ret; \
|
|
|
+})
|
|
|
+
|
|
|
+#endif
|
|
|
|
|
|
-#define cmpxchg64(ptr,o,n)\
|
|
|
- ((__typeof__(*(ptr)))__cmpxchg64((ptr),(unsigned long long)(o),\
|
|
|
- (unsigned long long)(n)))
|
|
|
-#define cmpxchg64_local(ptr,o,n)\
|
|
|
- ((__typeof__(*(ptr)))__cmpxchg64_local((ptr),(unsigned long long)(o),\
|
|
|
- (unsigned long long)(n)))
|
|
|
#endif
|