|
@@ -223,6 +223,42 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
|
|
|
return ret;
|
|
|
}
|
|
|
|
|
|
+static inline unsigned long long __cmpxchg64(unsigned long long *ptr,
|
|
|
+ unsigned long long old,
|
|
|
+ unsigned long long new)
|
|
|
+{
|
|
|
+ unsigned long long oldval;
|
|
|
+ unsigned long res;
|
|
|
+
|
|
|
+ __asm__ __volatile__(
|
|
|
+"1: ldrexd %1, %H1, [%3]\n"
|
|
|
+" teq %1, %4\n"
|
|
|
+" teqeq %H1, %H4\n"
|
|
|
+" bne 2f\n"
|
|
|
+" strexd %0, %5, %H5, [%3]\n"
|
|
|
+" teq %0, #0\n"
|
|
|
+" bne 1b\n"
|
|
|
+"2:"
|
|
|
+ : "=&r" (res), "=&r" (oldval), "+Qo" (*ptr)
|
|
|
+ : "r" (ptr), "r" (old), "r" (new)
|
|
|
+ : "cc");
|
|
|
+
|
|
|
+ return oldval;
|
|
|
+}
|
|
|
+
|
|
|
+static inline unsigned long long __cmpxchg64_mb(unsigned long long *ptr,
|
|
|
+ unsigned long long old,
|
|
|
+ unsigned long long new)
|
|
|
+{
|
|
|
+ unsigned long long ret;
|
|
|
+
|
|
|
+ smp_mb();
|
|
|
+ ret = __cmpxchg64(ptr, old, new);
|
|
|
+ smp_mb();
|
|
|
+
|
|
|
+ return ret;
|
|
|
+}
|
|
|
+
|
|
|
#define cmpxchg_local(ptr,o,n) \
|
|
|
((__typeof__(*(ptr)))__cmpxchg_local((ptr), \
|
|
|
(unsigned long)(o), \
|
|
@@ -230,18 +266,14 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
|
|
|
sizeof(*(ptr))))
|
|
|
|
|
|
#define cmpxchg64(ptr, o, n) \
|
|
|
- ((__typeof__(*(ptr)))atomic64_cmpxchg(container_of((ptr), \
|
|
|
- atomic64_t, \
|
|
|
- counter), \
|
|
|
- (unsigned long long)(o), \
|
|
|
- (unsigned long long)(n)))
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg64_mb((ptr), \
|
|
|
+ (unsigned long long)(o), \
|
|
|
+ (unsigned long long)(n)))
|
|
|
|
|
|
#define cmpxchg64_local(ptr, o, n) \
|
|
|
- ((__typeof__(*(ptr)))local64_cmpxchg(container_of((ptr), \
|
|
|
- local64_t, \
|
|
|
- a), \
|
|
|
- (unsigned long long)(o), \
|
|
|
- (unsigned long long)(n)))
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg64((ptr), \
|
|
|
+ (unsigned long long)(o), \
|
|
|
+ (unsigned long long)(n)))
|
|
|
|
|
|
#endif /* __LINUX_ARM_ARCH__ >= 6 */
|
|
|
|