|
@@ -154,6 +154,10 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
+#include <asm-generic/cmpxchg-local.h>
|
|
|
+
|
|
|
+#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
|
|
|
+
|
|
|
/*
|
|
|
* Atomic compare and exchange. Compare OLD with MEM, if identical,
|
|
|
* store NEW in MEM. Return the initial value in MEM. Success is
|
|
@@ -185,9 +189,26 @@ static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
|
|
|
return old;
|
|
|
}
|
|
|
|
|
|
-#define cmpxchg(ptr,o,n)\
|
|
|
- ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
|
|
|
- (unsigned long)(n),sizeof(*(ptr))))
|
|
|
+#define cmpxchg(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+#define cmpxchg_local(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+#else
|
|
|
+
|
|
|
+/*
|
|
|
+ * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
|
|
|
+ * them available.
|
|
|
+ */
|
|
|
+#define cmpxchg_local(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+
|
|
|
+#ifndef CONFIG_SMP
|
|
|
+#include <asm-generic/cmpxchg.h>
|
|
|
+#endif
|
|
|
+
|
|
|
#endif
|
|
|
|
|
|
#define arch_align_stack(x) (x)
|