|
@@ -201,9 +201,9 @@ static inline unsigned long __xchg(unsigned long x, void * ptr, int size)
|
|
|
|
|
|
#define __HAVE_ARCH_CMPXCHG 1
|
|
|
|
|
|
-#define cmpxchg(ptr,o,n)\
|
|
|
- ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
|
|
|
- (unsigned long)(n),sizeof(*(ptr))))
|
|
|
+#define cmpxchg(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
|
|
|
extern void __cmpxchg_called_with_bad_pointer(void);
|
|
|
|
|
@@ -355,6 +355,44 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
|
|
|
|
|
|
#include <linux/irqflags.h>
|
|
|
|
|
|
+#include <asm-generic/cmpxchg-local.h>
|
|
|
+
|
|
|
+static inline unsigned long __cmpxchg_local(volatile void *ptr,
|
|
|
+ unsigned long old,
|
|
|
+ unsigned long new, int size)
|
|
|
+{
|
|
|
+ switch (size) {
|
|
|
+ case 1:
|
|
|
+ case 2:
|
|
|
+ case 4:
|
|
|
+#ifdef __s390x__
|
|
|
+ case 8:
|
|
|
+#endif
|
|
|
+ return __cmpxchg(ptr, old, new, size);
|
|
|
+ default:
|
|
|
+ return __cmpxchg_local_generic(ptr, old, new, size);
|
|
|
+ }
|
|
|
+
|
|
|
+ return old;
|
|
|
+}
|
|
|
+
|
|
|
+/*
|
|
|
+ * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
|
|
|
+ * them available.
|
|
|
+ */
|
|
|
+#define cmpxchg_local(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+#ifdef __s390x__
|
|
|
+#define cmpxchg64_local(ptr, o, n) \
|
|
|
+ ({ \
|
|
|
+ BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
|
|
|
+ cmpxchg_local((ptr), (o), (n)); \
|
|
|
+ })
|
|
|
+#else
|
|
|
+#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
|
|
|
+#endif
|
|
|
+
|
|
|
/*
|
|
|
* Use to set psw mask except for the first byte which
|
|
|
* won't be changed by this function.
|