|
@@ -209,12 +209,34 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
|
|
|
return prev;
|
|
|
}
|
|
|
|
|
|
+static inline unsigned long
|
|
|
+__cmpxchg_u32_local(volatile unsigned int *p, unsigned int old,
|
|
|
+ unsigned int new)
|
|
|
+{
|
|
|
+ unsigned int prev;
|
|
|
+
|
|
|
+ __asm__ __volatile__ ("\n\
|
|
|
+1: lwarx %0,0,%2 \n\
|
|
|
+ cmpw 0,%0,%3 \n\
|
|
|
+ bne 2f \n"
|
|
|
+ PPC405_ERR77(0,%2)
|
|
|
+" stwcx. %4,0,%2 \n\
|
|
|
+ bne- 1b\n"
|
|
|
+"2:"
|
|
|
+ : "=&r" (prev), "=m" (*p)
|
|
|
+ : "r" (p), "r" (old), "r" (new), "m" (*p)
|
|
|
+ : "cc", "memory");
|
|
|
+
|
|
|
+ return prev;
|
|
|
+}
|
|
|
+
|
|
|
/* This function doesn't exist, so you'll get a linker error
|
|
|
if something tries to do an invalid cmpxchg(). */
|
|
|
extern void __cmpxchg_called_with_bad_pointer(void);
|
|
|
|
|
|
static __inline__ unsigned long
|
|
|
-__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
|
|
|
+__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
|
|
|
+ unsigned int size)
|
|
|
{
|
|
|
switch (size) {
|
|
|
case 4:
|
|
@@ -228,7 +250,7 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
|
|
|
return old;
|
|
|
}
|
|
|
|
|
|
-#define cmpxchg(ptr,o,n) \
|
|
|
+#define cmpxchg(ptr, o, n) \
|
|
|
({ \
|
|
|
__typeof__(*(ptr)) _o_ = (o); \
|
|
|
__typeof__(*(ptr)) _n_ = (n); \
|
|
@@ -236,6 +258,31 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
|
|
|
(unsigned long)_n_, sizeof(*(ptr))); \
|
|
|
})
|
|
|
|
|
|
+#include <asm-generic/cmpxchg-local.h>
|
|
|
+
|
|
|
+static inline unsigned long __cmpxchg_local(volatile void *ptr,
|
|
|
+ unsigned long old,
|
|
|
+ unsigned long new, int size)
|
|
|
+{
|
|
|
+ switch (size) {
|
|
|
+ case 4:
|
|
|
+ return __cmpxchg_u32_local(ptr, old, new);
|
|
|
+ default:
|
|
|
+ return __cmpxchg_local_generic(ptr, old, new, size);
|
|
|
+ }
|
|
|
+
|
|
|
+ return old;
|
|
|
+}
|
|
|
+
|
|
|
+/*
|
|
|
+ * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
|
|
|
+ * them available.
|
|
|
+ */
|
|
|
+#define cmpxchg_local(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
|
|
|
+
|
|
|
#define arch_align_stack(x) (x)
|
|
|
|
|
|
#endif /* __KERNEL__ */
|