|
@@ -7,7 +7,6 @@
|
|
|
#include <linux/kernel.h>
|
|
|
|
|
|
#include <asm/hw_irq.h>
|
|
|
-#include <asm/atomic.h>
|
|
|
|
|
|
/*
|
|
|
* Memory barrier.
|
|
@@ -227,6 +226,29 @@ __xchg_u32(volatile void *p, unsigned long val)
|
|
|
return prev;
|
|
|
}
|
|
|
|
|
|
+/*
|
|
|
+ * Atomic exchange
|
|
|
+ *
|
|
|
+ * Changes the memory location '*ptr' to be val and returns
|
|
|
+ * the previous value stored there.
|
|
|
+ */
|
|
|
+static __inline__ unsigned long
|
|
|
+__xchg_u32_local(volatile void *p, unsigned long val)
|
|
|
+{
|
|
|
+ unsigned long prev;
|
|
|
+
|
|
|
+ __asm__ __volatile__(
|
|
|
+"1: lwarx %0,0,%2 \n"
|
|
|
+ PPC405_ERR77(0,%2)
|
|
|
+" stwcx. %3,0,%2 \n\
|
|
|
+ bne- 1b"
|
|
|
+ : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
|
|
|
+ : "r" (p), "r" (val)
|
|
|
+ : "cc", "memory");
|
|
|
+
|
|
|
+ return prev;
|
|
|
+}
|
|
|
+
|
|
|
#ifdef CONFIG_PPC64
|
|
|
static __inline__ unsigned long
|
|
|
__xchg_u64(volatile void *p, unsigned long val)
|
|
@@ -246,6 +268,23 @@ __xchg_u64(volatile void *p, unsigned long val)
|
|
|
|
|
|
return prev;
|
|
|
}
|
|
|
+
|
|
|
+static __inline__ unsigned long
|
|
|
+__xchg_u64_local(volatile void *p, unsigned long val)
|
|
|
+{
|
|
|
+ unsigned long prev;
|
|
|
+
|
|
|
+ __asm__ __volatile__(
|
|
|
+"1: ldarx %0,0,%2 \n"
|
|
|
+ PPC405_ERR77(0,%2)
|
|
|
+" stdcx. %3,0,%2 \n\
|
|
|
+ bne- 1b"
|
|
|
+ : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
|
|
|
+ : "r" (p), "r" (val)
|
|
|
+ : "cc", "memory");
|
|
|
+
|
|
|
+ return prev;
|
|
|
+}
|
|
|
#endif
|
|
|
|
|
|
/*
|
|
@@ -269,12 +308,33 @@ __xchg(volatile void *ptr, unsigned long x, unsigned int size)
|
|
|
return x;
|
|
|
}
|
|
|
|
|
|
+static __inline__ unsigned long
|
|
|
+__xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
|
|
|
+{
|
|
|
+ switch (size) {
|
|
|
+ case 4:
|
|
|
+ return __xchg_u32_local(ptr, x);
|
|
|
+#ifdef CONFIG_PPC64
|
|
|
+ case 8:
|
|
|
+ return __xchg_u64_local(ptr, x);
|
|
|
+#endif
|
|
|
+ }
|
|
|
+ __xchg_called_with_bad_pointer();
|
|
|
+ return x;
|
|
|
+}
|
|
|
#define xchg(ptr,x) \
|
|
|
({ \
|
|
|
__typeof__(*(ptr)) _x_ = (x); \
|
|
|
(__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
|
|
|
})
|
|
|
|
|
|
+#define xchg_local(ptr,x) \
|
|
|
+ ({ \
|
|
|
+ __typeof__(*(ptr)) _x_ = (x); \
|
|
|
+ (__typeof__(*(ptr))) __xchg_local((ptr), \
|
|
|
+ (unsigned long)_x_, sizeof(*(ptr))); \
|
|
|
+ })
|
|
|
+
|
|
|
#define tas(ptr) (xchg((ptr),1))
|
|
|
|
|
|
/*
|
|
@@ -306,6 +366,28 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
|
|
|
return prev;
|
|
|
}
|
|
|
|
|
|
+static __inline__ unsigned long
|
|
|
+__cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
|
|
|
+ unsigned long new)
|
|
|
+{
|
|
|
+ unsigned int prev;
|
|
|
+
|
|
|
+ __asm__ __volatile__ (
|
|
|
+"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
|
|
|
+ cmpw 0,%0,%3\n\
|
|
|
+ bne- 2f\n"
|
|
|
+ PPC405_ERR77(0,%2)
|
|
|
+" stwcx. %4,0,%2\n\
|
|
|
+ bne- 1b"
|
|
|
+ "\n\
|
|
|
+2:"
|
|
|
+ : "=&r" (prev), "+m" (*p)
|
|
|
+ : "r" (p), "r" (old), "r" (new)
|
|
|
+ : "cc", "memory");
|
|
|
+
|
|
|
+ return prev;
|
|
|
+}
|
|
|
+
|
|
|
#ifdef CONFIG_PPC64
|
|
|
static __inline__ unsigned long
|
|
|
__cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
|
|
@@ -328,6 +410,27 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
|
|
|
|
|
|
return prev;
|
|
|
}
|
|
|
+
|
|
|
+static __inline__ unsigned long
|
|
|
+__cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
|
|
|
+ unsigned long new)
|
|
|
+{
|
|
|
+ unsigned long prev;
|
|
|
+
|
|
|
+ __asm__ __volatile__ (
|
|
|
+"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
|
|
|
+ cmpd 0,%0,%3\n\
|
|
|
+ bne- 2f\n\
|
|
|
+ stdcx. %4,0,%2\n\
|
|
|
+ bne- 1b"
|
|
|
+ "\n\
|
|
|
+2:"
|
|
|
+ : "=&r" (prev), "+m" (*p)
|
|
|
+ : "r" (p), "r" (old), "r" (new)
|
|
|
+ : "cc", "memory");
|
|
|
+
|
|
|
+ return prev;
|
|
|
+}
|
|
|
#endif
|
|
|
|
|
|
/* This function doesn't exist, so you'll get a linker error
|
|
@@ -350,6 +453,22 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
|
|
|
return old;
|
|
|
}
|
|
|
|
|
|
+static __inline__ unsigned long
|
|
|
+__cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new,
|
|
|
+ unsigned int size)
|
|
|
+{
|
|
|
+ switch (size) {
|
|
|
+ case 4:
|
|
|
+ return __cmpxchg_u32_local(ptr, old, new);
|
|
|
+#ifdef CONFIG_PPC64
|
|
|
+ case 8:
|
|
|
+ return __cmpxchg_u64_local(ptr, old, new);
|
|
|
+#endif
|
|
|
+ }
|
|
|
+ __cmpxchg_called_with_bad_pointer();
|
|
|
+ return old;
|
|
|
+}
|
|
|
+
|
|
|
#define cmpxchg(ptr,o,n) \
|
|
|
({ \
|
|
|
__typeof__(*(ptr)) _o_ = (o); \
|
|
@@ -358,6 +477,15 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
|
|
|
(unsigned long)_n_, sizeof(*(ptr))); \
|
|
|
})
|
|
|
|
|
|
+
|
|
|
+#define cmpxchg_local(ptr,o,n) \
|
|
|
+ ({ \
|
|
|
+ __typeof__(*(ptr)) _o_ = (o); \
|
|
|
+ __typeof__(*(ptr)) _n_ = (n); \
|
|
|
+ (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
|
|
|
+ (unsigned long)_n_, sizeof(*(ptr))); \
|
|
|
+ })
|
|
|
+
|
|
|
#ifdef CONFIG_PPC64
|
|
|
/*
|
|
|
* We handle most unaligned accesses in hardware. On the other hand
|