|
@@ -121,8 +121,11 @@ static inline void local_irq_disable(void)
|
|
|
|
|
|
#define nop() __asm__ __volatile__ ("nop" : : )
|
|
|
|
|
|
-#define xchg(ptr,x) \
|
|
|
- ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
|
|
|
+#define xchg(ptr, x) \
|
|
|
+ ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
|
|
|
+#define xchg_local(ptr, x) \
|
|
|
+ ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr), \
|
|
|
+ sizeof(*(ptr))))
|
|
|
|
|
|
#ifdef CONFIG_SMP
|
|
|
extern void __xchg_called_with_bad_pointer(void);
|
|
@@ -146,7 +149,7 @@ extern void __xchg_called_with_bad_pointer(void);
|
|
|
#endif /* CONFIG_CHIP_M32700_TS1 */
|
|
|
|
|
|
static __always_inline unsigned long
|
|
|
-__xchg(unsigned long x, volatile void * ptr, int size)
|
|
|
+__xchg(unsigned long x, volatile void *ptr, int size)
|
|
|
{
|
|
|
unsigned long flags;
|
|
|
unsigned long tmp = 0;
|
|
@@ -196,6 +199,42 @@ __xchg(unsigned long x, volatile void * ptr, int size)
|
|
|
return (tmp);
|
|
|
}
|
|
|
|
|
|
+static __always_inline unsigned long
|
|
|
+__xchg_local(unsigned long x, volatile void *ptr, int size)
|
|
|
+{
|
|
|
+ unsigned long flags;
|
|
|
+ unsigned long tmp = 0;
|
|
|
+
|
|
|
+ local_irq_save(flags);
|
|
|
+
|
|
|
+ switch (size) {
|
|
|
+ case 1:
|
|
|
+ __asm__ __volatile__ (
|
|
|
+ "ldb %0, @%2 \n\t"
|
|
|
+ "stb %1, @%2 \n\t"
|
|
|
+ : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
|
|
|
+ break;
|
|
|
+ case 2:
|
|
|
+ __asm__ __volatile__ (
|
|
|
+ "ldh %0, @%2 \n\t"
|
|
|
+ "sth %1, @%2 \n\t"
|
|
|
+ : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
|
|
|
+ break;
|
|
|
+ case 4:
|
|
|
+ __asm__ __volatile__ (
|
|
|
+ "ld %0, @%2 \n\t"
|
|
|
+ "st %1, @%2 \n\t"
|
|
|
+ : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
|
|
|
+ break;
|
|
|
+ default:
|
|
|
+ __xchg_called_with_bad_pointer();
|
|
|
+ }
|
|
|
+
|
|
|
+ local_irq_restore(flags);
|
|
|
+
|
|
|
+ return (tmp);
|
|
|
+}
|
|
|
+
|
|
|
#define __HAVE_ARCH_CMPXCHG 1
|
|
|
|
|
|
static inline unsigned long
|
|
@@ -228,6 +267,37 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
|
|
|
return retval;
|
|
|
}
|
|
|
|
|
|
+static inline unsigned long
|
|
|
+__cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
|
|
|
+ unsigned int new)
|
|
|
+{
|
|
|
+ unsigned long flags;
|
|
|
+ unsigned int retval;
|
|
|
+
|
|
|
+ local_irq_save(flags);
|
|
|
+ __asm__ __volatile__ (
|
|
|
+ DCACHE_CLEAR("%0", "r4", "%1")
|
|
|
+ "ld %0, @%1; \n"
|
|
|
+ " bne %0, %2, 1f; \n"
|
|
|
+ "st %3, @%1; \n"
|
|
|
+ " bra 2f; \n"
|
|
|
+ " .fillinsn \n"
|
|
|
+ "1:"
|
|
|
+ "st %0, @%1; \n"
|
|
|
+ " .fillinsn \n"
|
|
|
+ "2:"
|
|
|
+ : "=&r" (retval)
|
|
|
+ : "r" (p), "r" (old), "r" (new)
|
|
|
+ : "cbit", "memory"
|
|
|
+#ifdef CONFIG_CHIP_M32700_TS1
|
|
|
+ , "r4"
|
|
|
+#endif /* CONFIG_CHIP_M32700_TS1 */
|
|
|
+ );
|
|
|
+ local_irq_restore(flags);
|
|
|
+
|
|
|
+ return retval;
|
|
|
+}
|
|
|
+
|
|
|
/* This function doesn't exist, so you'll get a linker error
|
|
|
if something tries to do an invalid cmpxchg(). */
|
|
|
extern void __cmpxchg_called_with_bad_pointer(void);
|
|
@@ -247,13 +317,34 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
|
|
|
return old;
|
|
|
}
|
|
|
|
|
|
-#define cmpxchg(ptr,o,n) \
|
|
|
- ({ \
|
|
|
- __typeof__(*(ptr)) _o_ = (o); \
|
|
|
- __typeof__(*(ptr)) _n_ = (n); \
|
|
|
- (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
|
|
|
- (unsigned long)_n_, sizeof(*(ptr))); \
|
|
|
- })
|
|
|
+#define cmpxchg(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+
|
|
|
+#include <asm-generic/cmpxchg-local.h>
|
|
|
+
|
|
|
+static inline unsigned long __cmpxchg_local(volatile void *ptr,
|
|
|
+ unsigned long old,
|
|
|
+ unsigned long new, int size)
|
|
|
+{
|
|
|
+ switch (size) {
|
|
|
+ case 4:
|
|
|
+ return __cmpxchg_local_u32(ptr, old, new);
|
|
|
+ default:
|
|
|
+ return __cmpxchg_local_generic(ptr, old, new, size);
|
|
|
+ }
|
|
|
+
|
|
|
+ return old;
|
|
|
+}
|
|
|
+
|
|
|
+/*
|
|
|
+ * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
|
|
|
+ * them available.
|
|
|
+ */
|
|
|
+#define cmpxchg_local(ptr, o, n) \
|
|
|
+ ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
|
|
|
+ (unsigned long)(n), sizeof(*(ptr))))
|
|
|
+#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
|
|
|
|
|
|
#endif /* __KERNEL__ */
|
|
|
|