|
@@ -1,16 +1,87 @@
|
|
|
+/*
|
|
|
+ * Generic UP xchg and cmpxchg using interrupt disablement. Does not
|
|
|
+ * support SMP.
|
|
|
+ */
|
|
|
+
|
|
|
#ifndef __ASM_GENERIC_CMPXCHG_H
|
|
|
#define __ASM_GENERIC_CMPXCHG_H
|
|
|
|
|
|
-/*
|
|
|
- * Generic cmpxchg
|
|
|
- *
|
|
|
- * Uses the local cmpxchg. Does not support SMP.
|
|
|
- */
|
|
|
#ifdef CONFIG_SMP
|
|
|
#error "Cannot use generic cmpxchg on SMP"
|
|
|
#endif
|
|
|
|
|
|
-#include <asm-generic/cmpxchg-local.h>
|
|
|
+#include <linux/irqflags.h>
|
|
|
+
|
|
|
+#ifndef xchg
|
|
|
+
|
|
|
+/*
|
|
|
+ * This function doesn't exist, so you'll get a linker error if
|
|
|
+ * something tries to do an invalidly-sized xchg().
|
|
|
+ */
|
|
|
+extern void __xchg_called_with_bad_pointer(void);
|
|
|
+
|
|
|
+static inline
|
|
|
+unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
|
|
|
+{
|
|
|
+ unsigned long ret, flags;
|
|
|
+
|
|
|
+ switch (size) {
|
|
|
+ case 1:
|
|
|
+#ifdef __xchg_u8
|
|
|
+ return __xchg_u8(x, ptr);
|
|
|
+#else
|
|
|
+ local_irq_save(flags);
|
|
|
+ ret = *(volatile u8 *)ptr;
|
|
|
+ *(volatile u8 *)ptr = x;
|
|
|
+ local_irq_restore(flags);
|
|
|
+ return ret;
|
|
|
+#endif /* __xchg_u8 */
|
|
|
+
|
|
|
+ case 2:
|
|
|
+#ifdef __xchg_u16
|
|
|
+ return __xchg_u16(x, ptr);
|
|
|
+#else
|
|
|
+ local_irq_save(flags);
|
|
|
+ ret = *(volatile u16 *)ptr;
|
|
|
+ *(volatile u16 *)ptr = x;
|
|
|
+ local_irq_restore(flags);
|
|
|
+ return ret;
|
|
|
+#endif /* __xchg_u16 */
|
|
|
+
|
|
|
+ case 4:
|
|
|
+#ifdef __xchg_u32
|
|
|
+ return __xchg_u32(x, ptr);
|
|
|
+#else
|
|
|
+ local_irq_save(flags);
|
|
|
+ ret = *(volatile u32 *)ptr;
|
|
|
+ *(volatile u32 *)ptr = x;
|
|
|
+ local_irq_restore(flags);
|
|
|
+ return ret;
|
|
|
+#endif /* __xchg_u32 */
|
|
|
+
|
|
|
+#ifdef CONFIG_64BIT
|
|
|
+ case 8:
|
|
|
+#ifdef __xchg_u64
|
|
|
+ return __xchg_u64(x, ptr);
|
|
|
+#else
|
|
|
+ local_irq_save(flags);
|
|
|
+ ret = *(volatile u64 *)ptr;
|
|
|
+ *(volatile u64 *)ptr = x;
|
|
|
+ local_irq_restore(flags);
|
|
|
+ return ret;
|
|
|
+#endif /* __xchg_u64 */
|
|
|
+#endif /* CONFIG_64BIT */
|
|
|
+
|
|
|
+ default:
|
|
|
+ __xchg_called_with_bad_pointer();
|
|
|
+ return x;
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+#define xchg(ptr, x) \
|
|
|
+ ((__typeof__(*(ptr))) __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
|
|
|
+
|
|
|
+#endif /* xchg */
|
|
|
|
|
|
/*
|
|
|
* Atomic compare and exchange.
|
|
@@ -18,7 +89,9 @@
|
|
|
* Do not define __HAVE_ARCH_CMPXCHG because we want to use it to check whether
|
|
|
* a cmpxchg primitive faster than repeated local irq save/restore exists.
|
|
|
*/
|
|
|
+#include <asm-generic/cmpxchg-local.h>
|
|
|
+
|
|
|
#define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n))
|
|
|
#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
|
|
|
|
|
|
-#endif
|
|
|
+#endif /* __ASM_GENERIC_CMPXCHG_H */
|