|
@@ -16,8 +16,6 @@
|
|
|
|
|
|
#include <linux/compiler.h>
|
|
#include <linux/compiler.h>
|
|
#include <asm/byteorder.h>
|
|
#include <asm/byteorder.h>
|
|
-#include <asm/system.h>
|
|
|
|
-#include <asm/atomic.h>
|
|
|
|
|
|
|
|
#ifdef __KERNEL__
|
|
#ifdef __KERNEL__
|
|
|
|
|
|
@@ -33,6 +31,87 @@
|
|
#define smp_mb__before_clear_bit() barrier()
|
|
#define smp_mb__before_clear_bit() barrier()
|
|
#define smp_mb__after_clear_bit() barrier()
|
|
#define smp_mb__after_clear_bit() barrier()
|
|
|
|
|
|
|
|
+#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
|
|
|
|
+static inline
|
|
|
|
+unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v)
|
|
|
|
+{
|
|
|
|
+ unsigned long old, tmp;
|
|
|
|
+
|
|
|
|
+ asm volatile(
|
|
|
|
+ "0: \n"
|
|
|
|
+ " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
|
|
|
|
+ " ckeq icc3,cc7 \n"
|
|
|
|
+ " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
|
|
|
|
+ " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
|
|
|
|
+ " and%I3 %1,%3,%2 \n"
|
|
|
|
+ " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
|
|
|
|
+ " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
|
|
|
|
+ " beq icc3,#0,0b \n"
|
|
|
|
+ : "+U"(*v), "=&r"(old), "=r"(tmp)
|
|
|
|
+ : "NPr"(~mask)
|
|
|
|
+ : "memory", "cc7", "cc3", "icc3"
|
|
|
|
+ );
|
|
|
|
+
|
|
|
|
+ return old;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static inline
|
|
|
|
+unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v)
|
|
|
|
+{
|
|
|
|
+ unsigned long old, tmp;
|
|
|
|
+
|
|
|
|
+ asm volatile(
|
|
|
|
+ "0: \n"
|
|
|
|
+ " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
|
|
|
|
+ " ckeq icc3,cc7 \n"
|
|
|
|
+ " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
|
|
|
|
+ " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
|
|
|
|
+ " or%I3 %1,%3,%2 \n"
|
|
|
|
+ " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
|
|
|
|
+ " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
|
|
|
|
+ " beq icc3,#0,0b \n"
|
|
|
|
+ : "+U"(*v), "=&r"(old), "=r"(tmp)
|
|
|
|
+ : "NPr"(mask)
|
|
|
|
+ : "memory", "cc7", "cc3", "icc3"
|
|
|
|
+ );
|
|
|
|
+
|
|
|
|
+ return old;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static inline
|
|
|
|
+unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v)
|
|
|
|
+{
|
|
|
|
+ unsigned long old, tmp;
|
|
|
|
+
|
|
|
|
+ asm volatile(
|
|
|
|
+ "0: \n"
|
|
|
|
+ " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
|
|
|
|
+ " ckeq icc3,cc7 \n"
|
|
|
|
+ " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
|
|
|
|
+ " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
|
|
|
|
+ " xor%I3 %1,%3,%2 \n"
|
|
|
|
+ " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
|
|
|
|
+ " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
|
|
|
|
+ " beq icc3,#0,0b \n"
|
|
|
|
+ : "+U"(*v), "=&r"(old), "=r"(tmp)
|
|
|
|
+ : "NPr"(mask)
|
|
|
|
+ : "memory", "cc7", "cc3", "icc3"
|
|
|
|
+ );
|
|
|
|
+
|
|
|
|
+ return old;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+#else
|
|
|
|
+
|
|
|
|
+extern unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v);
|
|
|
|
+extern unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v);
|
|
|
|
+extern unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v);
|
|
|
|
+
|
|
|
|
+#endif
|
|
|
|
+
|
|
|
|
+#define atomic_clear_mask(mask, v) atomic_test_and_ANDNOT_mask((mask), (v))
|
|
|
|
+#define atomic_set_mask(mask, v) atomic_test_and_OR_mask((mask), (v))
|
|
|
|
+
|
|
static inline int test_and_clear_bit(int nr, volatile void *addr)
|
|
static inline int test_and_clear_bit(int nr, volatile void *addr)
|
|
{
|
|
{
|
|
volatile unsigned long *ptr = addr;
|
|
volatile unsigned long *ptr = addr;
|