|
@@ -2,6 +2,7 @@
|
|
|
#define _ALPHA_BITOPS_H
|
|
|
|
|
|
#include <asm/compiler.h>
|
|
|
+#include <asm/barrier.h>
|
|
|
|
|
|
/*
|
|
|
* Copyright 1994, Linus Torvalds.
|
|
@@ -69,6 +70,13 @@ clear_bit(unsigned long nr, volatile void * addr)
|
|
|
:"Ir" (1UL << (nr & 31)), "m" (*m));
|
|
|
}
|
|
|
|
|
|
+static inline void
|
|
|
+clear_bit_unlock(unsigned long nr, volatile void * addr)
|
|
|
+{
|
|
|
+ smp_mb();
|
|
|
+ clear_bit(nr, addr);
|
|
|
+}
|
|
|
+
|
|
|
/*
|
|
|
* WARNING: non atomic version.
|
|
|
*/
|
|
@@ -80,6 +88,13 @@ __clear_bit(unsigned long nr, volatile void * addr)
|
|
|
*m &= ~(1 << (nr & 31));
|
|
|
}
|
|
|
|
|
|
+static inline void
|
|
|
+__clear_bit_unlock(unsigned long nr, volatile void * addr)
|
|
|
+{
|
|
|
+ smp_mb();
|
|
|
+ __clear_bit(nr, addr);
|
|
|
+}
|
|
|
+
|
|
|
static inline void
|
|
|
change_bit(unsigned long nr, volatile void * addr)
|
|
|
{
|
|
@@ -139,6 +154,33 @@ test_and_set_bit(unsigned long nr, volatile void *addr)
|
|
|
return oldbit != 0;
|
|
|
}
|
|
|
|
|
|
+static inline int
|
|
|
+test_and_set_bit_lock(unsigned long nr, volatile void *addr)
|
|
|
+{
|
|
|
+ unsigned long oldbit;
|
|
|
+ unsigned long temp;
|
|
|
+ int *m = ((int *) addr) + (nr >> 5);
|
|
|
+
|
|
|
+ __asm__ __volatile__(
|
|
|
+ "1: ldl_l %0,%4\n"
|
|
|
+ " and %0,%3,%2\n"
|
|
|
+ " bne %2,2f\n"
|
|
|
+ " xor %0,%3,%0\n"
|
|
|
+ " stl_c %0,%1\n"
|
|
|
+ " beq %0,3f\n"
|
|
|
+ "2:\n"
|
|
|
+#ifdef CONFIG_SMP
|
|
|
+ " mb\n"
|
|
|
+#endif
|
|
|
+ ".subsection 2\n"
|
|
|
+ "3: br 1b\n"
|
|
|
+ ".previous"
|
|
|
+ :"=&r" (temp), "=m" (*m), "=&r" (oldbit)
|
|
|
+ :"Ir" (1UL << (nr & 31)), "m" (*m) : "memory");
|
|
|
+
|
|
|
+ return oldbit != 0;
|
|
|
+}
|
|
|
+
|
|
|
/*
|
|
|
* WARNING: non atomic version.
|
|
|
*/
|
|
@@ -376,7 +418,6 @@ static inline unsigned int hweight8(unsigned int w)
|
|
|
#else
|
|
|
#include <asm-generic/bitops/hweight.h>
|
|
|
#endif
|
|
|
-#include <asm-generic/bitops/lock.h>
|
|
|
|
|
|
#endif /* __KERNEL__ */
|
|
|
|