|
@@ -14,7 +14,6 @@
|
|
|
#endif
|
|
|
|
|
|
#include <linux/compiler.h>
|
|
|
-#include <linux/irqflags.h>
|
|
|
#include <linux/types.h>
|
|
|
#include <asm/barrier.h>
|
|
|
#include <asm/byteorder.h> /* sigh ... */
|
|
@@ -44,6 +43,24 @@
|
|
|
#define smp_mb__before_clear_bit() smp_mb__before_llsc()
|
|
|
#define smp_mb__after_clear_bit() smp_llsc_mb()
|
|
|
|
|
|
+
|
|
|
+/*
|
|
|
+ * These are the "slower" versions of the functions and are in bitops.c.
|
|
|
+ * These functions call raw_local_irq_{save,restore}().
|
|
|
+ */
|
|
|
+void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
|
|
|
+void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
|
|
|
+void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
|
|
|
+int __mips_test_and_set_bit(unsigned long nr,
|
|
|
+ volatile unsigned long *addr);
|
|
|
+int __mips_test_and_set_bit_lock(unsigned long nr,
|
|
|
+ volatile unsigned long *addr);
|
|
|
+int __mips_test_and_clear_bit(unsigned long nr,
|
|
|
+ volatile unsigned long *addr);
|
|
|
+int __mips_test_and_change_bit(unsigned long nr,
|
|
|
+ volatile unsigned long *addr);
|
|
|
+
|
|
|
+
|
|
|
/*
|
|
|
* set_bit - Atomically set a bit in memory
|
|
|
* @nr: the bit to set
|
|
@@ -57,7 +74,7 @@
|
|
|
static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
|
|
|
{
|
|
|
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
|
|
- unsigned short bit = nr & SZLONG_MASK;
|
|
|
+ int bit = nr & SZLONG_MASK;
|
|
|
unsigned long temp;
|
|
|
|
|
|
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
@@ -92,17 +109,8 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
|
|
|
: "=&r" (temp), "+m" (*m)
|
|
|
: "ir" (1UL << bit));
|
|
|
} while (unlikely(!temp));
|
|
|
- } else {
|
|
|
- volatile unsigned long *a = addr;
|
|
|
- unsigned long mask;
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- a += nr >> SZLONG_LOG;
|
|
|
- mask = 1UL << bit;
|
|
|
- raw_local_irq_save(flags);
|
|
|
- *a |= mask;
|
|
|
- raw_local_irq_restore(flags);
|
|
|
- }
|
|
|
+ } else
|
|
|
+ __mips_set_bit(nr, addr);
|
|
|
}
|
|
|
|
|
|
/*
|
|
@@ -118,7 +126,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
|
|
|
static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
|
|
|
{
|
|
|
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
|
|
- unsigned short bit = nr & SZLONG_MASK;
|
|
|
+ int bit = nr & SZLONG_MASK;
|
|
|
unsigned long temp;
|
|
|
|
|
|
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
@@ -153,17 +161,8 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
|
|
|
: "=&r" (temp), "+m" (*m)
|
|
|
: "ir" (~(1UL << bit)));
|
|
|
} while (unlikely(!temp));
|
|
|
- } else {
|
|
|
- volatile unsigned long *a = addr;
|
|
|
- unsigned long mask;
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- a += nr >> SZLONG_LOG;
|
|
|
- mask = 1UL << bit;
|
|
|
- raw_local_irq_save(flags);
|
|
|
- *a &= ~mask;
|
|
|
- raw_local_irq_restore(flags);
|
|
|
- }
|
|
|
+ } else
|
|
|
+ __mips_clear_bit(nr, addr);
|
|
|
}
|
|
|
|
|
|
/*
|
|
@@ -191,7 +190,7 @@ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *ad
|
|
|
*/
|
|
|
static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
|
|
|
{
|
|
|
- unsigned short bit = nr & SZLONG_MASK;
|
|
|
+ int bit = nr & SZLONG_MASK;
|
|
|
|
|
|
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
|
|
@@ -220,17 +219,8 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
|
|
|
: "=&r" (temp), "+m" (*m)
|
|
|
: "ir" (1UL << bit));
|
|
|
} while (unlikely(!temp));
|
|
|
- } else {
|
|
|
- volatile unsigned long *a = addr;
|
|
|
- unsigned long mask;
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- a += nr >> SZLONG_LOG;
|
|
|
- mask = 1UL << bit;
|
|
|
- raw_local_irq_save(flags);
|
|
|
- *a ^= mask;
|
|
|
- raw_local_irq_restore(flags);
|
|
|
- }
|
|
|
+ } else
|
|
|
+ __mips_change_bit(nr, addr);
|
|
|
}
|
|
|
|
|
|
/*
|
|
@@ -244,7 +234,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
|
|
|
static inline int test_and_set_bit(unsigned long nr,
|
|
|
volatile unsigned long *addr)
|
|
|
{
|
|
|
- unsigned short bit = nr & SZLONG_MASK;
|
|
|
+ int bit = nr & SZLONG_MASK;
|
|
|
unsigned long res;
|
|
|
|
|
|
smp_mb__before_llsc();
|
|
@@ -281,18 +271,8 @@ static inline int test_and_set_bit(unsigned long nr,
|
|
|
} while (unlikely(!res));
|
|
|
|
|
|
res = temp & (1UL << bit);
|
|
|
- } else {
|
|
|
- volatile unsigned long *a = addr;
|
|
|
- unsigned long mask;
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- a += nr >> SZLONG_LOG;
|
|
|
- mask = 1UL << bit;
|
|
|
- raw_local_irq_save(flags);
|
|
|
- res = (mask & *a);
|
|
|
- *a |= mask;
|
|
|
- raw_local_irq_restore(flags);
|
|
|
- }
|
|
|
+ } else
|
|
|
+ res = __mips_test_and_set_bit(nr, addr);
|
|
|
|
|
|
smp_llsc_mb();
|
|
|
|
|
@@ -310,7 +290,7 @@ static inline int test_and_set_bit(unsigned long nr,
|
|
|
static inline int test_and_set_bit_lock(unsigned long nr,
|
|
|
volatile unsigned long *addr)
|
|
|
{
|
|
|
- unsigned short bit = nr & SZLONG_MASK;
|
|
|
+ int bit = nr & SZLONG_MASK;
|
|
|
unsigned long res;
|
|
|
|
|
|
if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
@@ -345,18 +325,8 @@ static inline int test_and_set_bit_lock(unsigned long nr,
|
|
|
} while (unlikely(!res));
|
|
|
|
|
|
res = temp & (1UL << bit);
|
|
|
- } else {
|
|
|
- volatile unsigned long *a = addr;
|
|
|
- unsigned long mask;
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- a += nr >> SZLONG_LOG;
|
|
|
- mask = 1UL << bit;
|
|
|
- raw_local_irq_save(flags);
|
|
|
- res = (mask & *a);
|
|
|
- *a |= mask;
|
|
|
- raw_local_irq_restore(flags);
|
|
|
- }
|
|
|
+ } else
|
|
|
+ res = __mips_test_and_set_bit_lock(nr, addr);
|
|
|
|
|
|
smp_llsc_mb();
|
|
|
|
|
@@ -373,7 +343,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
|
|
|
static inline int test_and_clear_bit(unsigned long nr,
|
|
|
volatile unsigned long *addr)
|
|
|
{
|
|
|
- unsigned short bit = nr & SZLONG_MASK;
|
|
|
+ int bit = nr & SZLONG_MASK;
|
|
|
unsigned long res;
|
|
|
|
|
|
smp_mb__before_llsc();
|
|
@@ -428,18 +398,8 @@ static inline int test_and_clear_bit(unsigned long nr,
|
|
|
} while (unlikely(!res));
|
|
|
|
|
|
res = temp & (1UL << bit);
|
|
|
- } else {
|
|
|
- volatile unsigned long *a = addr;
|
|
|
- unsigned long mask;
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- a += nr >> SZLONG_LOG;
|
|
|
- mask = 1UL << bit;
|
|
|
- raw_local_irq_save(flags);
|
|
|
- res = (mask & *a);
|
|
|
- *a &= ~mask;
|
|
|
- raw_local_irq_restore(flags);
|
|
|
- }
|
|
|
+ } else
|
|
|
+ res = __mips_test_and_clear_bit(nr, addr);
|
|
|
|
|
|
smp_llsc_mb();
|
|
|
|
|
@@ -457,7 +417,7 @@ static inline int test_and_clear_bit(unsigned long nr,
|
|
|
static inline int test_and_change_bit(unsigned long nr,
|
|
|
volatile unsigned long *addr)
|
|
|
{
|
|
|
- unsigned short bit = nr & SZLONG_MASK;
|
|
|
+ int bit = nr & SZLONG_MASK;
|
|
|
unsigned long res;
|
|
|
|
|
|
smp_mb__before_llsc();
|
|
@@ -494,18 +454,8 @@ static inline int test_and_change_bit(unsigned long nr,
|
|
|
} while (unlikely(!res));
|
|
|
|
|
|
res = temp & (1UL << bit);
|
|
|
- } else {
|
|
|
- volatile unsigned long *a = addr;
|
|
|
- unsigned long mask;
|
|
|
- unsigned long flags;
|
|
|
-
|
|
|
- a += nr >> SZLONG_LOG;
|
|
|
- mask = 1UL << bit;
|
|
|
- raw_local_irq_save(flags);
|
|
|
- res = (mask & *a);
|
|
|
- *a ^= mask;
|
|
|
- raw_local_irq_restore(flags);
|
|
|
- }
|
|
|
+ } else
|
|
|
+ res = __mips_test_and_change_bit(nr, addr);
|
|
|
|
|
|
smp_llsc_mb();
|
|
|
|