|
@@ -60,31 +60,37 @@ static __inline__ void change_bit(int nr, volatile unsigned long * addr)
|
|
|
static __inline__ int test_and_set_bit(int nr, volatile unsigned long * addr)
|
|
|
{
|
|
|
unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
|
|
|
- unsigned long oldbit;
|
|
|
+ unsigned long old;
|
|
|
unsigned long flags;
|
|
|
+ int set;
|
|
|
|
|
|
addr += (nr >> SHIFT_PER_LONG);
|
|
|
_atomic_spin_lock_irqsave(addr, flags);
|
|
|
- oldbit = *addr;
|
|
|
- *addr = oldbit | mask;
|
|
|
+ old = *addr;
|
|
|
+ set = (old & mask) ? 1 : 0;
|
|
|
+ if (!set)
|
|
|
+ *addr = old | mask;
|
|
|
_atomic_spin_unlock_irqrestore(addr, flags);
|
|
|
|
|
|
- return (oldbit & mask) ? 1 : 0;
|
|
|
+ return set;
|
|
|
}
|
|
|
|
|
|
static __inline__ int test_and_clear_bit(int nr, volatile unsigned long * addr)
|
|
|
{
|
|
|
unsigned long mask = 1UL << CHOP_SHIFTCOUNT(nr);
|
|
|
- unsigned long oldbit;
|
|
|
+ unsigned long old;
|
|
|
unsigned long flags;
|
|
|
+ int set;
|
|
|
|
|
|
addr += (nr >> SHIFT_PER_LONG);
|
|
|
_atomic_spin_lock_irqsave(addr, flags);
|
|
|
- oldbit = *addr;
|
|
|
- *addr = oldbit & ~mask;
|
|
|
+ old = *addr;
|
|
|
+ set = (old & mask) ? 1 : 0;
|
|
|
+ if (set)
|
|
|
+ *addr = old & ~mask;
|
|
|
_atomic_spin_unlock_irqrestore(addr, flags);
|
|
|
|
|
|
- return (oldbit & mask) ? 1 : 0;
|
|
|
+ return set;
|
|
|
}
|
|
|
|
|
|
static __inline__ int test_and_change_bit(int nr, volatile unsigned long * addr)
|