|
@@ -97,19 +97,22 @@ static inline void arch_spin_lock(arch_spinlock_t *lock)
|
|
|
|
|
|
static inline int arch_spin_trylock(arch_spinlock_t *lock)
|
|
|
{
|
|
|
- unsigned long tmp;
|
|
|
+ unsigned long contended, res;
|
|
|
u32 slock;
|
|
|
|
|
|
- __asm__ __volatile__(
|
|
|
-" ldrex %0, [%2]\n"
|
|
|
-" subs %1, %0, %0, ror #16\n"
|
|
|
-" addeq %0, %0, %3\n"
|
|
|
-" strexeq %1, %0, [%2]"
|
|
|
- : "=&r" (slock), "=&r" (tmp)
|
|
|
- : "r" (&lock->slock), "I" (1 << TICKET_SHIFT)
|
|
|
- : "cc");
|
|
|
-
|
|
|
- if (tmp == 0) {
|
|
|
+ do {
|
|
|
+ __asm__ __volatile__(
|
|
|
+ " ldrex %0, [%3]\n"
|
|
|
+ " mov %2, #0\n"
|
|
|
+ " subs %1, %0, %0, ror #16\n"
|
|
|
+ " addeq %0, %0, %4\n"
|
|
|
+ " strexeq %2, %0, [%3]"
|
|
|
+ : "=&r" (slock), "=&r" (contended), "=r" (res)
|
|
|
+ : "r" (&lock->slock), "I" (1 << TICKET_SHIFT)
|
|
|
+ : "cc");
|
|
|
+ } while (res);
|
|
|
+
|
|
|
+ if (!contended) {
|
|
|
smp_mb();
|
|
|
return 1;
|
|
|
} else {
|