|
@@ -53,12 +53,12 @@ _raw_compare_and_swap(volatile unsigned int *lock,
|
|
|
*/
|
|
|
|
|
|
#define __raw_spin_is_locked(x) ((x)->owner_cpu != 0)
|
|
|
-#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
|
|
|
#define __raw_spin_unlock_wait(lock) \
|
|
|
do { while (__raw_spin_is_locked(lock)) \
|
|
|
_raw_spin_relax(lock); } while (0)
|
|
|
|
|
|
extern void _raw_spin_lock_wait(raw_spinlock_t *);
|
|
|
+extern void _raw_spin_lock_wait_flags(raw_spinlock_t *, unsigned long flags);
|
|
|
extern int _raw_spin_trylock_retry(raw_spinlock_t *);
|
|
|
extern void _raw_spin_relax(raw_spinlock_t *lock);
|
|
|
|
|
@@ -72,6 +72,17 @@ static inline void __raw_spin_lock(raw_spinlock_t *lp)
|
|
|
_raw_spin_lock_wait(lp);
|
|
|
}
|
|
|
|
|
|
+static inline void __raw_spin_lock_flags(raw_spinlock_t *lp,
|
|
|
+ unsigned long flags)
|
|
|
+{
|
|
|
+ int old;
|
|
|
+
|
|
|
+ old = _raw_compare_and_swap(&lp->owner_cpu, 0, ~smp_processor_id());
|
|
|
+ if (likely(old == 0))
|
|
|
+ return;
|
|
|
+ _raw_spin_lock_wait_flags(lp, flags);
|
|
|
+}
|
|
|
+
|
|
|
static inline int __raw_spin_trylock(raw_spinlock_t *lp)
|
|
|
{
|
|
|
int old;
|