瀏覽代碼

x86/paravirt: add spin_lock_flags lock op

It is useful for a pv_lock_ops backend to know whether interrupts are
enabled or not in the context a spin_lock is being called.  This
allows it to enable interrupts while spinning, which could be
particularly helpful when spinning becomes blocking.

The default implementation just calls the normal spin_lock op,
ignoring the flags.

Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Jeremy Fitzhardinge 16 年之前
父節點
當前提交
63d3a75d6f
共有 3 個文件被更改,包括 20 次插入2 次删除
  1. 6 0
      arch/x86/kernel/paravirt-spinlocks.c
  2. 7 0
      include/asm-x86/paravirt.h
  3. 7 2
      include/asm-x86/spinlock.h

+ 6 - 0
arch/x86/kernel/paravirt-spinlocks.c

@@ -7,12 +7,18 @@
 
 #include <asm/paravirt.h>
 
+static void default_spin_lock_flags(struct raw_spinlock *lock, unsigned long flags)
+{
+	__raw_spin_lock(lock);
+}
+
 struct pv_lock_ops pv_lock_ops = {
 #ifdef CONFIG_SMP
 	.spin_is_locked = __ticket_spin_is_locked,
 	.spin_is_contended = __ticket_spin_is_contended,
 
 	.spin_lock = __ticket_spin_lock,
+	.spin_lock_flags = default_spin_lock_flags,
 	.spin_trylock = __ticket_spin_trylock,
 	.spin_unlock = __ticket_spin_unlock,
 #endif

+ 7 - 0
include/asm-x86/paravirt.h

@@ -333,6 +333,7 @@ struct pv_lock_ops {
 	int (*spin_is_locked)(struct raw_spinlock *lock);
 	int (*spin_is_contended)(struct raw_spinlock *lock);
 	void (*spin_lock)(struct raw_spinlock *lock);
+	void (*spin_lock_flags)(struct raw_spinlock *lock, unsigned long flags);
 	int (*spin_trylock)(struct raw_spinlock *lock);
 	void (*spin_unlock)(struct raw_spinlock *lock);
 };
@@ -1414,6 +1415,12 @@ static __always_inline void __raw_spin_lock(struct raw_spinlock *lock)
 	PVOP_VCALL1(pv_lock_ops.spin_lock, lock);
 }
 
+static __always_inline void __raw_spin_lock_flags(struct raw_spinlock *lock,
+						  unsigned long flags)
+{
+	PVOP_VCALL2(pv_lock_ops.spin_lock_flags, lock, flags);
+}
+
 static __always_inline int __raw_spin_trylock(struct raw_spinlock *lock)
 {
 	return PVOP_CALL1(int, pv_lock_ops.spin_trylock, lock);

+ 7 - 2
include/asm-x86/spinlock.h

@@ -182,8 +182,6 @@ static __always_inline void __ticket_spin_unlock(raw_spinlock_t *lock)
 }
 #endif
 
-#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
-
 #ifdef CONFIG_PARAVIRT
 /*
  * Define virtualization-friendly old-style lock byte lock, for use in
@@ -272,6 +270,13 @@ static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock)
 {
 	__ticket_spin_unlock(lock);
 }
+
+static __always_inline void __raw_spin_lock_flags(raw_spinlock_t *lock,
+						  unsigned long flags)
+{
+	__raw_spin_lock(lock);
+}
+
 #endif	/* CONFIG_PARAVIRT */
 
 static inline void __raw_spin_unlock_wait(raw_spinlock_t *lock)