|
@@ -23,6 +23,7 @@
|
|
|
#include <asm/hvcall.h>
|
|
|
#include <asm/iseries/hv_call.h>
|
|
|
#include <asm/smp.h>
|
|
|
+#include <asm/firmware.h>
|
|
|
|
|
|
void __spin_yield(raw_spinlock_t *lock)
|
|
|
{
|
|
@@ -39,13 +40,12 @@ void __spin_yield(raw_spinlock_t *lock)
|
|
|
rmb();
|
|
|
if (lock->slock != lock_value)
|
|
|
return; /* something has changed */
|
|
|
-#ifdef CONFIG_PPC_ISERIES
|
|
|
- HvCall2(HvCallBaseYieldProcessor, HvCall_YieldToProc,
|
|
|
- ((u64)holder_cpu << 32) | yield_count);
|
|
|
-#else
|
|
|
- plpar_hcall_norets(H_CONFER, get_hard_smp_processor_id(holder_cpu),
|
|
|
- yield_count);
|
|
|
-#endif
|
|
|
+ if (firmware_has_feature(FW_FEATURE_ISERIES))
|
|
|
+ HvCall2(HvCallBaseYieldProcessor, HvCall_YieldToProc,
|
|
|
+ ((u64)holder_cpu << 32) | yield_count);
|
|
|
+ else
|
|
|
+ plpar_hcall_norets(H_CONFER,
|
|
|
+ get_hard_smp_processor_id(holder_cpu), yield_count);
|
|
|
}
|
|
|
|
|
|
/*
|
|
@@ -69,13 +69,12 @@ void __rw_yield(raw_rwlock_t *rw)
|
|
|
rmb();
|
|
|
if (rw->lock != lock_value)
|
|
|
return; /* something has changed */
|
|
|
-#ifdef CONFIG_PPC_ISERIES
|
|
|
- HvCall2(HvCallBaseYieldProcessor, HvCall_YieldToProc,
|
|
|
- ((u64)holder_cpu << 32) | yield_count);
|
|
|
-#else
|
|
|
- plpar_hcall_norets(H_CONFER, get_hard_smp_processor_id(holder_cpu),
|
|
|
- yield_count);
|
|
|
-#endif
|
|
|
+ if (firmware_has_feature(FW_FEATURE_ISERIES))
|
|
|
+ HvCall2(HvCallBaseYieldProcessor, HvCall_YieldToProc,
|
|
|
+ ((u64)holder_cpu << 32) | yield_count);
|
|
|
+ else
|
|
|
+ plpar_hcall_norets(H_CONFER,
|
|
|
+ get_hard_smp_processor_id(holder_cpu), yield_count);
|
|
|
}
|
|
|
#endif
|
|
|
|