|
@@ -150,7 +150,7 @@ EXPORT_SYMBOL(local_flush_tlb_page);
|
|
*/
|
|
*/
|
|
#ifdef CONFIG_SMP
|
|
#ifdef CONFIG_SMP
|
|
|
|
|
|
-static DEFINE_SPINLOCK(tlbivax_lock);
|
|
|
|
|
|
+static DEFINE_RAW_SPINLOCK(tlbivax_lock);
|
|
|
|
|
|
static int mm_is_core_local(struct mm_struct *mm)
|
|
static int mm_is_core_local(struct mm_struct *mm)
|
|
{
|
|
{
|
|
@@ -232,10 +232,10 @@ void __flush_tlb_page(struct mm_struct *mm, unsigned long vmaddr,
|
|
if (mmu_has_feature(MMU_FTR_USE_TLBIVAX_BCAST)) {
|
|
if (mmu_has_feature(MMU_FTR_USE_TLBIVAX_BCAST)) {
|
|
int lock = mmu_has_feature(MMU_FTR_LOCK_BCAST_INVAL);
|
|
int lock = mmu_has_feature(MMU_FTR_LOCK_BCAST_INVAL);
|
|
if (lock)
|
|
if (lock)
|
|
- spin_lock(&tlbivax_lock);
|
|
|
|
|
|
+ raw_spin_lock(&tlbivax_lock);
|
|
_tlbivax_bcast(vmaddr, pid, tsize, ind);
|
|
_tlbivax_bcast(vmaddr, pid, tsize, ind);
|
|
if (lock)
|
|
if (lock)
|
|
- spin_unlock(&tlbivax_lock);
|
|
|
|
|
|
+ raw_spin_unlock(&tlbivax_lock);
|
|
goto bail;
|
|
goto bail;
|
|
} else {
|
|
} else {
|
|
struct tlb_flush_param p = {
|
|
struct tlb_flush_param p = {
|