|
@@ -958,11 +958,12 @@ void vmalloc_sync_all(void)
|
|
|
for (address = start; address <= VMALLOC_END; address += PGDIR_SIZE) {
|
|
|
if (!test_bit(pgd_index(address), insync)) {
|
|
|
const pgd_t *pgd_ref = pgd_offset_k(address);
|
|
|
+ unsigned long flags;
|
|
|
struct page *page;
|
|
|
|
|
|
if (pgd_none(*pgd_ref))
|
|
|
continue;
|
|
|
- spin_lock(&pgd_lock);
|
|
|
+ spin_lock_irqsave(&pgd_lock, flags);
|
|
|
list_for_each_entry(page, &pgd_list, lru) {
|
|
|
pgd_t *pgd;
|
|
|
pgd = (pgd_t *)page_address(page) + pgd_index(address);
|
|
@@ -971,7 +972,7 @@ void vmalloc_sync_all(void)
|
|
|
else
|
|
|
BUG_ON(pgd_page_vaddr(*pgd) != pgd_page_vaddr(*pgd_ref));
|
|
|
}
|
|
|
- spin_unlock(&pgd_lock);
|
|
|
+ spin_unlock_irqrestore(&pgd_lock, flags);
|
|
|
set_bit(pgd_index(address), insync);
|
|
|
}
|
|
|
if (address == start)
|