|
@@ -217,7 +217,7 @@ void kvm_mmu_set_mask_ptes(u64 user_mask, u64 accessed_mask,
|
|
|
}
|
|
|
EXPORT_SYMBOL_GPL(kvm_mmu_set_mask_ptes);
|
|
|
|
|
|
-static int is_write_protection(struct kvm_vcpu *vcpu)
|
|
|
+static bool is_write_protection(struct kvm_vcpu *vcpu)
|
|
|
{
|
|
|
return kvm_read_cr0_bits(vcpu, X86_CR0_WP);
|
|
|
}
|
|
@@ -2432,6 +2432,7 @@ static int init_kvm_softmmu(struct kvm_vcpu *vcpu)
|
|
|
r = paging32_init_context(vcpu);
|
|
|
|
|
|
vcpu->arch.mmu.base_role.cr4_pae = !!is_pae(vcpu);
|
|
|
+ vcpu->arch.mmu.base_role.cr0_wp = is_write_protection(vcpu);
|
|
|
|
|
|
return r;
|
|
|
}
|