|
@@ -338,6 +338,9 @@ EXPORT_SYMBOL_GPL(kvm_lmsw);
|
|
|
|
|
|
void kvm_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4)
|
|
|
{
|
|
|
+ unsigned long old_cr4 = vcpu->arch.cr4;
|
|
|
+ unsigned long pdptr_bits = X86_CR4_PGE | X86_CR4_PSE | X86_CR4_PAE;
|
|
|
+
|
|
|
if (cr4 & CR4_RESERVED_BITS) {
|
|
|
printk(KERN_DEBUG "set_cr4: #GP, reserved bits\n");
|
|
|
kvm_inject_gp(vcpu, 0);
|
|
@@ -351,7 +354,8 @@ void kvm_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4)
|
|
|
kvm_inject_gp(vcpu, 0);
|
|
|
return;
|
|
|
}
|
|
|
- } else if (is_paging(vcpu) && !is_pae(vcpu) && (cr4 & X86_CR4_PAE)
|
|
|
+ } else if (is_paging(vcpu) && (cr4 & X86_CR4_PAE)
|
|
|
+ && ((cr4 ^ old_cr4) & pdptr_bits)
|
|
|
&& !load_pdptrs(vcpu, vcpu->arch.cr3)) {
|
|
|
printk(KERN_DEBUG "set_cr4: #GP, pdptrs reserved bits\n");
|
|
|
kvm_inject_gp(vcpu, 0);
|