|
@@ -510,7 +510,6 @@ EXPORT_SYMBOL_GPL(set_cr0);
|
|
|
|
|
|
void lmsw(struct kvm_vcpu *vcpu, unsigned long msw)
|
|
|
{
|
|
|
- kvm_arch_ops->decache_cr0_cr4_guest_bits(vcpu);
|
|
|
set_cr0(vcpu, (vcpu->cr0 & ~0x0ful) | (msw & 0x0f));
|
|
|
}
|
|
|
EXPORT_SYMBOL_GPL(lmsw);
|
|
@@ -1117,7 +1116,6 @@ int emulate_clts(struct kvm_vcpu *vcpu)
|
|
|
{
|
|
|
unsigned long cr0;
|
|
|
|
|
|
- kvm_arch_ops->decache_cr0_cr4_guest_bits(vcpu);
|
|
|
cr0 = vcpu->cr0 & ~CR0_TS_MASK;
|
|
|
kvm_arch_ops->set_cr0(vcpu, cr0);
|
|
|
return X86EMUL_CONTINUE;
|
|
@@ -1318,7 +1316,7 @@ void realmode_lmsw(struct kvm_vcpu *vcpu, unsigned long msw,
|
|
|
|
|
|
unsigned long realmode_get_cr(struct kvm_vcpu *vcpu, int cr)
|
|
|
{
|
|
|
- kvm_arch_ops->decache_cr0_cr4_guest_bits(vcpu);
|
|
|
+ kvm_arch_ops->decache_cr4_guest_bits(vcpu);
|
|
|
switch (cr) {
|
|
|
case 0:
|
|
|
return vcpu->cr0;
|
|
@@ -1934,7 +1932,7 @@ static int kvm_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu,
|
|
|
sregs->gdt.limit = dt.limit;
|
|
|
sregs->gdt.base = dt.base;
|
|
|
|
|
|
- kvm_arch_ops->decache_cr0_cr4_guest_bits(vcpu);
|
|
|
+ kvm_arch_ops->decache_cr4_guest_bits(vcpu);
|
|
|
sregs->cr0 = vcpu->cr0;
|
|
|
sregs->cr2 = vcpu->cr2;
|
|
|
sregs->cr3 = vcpu->cr3;
|
|
@@ -1985,7 +1983,7 @@ static int kvm_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu,
|
|
|
#endif
|
|
|
vcpu->apic_base = sregs->apic_base;
|
|
|
|
|
|
- kvm_arch_ops->decache_cr0_cr4_guest_bits(vcpu);
|
|
|
+ kvm_arch_ops->decache_cr4_guest_bits(vcpu);
|
|
|
|
|
|
mmu_reset_needed |= vcpu->cr0 != sregs->cr0;
|
|
|
kvm_arch_ops->set_cr0(vcpu, sregs->cr0);
|