|
@@ -691,7 +691,8 @@ static int kvmppc_handle_ext(struct kvm_vcpu *vcpu, unsigned int exit_nr,
|
|
#endif
|
|
#endif
|
|
t->fp_state.fpscr = vcpu->arch.fpscr;
|
|
t->fp_state.fpscr = vcpu->arch.fpscr;
|
|
t->fpexc_mode = 0;
|
|
t->fpexc_mode = 0;
|
|
- kvmppc_load_up_fpu();
|
|
|
|
|
|
+ enable_kernel_fp();
|
|
|
|
+ load_fp_state(&t->fp_state);
|
|
}
|
|
}
|
|
|
|
|
|
if (msr & MSR_VEC) {
|
|
if (msr & MSR_VEC) {
|
|
@@ -699,7 +700,8 @@ static int kvmppc_handle_ext(struct kvm_vcpu *vcpu, unsigned int exit_nr,
|
|
memcpy(t->vr_state.vr, vcpu->arch.vr, sizeof(vcpu->arch.vr));
|
|
memcpy(t->vr_state.vr, vcpu->arch.vr, sizeof(vcpu->arch.vr));
|
|
t->vr_state.vscr = vcpu->arch.vscr;
|
|
t->vr_state.vscr = vcpu->arch.vscr;
|
|
t->vrsave = -1;
|
|
t->vrsave = -1;
|
|
- kvmppc_load_up_altivec();
|
|
|
|
|
|
+ enable_kernel_altivec();
|
|
|
|
+ load_vr_state(&t->vr_state);
|
|
#endif
|
|
#endif
|
|
}
|
|
}
|
|
|
|
|
|
@@ -722,11 +724,15 @@ static void kvmppc_handle_lost_ext(struct kvm_vcpu *vcpu)
|
|
if (!lost_ext)
|
|
if (!lost_ext)
|
|
return;
|
|
return;
|
|
|
|
|
|
- if (lost_ext & MSR_FP)
|
|
|
|
- kvmppc_load_up_fpu();
|
|
|
|
|
|
+ if (lost_ext & MSR_FP) {
|
|
|
|
+ enable_kernel_fp();
|
|
|
|
+ load_fp_state(¤t->thread.fp_state);
|
|
|
|
+ }
|
|
#ifdef CONFIG_ALTIVEC
|
|
#ifdef CONFIG_ALTIVEC
|
|
- if (lost_ext & MSR_VEC)
|
|
|
|
- kvmppc_load_up_altivec();
|
|
|
|
|
|
+ if (lost_ext & MSR_VEC) {
|
|
|
|
+ enable_kernel_altivec();
|
|
|
|
+ load_vr_state(¤t->thread.vr_state);
|
|
|
|
+ }
|
|
#endif
|
|
#endif
|
|
current->thread.regs->msr |= lost_ext;
|
|
current->thread.regs->msr |= lost_ext;
|
|
}
|
|
}
|