|
|
@@ -2348,7 +2348,7 @@ static int em_sysenter(struct x86_emulate_ctxt *ctxt)
|
|
|
* Not recognized on AMD in compat mode (but is recognized in legacy
|
|
|
* mode).
|
|
|
*/
|
|
|
- if ((ctxt->mode == X86EMUL_MODE_PROT32) && (efer & EFER_LMA)
|
|
|
+ if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA)
|
|
|
&& !vendor_intel(ctxt))
|
|
|
return emulate_ud(ctxt);
|
|
|
|
|
|
@@ -2359,25 +2359,13 @@ static int em_sysenter(struct x86_emulate_ctxt *ctxt)
|
|
|
setup_syscalls_segments(ctxt, &cs, &ss);
|
|
|
|
|
|
ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
|
|
|
- switch (ctxt->mode) {
|
|
|
- case X86EMUL_MODE_PROT32:
|
|
|
- if ((msr_data & 0xfffc) == 0x0)
|
|
|
- return emulate_gp(ctxt, 0);
|
|
|
- break;
|
|
|
- case X86EMUL_MODE_PROT64:
|
|
|
- if (msr_data == 0x0)
|
|
|
- return emulate_gp(ctxt, 0);
|
|
|
- break;
|
|
|
- default:
|
|
|
- break;
|
|
|
- }
|
|
|
+ if ((msr_data & 0xfffc) == 0x0)
|
|
|
+ return emulate_gp(ctxt, 0);
|
|
|
|
|
|
ctxt->eflags &= ~(EFLG_VM | EFLG_IF);
|
|
|
- cs_sel = (u16)msr_data;
|
|
|
- cs_sel &= ~SELECTOR_RPL_MASK;
|
|
|
+ cs_sel = (u16)msr_data & ~SELECTOR_RPL_MASK;
|
|
|
ss_sel = cs_sel + 8;
|
|
|
- ss_sel &= ~SELECTOR_RPL_MASK;
|
|
|
- if (ctxt->mode == X86EMUL_MODE_PROT64 || (efer & EFER_LMA)) {
|
|
|
+ if (efer & EFER_LMA) {
|
|
|
cs.d = 0;
|
|
|
cs.l = 1;
|
|
|
}
|
|
|
@@ -2386,10 +2374,11 @@ static int em_sysenter(struct x86_emulate_ctxt *ctxt)
|
|
|
ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
|
|
|
|
|
|
ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
|
|
|
- ctxt->_eip = msr_data;
|
|
|
+ ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data;
|
|
|
|
|
|
ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
|
|
|
- *reg_write(ctxt, VCPU_REGS_RSP) = msr_data;
|
|
|
+ *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data :
|
|
|
+ (u32)msr_data;
|
|
|
|
|
|
return X86EMUL_CONTINUE;
|
|
|
}
|