|
@@ -387,11 +387,13 @@ static bool __hyp_text fixup_guest_exit(struct kvm_vcpu *vcpu, u64 *exit_code)
|
|
|
* same PC once the SError has been injected, and replay the
|
|
|
* trapping instruction.
|
|
|
*/
|
|
|
- if (*exit_code == ARM_EXCEPTION_TRAP && !__populate_fault_info(vcpu))
|
|
|
+ if (*exit_code != ARM_EXCEPTION_TRAP)
|
|
|
+ goto exit;
|
|
|
+
|
|
|
+ if (!__populate_fault_info(vcpu))
|
|
|
return true;
|
|
|
|
|
|
- if (static_branch_unlikely(&vgic_v2_cpuif_trap) &&
|
|
|
- *exit_code == ARM_EXCEPTION_TRAP) {
|
|
|
+ if (static_branch_unlikely(&vgic_v2_cpuif_trap)) {
|
|
|
bool valid;
|
|
|
|
|
|
valid = kvm_vcpu_trap_get_class(vcpu) == ESR_ELx_EC_DABT_LOW &&
|
|
@@ -417,11 +419,12 @@ static bool __hyp_text fixup_guest_exit(struct kvm_vcpu *vcpu, u64 *exit_code)
|
|
|
*vcpu_cpsr(vcpu) &= ~DBG_SPSR_SS;
|
|
|
*exit_code = ARM_EXCEPTION_EL1_SERROR;
|
|
|
}
|
|
|
+
|
|
|
+ goto exit;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (static_branch_unlikely(&vgic_v3_cpuif_trap) &&
|
|
|
- *exit_code == ARM_EXCEPTION_TRAP &&
|
|
|
(kvm_vcpu_trap_get_class(vcpu) == ESR_ELx_EC_SYS64 ||
|
|
|
kvm_vcpu_trap_get_class(vcpu) == ESR_ELx_EC_CP15_32)) {
|
|
|
int ret = __vgic_v3_perform_cpuif_access(vcpu);
|
|
@@ -430,6 +433,7 @@ static bool __hyp_text fixup_guest_exit(struct kvm_vcpu *vcpu, u64 *exit_code)
|
|
|
return true;
|
|
|
}
|
|
|
|
|
|
+exit:
|
|
|
/* Return to the host kernel and handle the exit */
|
|
|
return false;
|
|
|
}
|