|
@@ -144,6 +144,25 @@ int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run,
|
|
|
{
|
|
|
exit_handle_fn exit_handler;
|
|
|
|
|
|
+ if (ARM_ABORT_PENDING(exception_index)) {
|
|
|
+ u8 hsr_ec = kvm_vcpu_trap_get_class(vcpu);
|
|
|
+
|
|
|
+ /*
|
|
|
+ * HVC/SMC already have an adjusted PC, which we need
|
|
|
+ * to correct in order to return to after having
|
|
|
+ * injected the abort.
|
|
|
+ */
|
|
|
+ if (hsr_ec == HSR_EC_HVC || hsr_ec == HSR_EC_SMC) {
|
|
|
+ u32 adj = kvm_vcpu_trap_il_is32bit(vcpu) ? 4 : 2;
|
|
|
+ *vcpu_pc(vcpu) -= adj;
|
|
|
+ }
|
|
|
+
|
|
|
+ kvm_inject_vabt(vcpu);
|
|
|
+ return 1;
|
|
|
+ }
|
|
|
+
|
|
|
+ exception_index = ARM_EXCEPTION_CODE(exception_index);
|
|
|
+
|
|
|
switch (exception_index) {
|
|
|
case ARM_EXCEPTION_IRQ:
|
|
|
return 1;
|