|
@@ -131,6 +131,35 @@ static void __hyp_text __gic_v3_set_lr(u64 val, int lr)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+static void __hyp_text save_maint_int_state(struct kvm_vcpu *vcpu, int nr_lr)
|
|
|
+{
|
|
|
+ struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
|
|
|
+ int i;
|
|
|
+ bool expect_mi;
|
|
|
+
|
|
|
+ expect_mi = !!(cpu_if->vgic_hcr & ICH_HCR_UIE);
|
|
|
+
|
|
|
+ for (i = 0; i < nr_lr; i++) {
|
|
|
+ if (!(vcpu->arch.vgic_cpu.live_lrs & (1UL << i)))
|
|
|
+ continue;
|
|
|
+
|
|
|
+ expect_mi |= (!(cpu_if->vgic_lr[i] & ICH_LR_HW) &&
|
|
|
+ (cpu_if->vgic_lr[i] & ICH_LR_EOI));
|
|
|
+ }
|
|
|
+
|
|
|
+ if (expect_mi) {
|
|
|
+ cpu_if->vgic_misr = read_gicreg(ICH_MISR_EL2);
|
|
|
+
|
|
|
+ if (cpu_if->vgic_misr & ICH_MISR_EOI)
|
|
|
+ cpu_if->vgic_eisr = read_gicreg(ICH_EISR_EL2);
|
|
|
+ else
|
|
|
+ cpu_if->vgic_eisr = 0;
|
|
|
+ } else {
|
|
|
+ cpu_if->vgic_misr = 0;
|
|
|
+ cpu_if->vgic_eisr = 0;
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
void __hyp_text __vgic_v3_save_state(struct kvm_vcpu *vcpu)
|
|
|
{
|
|
|
struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
|
|
@@ -148,8 +177,6 @@ void __hyp_text __vgic_v3_save_state(struct kvm_vcpu *vcpu)
|
|
|
int i;
|
|
|
u32 max_lr_idx, nr_pri_bits;
|
|
|
|
|
|
- cpu_if->vgic_misr = read_gicreg(ICH_MISR_EL2);
|
|
|
- cpu_if->vgic_eisr = read_gicreg(ICH_EISR_EL2);
|
|
|
cpu_if->vgic_elrsr = read_gicreg(ICH_ELSR_EL2);
|
|
|
|
|
|
write_gicreg(0, ICH_HCR_EL2);
|
|
@@ -157,6 +184,8 @@ void __hyp_text __vgic_v3_save_state(struct kvm_vcpu *vcpu)
|
|
|
max_lr_idx = vtr_to_max_lr_idx(val);
|
|
|
nr_pri_bits = vtr_to_nr_pri_bits(val);
|
|
|
|
|
|
+ save_maint_int_state(vcpu, max_lr_idx + 1);
|
|
|
+
|
|
|
for (i = 0; i <= max_lr_idx; i++) {
|
|
|
if (vcpu->arch.vgic_cpu.live_lrs & (1UL << i))
|
|
|
cpu_if->vgic_lr[i] = __gic_v3_get_lr(i);
|