|
@@ -36,28 +36,41 @@ void __hyp_text __vgic_v2_save_state(struct kvm_vcpu *vcpu)
|
|
|
|
|
|
nr_lr = vcpu->arch.vgic_cpu.nr_lr;
|
|
nr_lr = vcpu->arch.vgic_cpu.nr_lr;
|
|
cpu_if->vgic_vmcr = readl_relaxed(base + GICH_VMCR);
|
|
cpu_if->vgic_vmcr = readl_relaxed(base + GICH_VMCR);
|
|
- cpu_if->vgic_misr = readl_relaxed(base + GICH_MISR);
|
|
|
|
- eisr0 = readl_relaxed(base + GICH_EISR0);
|
|
|
|
- elrsr0 = readl_relaxed(base + GICH_ELRSR0);
|
|
|
|
- if (unlikely(nr_lr > 32)) {
|
|
|
|
- eisr1 = readl_relaxed(base + GICH_EISR1);
|
|
|
|
- elrsr1 = readl_relaxed(base + GICH_ELRSR1);
|
|
|
|
- } else {
|
|
|
|
- eisr1 = elrsr1 = 0;
|
|
|
|
- }
|
|
|
|
|
|
+
|
|
|
|
+ if (vcpu->arch.vgic_cpu.live_lrs) {
|
|
|
|
+ eisr0 = readl_relaxed(base + GICH_EISR0);
|
|
|
|
+ elrsr0 = readl_relaxed(base + GICH_ELRSR0);
|
|
|
|
+ cpu_if->vgic_misr = readl_relaxed(base + GICH_MISR);
|
|
|
|
+ cpu_if->vgic_apr = readl_relaxed(base + GICH_APR);
|
|
|
|
+
|
|
|
|
+ if (unlikely(nr_lr > 32)) {
|
|
|
|
+ eisr1 = readl_relaxed(base + GICH_EISR1);
|
|
|
|
+ elrsr1 = readl_relaxed(base + GICH_ELRSR1);
|
|
|
|
+ } else {
|
|
|
|
+ eisr1 = elrsr1 = 0;
|
|
|
|
+ }
|
|
|
|
+
|
|
#ifdef CONFIG_CPU_BIG_ENDIAN
|
|
#ifdef CONFIG_CPU_BIG_ENDIAN
|
|
- cpu_if->vgic_eisr = ((u64)eisr0 << 32) | eisr1;
|
|
|
|
- cpu_if->vgic_elrsr = ((u64)elrsr0 << 32) | elrsr1;
|
|
|
|
|
|
+ cpu_if->vgic_eisr = ((u64)eisr0 << 32) | eisr1;
|
|
|
|
+ cpu_if->vgic_elrsr = ((u64)elrsr0 << 32) | elrsr1;
|
|
#else
|
|
#else
|
|
- cpu_if->vgic_eisr = ((u64)eisr1 << 32) | eisr0;
|
|
|
|
- cpu_if->vgic_elrsr = ((u64)elrsr1 << 32) | elrsr0;
|
|
|
|
|
|
+ cpu_if->vgic_eisr = ((u64)eisr1 << 32) | eisr0;
|
|
|
|
+ cpu_if->vgic_elrsr = ((u64)elrsr1 << 32) | elrsr0;
|
|
#endif
|
|
#endif
|
|
- cpu_if->vgic_apr = readl_relaxed(base + GICH_APR);
|
|
|
|
|
|
|
|
- writel_relaxed(0, base + GICH_HCR);
|
|
|
|
|
|
+ for (i = 0; i < nr_lr; i++)
|
|
|
|
+ if (vcpu->arch.vgic_cpu.live_lrs & (1UL << i))
|
|
|
|
+ cpu_if->vgic_lr[i] = readl_relaxed(base + GICH_LR0 + (i * 4));
|
|
|
|
|
|
- for (i = 0; i < nr_lr; i++)
|
|
|
|
- cpu_if->vgic_lr[i] = readl_relaxed(base + GICH_LR0 + (i * 4));
|
|
|
|
|
|
+ writel_relaxed(0, base + GICH_HCR);
|
|
|
|
+
|
|
|
|
+ vcpu->arch.vgic_cpu.live_lrs = 0;
|
|
|
|
+ } else {
|
|
|
|
+ cpu_if->vgic_eisr = 0;
|
|
|
|
+ cpu_if->vgic_elrsr = ~0UL;
|
|
|
|
+ cpu_if->vgic_misr = 0;
|
|
|
|
+ cpu_if->vgic_apr = 0;
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
|
|
/* vcpu is already in the HYP VA space */
|
|
/* vcpu is already in the HYP VA space */
|
|
@@ -68,15 +81,30 @@ void __hyp_text __vgic_v2_restore_state(struct kvm_vcpu *vcpu)
|
|
struct vgic_dist *vgic = &kvm->arch.vgic;
|
|
struct vgic_dist *vgic = &kvm->arch.vgic;
|
|
void __iomem *base = kern_hyp_va(vgic->vctrl_base);
|
|
void __iomem *base = kern_hyp_va(vgic->vctrl_base);
|
|
int i, nr_lr;
|
|
int i, nr_lr;
|
|
|
|
+ u64 live_lrs = 0;
|
|
|
|
|
|
if (!base)
|
|
if (!base)
|
|
return;
|
|
return;
|
|
|
|
|
|
- writel_relaxed(cpu_if->vgic_hcr, base + GICH_HCR);
|
|
|
|
- writel_relaxed(cpu_if->vgic_vmcr, base + GICH_VMCR);
|
|
|
|
- writel_relaxed(cpu_if->vgic_apr, base + GICH_APR);
|
|
|
|
-
|
|
|
|
nr_lr = vcpu->arch.vgic_cpu.nr_lr;
|
|
nr_lr = vcpu->arch.vgic_cpu.nr_lr;
|
|
|
|
+
|
|
for (i = 0; i < nr_lr; i++)
|
|
for (i = 0; i < nr_lr; i++)
|
|
- writel_relaxed(cpu_if->vgic_lr[i], base + GICH_LR0 + (i * 4));
|
|
|
|
|
|
+ if (cpu_if->vgic_lr[i] & GICH_LR_STATE)
|
|
|
|
+ live_lrs |= 1UL << i;
|
|
|
|
+
|
|
|
|
+ if (live_lrs) {
|
|
|
|
+ writel_relaxed(cpu_if->vgic_hcr, base + GICH_HCR);
|
|
|
|
+ writel_relaxed(cpu_if->vgic_apr, base + GICH_APR);
|
|
|
|
+ for (i = 0; i < nr_lr; i++) {
|
|
|
|
+ u32 val = 0;
|
|
|
|
+
|
|
|
|
+ if (live_lrs & (1UL << i))
|
|
|
|
+ val = cpu_if->vgic_lr[i];
|
|
|
|
+
|
|
|
|
+ writel_relaxed(val, base + GICH_LR0 + (i * 4));
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ writel_relaxed(cpu_if->vgic_vmcr, base + GICH_VMCR);
|
|
|
|
+ vcpu->arch.vgic_cpu.live_lrs = live_lrs;
|
|
}
|
|
}
|