|
@@ -706,11 +706,33 @@ static void __hyp_text __vgic_v3_write_igrpen1(struct kvm_vcpu *vcpu, u32 vmcr,
|
|
|
__vgic_v3_write_vmcr(vmcr);
|
|
__vgic_v3_write_vmcr(vmcr);
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
+static void __hyp_text __vgic_v3_read_bpr0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
|
|
|
|
|
+{
|
|
|
|
|
+ vcpu_set_reg(vcpu, rt, __vgic_v3_get_bpr0(vmcr));
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
static void __hyp_text __vgic_v3_read_bpr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
|
|
static void __hyp_text __vgic_v3_read_bpr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
|
|
|
{
|
|
{
|
|
|
vcpu_set_reg(vcpu, rt, __vgic_v3_get_bpr1(vmcr));
|
|
vcpu_set_reg(vcpu, rt, __vgic_v3_get_bpr1(vmcr));
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
+static void __hyp_text __vgic_v3_write_bpr0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
|
|
|
|
|
+{
|
|
|
|
|
+ u64 val = vcpu_get_reg(vcpu, rt);
|
|
|
|
|
+ u8 bpr_min = __vgic_v3_bpr_min() - 1;
|
|
|
|
|
+
|
|
|
|
|
+ /* Enforce BPR limiting */
|
|
|
|
|
+ if (val < bpr_min)
|
|
|
|
|
+ val = bpr_min;
|
|
|
|
|
+
|
|
|
|
|
+ val <<= ICH_VMCR_BPR0_SHIFT;
|
|
|
|
|
+ val &= ICH_VMCR_BPR0_MASK;
|
|
|
|
|
+ vmcr &= ~ICH_VMCR_BPR0_MASK;
|
|
|
|
|
+ vmcr |= val;
|
|
|
|
|
+
|
|
|
|
|
+ __vgic_v3_write_vmcr(vmcr);
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
static void __hyp_text __vgic_v3_write_bpr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
|
|
static void __hyp_text __vgic_v3_write_bpr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
|
|
|
{
|
|
{
|
|
|
u64 val = vcpu_get_reg(vcpu, rt);
|
|
u64 val = vcpu_get_reg(vcpu, rt);
|
|
@@ -888,6 +910,12 @@ int __hyp_text __vgic_v3_perform_cpuif_access(struct kvm_vcpu *vcpu)
|
|
|
case SYS_ICC_HPPIR1_EL1:
|
|
case SYS_ICC_HPPIR1_EL1:
|
|
|
fn = __vgic_v3_read_hppir;
|
|
fn = __vgic_v3_read_hppir;
|
|
|
break;
|
|
break;
|
|
|
|
|
+ case SYS_ICC_BPR0_EL1:
|
|
|
|
|
+ if (is_read)
|
|
|
|
|
+ fn = __vgic_v3_read_bpr0;
|
|
|
|
|
+ else
|
|
|
|
|
+ fn = __vgic_v3_write_bpr0;
|
|
|
|
|
+ break;
|
|
|
default:
|
|
default:
|
|
|
return 0;
|
|
return 0;
|
|
|
}
|
|
}
|