|
@@ -460,35 +460,32 @@ static void reset_pmcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *r)
|
|
|
vcpu_sys_reg(vcpu, PMCR_EL0) = val;
|
|
|
}
|
|
|
|
|
|
-static bool pmu_access_el0_disabled(struct kvm_vcpu *vcpu)
|
|
|
+static bool check_pmu_access_disabled(struct kvm_vcpu *vcpu, u64 flags)
|
|
|
{
|
|
|
u64 reg = vcpu_sys_reg(vcpu, PMUSERENR_EL0);
|
|
|
+ bool enabled = (reg & flags) || vcpu_mode_priv(vcpu);
|
|
|
|
|
|
- return !((reg & ARMV8_PMU_USERENR_EN) || vcpu_mode_priv(vcpu));
|
|
|
+ return !enabled;
|
|
|
}
|
|
|
|
|
|
-static bool pmu_write_swinc_el0_disabled(struct kvm_vcpu *vcpu)
|
|
|
+static bool pmu_access_el0_disabled(struct kvm_vcpu *vcpu)
|
|
|
{
|
|
|
- u64 reg = vcpu_sys_reg(vcpu, PMUSERENR_EL0);
|
|
|
+ return check_pmu_access_disabled(vcpu, ARMV8_PMU_USERENR_EN);
|
|
|
+}
|
|
|
|
|
|
- return !((reg & (ARMV8_PMU_USERENR_SW | ARMV8_PMU_USERENR_EN))
|
|
|
- || vcpu_mode_priv(vcpu));
|
|
|
+static bool pmu_write_swinc_el0_disabled(struct kvm_vcpu *vcpu)
|
|
|
+{
|
|
|
+ return check_pmu_access_disabled(vcpu, ARMV8_PMU_USERENR_SW | ARMV8_PMU_USERENR_EN);
|
|
|
}
|
|
|
|
|
|
static bool pmu_access_cycle_counter_el0_disabled(struct kvm_vcpu *vcpu)
|
|
|
{
|
|
|
- u64 reg = vcpu_sys_reg(vcpu, PMUSERENR_EL0);
|
|
|
-
|
|
|
- return !((reg & (ARMV8_PMU_USERENR_CR | ARMV8_PMU_USERENR_EN))
|
|
|
- || vcpu_mode_priv(vcpu));
|
|
|
+ return check_pmu_access_disabled(vcpu, ARMV8_PMU_USERENR_CR | ARMV8_PMU_USERENR_EN);
|
|
|
}
|
|
|
|
|
|
static bool pmu_access_event_counter_el0_disabled(struct kvm_vcpu *vcpu)
|
|
|
{
|
|
|
- u64 reg = vcpu_sys_reg(vcpu, PMUSERENR_EL0);
|
|
|
-
|
|
|
- return !((reg & (ARMV8_PMU_USERENR_ER | ARMV8_PMU_USERENR_EN))
|
|
|
- || vcpu_mode_priv(vcpu));
|
|
|
+ return check_pmu_access_disabled(vcpu, ARMV8_PMU_USERENR_ER | ARMV8_PMU_USERENR_EN);
|
|
|
}
|
|
|
|
|
|
static bool access_pmcr(struct kvm_vcpu *vcpu, struct sys_reg_params *p,
|