|
@@ -90,6 +90,12 @@ static void xen_pmu_arch_init(void)
|
|
|
k7_counters_mirrored = 0;
|
|
|
break;
|
|
|
}
|
|
|
+ } else if (boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) {
|
|
|
+ amd_num_counters = F10H_NUM_COUNTERS;
|
|
|
+ amd_counters_base = MSR_K7_PERFCTR0;
|
|
|
+ amd_ctrls_base = MSR_K7_EVNTSEL0;
|
|
|
+ amd_msr_step = 1;
|
|
|
+ k7_counters_mirrored = 0;
|
|
|
} else {
|
|
|
uint32_t eax, ebx, ecx, edx;
|
|
|
|
|
@@ -285,7 +291,7 @@ static bool xen_amd_pmu_emulate(unsigned int msr, u64 *val, bool is_read)
|
|
|
|
|
|
bool pmu_msr_read(unsigned int msr, uint64_t *val, int *err)
|
|
|
{
|
|
|
- if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) {
|
|
|
+ if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) {
|
|
|
if (is_amd_pmu_msr(msr)) {
|
|
|
if (!xen_amd_pmu_emulate(msr, val, 1))
|
|
|
*val = native_read_msr_safe(msr, err);
|
|
@@ -308,7 +314,7 @@ bool pmu_msr_write(unsigned int msr, uint32_t low, uint32_t high, int *err)
|
|
|
{
|
|
|
uint64_t val = ((uint64_t)high << 32) | low;
|
|
|
|
|
|
- if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) {
|
|
|
+ if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) {
|
|
|
if (is_amd_pmu_msr(msr)) {
|
|
|
if (!xen_amd_pmu_emulate(msr, &val, 0))
|
|
|
*err = native_write_msr_safe(msr, low, high);
|
|
@@ -379,7 +385,7 @@ static unsigned long long xen_intel_read_pmc(int counter)
|
|
|
|
|
|
unsigned long long xen_read_pmc(int counter)
|
|
|
{
|
|
|
- if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD)
|
|
|
+ if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
|
|
|
return xen_amd_read_pmc(counter);
|
|
|
else
|
|
|
return xen_intel_read_pmc(counter);
|