Lines Matching refs:msr

132 static inline bool is_amd_pmu_msr(unsigned int msr)  in is_amd_pmu_msr()  argument
138 if ((msr >= MSR_F15H_PERF_CTL && in is_amd_pmu_msr()
139 msr < MSR_F15H_PERF_CTR + (amd_num_counters * 2)) || in is_amd_pmu_msr()
140 (msr >= MSR_K7_EVNTSEL0 && in is_amd_pmu_msr()
141 msr < MSR_K7_PERFCTR0 + amd_num_counters)) in is_amd_pmu_msr()
198 static bool xen_intel_pmu_emulate(unsigned int msr, u64 *val, int type, in xen_intel_pmu_emulate() argument
214 switch (msr) { in xen_intel_pmu_emulate()
252 if (msr == MSR_CORE_PERF_GLOBAL_OVF_CTRL) in xen_intel_pmu_emulate()
261 static bool xen_amd_pmu_emulate(unsigned int msr, u64 *val, bool is_read) in xen_amd_pmu_emulate() argument
274 ((msr >= MSR_K7_EVNTSEL0) && (msr <= MSR_K7_PERFCTR3))) in xen_amd_pmu_emulate()
275 msr = get_fam15h_addr(msr); in xen_amd_pmu_emulate()
279 if (msr == amd_ctrls_base + off) { in xen_amd_pmu_emulate()
283 } else if (msr == amd_counters_base + off) { in xen_amd_pmu_emulate()
302 static bool pmu_msr_chk_emulated(unsigned int msr, uint64_t *val, bool is_read, in pmu_msr_chk_emulated() argument
307 if (is_amd_pmu_msr(msr)) in pmu_msr_chk_emulated()
308 *emul = xen_amd_pmu_emulate(msr, val, is_read); in pmu_msr_chk_emulated()
309 else if (is_intel_pmu_msr(msr, &type, &index)) in pmu_msr_chk_emulated()
310 *emul = xen_intel_pmu_emulate(msr, val, type, index, is_read); in pmu_msr_chk_emulated()
317 bool pmu_msr_read(unsigned int msr, uint64_t *val, int *err) in pmu_msr_read() argument
321 if (!pmu_msr_chk_emulated(msr, val, true, &emulated)) in pmu_msr_read()
325 *val = err ? native_read_msr_safe(msr, err) in pmu_msr_read()
326 : native_read_msr(msr); in pmu_msr_read()
332 bool pmu_msr_write(unsigned int msr, uint32_t low, uint32_t high, int *err) in pmu_msr_write() argument
337 if (!pmu_msr_chk_emulated(msr, &val, false, &emulated)) in pmu_msr_write()
342 *err = native_write_msr_safe(msr, low, high); in pmu_msr_write()
344 native_write_msr(msr, low, high); in pmu_msr_write()
358 uint32_t msr; in xen_amd_read_pmc() local
361 msr = amd_counters_base + (counter * amd_msr_step); in xen_amd_read_pmc()
362 return native_read_msr_safe(msr, &err); in xen_amd_read_pmc()
379 uint32_t msr; in xen_intel_read_pmc() local
383 msr = MSR_CORE_PERF_FIXED_CTR0 + (counter & 0xffff); in xen_intel_read_pmc()
385 msr = MSR_IA32_PERFCTR0 + counter; in xen_intel_read_pmc()
387 return native_read_msr_safe(msr, &err); in xen_intel_read_pmc()