Home
last modified time | relevance | path

Searched refs:vcpu_to_pmu (Results 1 – 10 of 10) sorted by relevance

/openbmc/linux/arch/x86/kvm/svm/
H A Dpmu.c83 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_is_valid_rdpmc_ecx()
94 return amd_pmc_idx_to_pmc(vcpu_to_pmu(vcpu), idx & ~(3u << 30)); in amd_rdpmc_ecx_to_pmc()
99 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_msr_idx_to_pmc()
110 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_is_valid_msr()
133 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_get_msr()
155 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_set_msr()
183 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_refresh()
222 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in amd_pmu_init()
/openbmc/linux/arch/x86/kvm/vmx/
H A Dpmu_intel.c130 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_is_valid_rdpmc_ecx()
142 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_rdpmc_ecx_to_pmc()
202 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_is_valid_msr()
233 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_msr_idx_to_pmc()
250 vcpu_to_pmu(vcpu)->event_count--; in intel_pmu_release_guest_lbr_event()
257 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_create_guest_lbr_event()
335 __set_bit(INTEL_PMC_IDX_FIXED_VLBR, vcpu_to_pmu(vcpu)->pmc_in_use); in intel_pmu_handle_lbr_msrs_access()
339 clear_bit(INTEL_PMC_IDX_FIXED_VLBR, vcpu_to_pmu(vcpu)->pmc_in_use); in intel_pmu_handle_lbr_msrs_access()
350 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_get_msr()
393 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in intel_pmu_set_msr()
[all …]
H A Dnested.c2666 kvm_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu)) && in prepare_vmcs02()
2942 CC(!kvm_valid_perf_global_ctrl(vcpu_to_pmu(vcpu), in nested_vmx_check_host_state()
3061 CC(!kvm_valid_perf_global_ctrl(vcpu_to_pmu(vcpu), in nested_vmx_check_guest_state()
4573 kvm_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu))) in load_vmcs12_host_state()
H A Dvmx.c2437 if (data && !vcpu_to_pmu(vcpu)->version) in vmx_set_msr()
7150 struct kvm_pmu *pmu = vcpu_to_pmu(&vmx->vcpu); in atomic_switch_perf_msrs()
/openbmc/linux/arch/riscv/kvm/
H A Dvcpu_pmu.c202 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in pmu_ctr_read()
265 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_incr_fw()
282 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_read_hpm()
316 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_num_ctrs()
326 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_info()
342 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_start()
398 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_stop()
465 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_ctr_cfg_match()
548 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_init()
613 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_riscv_vcpu_pmu_deinit()
H A Dvcpu_sbi_pmu.c21 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_sbi_ext_pmu_handler()
76 struct kvm_pmu *kvpmu = vcpu_to_pmu(vcpu); in kvm_sbi_ext_pmu_probe()
/openbmc/linux/arch/x86/kvm/
H A Dpmu.c456 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_handle_event()
522 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_rdpmc()
559 return kvm_pmu_has_perf_global_ctrl(vcpu_to_pmu(vcpu)); in kvm_pmu_is_valid_msr()
569 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_mark_pmc_in_use()
578 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_get_msr()
603 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_set_msr()
662 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_reset()
694 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_refresh()
737 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_init()
749 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_pmu_cleanup()
[all …]
H A Dpmu.h7 #define vcpu_to_pmu(vcpu) (&(vcpu)->arch.pmu) macro
H A Dx86.c12310 struct kvm_pmu *pmu = vcpu_to_pmu(vcpu); in kvm_arch_sched_in()
/openbmc/linux/arch/riscv/include/asm/
H A Dkvm_vcpu_pmu.h56 #define vcpu_to_pmu(vcpu) (&(vcpu)->arch.pmu_context) macro