Lines Matching full:pmc
9 #define pmc_to_pmu(pmc) (&(pmc)->vcpu->arch.pmu) argument
22 bool (*hw_event_available)(struct kvm_pmc *pmc);
58 static inline u64 pmc_bitmask(struct kvm_pmc *pmc) in pmc_bitmask() argument
60 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in pmc_bitmask()
62 return pmu->counter_bitmask[pmc->type]; in pmc_bitmask()
65 static inline u64 pmc_read_counter(struct kvm_pmc *pmc) in pmc_read_counter() argument
69 counter = pmc->counter; in pmc_read_counter()
70 if (pmc->perf_event && !pmc->is_paused) in pmc_read_counter()
71 counter += perf_event_read_value(pmc->perf_event, in pmc_read_counter()
74 return counter & pmc_bitmask(pmc); in pmc_read_counter()
77 static inline void pmc_write_counter(struct kvm_pmc *pmc, u64 val) in pmc_write_counter() argument
79 pmc->counter += val - pmc_read_counter(pmc); in pmc_write_counter()
80 pmc->counter &= pmc_bitmask(pmc); in pmc_write_counter()
83 static inline bool pmc_is_gp(struct kvm_pmc *pmc) in pmc_is_gp() argument
85 return pmc->type == KVM_PMC_GP; in pmc_is_gp()
88 static inline bool pmc_is_fixed(struct kvm_pmc *pmc) in pmc_is_fixed() argument
90 return pmc->type == KVM_PMC_FIXED; in pmc_is_fixed()
99 /* returns general purpose PMC with the specified MSR. Note that it can be
116 /* returns fixed PMC with the specified MSR */
131 static inline u64 get_sample_period(struct kvm_pmc *pmc, u64 counter_value) in get_sample_period() argument
133 u64 sample_period = (-counter_value) & pmc_bitmask(pmc); in get_sample_period()
136 sample_period = pmc_bitmask(pmc) + 1; in get_sample_period()
140 static inline void pmc_update_sample_period(struct kvm_pmc *pmc) in pmc_update_sample_period() argument
142 if (!pmc->perf_event || pmc->is_paused || in pmc_update_sample_period()
143 !is_sampling_event(pmc->perf_event)) in pmc_update_sample_period()
146 perf_event_period(pmc->perf_event, in pmc_update_sample_period()
147 get_sample_period(pmc, pmc->counter)); in pmc_update_sample_period()
150 static inline bool pmc_speculative_in_use(struct kvm_pmc *pmc) in pmc_speculative_in_use() argument
152 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in pmc_speculative_in_use()
154 if (pmc_is_fixed(pmc)) in pmc_speculative_in_use()
156 pmc->idx - INTEL_PMC_IDX_FIXED) & 0x3; in pmc_speculative_in_use()
158 return pmc->eventsel & ARCH_PERFMON_EVENTSEL_ENABLE; in pmc_speculative_in_use()
205 static inline void kvm_pmu_request_counter_reprogram(struct kvm_pmc *pmc) in kvm_pmu_request_counter_reprogram() argument
207 set_bit(pmc->idx, pmc_to_pmu(pmc)->reprogram_pmi); in kvm_pmu_request_counter_reprogram()
208 kvm_make_request(KVM_REQ_PMU, pmc->vcpu); in kvm_pmu_request_counter_reprogram()
224 * Check if a PMC is enabled by comparing it against global_ctrl bits.
228 static inline bool pmc_is_globally_enabled(struct kvm_pmc *pmc) in pmc_is_globally_enabled() argument
230 struct kvm_pmu *pmu = pmc_to_pmu(pmc); in pmc_is_globally_enabled()
235 return test_bit(pmc->idx, (unsigned long *)&pmu->global_ctrl); in pmc_is_globally_enabled()
240 int kvm_pmu_rdpmc(struct kvm_vcpu *vcpu, unsigned pmc, u64 *data);