Searched refs:num_counters_fixed (Results 1 – 10 of 10) sorted by relevance
/openbmc/linux/arch/x86/events/intel/ |
H A D | core.c | 2879 int num_counters_fixed = hybrid(cpuc->pmu, num_counters_fixed); in intel_pmu_reset() local 2895 for (idx = 0; idx < num_counters_fixed; idx++) { in intel_pmu_reset() 2946 INTEL_PMC_IDX_FIXED + x86_pmu.num_counters_fixed) { in x86_pmu_handle_guest_pebs() 4726 int *num_counters_fixed, 4741 pmu->num_counters_fixed = fls(fixed_cntr); in update_pmu_cap() 4742 intel_pmu_check_num_counters(&pmu->num_counters, &pmu->num_counters_fixed, in update_pmu_cap() 4775 if (!check_hw_exists(&pmu->pmu, pmu->num_counters, pmu->num_counters_fixed)) in init_hybrid_pmu() 4785 x86_pmu_show_pmu_cap(pmu->num_counters, pmu->num_counters_fixed, in init_hybrid_pmu() 5943 int *num_counters_fixed, in intel_pmu_check_num_counters() argument 5953 if (*num_counters_fixed > INTEL_PMC_MAX_FIXED) { in intel_pmu_check_num_counters() [all …]
|
H A D | ds.c | 1140 int num_counters_fixed = hybrid(cpuc->pmu, num_counters_fixed); in pebs_update_threshold() local 1148 reserved = max_pebs_events + num_counters_fixed; in pebs_update_threshold() 2177 mask |= ((1ULL << x86_pmu.num_counters_fixed) - 1) << INTEL_PMC_IDX_FIXED; in intel_pmu_drain_pebs_nhm() 2178 size = INTEL_PMC_IDX_FIXED + x86_pmu.num_counters_fixed; in intel_pmu_drain_pebs_nhm() 2274 int num_counters_fixed = hybrid(cpuc->pmu, num_counters_fixed); in intel_pmu_drain_pebs_icl() local 2290 (((1ULL << num_counters_fixed) - 1) << INTEL_PMC_IDX_FIXED); in intel_pmu_drain_pebs_icl() 2291 size = INTEL_PMC_IDX_FIXED + num_counters_fixed; in intel_pmu_drain_pebs_icl()
|
/openbmc/linux/arch/x86/events/ |
H A D | core.c | 253 bool check_hw_exists(struct pmu *pmu, int num_counters, int num_counters_fixed) in check_hw_exists() argument 278 if (num_counters_fixed) { in check_hw_exists() 283 for (i = 0; i < num_counters_fixed; i++) { in check_hw_exists() 1163 int num_counters_fixed = hybrid(cpuc->pmu, num_counters_fixed); in collect_events() local 1167 max_count = num_counters + num_counters_fixed; in collect_events() 1528 int num_counters_fixed = hybrid(cpuc->pmu, num_counters_fixed); in perf_event_print_debug() local 1573 for (idx = 0; idx < num_counters_fixed; idx++) { in perf_event_print_debug() 2043 void x86_pmu_show_pmu_cap(int num_counters, int num_counters_fixed, in x86_pmu_show_pmu_cap() argument 2052 hweight64((((1ULL << num_counters_fixed) - 1) in x86_pmu_show_pmu_cap() 2091 if (!check_hw_exists(&pmu, x86_pmu.num_counters, x86_pmu.num_counters_fixed)) in init_hw_perf_events() [all …]
|
H A D | perf_event.h | 664 int num_counters_fixed; member 765 int num_counters_fixed; member 1114 int num_counters_fixed); 1185 void x86_pmu_show_pmu_cap(int num_counters, int num_counters_fixed,
|
/openbmc/linux/arch/x86/kvm/ |
H A D | pmu.h | 201 kvm_pmu_cap.num_counters_fixed = min(kvm_pmu_cap.num_counters_fixed, in kvm_init_pmu_capability()
|
H A D | cpuid.c | 1026 edx.split.num_counters_fixed = kvm_pmu_cap.num_counters_fixed; in __do_cpuid_func()
|
H A D | x86.c | 7189 kvm_pmu_cap.num_counters_fixed) in kvm_probe_msr_to_save()
|
/openbmc/linux/arch/x86/events/zhaoxin/ |
H A D | core.c | 539 x86_pmu.num_counters_fixed = edx.split.num_counters_fixed; in zhaoxin_pmu_init() 608 x86_pmu.intel_ctrl |= ((1LL << x86_pmu.num_counters_fixed)-1) << INTEL_PMC_IDX_FIXED; in zhaoxin_pmu_init()
|
/openbmc/linux/arch/x86/include/asm/ |
H A D | perf_event.h | 166 unsigned int num_counters_fixed:5; member 266 int num_counters_fixed; member
|
/openbmc/linux/arch/x86/kvm/vmx/ |
H A D | pmu_intel.c | 530 pmu->nr_arch_fixed_counters = min_t(int, edx.split.num_counters_fixed, in intel_pmu_refresh() 531 kvm_pmu_cap.num_counters_fixed); in intel_pmu_refresh()
|