/openbmc/linux/arch/x86/events/amd/ |
H A D | brs.c | 205 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_enable() 221 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_enable_all() 228 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_disable() 257 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_disable_all() 283 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_brs_drain() 386 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_brs_sched_task() 407 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in perf_amd_brs_lopwr_cb()
|
H A D | lbr.c | 99 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_filter() 162 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_read() 326 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_reset() 345 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_add() 365 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_del() 380 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_sched_task() 393 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_enable_all() 416 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_lbr_disable_all()
|
H A D | core.c | 378 static inline int amd_has_nb(struct cpu_hw_events *cpuc) in amd_has_nb() 406 static void __amd_put_nb_event_constraints(struct cpu_hw_events *cpuc, in __amd_put_nb_event_constraints() 463 __amd_get_nb_event_constraints(struct cpu_hw_events *cpuc, struct perf_event *event, in __amd_get_nb_event_constraints() 561 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_prepare() 585 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_starting() 600 nb = per_cpu(cpu_hw_events, i).amd_nb; in amd_pmu_cpu_starting() 617 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_dead() 706 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_check_overflow() 738 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_enable_all() 868 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_handle_irq() [all …]
|
/openbmc/linux/arch/sh/kernel/ |
H A D | perf_event.c | 28 struct cpu_hw_events { struct 34 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 201 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_stop() 219 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_start() 236 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_del() 246 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_add() 336 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in sh_pmu_prepare_cpu() 338 memset(cpuhw, 0, sizeof(struct cpu_hw_events)); in sh_pmu_prepare_cpu()
|
/openbmc/linux/arch/x86/events/intel/ |
H A D | lbr.c | 105 static void intel_pmu_lbr_filter(struct cpu_hw_events *cpuc); 122 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_enable() 191 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_reset() 361 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_restore() 426 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_restore() 453 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_save() 503 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_save() 543 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_sched_task() 580 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_add() 622 struct cpu_hw_events *cpuc; in release_lbr_buffers() [all …]
|
H A D | ds.c | 440 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in init_debug_store_on_cpu() 452 if (!per_cpu(cpu_hw_events, cpu).ds) in fini_debug_store_on_cpu() 511 struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); in alloc_pebs_buffer() 549 struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); in release_pebs_buffer() 567 struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); in alloc_bts_buffer() 596 struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); in release_bts_buffer() 614 per_cpu(cpu_hw_events, cpu).ds = ds; in alloc_ds_buffer() 620 per_cpu(cpu_hw_events, cpu).ds = NULL; in release_ds_buffer() 740 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_bts() 757 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_drain_bts_buffer() [all …]
|
H A D | core.c | 2212 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_disable_all() 2229 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_enable_all() 2263 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_snapshot_branch_stack() 2316 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_nhm_workaround() 2381 static void intel_set_tfa(struct cpu_hw_events *cpuc, bool on) in intel_set_tfa() 2391 static void intel_tfa_commit_scheduling(struct cpu_hw_events *cpuc, int idx, int cntr) in intel_tfa_commit_scheduling() 2402 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_tfa_pmu_enable_all() 2435 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_set_masks() 2447 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_clear_masks() 2456 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_fixed() [all …]
|
H A D | bts.c | 147 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in bts_config_buffer() 186 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in bts_update() 262 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_start() 307 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_stop() 452 struct debug_store *ds = this_cpu_ptr(&cpu_hw_events)->ds; in intel_bts_interrupt() 522 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_add()
|
H A D | knc.c | 216 struct cpu_hw_events *cpuc; in knc_pmu_handle_irq() 221 cpuc = this_cpu_ptr(&cpu_hw_events); in knc_pmu_handle_irq()
|
H A D | p4.c | 919 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_disable_all() 998 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_enable_all() 1035 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq() 1041 cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_handle_irq() 1240 static int p4_pmu_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in p4_pmu_schedule_events()
|
/openbmc/linux/arch/powerpc/perf/ |
H A D | core-fsl-emb.c | 19 struct cpu_hw_events { struct 25 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 192 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_disable() 196 cpuhw = this_cpu_ptr(&cpu_hw_events); in fsl_emb_pmu_disable() 231 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_enable() 235 cpuhw = this_cpu_ptr(&cpu_hw_events); in fsl_emb_pmu_enable() 278 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_add() 285 cpuhw = &get_cpu_var(cpu_hw_events); in fsl_emb_pmu_add() 331 put_cpu_var(cpu_hw_events); in fsl_emb_pmu_add() 339 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_del() [all …]
|
H A D | core-book3s.c | 32 struct cpu_hw_events { struct 64 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 128 static unsigned long ebb_switch_in(bool ebb, struct cpu_hw_events *cpuhw) in ebb_switch_in() 136 static inline void power_pmu_bhrb_read(struct perf_event *event, struct cpu_hw_events *cpuhw) {} in power_pmu_bhrb_read() 157 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in get_pmcs_ext_regs() 416 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_enable() 432 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_disable() 490 static void power_pmu_bhrb_read(struct perf_event *event, struct cpu_hw_events *cpuhw) in power_pmu_bhrb_read() 647 static unsigned long ebb_switch_in(bool ebb, struct cpu_hw_events *cpuhw) in ebb_switch_in() 795 struct cpu_hw_events *cpuhw; in power_pmu_wants_prompt_pmi() [all …]
|
/openbmc/linux/arch/alpha/kernel/ |
H A D | perf_event.c | 34 struct cpu_hw_events { struct 53 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 391 static void maybe_change_configuration(struct cpu_hw_events *cpuc) in maybe_change_configuration() 435 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_add() 487 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_del() 535 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_stop() 555 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_start() 718 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_enable() 744 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_disable() 803 struct cpu_hw_events *cpuc; in alpha_perf_event_irq_handler() [all …]
|
/openbmc/linux/arch/loongarch/kernel/ |
H A D | perf_event.c | 89 struct cpu_hw_events { struct 104 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { argument 253 static int loongarch_pmu_alloc_counter(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc) in loongarch_pmu_alloc_counter() 269 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_enable_event() 288 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_disable_event() 388 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_add() 422 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in loongarch_pmu_del() 481 static void handle_associated_event(struct cpu_hw_events *cpuc, int idx, in handle_associated_event() 503 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in pmu_handle_irq() 642 struct cpu_hw_events fake_cpuc; in validate_group() [all …]
|
/openbmc/linux/arch/x86/events/ |
H A D | core.c | 52 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { 681 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable_all() 721 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable() 738 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_enable_all() 768 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in x86_get_pmu() 978 int x86_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in x86_schedule_events() 1116 static int add_nr_metric_event(struct cpu_hw_events *cpuc, in add_nr_metric_event() 1129 static void del_nr_metric_event(struct cpu_hw_events *cpuc, in del_nr_metric_event() 1136 static int collect_event(struct cpu_hw_events *cpuc, struct perf_event *event, in collect_event() 1160 static int collect_events(struct cpu_hw_events *cpuc, struct perf_event *leader, bool dogrp) in collect_events() [all …]
|
H A D | perf_event.h | 231 struct cpu_hw_events { struct 757 int (*schedule_events)(struct cpu_hw_events *cpuc, int n, int *assign); 776 (*get_event_constraints)(struct cpu_hw_events *cpuc, 780 void (*put_event_constraints)(struct cpu_hw_events *cpuc, 783 void (*start_scheduling)(struct cpu_hw_events *cpuc); 785 void (*commit_scheduling)(struct cpu_hw_events *cpuc, int idx, int cntr); 787 void (*stop_scheduling)(struct cpu_hw_events *cpuc); 886 void (*lbr_read)(struct cpu_hw_events *cpuc); 1070 DECLARE_PER_CPU(struct cpu_hw_events, cpu_hw_events); 1147 u64 disable_mask = __this_cpu_read(cpu_hw_events.perf_ctr_virt_mask); in __x86_pmu_enable_event() [all …]
|
/openbmc/linux/arch/sparc/kernel/ |
H A D | perf_event.c | 78 struct cpu_hw_events { struct 115 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { .enabled = 1, }; argument 827 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event() 845 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event() 923 static void read_in_all_counters(struct cpu_hw_events *cpuc) in read_in_all_counters() 947 static void calculate_single_pcr(struct cpu_hw_events *cpuc) in calculate_single_pcr() 983 static void calculate_multiple_pcrs(struct cpu_hw_events *cpuc) in calculate_multiple_pcrs() 1017 static void update_pcrs_for_enable(struct cpu_hw_events *cpuc) in update_pcrs_for_enable() 1031 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_enable() 1049 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_disable() [all …]
|
/openbmc/linux/arch/mips/kernel/ |
H A D | perf_event_mipsxx.c | 31 struct cpu_hw_events { struct 48 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { argument 314 static int mipsxx_pmu_alloc_counter(struct cpu_hw_events *cpuc, in mipsxx_pmu_alloc_counter() 351 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_enable_event() 397 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_disable_event() 501 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipspmu_add() 537 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipspmu_del() 763 struct cpu_hw_events fake_cpuc; in validate_group() 782 static void handle_associated_event(struct cpu_hw_events *cpuc, in handle_associated_event() 1548 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in pause_local_counters() [all …]
|
/openbmc/linux/include/linux/perf/ |
H A D | riscv_pmu.h | 31 struct cpu_hw_events { struct 63 struct cpu_hw_events __percpu *hw_events;
|
/openbmc/linux/drivers/perf/ |
H A D | riscv_pmu.c | 264 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pmu_add() 288 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pmu_del() 394 struct cpu_hw_events *cpuc; in riscv_pmu_alloc() 400 pmu->hw_events = alloc_percpu_gfp(struct cpu_hw_events, GFP_KERNEL); in riscv_pmu_alloc()
|
H A D | riscv_pmu_sbi.c | 345 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in pmu_sbi_ctr_get_idx() 404 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in pmu_sbi_ctr_clear_idx() 619 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_stop_hw_ctrs() 636 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_start_overflow_mask() 682 struct cpu_hw_events *cpu_hw_evt = dev; in pmu_sbi_ovf_handler() 767 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_starting_cpu() 807 struct cpu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_sbi_setup_irqs() 851 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pm_pmu_notify()
|
/openbmc/linux/arch/x86/events/zhaoxin/ |
H A D | core.c | 357 struct cpu_hw_events *cpuc; in zhaoxin_pmu_handle_irq() 362 cpuc = this_cpu_ptr(&cpu_hw_events); in zhaoxin_pmu_handle_irq() 422 zhaoxin_get_event_constraints(struct cpu_hw_events *cpuc, int idx, in zhaoxin_get_event_constraints()
|