Lines Matching refs:pmu_dev

108 	void (*write_evttype)(struct xgene_pmu_dev *pmu_dev, int idx, u32 val);
109 void (*write_agentmsk)(struct xgene_pmu_dev *pmu_dev, u32 val);
110 void (*write_agent1msk)(struct xgene_pmu_dev *pmu_dev, u32 val);
111 void (*enable_counter)(struct xgene_pmu_dev *pmu_dev, int idx);
112 void (*disable_counter)(struct xgene_pmu_dev *pmu_dev, int idx);
113 void (*enable_counter_int)(struct xgene_pmu_dev *pmu_dev, int idx);
114 void (*disable_counter_int)(struct xgene_pmu_dev *pmu_dev, int idx);
115 void (*reset_counters)(struct xgene_pmu_dev *pmu_dev);
116 void (*start_counters)(struct xgene_pmu_dev *pmu_dev);
117 void (*stop_counters)(struct xgene_pmu_dev *pmu_dev);
141 struct xgene_pmu_dev *pmu_dev; member
607 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(dev_get_drvdata(dev)); in cpumask_show() local
609 return cpumap_print_to_pagebuf(true, buf, &pmu_dev->parent->cpu); in cpumask_show()
692 static int get_next_avail_cntr(struct xgene_pmu_dev *pmu_dev) in get_next_avail_cntr() argument
696 cntr = find_first_zero_bit(pmu_dev->cntr_assign_mask, in get_next_avail_cntr()
697 pmu_dev->max_counters); in get_next_avail_cntr()
698 if (cntr == pmu_dev->max_counters) in get_next_avail_cntr()
700 set_bit(cntr, pmu_dev->cntr_assign_mask); in get_next_avail_cntr()
705 static void clear_avail_cntr(struct xgene_pmu_dev *pmu_dev, int cntr) in clear_avail_cntr() argument
707 clear_bit(cntr, pmu_dev->cntr_assign_mask); in clear_avail_cntr()
731 static inline u64 xgene_pmu_read_counter32(struct xgene_pmu_dev *pmu_dev, in xgene_pmu_read_counter32() argument
734 return readl(pmu_dev->inf->csr + PMU_PMEVCNTR0 + (4 * idx)); in xgene_pmu_read_counter32()
737 static inline u64 xgene_pmu_read_counter64(struct xgene_pmu_dev *pmu_dev, in xgene_pmu_read_counter64() argument
749 hi = xgene_pmu_read_counter32(pmu_dev, 2 * idx + 1); in xgene_pmu_read_counter64()
750 lo = xgene_pmu_read_counter32(pmu_dev, 2 * idx); in xgene_pmu_read_counter64()
751 } while (hi != xgene_pmu_read_counter32(pmu_dev, 2 * idx + 1)); in xgene_pmu_read_counter64()
757 xgene_pmu_write_counter32(struct xgene_pmu_dev *pmu_dev, int idx, u64 val) in xgene_pmu_write_counter32() argument
759 writel(val, pmu_dev->inf->csr + PMU_PMEVCNTR0 + (4 * idx)); in xgene_pmu_write_counter32()
763 xgene_pmu_write_counter64(struct xgene_pmu_dev *pmu_dev, int idx, u64 val) in xgene_pmu_write_counter64() argument
771 xgene_pmu_write_counter32(pmu_dev, 2 * idx, cnt_lo); in xgene_pmu_write_counter64()
772 xgene_pmu_write_counter32(pmu_dev, 2 * idx + 1, cnt_hi); in xgene_pmu_write_counter64()
776 xgene_pmu_write_evttype(struct xgene_pmu_dev *pmu_dev, int idx, u32 val) in xgene_pmu_write_evttype() argument
778 writel(val, pmu_dev->inf->csr + PMU_PMEVTYPER0 + (4 * idx)); in xgene_pmu_write_evttype()
782 xgene_pmu_write_agentmsk(struct xgene_pmu_dev *pmu_dev, u32 val) in xgene_pmu_write_agentmsk() argument
784 writel(val, pmu_dev->inf->csr + PMU_PMAMR0); in xgene_pmu_write_agentmsk()
788 xgene_pmu_v3_write_agentmsk(struct xgene_pmu_dev *pmu_dev, u32 val) { } in xgene_pmu_v3_write_agentmsk() argument
791 xgene_pmu_write_agent1msk(struct xgene_pmu_dev *pmu_dev, u32 val) in xgene_pmu_write_agent1msk() argument
793 writel(val, pmu_dev->inf->csr + PMU_PMAMR1); in xgene_pmu_write_agent1msk()
797 xgene_pmu_v3_write_agent1msk(struct xgene_pmu_dev *pmu_dev, u32 val) { } in xgene_pmu_v3_write_agent1msk() argument
800 xgene_pmu_enable_counter(struct xgene_pmu_dev *pmu_dev, int idx) in xgene_pmu_enable_counter() argument
804 val = readl(pmu_dev->inf->csr + PMU_PMCNTENSET); in xgene_pmu_enable_counter()
806 writel(val, pmu_dev->inf->csr + PMU_PMCNTENSET); in xgene_pmu_enable_counter()
810 xgene_pmu_disable_counter(struct xgene_pmu_dev *pmu_dev, int idx) in xgene_pmu_disable_counter() argument
814 val = readl(pmu_dev->inf->csr + PMU_PMCNTENCLR); in xgene_pmu_disable_counter()
816 writel(val, pmu_dev->inf->csr + PMU_PMCNTENCLR); in xgene_pmu_disable_counter()
820 xgene_pmu_enable_counter_int(struct xgene_pmu_dev *pmu_dev, int idx) in xgene_pmu_enable_counter_int() argument
824 val = readl(pmu_dev->inf->csr + PMU_PMINTENSET); in xgene_pmu_enable_counter_int()
826 writel(val, pmu_dev->inf->csr + PMU_PMINTENSET); in xgene_pmu_enable_counter_int()
830 xgene_pmu_disable_counter_int(struct xgene_pmu_dev *pmu_dev, int idx) in xgene_pmu_disable_counter_int() argument
834 val = readl(pmu_dev->inf->csr + PMU_PMINTENCLR); in xgene_pmu_disable_counter_int()
836 writel(val, pmu_dev->inf->csr + PMU_PMINTENCLR); in xgene_pmu_disable_counter_int()
839 static inline void xgene_pmu_reset_counters(struct xgene_pmu_dev *pmu_dev) in xgene_pmu_reset_counters() argument
843 val = readl(pmu_dev->inf->csr + PMU_PMCR); in xgene_pmu_reset_counters()
845 writel(val, pmu_dev->inf->csr + PMU_PMCR); in xgene_pmu_reset_counters()
848 static inline void xgene_pmu_start_counters(struct xgene_pmu_dev *pmu_dev) in xgene_pmu_start_counters() argument
852 val = readl(pmu_dev->inf->csr + PMU_PMCR); in xgene_pmu_start_counters()
854 writel(val, pmu_dev->inf->csr + PMU_PMCR); in xgene_pmu_start_counters()
857 static inline void xgene_pmu_stop_counters(struct xgene_pmu_dev *pmu_dev) in xgene_pmu_stop_counters() argument
861 val = readl(pmu_dev->inf->csr + PMU_PMCR); in xgene_pmu_stop_counters()
863 writel(val, pmu_dev->inf->csr + PMU_PMCR); in xgene_pmu_stop_counters()
868 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(pmu); in xgene_perf_pmu_enable() local
869 struct xgene_pmu *xgene_pmu = pmu_dev->parent; in xgene_perf_pmu_enable()
870 bool enabled = !bitmap_empty(pmu_dev->cntr_assign_mask, in xgene_perf_pmu_enable()
871 pmu_dev->max_counters); in xgene_perf_pmu_enable()
876 xgene_pmu->ops->start_counters(pmu_dev); in xgene_perf_pmu_enable()
881 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(pmu); in xgene_perf_pmu_disable() local
882 struct xgene_pmu *xgene_pmu = pmu_dev->parent; in xgene_perf_pmu_disable()
884 xgene_pmu->ops->stop_counters(pmu_dev); in xgene_perf_pmu_disable()
889 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(event->pmu); in xgene_perf_event_init() local
916 event->cpu = cpumask_first(&pmu_dev->parent->cpu); in xgene_perf_event_init()
946 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(event->pmu); in xgene_perf_enable_event() local
947 struct xgene_pmu *xgene_pmu = pmu_dev->parent; in xgene_perf_enable_event()
949 xgene_pmu->ops->write_evttype(pmu_dev, GET_CNTR(event), in xgene_perf_enable_event()
951 xgene_pmu->ops->write_agentmsk(pmu_dev, ~((u32)GET_AGENTID(event))); in xgene_perf_enable_event()
952 if (pmu_dev->inf->type == PMU_TYPE_IOB) in xgene_perf_enable_event()
953 xgene_pmu->ops->write_agent1msk(pmu_dev, in xgene_perf_enable_event()
956 xgene_pmu->ops->enable_counter(pmu_dev, GET_CNTR(event)); in xgene_perf_enable_event()
957 xgene_pmu->ops->enable_counter_int(pmu_dev, GET_CNTR(event)); in xgene_perf_enable_event()
962 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(event->pmu); in xgene_perf_disable_event() local
963 struct xgene_pmu *xgene_pmu = pmu_dev->parent; in xgene_perf_disable_event()
965 xgene_pmu->ops->disable_counter(pmu_dev, GET_CNTR(event)); in xgene_perf_disable_event()
966 xgene_pmu->ops->disable_counter_int(pmu_dev, GET_CNTR(event)); in xgene_perf_disable_event()
971 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(event->pmu); in xgene_perf_event_set_period() local
972 struct xgene_pmu *xgene_pmu = pmu_dev->parent; in xgene_perf_event_set_period()
984 xgene_pmu->ops->write_counter(pmu_dev, hw->idx, val); in xgene_perf_event_set_period()
989 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(event->pmu); in xgene_perf_event_update() local
990 struct xgene_pmu *xgene_pmu = pmu_dev->parent; in xgene_perf_event_update()
996 new_raw_count = xgene_pmu->ops->read_counter(pmu_dev, GET_CNTR(event)); in xgene_perf_event_update()
1002 delta = (new_raw_count - prev_raw_count) & pmu_dev->max_period; in xgene_perf_event_update()
1014 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(event->pmu); in xgene_perf_start() local
1015 struct xgene_pmu *xgene_pmu = pmu_dev->parent; in xgene_perf_start()
1029 xgene_pmu->ops->write_counter(pmu_dev, GET_CNTR(event), in xgene_perf_start()
1057 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(event->pmu); in xgene_perf_add() local
1063 hw->idx = get_next_avail_cntr(pmu_dev); in xgene_perf_add()
1068 pmu_dev->pmu_counter_event[hw->idx] = event; in xgene_perf_add()
1078 struct xgene_pmu_dev *pmu_dev = to_pmu_dev(event->pmu); in xgene_perf_del() local
1084 clear_avail_cntr(pmu_dev, GET_CNTR(event)); in xgene_perf_del()
1087 pmu_dev->pmu_counter_event[hw->idx] = NULL; in xgene_perf_del()
1090 static int xgene_init_perf(struct xgene_pmu_dev *pmu_dev, char *name) in xgene_init_perf() argument
1094 if (pmu_dev->parent->version == PCP_PMU_V3) in xgene_init_perf()
1095 pmu_dev->max_period = PMU_V3_CNT_MAX_PERIOD; in xgene_init_perf()
1097 pmu_dev->max_period = PMU_CNT_MAX_PERIOD; in xgene_init_perf()
1099 xgene_pmu = pmu_dev->parent; in xgene_init_perf()
1101 pmu_dev->max_counters = 1; in xgene_init_perf()
1103 pmu_dev->max_counters = PMU_MAX_COUNTERS; in xgene_init_perf()
1106 pmu_dev->pmu = (struct pmu) { in xgene_init_perf()
1107 .attr_groups = pmu_dev->attr_groups, in xgene_init_perf()
1121 xgene_pmu->ops->stop_counters(pmu_dev); in xgene_init_perf()
1122 xgene_pmu->ops->reset_counters(pmu_dev); in xgene_init_perf()
1124 return perf_pmu_register(&pmu_dev->pmu, name, -1); in xgene_init_perf()
1138 ctx->pmu_dev = pmu; in xgene_pmu_dev_add()
1189 static void _xgene_pmu_isr(int irq, struct xgene_pmu_dev *pmu_dev) in _xgene_pmu_isr() argument
1191 struct xgene_pmu *xgene_pmu = pmu_dev->parent; in _xgene_pmu_isr()
1192 void __iomem *csr = pmu_dev->inf->csr; in _xgene_pmu_isr()
1196 xgene_pmu->ops->stop_counters(pmu_dev); in _xgene_pmu_isr()
1215 struct perf_event *event = pmu_dev->pmu_counter_event[idx]; in _xgene_pmu_isr()
1226 xgene_pmu->ops->start_counters(pmu_dev); in _xgene_pmu_isr()
1253 _xgene_pmu_isr(irq, ctx->pmu_dev); in xgene_pmu_isr()
1258 _xgene_pmu_isr(irq, ctx->pmu_dev); in xgene_pmu_isr()
1263 _xgene_pmu_isr(irq, ctx->pmu_dev); in xgene_pmu_isr()
1268 _xgene_pmu_isr(irq, ctx->pmu_dev); in xgene_pmu_isr()
1812 perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target); in xgene_pmu_offline_cpu()
1815 perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target); in xgene_pmu_offline_cpu()
1818 perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target); in xgene_pmu_offline_cpu()
1821 perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target); in xgene_pmu_offline_cpu()
1947 perf_pmu_unregister(&ctx->pmu_dev->pmu); in xgene_pmu_dev_cleanup()