Lines Matching refs:cpu_map_idx

268 static int evsel__write_stat_event(struct evsel *counter, int cpu_map_idx, u32 thread,  in evsel__write_stat_event()  argument
271 struct perf_sample_id *sid = SID(counter, cpu_map_idx, thread); in evsel__write_stat_event()
272 struct perf_cpu cpu = perf_cpu_map__cpu(evsel__cpus(counter), cpu_map_idx); in evsel__write_stat_event()
278 static int read_single_counter(struct evsel *counter, int cpu_map_idx, in read_single_counter() argument
285 perf_counts(counter->counts, cpu_map_idx, thread); in read_single_counter()
294 perf_counts(counter->counts, cpu_map_idx, thread); in read_single_counter()
305 return evsel__read_counter(counter, cpu_map_idx, thread); in read_single_counter()
316 static int read_counter_cpu(struct evsel *counter, struct timespec *rs, int cpu_map_idx) in read_counter_cpu() argument
327 count = perf_counts(counter->counts, cpu_map_idx, thread); in read_counter_cpu()
333 if (!perf_counts__is_loaded(counter->counts, cpu_map_idx, thread) && in read_counter_cpu()
334 read_single_counter(counter, cpu_map_idx, thread, rs)) { in read_counter_cpu()
336 perf_counts(counter->counts, cpu_map_idx, thread)->ena = 0; in read_counter_cpu()
337 perf_counts(counter->counts, cpu_map_idx, thread)->run = 0; in read_counter_cpu()
341 perf_counts__set_loaded(counter->counts, cpu_map_idx, thread, false); in read_counter_cpu()
344 if (evsel__write_stat_event(counter, cpu_map_idx, thread, count)) { in read_counter_cpu()
355 cpu_map_idx).cpu, in read_counter_cpu()
386 evlist_cpu_itr.cpu_map_idx); in read_affinity_counters()
746 evlist_cpu_itr.cpu_map_idx) < 0) { in __run_perf_stat()
792 perf_evsel__close_cpu(&counter->core, evlist_cpu_itr.cpu_map_idx); in __run_perf_stat()
803 evlist_cpu_itr.cpu_map_idx) < 0) { in __run_perf_stat()