Searched refs:load_sum (Results 1 – 4 of 4) sorted by relevance
/openbmc/linux/kernel/sched/ |
H A D | pelt.c | 115 sa->load_sum = decay_load(sa->load_sum, periods); in accumulate_sum() 142 sa->load_sum += load * contrib; in accumulate_sum() 264 sa->load_avg = div_u64(load * sa->load_sum, divider); in ___update_load_avg()
|
H A D | fair.c | 3692 cfs_rq->avg.load_sum += se_weight(se) * se->avg.load_sum; in enqueue_load_avg() 3699 sub_positive(&cfs_rq->avg.load_sum, se_weight(se) * se->avg.load_sum); in dequeue_load_avg() 3701 cfs_rq->avg.load_sum = max_t(u32, cfs_rq->avg.load_sum, in dequeue_load_avg() 4048 if (sa->load_sum) in load_avg_is_decayed() 4310 u64 load_sum = 0; in update_tg_cfs_load() local 4338 load_sum = div_u64(gcfs_rq->avg.load_sum, in update_tg_cfs_load() 4343 runnable_sum = min(se->avg.load_sum, load_sum); in update_tg_cfs_load() 4362 delta_sum = load_sum - (s64)se_weight(se) * se->avg.load_sum; in update_tg_cfs_load() 4369 cfs_rq->avg.load_sum = max_t(u32, cfs_rq->avg.load_sum, in update_tg_cfs_load() 4570 sa->load_sum = max_t(u32, sa->load_sum, sa->load_avg * PELT_MIN_DIVIDER); in update_cfs_rq_load_avg() [all …]
|
H A D | debug.c | 1071 P(se.avg.load_sum); in proc_sched_show_task()
|
/openbmc/linux/include/linux/ |
H A D | sched.h | 496 u64 load_sum; member
|