Lines Matching refs:vgpu_data

78 	struct vgpu_sched_data *vgpu_data;  in vgpu_update_timeslice()  local
83 vgpu_data = vgpu->sched_data; in vgpu_update_timeslice()
84 delta_ts = ktime_sub(cur_time, vgpu_data->sched_in_time); in vgpu_update_timeslice()
85 vgpu_data->sched_time = ktime_add(vgpu_data->sched_time, delta_ts); in vgpu_update_timeslice()
86 vgpu_data->left_ts = ktime_sub(vgpu_data->left_ts, delta_ts); in vgpu_update_timeslice()
87 vgpu_data->sched_in_time = cur_time; in vgpu_update_timeslice()
95 struct vgpu_sched_data *vgpu_data; in gvt_balance_timeslice() local
108 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
109 total_weight += vgpu_data->sched_ctl.weight; in gvt_balance_timeslice()
113 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
115 total_weight) * vgpu_data->sched_ctl.weight; in gvt_balance_timeslice()
117 vgpu_data->allocated_ts = fair_timeslice; in gvt_balance_timeslice()
118 vgpu_data->left_ts = vgpu_data->allocated_ts; in gvt_balance_timeslice()
122 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in gvt_balance_timeslice()
127 vgpu_data->left_ts += vgpu_data->allocated_ts; in gvt_balance_timeslice()
137 struct vgpu_sched_data *vgpu_data; in try_to_schedule_next_vgpu() local
162 vgpu_data = scheduler->next_vgpu->sched_data; in try_to_schedule_next_vgpu()
163 vgpu_data->sched_in_time = cur_time; in try_to_schedule_next_vgpu()
178 struct vgpu_sched_data *vgpu_data; in find_busy_vgpu() local
186 vgpu_data = container_of(pos, struct vgpu_sched_data, lru_list); in find_busy_vgpu()
187 if (!vgpu_has_pending_workload(vgpu_data->vgpu)) in find_busy_vgpu()
190 if (vgpu_data->pri_sched) { in find_busy_vgpu()
191 if (ktime_before(ktime_get(), vgpu_data->pri_time)) { in find_busy_vgpu()
192 vgpu = vgpu_data->vgpu; in find_busy_vgpu()
195 vgpu_data->pri_sched = false; in find_busy_vgpu()
199 if (vgpu_data->left_ts > 0) { in find_busy_vgpu()
200 vgpu = vgpu_data->vgpu; in find_busy_vgpu()
215 struct vgpu_sched_data *vgpu_data; in tbs_sched_func() local
225 vgpu_data = vgpu->sched_data; in tbs_sched_func()
226 if (!vgpu_data->pri_sched) { in tbs_sched_func()
228 list_del_init(&vgpu_data->lru_list); in tbs_sched_func()
229 list_add_tail(&vgpu_data->lru_list, in tbs_sched_func()
344 struct vgpu_sched_data *vgpu_data = vgpu->sched_data; in tbs_sched_start_schedule() local
347 if (!list_empty(&vgpu_data->lru_list)) in tbs_sched_start_schedule()
351 vgpu_data->pri_time = ktime_add(now, in tbs_sched_start_schedule()
353 vgpu_data->pri_sched = true; in tbs_sched_start_schedule()
355 list_add(&vgpu_data->lru_list, &sched_data->lru_runq_head); in tbs_sched_start_schedule()
360 vgpu_data->active = true; in tbs_sched_start_schedule()
365 struct vgpu_sched_data *vgpu_data = vgpu->sched_data; in tbs_sched_stop_schedule() local
367 list_del_init(&vgpu_data->lru_list); in tbs_sched_stop_schedule()
368 vgpu_data->active = false; in tbs_sched_stop_schedule()
426 struct vgpu_sched_data *vgpu_data = vgpu->sched_data; in intel_vgpu_start_schedule() local
429 if (!vgpu_data->active) { in intel_vgpu_start_schedule()
447 struct vgpu_sched_data *vgpu_data = vgpu->sched_data; in intel_vgpu_stop_schedule() local
452 if (!vgpu_data->active) in intel_vgpu_stop_schedule()