Home
last modified time | relevance | path

Searched refs:drm_sched_entity (Results 1 – 25 of 26) sorted by relevance

12

/openbmc/linux/include/drm/
H A Dgpu_scheduler.h88 struct drm_sched_entity { struct
260 struct drm_sched_entity *current_entity; argument
354 struct drm_sched_entity *entity;
406 struct drm_sched_entity *s_entity);
529 struct drm_sched_entity *entity,
546 void drm_sched_entity_modify_sched(struct drm_sched_entity *entity,
559 struct drm_sched_entity *entity);
563 struct drm_sched_entity *entity);
565 struct drm_sched_entity *entity);
567 void drm_sched_rq_update_fifo(struct drm_sched_entity *entity, ktime_t ts);
[all …]
/openbmc/linux/drivers/gpu/drm/scheduler/
H A Dsched_entity.c59 int drm_sched_entity_init(struct drm_sched_entity *entity, in drm_sched_entity_init()
68 memset(entity, 0, sizeof(struct drm_sched_entity)); in drm_sched_entity_init()
108 void drm_sched_entity_modify_sched(struct drm_sched_entity *entity, in drm_sched_entity_modify_sched()
121 static bool drm_sched_entity_is_idle(struct drm_sched_entity *entity) in drm_sched_entity_is_idle()
134 bool drm_sched_entity_is_ready(struct drm_sched_entity *entity) in drm_sched_entity_is_ready()
152 int drm_sched_entity_error(struct drm_sched_entity *entity) in drm_sched_entity_error()
217 static void drm_sched_entity_kill(struct drm_sched_entity *entity) in drm_sched_entity_kill()
261 long drm_sched_entity_flush(struct drm_sched_entity *entity, long timeout) in drm_sched_entity_flush()
307 void drm_sched_entity_fini(struct drm_sched_entity *entity) in drm_sched_entity_fini()
334 void drm_sched_entity_destroy(struct drm_sched_entity *entity) in drm_sched_entity_destroy()
[all …]
H A Dsched_main.c82 struct drm_sched_entity *ent_a = rb_entry((a), struct drm_sched_entity, rb_tree_node); in drm_sched_entity_compare_before()
83 struct drm_sched_entity *ent_b = rb_entry((b), struct drm_sched_entity, rb_tree_node); in drm_sched_entity_compare_before()
88 static inline void drm_sched_rq_remove_fifo_locked(struct drm_sched_entity *entity) in drm_sched_rq_remove_fifo_locked()
98 void drm_sched_rq_update_fifo(struct drm_sched_entity *entity, ktime_t ts) in drm_sched_rq_update_fifo()
146 struct drm_sched_entity *entity) in drm_sched_rq_add_entity()
168 struct drm_sched_entity *entity) in drm_sched_rq_remove_entity()
194 static struct drm_sched_entity *
197 struct drm_sched_entity *entity; in drm_sched_rq_select_entity_rr()
238 static struct drm_sched_entity *
245 struct drm_sched_entity *entity; in drm_sched_rq_select_entity_fifo()
[all …]
H A Dgpu_scheduler_trace.h36 TP_PROTO(struct drm_sched_job *sched_job, struct drm_sched_entity *entity),
39 __field(struct drm_sched_entity *, entity)
63 TP_PROTO(struct drm_sched_job *sched_job, struct drm_sched_entity *entity),
68 TP_PROTO(struct drm_sched_job *sched_job, struct drm_sched_entity *entity),
H A Dsched_fence.c208 struct drm_sched_fence *drm_sched_fence_alloc(struct drm_sched_entity *entity, in drm_sched_fence_alloc()
224 struct drm_sched_entity *entity) in drm_sched_fence_init()
/openbmc/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_ctx.h41 struct drm_sched_entity entity;
77 u32 ring, struct drm_sched_entity **entity);
79 struct drm_sched_entity *entity,
82 struct drm_sched_entity *entity,
91 struct drm_sched_entity *entity);
H A Damdgpu_job.h89 struct drm_sched_entity *entity, void *owner,
92 struct drm_sched_entity *entity, void *owner,
H A Damdgpu_job.c94 struct drm_sched_entity *entity, void *owner, in amdgpu_job_alloc()
122 struct drm_sched_entity *entity, void *owner, in amdgpu_job_alloc_with_ib()
253 struct drm_sched_entity *s_entity) in amdgpu_job_prepare_job()
325 struct drm_sched_entity *s_entity = NULL; in amdgpu_job_stop_all_jobs_on_sched()
H A Damdgpu_ctx.c434 u32 ring, struct drm_sched_entity **entity) in amdgpu_ctx_get_entity()
437 struct drm_sched_entity *ctx_entity; in amdgpu_ctx_get_entity()
757 struct drm_sched_entity *entity, in amdgpu_ctx_add_fence()
784 struct drm_sched_entity *entity, in amdgpu_ctx_get_fence()
859 struct drm_sched_entity *entity) in amdgpu_ctx_wait_prev_fence()
907 struct drm_sched_entity *entity; in amdgpu_ctx_mgr_entity_flush()
937 struct drm_sched_entity *entity; in amdgpu_ctx_mgr_entity_fini()
H A Damdgpu_ttm.h67 struct drm_sched_entity high_pr;
69 struct drm_sched_entity low_pr;
H A Damdgpu_cs.h61 struct drm_sched_entity *entities[AMDGPU_CS_GANG_SIZE];
H A Damdgpu_vce.h51 struct drm_sched_entity entity;
H A Damdgpu_uvd.h65 struct drm_sched_entity entity;
H A Damdgpu_vm.h299 struct drm_sched_entity immediate;
300 struct drm_sched_entity delayed;
H A Damdgpu_cs.c76 struct drm_sched_entity *entity; in amdgpu_cs_job_idx()
398 struct drm_sched_entity *entity; in amdgpu_cs_p2_dependencies()
1109 struct drm_sched_entity *entity = p->entities[i]; in amdgpu_cs_vm_handling()
1483 struct drm_sched_entity *entity; in amdgpu_cs_wait_ioctl()
1531 struct drm_sched_entity *entity; in amdgpu_cs_get_fence()
H A Damdgpu_vm_sdma.c56 struct drm_sched_entity *entity = p->immediate ? &p->vm->immediate in amdgpu_vm_sdma_alloc_job()
/openbmc/linux/drivers/gpu/drm/msm/
H A Dmsm_submitqueue.c121 static struct drm_sched_entity *
137 struct drm_sched_entity *entity; in get_sched_entity()
H A Dmsm_gpu.h430 struct drm_sched_entity *entities[NR_SCHED_PRIORITIES * MSM_GPU_MAX_RINGS];
516 struct drm_sched_entity *entity;
/openbmc/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_sched.h102 struct drm_sched_entity base;
/openbmc/linux/drivers/gpu/drm/lima/
H A Dlima_sched.h37 struct drm_sched_entity base;
/openbmc/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_drv.h34 struct drm_sched_entity sched_entity[ETNA_MAX_PIPES];
/openbmc/linux/drivers/gpu/drm/panfrost/
H A Dpanfrost_device.h141 struct drm_sched_entity sched_entity[NUM_JOB_SLOTS];
H A Dpanfrost_job.c901 struct drm_sched_entity *entity = &panfrost_priv->sched_entity[i]; in panfrost_job_close()
/openbmc/linux/drivers/gpu/drm/v3d/
H A Dv3d_drv.h169 struct drm_sched_entity sched_entity[V3D_MAX_QUEUES];
/openbmc/linux/Documentation/gpu/rfc/
H A Dxe.rst85 drm_sched_entity.
175 drm_sched_entity) and making sure drm_scheduler can cope with the lack of job

12