Home
last modified time | relevance | path

Searched refs:sched_job (Results 1 – 15 of 15) sorted by relevance

/openbmc/linux/drivers/gpu/drm/v3d/
H A Dv3d_sched.c28 to_v3d_job(struct drm_sched_job *sched_job) in to_v3d_job() argument
30 return container_of(sched_job, struct v3d_job, base); in to_v3d_job()
34 to_bin_job(struct drm_sched_job *sched_job) in to_bin_job() argument
36 return container_of(sched_job, struct v3d_bin_job, base.base); in to_bin_job()
40 to_render_job(struct drm_sched_job *sched_job) in to_render_job() argument
42 return container_of(sched_job, struct v3d_render_job, base.base); in to_render_job()
46 to_tfu_job(struct drm_sched_job *sched_job) in to_tfu_job() argument
48 return container_of(sched_job, struct v3d_tfu_job, base.base); in to_tfu_job()
52 to_csd_job(struct drm_sched_job *sched_job) in to_csd_job() argument
54 return container_of(sched_job, struct v3d_csd_job, base.base); in to_csd_job()
[all …]
/openbmc/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_sched.c20 static struct dma_fence *etnaviv_sched_run_job(struct drm_sched_job *sched_job) in etnaviv_sched_run_job() argument
22 struct etnaviv_gem_submit *submit = to_etnaviv_submit(sched_job); in etnaviv_sched_run_job()
25 if (likely(!sched_job->s_fence->finished.error)) in etnaviv_sched_run_job()
34 *sched_job) in etnaviv_sched_timedout_job()
36 struct etnaviv_gem_submit *submit = to_etnaviv_submit(sched_job); in etnaviv_sched_timedout_job()
64 drm_sched_stop(&gpu->sched, sched_job); in etnaviv_sched_timedout_job()
66 if(sched_job) in etnaviv_sched_timedout_job()
67 drm_sched_increase_karma(sched_job); in etnaviv_sched_timedout_job()
79 list_add(&sched_job->list, &sched_job->sched->pending_list); in etnaviv_sched_timedout_job()
83 static void etnaviv_sched_free_job(struct drm_sched_job *sched_job) in etnaviv_sched_free_job() argument
[all …]
H A Detnaviv_sched.h14 struct etnaviv_gem_submit *to_etnaviv_submit(struct drm_sched_job *sched_job) in to_etnaviv_submit() argument
16 return container_of(sched_job, struct etnaviv_gem_submit, sched_job); in to_etnaviv_submit()
H A Detnaviv_gem_submit.c189 ret = drm_sched_job_add_implicit_dependencies(&submit->sched_job, in submit_fence_sync()
536 ret = drm_sched_job_init(&submit->sched_job, in etnaviv_ioctl_gem_submit()
560 ret = drm_sched_job_add_dependency(&submit->sched_job, in etnaviv_ioctl_gem_submit()
620 drm_sched_job_cleanup(&submit->sched_job); in etnaviv_ioctl_gem_submit()
H A Detnaviv_gem.h90 struct drm_sched_job sched_job; member
/openbmc/linux/drivers/gpu/drm/scheduler/
H A Dgpu_scheduler_trace.h36 TP_PROTO(struct drm_sched_job *sched_job, struct drm_sched_entity *entity),
37 TP_ARGS(sched_job, entity),
41 __string(name, sched_job->sched->name)
49 __entry->id = sched_job->id;
50 __entry->fence = &sched_job->s_fence->finished;
51 __assign_str(name, sched_job->sched->name);
54 &sched_job->sched->hw_rq_count);
63 TP_PROTO(struct drm_sched_job *sched_job, struct drm_sched_entity *entity),
64 TP_ARGS(sched_job, entity)
68 TP_PROTO(struct drm_sched_job *sched_job, struc
[all...]
H A Dsched_entity.c33 #define to_drm_sched_job(sched_job) \ argument
34 container_of((sched_job), struct drm_sched_job, queue_node)
456 struct drm_sched_job *sched_job; in drm_sched_entity_pop_job() local
458 sched_job = to_drm_sched_job(spsc_queue_peek(&entity->job_queue)); in drm_sched_entity_pop_job()
459 if (!sched_job) in drm_sched_entity_pop_job()
463 drm_sched_job_dependency(sched_job, entity))) { in drm_sched_entity_pop_job()
464 trace_drm_sched_job_wait_dep(sched_job, entity->dependency); in drm_sched_entity_pop_job()
472 dma_fence_set_error(&sched_job->s_fence->finished, -ECANCELED); in drm_sched_entity_pop_job()
476 dma_fence_get(&sched_job->s_fence->finished)); in drm_sched_entity_pop_job()
503 sched_job->entity = NULL; in drm_sched_entity_pop_job()
[all …]
H A Dsched_main.c67 #define to_drm_sched_job(sched_job) \ argument
68 container_of((sched_job), struct drm_sched_job, queue_node)
1023 struct drm_sched_job *sched_job; in drm_sched_main() local
1039 sched_job = drm_sched_entity_pop_job(entity); in drm_sched_main()
1041 if (!sched_job) { in drm_sched_main()
1046 s_fence = sched_job->s_fence; in drm_sched_main()
1049 drm_sched_job_begin(sched_job); in drm_sched_main()
1051 trace_drm_run_job(sched_job, entity); in drm_sched_main()
1052 fence = sched->ops->run_job(sched_job); in drm_sched_main()
1060 r = dma_fence_add_callback(fence, &sched_job->cb, in drm_sched_main()
[all …]
/openbmc/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_sched.c368 nouveau_sched_run_job(struct drm_sched_job *sched_job) in nouveau_sched_run_job() argument
370 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_run_job()
376 nouveau_sched_timedout_job(struct drm_sched_job *sched_job) in nouveau_sched_timedout_job() argument
378 struct drm_gpu_scheduler *sched = sched_job->sched; in nouveau_sched_timedout_job()
379 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_timedout_job()
382 drm_sched_stop(sched, sched_job); in nouveau_sched_timedout_job()
395 nouveau_sched_free_job(struct drm_sched_job *sched_job) in nouveau_sched_free_job() argument
397 struct nouveau_job *job = to_nouveau_job(sched_job); in nouveau_sched_free_job()
H A Dnouveau_sched.h13 #define to_nouveau_job(sched_job) \ argument
14 container_of((sched_job), struct nouveau_job, base)
/openbmc/linux/include/drm/
H A Dgpu_scheduler.h405 struct dma_fence *(*prepare_job)(struct drm_sched_job *sched_job,
414 struct dma_fence *(*run_job)(struct drm_sched_job *sched_job);
458 enum drm_gpu_sched_stat (*timedout_job)(struct drm_sched_job *sched_job);
464 void (*free_job)(struct drm_sched_job *sched_job);
579 void drm_sched_entity_push_job(struct drm_sched_job *sched_job);
/openbmc/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_job.c252 amdgpu_job_prepare_job(struct drm_sched_job *sched_job, in amdgpu_job_prepare_job() argument
256 struct amdgpu_job *job = to_amdgpu_job(sched_job); in amdgpu_job_prepare_job()
282 static struct dma_fence *amdgpu_job_run(struct drm_sched_job *sched_job) in amdgpu_job_run() argument
284 struct amdgpu_ring *ring = to_amdgpu_ring(sched_job->sched); in amdgpu_job_run()
290 job = to_amdgpu_job(sched_job); in amdgpu_job_run()
319 #define to_drm_sched_job(sched_job) \ argument
320 container_of((sched_job), struct drm_sched_job, queue_node)
H A Damdgpu_job.h39 #define to_amdgpu_job(sched_job) \ argument
40 container_of((sched_job), struct amdgpu_job, base)
H A Damdgpu_trace.h518 TP_PROTO(struct amdgpu_job *sched_job, struct dma_fence *fence),
519 TP_ARGS(sched_job, fence),
521 __string(ring, sched_job->base.sched->name)
529 __assign_str(ring, sched_job->base.sched->name);
530 __entry->id = sched_job->base.id;
/openbmc/linux/drivers/gpu/drm/panfrost/
H A Dpanfrost_job.c43 to_panfrost_job(struct drm_sched_job *sched_job) in to_panfrost_job() argument
45 return container_of(sched_job, struct panfrost_job, base); in to_panfrost_job()
348 static void panfrost_job_free(struct drm_sched_job *sched_job) in panfrost_job_free() argument
350 struct panfrost_job *job = to_panfrost_job(sched_job); in panfrost_job_free()
352 drm_sched_job_cleanup(sched_job); in panfrost_job_free()
357 static struct dma_fence *panfrost_job_run(struct drm_sched_job *sched_job) in panfrost_job_run() argument
359 struct panfrost_job *job = to_panfrost_job(sched_job); in panfrost_job_run()
710 *sched_job) in panfrost_job_timedout()
712 struct panfrost_job *job = to_panfrost_job(sched_job); in panfrost_job_timedout()
745 sched_job); in panfrost_job_timedout()
[all …]