Searched refs:hw_fence (Results 1 – 9 of 9) sorted by relevance
169 else if (job->hw_fence.ops) in amdgpu_job_free_resources()170 f = &job->hw_fence; in amdgpu_job_free_resources()187 if (!job->hw_fence.ops) in amdgpu_job_free_cb()190 dma_fence_put(&job->hw_fence); in amdgpu_job_free_cb()219 if (!job->hw_fence.ops) in amdgpu_job_free()222 dma_fence_put(&job->hw_fence); in amdgpu_job_free()
160 fence = &job->hw_fence; in amdgpu_fence_emit()722 job = container_of(old, struct amdgpu_job, hw_fence); in amdgpu_fence_driver_clear_job_fences()784 struct amdgpu_job *job = container_of(f, struct amdgpu_job, hw_fence); in amdgpu_job_fence_get_timeline_name()814 struct amdgpu_job *job = container_of(f, struct amdgpu_job, hw_fence); in amdgpu_job_fence_enable_signaling()849 kfree(container_of(f, struct amdgpu_job, hw_fence)); in amdgpu_job_fence_free()
51 struct dma_fence hw_fence; member
1893 if (preempted && (&job->hw_fence) == fence) in amdgpu_ib_preempt_mark_partial_job()
5319 if (job && dma_fence_is_signaled(&job->hw_fence)) { in amdgpu_device_gpu_recover()
22 msm_fence_init(submit->hw_fence, fctx); in msm_job_run()42 return dma_fence_get(submit->hw_fence); in msm_job_run()
44 submit->hw_fence = msm_fence_alloc(); in submit_create()45 if (IS_ERR(submit->hw_fence)) { in submit_create()46 ret = PTR_ERR(submit->hw_fence); in submit_create()53 kfree(submit->hw_fence); in submit_create()97 if (kref_read(&submit->hw_fence->refcount) == 0) { in __msm_gem_submit_destroy()98 kfree(submit->hw_fence); in __msm_gem_submit_destroy()100 dma_fence_put(submit->hw_fence); in __msm_gem_submit_destroy()
263 struct dma_fence *hw_fence; member
716 if (submit && dma_fence_is_signaled(submit->hw_fence)) { in retire_submits()758 submit->seqno = submit->hw_fence->seqno; in msm_gpu_submit()