Lines Matching refs:p
51 static int amdgpu_vm_sdma_alloc_job(struct amdgpu_vm_update_params *p, in amdgpu_vm_sdma_alloc_job() argument
54 enum amdgpu_ib_pool_type pool = p->immediate ? AMDGPU_IB_POOL_IMMEDIATE in amdgpu_vm_sdma_alloc_job()
56 struct drm_sched_entity *entity = p->immediate ? &p->vm->immediate in amdgpu_vm_sdma_alloc_job()
57 : &p->vm->delayed; in amdgpu_vm_sdma_alloc_job()
63 if (p->pages_addr) in amdgpu_vm_sdma_alloc_job()
67 r = amdgpu_job_alloc_with_ib(p->adev, entity, AMDGPU_FENCE_OWNER_VM, in amdgpu_vm_sdma_alloc_job()
68 ndw * 4, pool, &p->job); in amdgpu_vm_sdma_alloc_job()
72 p->num_dw_left = ndw; in amdgpu_vm_sdma_alloc_job()
86 static int amdgpu_vm_sdma_prepare(struct amdgpu_vm_update_params *p, in amdgpu_vm_sdma_prepare() argument
93 r = amdgpu_vm_sdma_alloc_job(p, 0); in amdgpu_vm_sdma_prepare()
101 r = amdgpu_sync_resv(p->adev, &sync, resv, sync_mode, p->vm); in amdgpu_vm_sdma_prepare()
103 r = amdgpu_sync_push_to_job(&sync, p->job); in amdgpu_vm_sdma_prepare()
107 p->num_dw_left = 0; in amdgpu_vm_sdma_prepare()
108 amdgpu_job_free(p->job); in amdgpu_vm_sdma_prepare()
122 static int amdgpu_vm_sdma_commit(struct amdgpu_vm_update_params *p, in amdgpu_vm_sdma_commit() argument
125 struct amdgpu_ib *ib = p->job->ibs; in amdgpu_vm_sdma_commit()
129 ring = container_of(p->vm->delayed.rq->sched, struct amdgpu_ring, in amdgpu_vm_sdma_commit()
134 WARN_ON(ib->length_dw > p->num_dw_left); in amdgpu_vm_sdma_commit()
135 f = amdgpu_job_submit(p->job); in amdgpu_vm_sdma_commit()
137 if (p->unlocked) { in amdgpu_vm_sdma_commit()
140 swap(p->vm->last_unlocked, tmp); in amdgpu_vm_sdma_commit()
143 dma_resv_add_fence(p->vm->root.bo->tbo.base.resv, f, in amdgpu_vm_sdma_commit()
147 if (fence && !p->immediate) { in amdgpu_vm_sdma_commit()
170 static void amdgpu_vm_sdma_copy_ptes(struct amdgpu_vm_update_params *p, in amdgpu_vm_sdma_copy_ptes() argument
174 struct amdgpu_ib *ib = p->job->ibs; in amdgpu_vm_sdma_copy_ptes()
177 src += p->num_dw_left * 4; in amdgpu_vm_sdma_copy_ptes()
180 trace_amdgpu_vm_copy_ptes(pe, src, count, p->immediate); in amdgpu_vm_sdma_copy_ptes()
182 amdgpu_vm_copy_pte(p->adev, ib, pe, src, count); in amdgpu_vm_sdma_copy_ptes()
199 static void amdgpu_vm_sdma_set_ptes(struct amdgpu_vm_update_params *p, in amdgpu_vm_sdma_set_ptes() argument
204 struct amdgpu_ib *ib = p->job->ibs; in amdgpu_vm_sdma_set_ptes()
207 trace_amdgpu_vm_set_ptes(pe, addr, count, incr, flags, p->immediate); in amdgpu_vm_sdma_set_ptes()
209 amdgpu_vm_write_pte(p->adev, ib, pe, addr | flags, in amdgpu_vm_sdma_set_ptes()
212 amdgpu_vm_set_pte_pde(p->adev, ib, pe, addr, in amdgpu_vm_sdma_set_ptes()
231 static int amdgpu_vm_sdma_update(struct amdgpu_vm_update_params *p, in amdgpu_vm_sdma_update() argument
247 r = drm_sched_job_add_dependency(&p->job->base, fence); in amdgpu_vm_sdma_update()
257 ndw = p->num_dw_left; in amdgpu_vm_sdma_update()
258 ndw -= p->job->ibs->length_dw; in amdgpu_vm_sdma_update()
261 r = amdgpu_vm_sdma_commit(p, NULL); in amdgpu_vm_sdma_update()
265 r = amdgpu_vm_sdma_alloc_job(p, count); in amdgpu_vm_sdma_update()
270 if (!p->pages_addr) { in amdgpu_vm_sdma_update()
273 amdgpu_vm_sdma_set_ptes(p, vmbo->shadow, pe, addr, in amdgpu_vm_sdma_update()
275 amdgpu_vm_sdma_set_ptes(p, bo, pe, addr, count, in amdgpu_vm_sdma_update()
281 ndw -= p->adev->vm_manager.vm_pte_funcs->copy_pte_num_dw * in amdgpu_vm_sdma_update()
290 p->num_dw_left -= nptes * 2; in amdgpu_vm_sdma_update()
291 pte = (uint64_t *)&(p->job->ibs->ptr[p->num_dw_left]); in amdgpu_vm_sdma_update()
293 pte[i] = amdgpu_vm_map_gart(p->pages_addr, addr); in amdgpu_vm_sdma_update()
298 amdgpu_vm_sdma_copy_ptes(p, vmbo->shadow, pe, nptes); in amdgpu_vm_sdma_update()
299 amdgpu_vm_sdma_copy_ptes(p, bo, pe, nptes); in amdgpu_vm_sdma_update()