Lines Matching refs:va

68 	} va;  member
172 u64 addr = reg->va.addr; in nouveau_uvma_region_sparse_unref()
173 u64 range = reg->va.range; in nouveau_uvma_region_sparse_unref()
181 u64 addr = uvma->va.va.addr; in nouveau_uvma_vmm_put()
182 u64 range = uvma->va.va.range; in nouveau_uvma_vmm_put()
191 u64 addr = uvma->va.va.addr; in nouveau_uvma_map()
192 u64 offset = uvma->va.gem.offset; in nouveau_uvma_map()
193 u64 range = uvma->va.va.range; in nouveau_uvma_map()
202 u64 addr = uvma->va.va.addr; in nouveau_uvma_unmap()
203 u64 range = uvma->va.va.range; in nouveau_uvma_unmap()
206 if (drm_gpuva_invalidated(&uvma->va)) in nouveau_uvma_unmap()
231 drm_gem_object_get(uvma->va.gem.obj); in nouveau_uvma_gem_get()
237 drm_gem_object_put(uvma->va.gem.obj); in nouveau_uvma_gem_put()
277 u64 addr = reg->va.addr; in __nouveau_uvma_region_insert()
278 u64 range = reg->va.range; in __nouveau_uvma_region_insert()
306 reg->va.addr = addr; in nouveau_uvma_region_insert()
307 reg->va.range = range; in nouveau_uvma_region_insert()
320 MA_STATE(mas, &uvmm->region_mt, reg->va.addr, 0); in nouveau_uvma_region_remove()
375 if (reg->va.addr != addr || in nouveau_uvma_region_find()
376 reg->va.range != range) in nouveau_uvma_region_find()
388 reg->va.addr, in nouveau_uvma_region_empty()
389 reg->va.range); in nouveau_uvma_region_empty()
396 u64 addr = reg->va.addr; in __nouveau_uvma_region_destroy()
397 u64 range = reg->va.range; in __nouveau_uvma_region_destroy()
440 drm_gpuva_remove(&uvma->va); in op_map_prepare_unwind()
445 op_unmap_prepare_unwind(struct drm_gpuva *va) in op_unmap_prepare_unwind() argument
447 drm_gpuva_insert(va->mgr, va); in op_unmap_prepare_unwind()
476 op_unmap_prepare_unwind(r->unmap->va); in nouveau_uvmm_sm_prepare_unwind()
480 op_unmap_prepare_unwind(op->unmap.va); in nouveau_uvmm_sm_prepare_unwind()
505 struct drm_gpuva *va = r->unmap->va; in nouveau_uvmm_sm_prepare_unwind() local
506 u64 ustart = va->va.addr; in nouveau_uvmm_sm_prepare_unwind()
507 u64 urange = va->va.range; in nouveau_uvmm_sm_prepare_unwind()
523 struct drm_gpuva *va = u->va; in nouveau_uvmm_sm_prepare_unwind() local
524 u64 ustart = va->va.addr; in nouveau_uvmm_sm_prepare_unwind()
525 u64 urange = va->va.range; in nouveau_uvmm_sm_prepare_unwind()
592 drm_gpuva_map(&uvmm->umgr, &uvma->va, op); in op_map_prepare()
639 struct drm_gpuva *va = r->unmap->va; in nouveau_uvmm_sm_prepare() local
641 .kind = uvma_from_va(va)->kind, in nouveau_uvmm_sm_prepare()
642 .region = uvma_from_va(va)->region, in nouveau_uvmm_sm_prepare()
644 u64 ustart = va->va.addr; in nouveau_uvmm_sm_prepare()
645 u64 urange = va->va.range; in nouveau_uvmm_sm_prepare()
680 struct drm_gpuva *va = u->va; in nouveau_uvmm_sm_prepare() local
681 u64 ustart = va->va.addr; in nouveau_uvmm_sm_prepare()
682 u64 urange = va->va.range; in nouveau_uvmm_sm_prepare()
701 op_unmap_prepare_unwind(va); in nouveau_uvmm_sm_prepare()
761 return op->remap.unmap->va->gem.obj; in op_gem_obj()
763 return op->unmap.va->gem.obj; in op_gem_obj()
773 struct nouveau_bo *nvbo = nouveau_gem_object(uvma->va.gem.obj); in op_map()
781 struct drm_gpuva *va = u->va; in op_unmap() local
782 struct nouveau_uvma *uvma = uvma_from_va(va); in op_unmap()
793 struct nouveau_uvma *uvma = uvma_from_va(u->va); in op_unmap_range()
796 if (!drm_gpuva_invalidated(u->va)) in op_unmap_range()
805 struct nouveau_uvma *uvma = uvma_from_va(u->va); in op_remap()
806 u64 addr = uvma->va.va.addr; in op_remap()
807 u64 end = uvma->va.va.addr + uvma->va.va.range; in op_remap()
810 addr = r->prev->va.addr + r->prev->va.range; in op_remap()
813 end = r->next->va.addr; in op_remap()
875 struct drm_gpuva *va = r->unmap->va; in nouveau_uvmm_sm_cleanup() local
876 struct nouveau_uvma *uvma = uvma_from_va(va); in nouveau_uvmm_sm_cleanup()
879 u64 addr = va->va.addr; in nouveau_uvmm_sm_cleanup()
880 u64 end = addr + va->va.range; in nouveau_uvmm_sm_cleanup()
883 addr = p->va.addr + p->va.range; in nouveau_uvmm_sm_cleanup()
886 end = n->va.addr; in nouveau_uvmm_sm_cleanup()
897 struct drm_gpuva *va = u->va; in nouveau_uvmm_sm_cleanup() local
898 struct nouveau_uvma *uvma = uvma_from_va(va); in nouveau_uvmm_sm_cleanup()
1004 if (op->va.range > (obj->size - op->gem.offset)) in bind_validate_op()
1008 return nouveau_uvmm_validate_range(uvmm, op->va.addr, op->va.range); in bind_validate_op()
1024 u64 op_addr = op->va.addr; in bind_validate_map_sparse()
1025 u64 op_end = op_addr + op->va.range; in bind_validate_map_sparse()
1072 reg_addr = reg->va.addr; in bind_validate_map_common()
1073 reg_end = reg_addr + reg->va.range; in bind_validate_map_common()
1092 u64 op_addr = op->va.addr; in bind_validate_region()
1093 u64 op_range = op->va.range; in bind_validate_region()
1123 drm_gpuva_link(&new->map->va); in bind_link_gpuvas()
1127 drm_gpuva_link(&new->prev->va); in bind_link_gpuvas()
1129 drm_gpuva_link(&new->next->va); in bind_link_gpuvas()
1130 drm_gpuva_unlink(op->remap.unmap->va); in bind_link_gpuvas()
1133 drm_gpuva_unlink(op->unmap.va); in bind_link_gpuvas()
1183 op->va.addr, in nouveau_uvmm_bind_job_submit()
1184 op->va.range); in nouveau_uvmm_bind_job_submit()
1190 op->reg = nouveau_uvma_region_find(uvmm, op->va.addr, in nouveau_uvmm_bind_job_submit()
1191 op->va.range); in nouveau_uvmm_bind_job_submit()
1198 op->va.addr, in nouveau_uvmm_bind_job_submit()
1199 op->va.range); in nouveau_uvmm_bind_job_submit()
1221 op->va.addr, in nouveau_uvmm_bind_job_submit()
1222 op->va.range); in nouveau_uvmm_bind_job_submit()
1224 u64 reg_addr = reg->va.addr; in nouveau_uvmm_bind_job_submit()
1225 u64 reg_end = reg_addr + reg->va.range; in nouveau_uvmm_bind_job_submit()
1226 u64 op_addr = op->va.addr; in nouveau_uvmm_bind_job_submit()
1227 u64 op_end = op_addr + op->va.range; in nouveau_uvmm_bind_job_submit()
1244 op->va.addr, in nouveau_uvmm_bind_job_submit()
1245 op->va.range, in nouveau_uvmm_bind_job_submit()
1255 op->va.addr, in nouveau_uvmm_bind_job_submit()
1256 op->va.range, in nouveau_uvmm_bind_job_submit()
1268 op->va.addr, in nouveau_uvmm_bind_job_submit()
1269 op->va.range); in nouveau_uvmm_bind_job_submit()
1389 nouveau_uvma_region_destroy(uvmm, op->va.addr, in nouveau_uvmm_bind_job_submit()
1390 op->va.range); in nouveau_uvmm_bind_job_submit()
1400 op->va.addr, in nouveau_uvmm_bind_job_submit()
1401 op->va.range); in nouveau_uvmm_bind_job_submit()
1579 op->va.addr = uop->addr; in bind_job_op_from_uop()
1580 op->va.range = uop->range; in bind_job_op_from_uop()
1781 struct drm_gpuva *va; in nouveau_uvmm_bo_map_all() local
1785 drm_gem_for_each_gpuva(va, obj) { in nouveau_uvmm_bo_map_all()
1786 struct nouveau_uvma *uvma = uvma_from_va(va); in nouveau_uvmm_bo_map_all()
1789 drm_gpuva_invalidate(va, false); in nouveau_uvmm_bo_map_all()
1797 struct drm_gpuva *va; in nouveau_uvmm_bo_unmap_all() local
1801 drm_gem_for_each_gpuva(va, obj) { in nouveau_uvmm_bo_unmap_all()
1802 struct nouveau_uvma *uvma = uvma_from_va(va); in nouveau_uvmm_bo_unmap_all()
1805 drm_gpuva_invalidate(va, true); in nouveau_uvmm_bo_unmap_all()
1873 struct drm_gpuva *va, *next; in nouveau_uvmm_fini() local
1882 drm_gpuva_for_each_va_safe(va, next, &uvmm->umgr) { in nouveau_uvmm_fini()
1883 struct nouveau_uvma *uvma = uvma_from_va(va); in nouveau_uvmm_fini()
1884 struct drm_gem_object *obj = va->gem.obj; in nouveau_uvmm_fini()
1886 if (unlikely(va == &uvmm->umgr.kernel_alloc_node)) in nouveau_uvmm_fini()
1889 drm_gpuva_remove(va); in nouveau_uvmm_fini()
1892 drm_gpuva_unlink(va); in nouveau_uvmm_fini()