/openbmc/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | vmm.c | 75 struct nvkm_vmm *vmm; member 113 VMM_TRACE(_it->vmm, "%s "f, _buf, ##a); \ 129 if (it->vmm->func->flush) { in nvkm_vmm_flush() 131 it->vmm->func->flush(it->vmm, it->flush); in nvkm_vmm_flush() 145 struct nvkm_vmm *vmm = it->vmm; in nvkm_vmm_unref_pdes() local 159 func->sparse(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes() 162 func->unmap(vmm, pgd->pt[0], pdei, 1); in nvkm_vmm_unref_pdes() 170 func->pde(vmm, pgd, pdei); in nvkm_vmm_unref_pdes() 177 func->pde(vmm, pgd, pdei); in nvkm_vmm_unref_pdes() 190 nvkm_mmu_ptc_put(vmm->mmu, vmm->bootstrapped, &pt); in nvkm_vmm_unref_pdes() [all …]
|
H A D | uvmm.c | 42 return nvkm_vmm_ref(nvkm_uvmm(object)->vmm); in nvkm_uvmm_search() 51 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnclr() local 61 if (nvkm_vmm_in_managed_range(vmm, addr, size) && vmm->managed.raw) in nvkm_uvmm_mthd_pfnclr() 65 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnclr() 66 ret = nvkm_vmm_pfn_unmap(vmm, addr, size); in nvkm_uvmm_mthd_pfnclr() 67 mutex_unlock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnclr() 79 struct nvkm_vmm *vmm = uvmm->vmm; in nvkm_uvmm_mthd_pfnmap() local 94 if (nvkm_vmm_in_managed_range(vmm, addr, size) && vmm->managed.raw) in nvkm_uvmm_mthd_pfnmap() 98 mutex_lock(&vmm->mutex.vmm); in nvkm_uvmm_mthd_pfnmap() 99 ret = nvkm_vmm_pfn_map(vmm, page, addr, size, phys); in nvkm_uvmm_mthd_pfnmap() [all …]
|
H A D | vmmnv44.c | 27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_fill() argument 39 u32 addr = (list ? *list++ : vmm->null) >> 12; in nv44_vmm_pgt_fill() 66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill() 67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill() 68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill() 69 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); in nv44_vmm_pgt_fill() 73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_pte() argument 82 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); in nv44_vmm_pgt_pte() 90 VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); in nv44_vmm_pgt_pte() 91 VMM_WO032(pt, vmm, ptei++ * 4, tmp[1] >> 5 | tmp[2] << 22); in nv44_vmm_pgt_pte() [all …]
|
H A D | vmmnv50.c | 32 nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_pte() argument 53 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_pte() 58 nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_sgl() argument 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl() 65 nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_dma() argument 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma() 73 VMM_WO064(pt, vmm, ptei++ * 8, data); in nv50_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_dma() 84 nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv50_vmm_pgt_mem() argument 87 VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_mem() [all …]
|
H A D | vmmgp100.c | 34 gp100_vmm_pfn_unmap(struct nvkm_vmm *vmm, in gp100_vmm_pfn_unmap() argument 37 struct device *dev = vmm->mmu->subdev.device->dev; in gp100_vmm_pfn_unmap() 55 gp100_vmm_pfn_clear(struct nvkm_vmm *vmm, in gp100_vmm_pfn_clear() argument 65 VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); in gp100_vmm_pfn_clear() 75 gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_pfn() argument 78 struct device *dev = vmm->mmu->subdev.device->dev; in gp100_vmm_pgt_pfn() 109 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pfn() 115 gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_pte() argument 123 VMM_WO064(pt, vmm, ptei++ * 8, data); in gp100_vmm_pgt_pte() 129 gp100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gp100_vmm_pgt_sgl() argument [all …]
|
H A D | vmmgf100.c | 32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_pte() argument 44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 51 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_sgl() argument 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl() 65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_dma() argument 69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma() 73 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma() 84 gf100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_mem() argument [all …]
|
H A D | vmmnv04.c | 28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument 33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte() 39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_sgl() argument 42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl() 46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_dma() argument 52 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); in nv04_vmm_pgt_dma() 55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma() 60 nv04_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv04_vmm_pgt_unmap() argument 63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap() 80 nv04_vmm_valid(struct nvkm_vmm *vmm, void *argv, u32 argc, in nv04_vmm_valid() argument [all …]
|
H A D | vmmnv41.c | 27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument 32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte() 38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument 41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl() 45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_dma() argument 52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma() 56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma() 61 nv41_vmm_pgt_unmap(struct nvkm_vmm *vmm, in nv41_vmm_pgt_unmap() argument 64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap() 81 nv41_vmm_flush(struct nvkm_vmm *vmm, int level) in nv41_vmm_flush() argument [all …]
|
H A D | vmmgm200.c | 28 gm200_vmm_pgt_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgt_sparse() argument 32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse() 53 gm200_vmm_pgd_sparse(struct nvkm_vmm *vmm, in gm200_vmm_pgd_sparse() argument 57 VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); in gm200_vmm_pgd_sparse() 96 gm200_vmm_join_(struct nvkm_vmm *vmm, struct nvkm_memory *inst, u64 base) in gm200_vmm_join_() argument 98 if (vmm->func->page[1].shift == 16) in gm200_vmm_join_() 100 return gf100_vmm_join_(vmm, inst, base); in gm200_vmm_join_() 104 gm200_vmm_join(struct nvkm_vmm *vmm, struct nvkm_memory *inst) in gm200_vmm_join() argument 106 return gm200_vmm_join_(vmm, inst, 0); in gm200_vmm_join()
|
H A D | vmmtu102.c | 27 tu102_vmm_flush(struct nvkm_vmm *vmm, int depth) in tu102_vmm_flush() argument 29 struct nvkm_device *device = vmm->mmu->subdev.device; in tu102_vmm_flush() 33 if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR])) in tu102_vmm_flush() 36 mutex_lock(&vmm->mmu->mutex); in tu102_vmm_flush() 38 nvkm_wr32(device, 0xb830a0, vmm->pd->pt[0]->addr >> 8); in tu102_vmm_flush() 48 mutex_unlock(&vmm->mmu->mutex); in tu102_vmm_flush()
|
/openbmc/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_vmm.c | 32 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap() 41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map() 49 nouveau_vma_find(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm) in nouveau_vma_find() argument 54 if (vma->vmm == vmm) in nouveau_vma_find() 68 nvif_vmm_put(&vma->vmm->vmm, &tmp); in nouveau_vma_del() 77 nouveau_vma_new(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm, in nouveau_vma_new() argument 85 if ((vma = *pvma = nouveau_vma_find(nvbo, vmm))) { in nouveau_vma_new() 92 vma->vmm = vmm; in nouveau_vma_new() 101 ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0, in nouveau_vma_new() 109 ret = nvif_vmm_get(&vmm->vmm, PTES, false, mem->mem.page, 0, in nouveau_vma_new() [all …]
|
H A D | nouveau_svm.c | 105 NV_DEBUG((s)->vmm->cli->drm, "svm-%p: "f"\n", (s), ##a) 107 NV_WARN((s)->vmm->cli->drm, "svm-%p: "f"\n", (s), ##a) 213 mutex_lock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_part() 214 ivmm = nouveau_ivmm_find(svmm->vmm->cli->drm->svm, inst); in nouveau_svmm_part() 219 mutex_unlock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_part() 234 mutex_lock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_join() 235 list_add(&ivmm->head, &svmm->vmm->cli->drm->svm->inst); in nouveau_svmm_join() 236 mutex_unlock(&svmm->vmm->cli->drm->svm->mutex); in nouveau_svmm_join() 246 nvif_object_mthd(&svmm->vmm->vmm.object, NVIF_VMM_V0_PFNCLR, in nouveau_svmm_invalidate() 269 if (unlikely(!svmm->vmm)) in nouveau_svmm_invalidate_range_start() [all …]
|
H A D | nouveau_chan.c | 99 nouveau_svmm_part(chan->vmm->svmm, chan->inst); in nouveau_channel_del() 162 chan->vmm = nouveau_cli_vmm(cli); in nouveau_channel_prep() 198 ret = nouveau_vma_new(chan->push.buffer, chan->vmm, in nouveau_channel_prep() 213 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep() 243 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_prep() 316 args.chan.vmm = 0; in nouveau_channel_ctor() 321 args.chan.vmm = nvif_handle(&chan->vmm->vmm.object); in nouveau_channel_ctor() 404 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init() 422 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init() 434 args.limit = chan->vmm->vmm.limit - 1; in nouveau_channel_init() [all …]
|
H A D | nouveau_mem.c | 38 struct nvif_vmm *vmm, struct nvif_vma *vma) in nouveau_mem_map() argument 46 switch (vmm->object.oclass) { in nouveau_mem_map() 75 return nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, &mem->mem, 0); in nouveau_mem_map() 81 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]); in nouveau_mem_fini() 82 nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]); in nouveau_mem_fini()
|
H A D | nouveau_bo.c | 207 struct nvif_vmm *vmm = &nouveau_cli_vmm(cli)->vmm; in nouveau_bo_alloc() local 260 for (i = 0; i < vmm->page_nr; i++) { in nouveau_bo_alloc() 269 (domain & NOUVEAU_GEM_DOMAIN_VRAM) && !vmm->page[i].vram) in nouveau_bo_alloc() 272 (!vmm->page[i].host || vmm->page[i].shift > PAGE_SHIFT)) in nouveau_bo_alloc() 279 if (pi < 0 || !nvbo->comp || vmm->page[i].comp) in nouveau_bo_alloc() 283 if (*size >= 1ULL << vmm->page[i].shift) in nouveau_bo_alloc() 293 if (nvbo->comp && !vmm->page[pi].comp) { in nouveau_bo_alloc() 298 nvbo->page = vmm->page[pi].shift; in nouveau_bo_alloc() 301 for (i = 0; i < vmm->page_nr; i++) { in nouveau_bo_alloc() 309 if ((domain & NOUVEAU_GEM_DOMAIN_VRAM) && !vmm->page[i].vram) in nouveau_bo_alloc() [all …]
|
H A D | nouveau_uvmm.c | 92 struct nvif_vmm *vmm = &uvmm->vmm.vmm; in nouveau_uvmm_vmm_sparse_ref() local 94 return nvif_vmm_raw_sparse(vmm, addr, range, true); in nouveau_uvmm_vmm_sparse_ref() 101 struct nvif_vmm *vmm = &uvmm->vmm.vmm; in nouveau_uvmm_vmm_sparse_unref() local 103 return nvif_vmm_raw_sparse(vmm, addr, range, false); in nouveau_uvmm_vmm_sparse_unref() 110 struct nvif_vmm *vmm = &uvmm->vmm.vmm; in nouveau_uvmm_vmm_get() local 112 return nvif_vmm_raw_get(vmm, addr, range, PAGE_SHIFT); in nouveau_uvmm_vmm_get() 119 struct nvif_vmm *vmm = &uvmm->vmm.vmm; in nouveau_uvmm_vmm_put() local 121 return nvif_vmm_raw_put(vmm, addr, range, PAGE_SHIFT); in nouveau_uvmm_vmm_put() 128 struct nvif_vmm *vmm = &uvmm->vmm.vmm; in nouveau_uvmm_vmm_unmap() local 130 return nvif_vmm_raw_unmap(vmm, addr, range, PAGE_SHIFT, sparse); in nouveau_uvmm_vmm_unmap() [all …]
|
/openbmc/linux/drivers/gpu/drm/nouveau/nvif/ |
H A D | vmm.c | 28 nvif_vmm_unmap(struct nvif_vmm *vmm, u64 addr) in nvif_vmm_unmap() argument 30 return nvif_object_mthd(&vmm->object, NVIF_VMM_V0_UNMAP, in nvif_vmm_unmap() 36 nvif_vmm_map(struct nvif_vmm *vmm, u64 addr, u64 size, void *argv, u32 argc, in nvif_vmm_map() argument 57 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_MAP, in nvif_vmm_map() 65 nvif_vmm_put(struct nvif_vmm *vmm, struct nvif_vma *vma) in nvif_vmm_put() argument 68 WARN_ON(nvif_object_mthd(&vmm->object, NVIF_VMM_V0_PUT, in nvif_vmm_put() 77 nvif_vmm_get(struct nvif_vmm *vmm, enum nvif_vmm_get type, bool sparse, in nvif_vmm_get() argument 98 ret = nvif_object_mthd(&vmm->object, NVIF_VMM_V0_GET, in nvif_vmm_get() 108 nvif_vmm_raw_get(struct nvif_vmm *vmm, u64 addr, u64 size, in nvif_vmm_raw_get() argument 119 return nvif_object_mthd(&vmm->object, NVIF_VMM_V0_RAW, in nvif_vmm_raw_get() [all …]
|
/openbmc/linux/drivers/gpu/drm/nouveau/nvkm/subdev/bar/ |
H A D | gf100.c | 34 return gf100_bar(base)->bar[1].vmm; in gf100_bar_bar1_vmm() 63 return gf100_bar(base)->bar[0].vmm; in gf100_bar_bar2_vmm() 103 (bar_nr == 3) ? "bar2" : "bar1", &bar_vm->vmm); in gf100_bar_oneinit_bar() 107 atomic_inc(&bar_vm->vmm->engref[NVKM_SUBDEV_BAR]); in gf100_bar_oneinit_bar() 108 bar_vm->vmm->debug = bar->base.subdev.debug; in gf100_bar_oneinit_bar() 114 ret = nvkm_vmm_boot(bar_vm->vmm); in gf100_bar_oneinit_bar() 119 return nvkm_vmm_join(bar_vm->vmm, bar_vm->inst); in gf100_bar_oneinit_bar() 153 nvkm_vmm_part(bar->bar[1].vmm, bar->bar[1].inst); in gf100_bar_dtor() 154 nvkm_vmm_unref(&bar->bar[1].vmm); in gf100_bar_dtor() 157 nvkm_vmm_part(bar->bar[0].vmm, bar->bar[0].inst); in gf100_bar_dtor() [all …]
|
/openbmc/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
H A D | cgrp.c | 102 nvkm_vmm_put(vctx->vmm, &vctx->vma); in nvkm_cgrp_vctx_put() 106 if (vctx->vmm) { in nvkm_cgrp_vctx_put() 107 atomic_dec(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_put() 108 nvkm_vmm_unref(&vctx->vmm); in nvkm_cgrp_vctx_put() 128 vctx->ectx->engn == engn && vctx->vmm == chan->vmm); in nvkm_cgrp_vctx_get() 150 vctx->vmm = nvkm_vmm_ref(chan->vmm); in nvkm_cgrp_vctx_get() 155 if (vctx->vmm) in nvkm_cgrp_vctx_get() 156 atomic_inc(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_get() 181 nvkm_vmm_unref(&cgrp->vmm); in nvkm_cgrp_del() 219 nvkm_cgrp_new(struct nvkm_runl *runl, const char *name, struct nvkm_vmm *vmm, bool hw, in nvkm_cgrp_new() argument [all …]
|
H A D | chan.c | 104 cctx->vctx->ectx->engn == engn && cctx->vctx->vmm == chan->vmm); in nvkm_chan_cctx_get() 285 if (chan->vmm) { in nvkm_chan_del() 286 nvkm_vmm_part(chan->vmm, chan->inst->memory); in nvkm_chan_del() 287 nvkm_vmm_unref(&chan->vmm); in nvkm_chan_del() 347 struct nvkm_cgrp *cgrp, const char *name, bool priv, u32 devm, struct nvkm_vmm *vmm, in nvkm_chan_new_() argument 358 (!func->inst->vmm != !vmm) || in nvkm_chan_new_() 365 runl->func->runqs, runq, func->inst->vmm, vmm, in nvkm_chan_new_() 393 ret = nvkm_cgrp_new(runl, chan->name, vmm, fifo->func->cgrp.force, &chan->cgrp); in nvkm_chan_new_() 401 if (cgrp->runl != runl || cgrp->vmm != vmm) { in nvkm_chan_new_() 402 RUNL_DEBUG(runl, "cgrp %d %d", cgrp->runl != runl, cgrp->vmm != vmm); in nvkm_chan_new_() [all …]
|
H A D | ucgrp.c | 86 struct nvkm_vmm *vmm; in nvkm_ucgrp_new() local 102 vmm = nvkm_uvmm_search(oclass->client, args->v0.vmm); in nvkm_ucgrp_new() 103 if (IS_ERR(vmm)) in nvkm_ucgrp_new() 104 return PTR_ERR(vmm); in nvkm_ucgrp_new() 115 ret = nvkm_cgrp_new(runl, args->v0.name, vmm, true, &ucgrp->cgrp); in nvkm_ucgrp_new() 123 nvkm_vmm_unref(&vmm); in nvkm_ucgrp_new()
|
/openbmc/linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
H A D | nv50.c | 120 nv50_instobj_kmap(struct nv50_instobj *iobj, struct nvkm_vmm *vmm) in nv50_instobj_kmap() argument 137 while ((ret = nvkm_vmm_get(vmm, 12, size, &bar))) { in nv50_instobj_kmap() 158 nvkm_vmm_put(vmm, &ebar); in nv50_instobj_kmap() 162 ret = nvkm_memory_map(memory, 0, vmm, bar, NULL, 0); in nv50_instobj_kmap() 167 nvkm_vmm_put(vmm, &bar); in nv50_instobj_kmap() 178 nvkm_vmm_put(vmm, &iobj->bar); in nv50_instobj_kmap() 183 nv50_instobj_map(struct nvkm_memory *memory, u64 offset, struct nvkm_vmm *vmm, in nv50_instobj_map() argument 187 return nvkm_memory_map(memory, offset, vmm, vma, argv, argc); in nv50_instobj_map() 220 struct nvkm_vmm *vmm; in nv50_instobj_acquire() local 240 if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { in nv50_instobj_acquire() [all …]
|
/openbmc/linux/drivers/gpu/drm/nouveau/include/nvif/ |
H A D | vmm.h | 50 int nvif_vmm_raw_get(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift); 51 int nvif_vmm_raw_put(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift); 52 int nvif_vmm_raw_map(struct nvif_vmm *vmm, u64 addr, u64 size, u8 shift, 54 int nvif_vmm_raw_unmap(struct nvif_vmm *vmm, u64 addr, u64 size, 56 int nvif_vmm_raw_sparse(struct nvif_vmm *vmm, u64 addr, u64 size, bool ref);
|
/openbmc/linux/drivers/gpu/drm/ |
H A D | drm_gem_vram_helper.c | 193 struct drm_vram_mm *vmm = dev->vram_mm; in drm_gem_vram_create() local 197 if (WARN_ONCE(!vmm, "VRAM MM not initialized")) in drm_gem_vram_create() 221 bdev = &vmm->bdev; in drm_gem_vram_create() 938 struct drm_vram_mm *vmm = drm_vram_mm_of_bdev(bdev); in bo_driver_io_mem_reserve() local 944 mem->bus.offset = (mem->start << PAGE_SHIFT) + vmm->vram_base; in bo_driver_io_mem_reserve() 972 struct drm_vram_mm *vmm = entry->dev->vram_mm; in drm_vram_mm_debugfs() local 973 struct ttm_resource_manager *man = ttm_manager_type(&vmm->bdev, TTM_PL_VRAM); in drm_vram_mm_debugfs() 997 static int drm_vram_mm_init(struct drm_vram_mm *vmm, struct drm_device *dev, in drm_vram_mm_init() argument 1002 vmm->vram_base = vram_base; in drm_vram_mm_init() 1003 vmm->vram_size = vram_size; in drm_vram_mm_init() [all …]
|
/openbmc/linux/arch/xtensa/kernel/ |
H A D | syscall.c | 60 struct vm_area_struct *vmm; in arch_get_unmapped_area() local 84 for_each_vma(vmi, vmm) { in arch_get_unmapped_area() 86 if (addr + len <= vm_start_gap(vmm)) in arch_get_unmapped_area() 89 addr = vmm->vm_end; in arch_get_unmapped_area()
|