Lines Matching refs:base

24 #define nv50_instmem(p) container_of((p), struct nv50_instmem, base)
33 struct nvkm_instmem base; member
43 #define nv50_instobj(p) container_of((p), struct nv50_instobj, base.memory)
46 struct nvkm_instobj base; member
60 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_wr32_slow()
61 u64 base = (nvkm_memory_addr(iobj->ram) + offset) & 0xffffff00000ULL; in nv50_instobj_wr32_slow() local
65 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_wr32_slow()
66 if (unlikely(imem->addr != base)) { in nv50_instobj_wr32_slow()
67 nvkm_wr32(device, 0x001700, base >> 16); in nv50_instobj_wr32_slow()
68 imem->addr = base; in nv50_instobj_wr32_slow()
71 spin_unlock_irqrestore(&imem->base.lock, flags); in nv50_instobj_wr32_slow()
79 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_rd32_slow()
80 u64 base = (nvkm_memory_addr(iobj->ram) + offset) & 0xffffff00000ULL; in nv50_instobj_rd32_slow() local
85 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_rd32_slow()
86 if (unlikely(imem->addr != base)) { in nv50_instobj_rd32_slow()
87 nvkm_wr32(device, 0x001700, base >> 16); in nv50_instobj_rd32_slow()
88 imem->addr = base; in nv50_instobj_rd32_slow()
91 spin_unlock_irqrestore(&imem->base.lock, flags); in nv50_instobj_rd32_slow()
124 struct nvkm_memory *memory = &iobj->base.memory; in nv50_instobj_kmap()
125 struct nvkm_subdev *subdev = &imem->base.subdev; in nv50_instobj_kmap()
136 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
141 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
145 nvkm_memory_addr(&eobj->base.memory), in nv50_instobj_kmap()
146 nvkm_memory_size(&eobj->base.memory), in nv50_instobj_kmap()
154 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
163 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
166 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
168 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
195 struct nvkm_subdev *subdev = &imem->base.subdev; in nv50_instobj_release()
200 if (refcount_dec_and_mutex_lock(&iobj->maps, &imem->base.mutex)) { in nv50_instobj_release()
210 iobj->base.memory.ptrs = NULL; in nv50_instobj_release()
211 mutex_unlock(&imem->base.mutex); in nv50_instobj_release()
219 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_acquire()
252 iobj->base.memory.ptrs = &nv50_instobj_fast; in nv50_instobj_acquire()
254 iobj->base.memory.ptrs = &nv50_instobj_slow; in nv50_instobj_acquire()
268 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_boot()
301 if (nv50_instobj_acquire(&iobj->base.memory)) { in nv50_instobj_bar2()
305 nv50_instobj_release(&iobj->base.memory); in nv50_instobj_bar2()
319 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_dtor()
338 nvkm_instobj_dtor(imem, &iobj->base); in nv50_instobj_dtor()
356 nv50_instobj_wrap(struct nvkm_instmem *base, in nv50_instobj_wrap() argument
359 struct nv50_instmem *imem = nv50_instmem(base); in nv50_instobj_wrap()
364 *pmemory = &iobj->base.memory; in nv50_instobj_wrap()
366 nvkm_instobj_ctor(&nv50_instobj_func, &imem->base, &iobj->base); in nv50_instobj_wrap()
397 nv50_instmem_fini(struct nvkm_instmem *base) in nv50_instmem_fini() argument
399 nv50_instmem(base)->addr = ~0ULL; in nv50_instmem_fini()
418 nvkm_instmem_ctor(&nv50_instmem, device, type, inst, &imem->base); in nv50_instmem_new()
420 *pimem = &imem->base; in nv50_instmem_new()