Lines Matching refs:bo

45 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
54 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
58 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
59 list_del_init(&bo->list); in radeon_ttm_bo_destroy()
60 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
61 radeon_bo_clear_surface_reg(bo); in radeon_ttm_bo_destroy()
62 WARN_ON_ONCE(!list_empty(&bo->va)); in radeon_ttm_bo_destroy()
63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy()
64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy()
65 drm_gem_object_release(&bo->tbo.base); in radeon_ttm_bo_destroy()
66 kfree(bo); in radeon_ttm_bo_destroy()
69 bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo) in radeon_ttm_bo_is_radeon_bo() argument
71 if (bo->destroy == &radeon_ttm_bo_destroy) in radeon_ttm_bo_is_radeon_bo()
136 struct radeon_bo *bo; in radeon_bo_create() local
152 bo = kzalloc(sizeof(struct radeon_bo), GFP_KERNEL); in radeon_bo_create()
153 if (bo == NULL) in radeon_bo_create()
155 drm_gem_private_object_init(rdev->ddev, &bo->tbo.base, size); in radeon_bo_create()
156 bo->rdev = rdev; in radeon_bo_create()
157 bo->surface_reg = -1; in radeon_bo_create()
158 INIT_LIST_HEAD(&bo->list); in radeon_bo_create()
159 INIT_LIST_HEAD(&bo->va); in radeon_bo_create()
160 bo->initial_domain = domain & (RADEON_GEM_DOMAIN_VRAM | in radeon_bo_create()
164 bo->flags = flags; in radeon_bo_create()
167 bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC); in radeon_bo_create()
173 bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC); in radeon_bo_create()
179 bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC); in radeon_bo_create()
190 if (bo->flags & RADEON_GEM_GTT_WC) in radeon_bo_create()
193 bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC); in radeon_bo_create()
199 bo->flags &= ~RADEON_GEM_GTT_WC; in radeon_bo_create()
202 radeon_ttm_placement_from_domain(bo, domain); in radeon_bo_create()
205 r = ttm_bo_init_validate(&rdev->mman.bdev, &bo->tbo, type, in radeon_bo_create()
206 &bo->placement, page_align, !kernel, sg, resv, in radeon_bo_create()
212 *bo_ptr = bo; in radeon_bo_create()
214 trace_radeon_bo_create(bo); in radeon_bo_create()
219 int radeon_bo_kmap(struct radeon_bo *bo, void **ptr) in radeon_bo_kmap() argument
224 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL, in radeon_bo_kmap()
229 if (bo->kptr) { in radeon_bo_kmap()
231 *ptr = bo->kptr; in radeon_bo_kmap()
235 r = ttm_bo_kmap(&bo->tbo, 0, PFN_UP(bo->tbo.base.size), &bo->kmap); in radeon_bo_kmap()
239 bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in radeon_bo_kmap()
241 *ptr = bo->kptr; in radeon_bo_kmap()
243 radeon_bo_check_tiling(bo, 0, 0); in radeon_bo_kmap()
247 void radeon_bo_kunmap(struct radeon_bo *bo) in radeon_bo_kunmap() argument
249 if (bo->kptr == NULL) in radeon_bo_kunmap()
251 bo->kptr = NULL; in radeon_bo_kunmap()
252 radeon_bo_check_tiling(bo, 0, 0); in radeon_bo_kunmap()
253 ttm_bo_kunmap(&bo->kmap); in radeon_bo_kunmap()
256 struct radeon_bo *radeon_bo_ref(struct radeon_bo *bo) in radeon_bo_ref() argument
258 if (bo == NULL) in radeon_bo_ref()
261 ttm_bo_get(&bo->tbo); in radeon_bo_ref()
262 return bo; in radeon_bo_ref()
265 void radeon_bo_unref(struct radeon_bo **bo) in radeon_bo_unref() argument
269 if ((*bo) == NULL) in radeon_bo_unref()
271 tbo = &((*bo)->tbo); in radeon_bo_unref()
273 *bo = NULL; in radeon_bo_unref()
276 int radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain, u64 max_offset, in radeon_bo_pin_restricted() argument
282 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_bo_pin_restricted()
285 if (bo->tbo.pin_count) { in radeon_bo_pin_restricted()
286 ttm_bo_pin(&bo->tbo); in radeon_bo_pin_restricted()
288 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted()
294 domain_start = bo->rdev->mc.vram_start; in radeon_bo_pin_restricted()
296 domain_start = bo->rdev->mc.gtt_start; in radeon_bo_pin_restricted()
298 (radeon_bo_gpu_offset(bo) - domain_start)); in radeon_bo_pin_restricted()
303 if (bo->prime_shared_count && domain == RADEON_GEM_DOMAIN_VRAM) { in radeon_bo_pin_restricted()
308 radeon_ttm_placement_from_domain(bo, domain); in radeon_bo_pin_restricted()
309 for (i = 0; i < bo->placement.num_placement; i++) { in radeon_bo_pin_restricted()
311 if ((bo->placements[i].mem_type == TTM_PL_VRAM) && in radeon_bo_pin_restricted()
312 !(bo->flags & RADEON_GEM_NO_CPU_ACCESS) && in radeon_bo_pin_restricted()
313 (!max_offset || max_offset > bo->rdev->mc.visible_vram_size)) in radeon_bo_pin_restricted()
314 bo->placements[i].lpfn = in radeon_bo_pin_restricted()
315 bo->rdev->mc.visible_vram_size >> PAGE_SHIFT; in radeon_bo_pin_restricted()
317 bo->placements[i].lpfn = max_offset >> PAGE_SHIFT; in radeon_bo_pin_restricted()
320 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_bo_pin_restricted()
322 ttm_bo_pin(&bo->tbo); in radeon_bo_pin_restricted()
324 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted()
326 bo->rdev->vram_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted()
328 bo->rdev->gart_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted()
330 dev_err(bo->rdev->dev, "%p pin failed\n", bo); in radeon_bo_pin_restricted()
335 int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) in radeon_bo_pin() argument
337 return radeon_bo_pin_restricted(bo, domain, 0, gpu_addr); in radeon_bo_pin()
340 void radeon_bo_unpin(struct radeon_bo *bo) in radeon_bo_unpin() argument
342 ttm_bo_unpin(&bo->tbo); in radeon_bo_unpin()
343 if (!bo->tbo.pin_count) { in radeon_bo_unpin()
344 if (bo->tbo.resource->mem_type == TTM_PL_VRAM) in radeon_bo_unpin()
345 bo->rdev->vram_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin()
347 bo->rdev->gart_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin()
372 struct radeon_bo *bo, *n; in radeon_bo_force_delete() local
378 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { in radeon_bo_force_delete()
380 &bo->tbo.base, bo, (unsigned long)bo->tbo.base.size, in radeon_bo_force_delete()
381 *((unsigned long *)&bo->tbo.base.refcount)); in radeon_bo_force_delete()
382 mutex_lock(&bo->rdev->gem.mutex); in radeon_bo_force_delete()
383 list_del_init(&bo->list); in radeon_bo_force_delete()
384 mutex_unlock(&bo->rdev->gem.mutex); in radeon_bo_force_delete()
386 drm_gem_object_put(&bo->tbo.base); in radeon_bo_force_delete()
488 struct radeon_bo *bo = lobj->robj; in radeon_bo_list_validate() local
489 if (!bo->tbo.pin_count) { in radeon_bo_list_validate()
493 radeon_mem_type_to_domain(bo->tbo.resource->mem_type); in radeon_bo_list_validate()
511 radeon_ttm_placement_from_domain(bo, domain); in radeon_bo_list_validate()
513 radeon_uvd_force_into_uvd_segment(bo, allowed); in radeon_bo_list_validate()
516 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_bo_list_validate()
530 lobj->gpu_offset = radeon_bo_gpu_offset(bo); in radeon_bo_list_validate()
531 lobj->tiling_flags = bo->tiling_flags; in radeon_bo_list_validate()
542 int radeon_bo_get_surface_reg(struct radeon_bo *bo) in radeon_bo_get_surface_reg() argument
544 struct radeon_device *rdev = bo->rdev; in radeon_bo_get_surface_reg()
550 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_get_surface_reg()
552 if (!bo->tiling_flags) in radeon_bo_get_surface_reg()
555 if (bo->surface_reg >= 0) { in radeon_bo_get_surface_reg()
556 i = bo->surface_reg; in radeon_bo_get_surface_reg()
564 if (!reg->bo) in radeon_bo_get_surface_reg()
567 old_object = reg->bo; in radeon_bo_get_surface_reg()
578 old_object = reg->bo; in radeon_bo_get_surface_reg()
586 bo->surface_reg = i; in radeon_bo_get_surface_reg()
587 reg->bo = bo; in radeon_bo_get_surface_reg()
590 radeon_set_surface_reg(rdev, i, bo->tiling_flags, bo->pitch, in radeon_bo_get_surface_reg()
591 bo->tbo.resource->start << PAGE_SHIFT, in radeon_bo_get_surface_reg()
592 bo->tbo.base.size); in radeon_bo_get_surface_reg()
596 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo) in radeon_bo_clear_surface_reg() argument
598 struct radeon_device *rdev = bo->rdev; in radeon_bo_clear_surface_reg()
601 if (bo->surface_reg == -1) in radeon_bo_clear_surface_reg()
604 reg = &rdev->surface_regs[bo->surface_reg]; in radeon_bo_clear_surface_reg()
605 radeon_clear_surface_reg(rdev, bo->surface_reg); in radeon_bo_clear_surface_reg()
607 reg->bo = NULL; in radeon_bo_clear_surface_reg()
608 bo->surface_reg = -1; in radeon_bo_clear_surface_reg()
611 int radeon_bo_set_tiling_flags(struct radeon_bo *bo, in radeon_bo_set_tiling_flags() argument
614 struct radeon_device *rdev = bo->rdev; in radeon_bo_set_tiling_flags()
662 r = radeon_bo_reserve(bo, false); in radeon_bo_set_tiling_flags()
665 bo->tiling_flags = tiling_flags; in radeon_bo_set_tiling_flags()
666 bo->pitch = pitch; in radeon_bo_set_tiling_flags()
667 radeon_bo_unreserve(bo); in radeon_bo_set_tiling_flags()
671 void radeon_bo_get_tiling_flags(struct radeon_bo *bo, in radeon_bo_get_tiling_flags() argument
675 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_get_tiling_flags()
678 *tiling_flags = bo->tiling_flags; in radeon_bo_get_tiling_flags()
680 *pitch = bo->pitch; in radeon_bo_get_tiling_flags()
683 int radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved, in radeon_bo_check_tiling() argument
687 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_check_tiling()
689 if (!(bo->tiling_flags & RADEON_TILING_SURFACE)) in radeon_bo_check_tiling()
693 radeon_bo_clear_surface_reg(bo); in radeon_bo_check_tiling()
697 if (bo->tbo.resource->mem_type != TTM_PL_VRAM) { in radeon_bo_check_tiling()
701 if (bo->surface_reg >= 0) in radeon_bo_check_tiling()
702 radeon_bo_clear_surface_reg(bo); in radeon_bo_check_tiling()
706 if ((bo->surface_reg >= 0) && !has_moved) in radeon_bo_check_tiling()
709 return radeon_bo_get_surface_reg(bo); in radeon_bo_check_tiling()
712 void radeon_bo_move_notify(struct ttm_buffer_object *bo) in radeon_bo_move_notify() argument
716 if (!radeon_ttm_bo_is_radeon_bo(bo)) in radeon_bo_move_notify()
719 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_bo_move_notify()
724 vm_fault_t radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo) in radeon_bo_fault_reserve_notify() argument
732 if (!radeon_ttm_bo_is_radeon_bo(bo)) in radeon_bo_fault_reserve_notify()
734 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_bo_fault_reserve_notify()
737 if (bo->resource->mem_type != TTM_PL_VRAM) in radeon_bo_fault_reserve_notify()
740 size = bo->resource->size; in radeon_bo_fault_reserve_notify()
741 offset = bo->resource->start << PAGE_SHIFT; in radeon_bo_fault_reserve_notify()
758 r = ttm_bo_validate(bo, &rbo->placement, &ctx); in radeon_bo_fault_reserve_notify()
761 r = ttm_bo_validate(bo, &rbo->placement, &ctx); in radeon_bo_fault_reserve_notify()
763 offset = bo->resource->start << PAGE_SHIFT; in radeon_bo_fault_reserve_notify()
774 ttm_bo_move_to_lru_tail_unlocked(bo); in radeon_bo_fault_reserve_notify()
786 void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, in radeon_bo_fence() argument
789 struct dma_resv *resv = bo->tbo.base.resv; in radeon_bo_fence()