Lines Matching refs:obj

57 bool i915_gem_object_has_cache_level(const struct drm_i915_gem_object *obj,  in i915_gem_object_has_cache_level()  argument
65 if (obj->pat_set_by_user) in i915_gem_object_has_cache_level()
72 return obj->pat_index == i915_gem_get_pat_index(obj_to_i915(obj), lvl); in i915_gem_object_has_cache_level()
77 struct drm_i915_gem_object *obj; in i915_gem_object_alloc() local
79 obj = kmem_cache_zalloc(slab_objects, GFP_KERNEL); in i915_gem_object_alloc()
80 if (!obj) in i915_gem_object_alloc()
82 obj->base.funcs = &i915_gem_object_funcs; in i915_gem_object_alloc()
84 return obj; in i915_gem_object_alloc()
87 void i915_gem_object_free(struct drm_i915_gem_object *obj) in i915_gem_object_free() argument
89 return kmem_cache_free(slab_objects, obj); in i915_gem_object_free()
92 void i915_gem_object_init(struct drm_i915_gem_object *obj, in i915_gem_object_init() argument
100 BUILD_BUG_ON(offsetof(typeof(*obj), base) != in i915_gem_object_init()
101 offsetof(typeof(*obj), __do_not_access.base)); in i915_gem_object_init()
103 spin_lock_init(&obj->vma.lock); in i915_gem_object_init()
104 INIT_LIST_HEAD(&obj->vma.list); in i915_gem_object_init()
106 INIT_LIST_HEAD(&obj->mm.link); in i915_gem_object_init()
108 INIT_LIST_HEAD(&obj->lut_list); in i915_gem_object_init()
109 spin_lock_init(&obj->lut_lock); in i915_gem_object_init()
111 spin_lock_init(&obj->mmo.lock); in i915_gem_object_init()
112 obj->mmo.offsets = RB_ROOT; in i915_gem_object_init()
114 init_rcu_head(&obj->rcu); in i915_gem_object_init()
116 obj->ops = ops; in i915_gem_object_init()
118 obj->flags = flags; in i915_gem_object_init()
120 obj->mm.madv = I915_MADV_WILLNEED; in i915_gem_object_init()
121 INIT_RADIX_TREE(&obj->mm.get_page.radix, GFP_KERNEL | __GFP_NOWARN); in i915_gem_object_init()
122 mutex_init(&obj->mm.get_page.lock); in i915_gem_object_init()
123 INIT_RADIX_TREE(&obj->mm.get_dma_page.radix, GFP_KERNEL | __GFP_NOWARN); in i915_gem_object_init()
124 mutex_init(&obj->mm.get_dma_page.lock); in i915_gem_object_init()
136 void __i915_gem_object_fini(struct drm_i915_gem_object *obj) in __i915_gem_object_fini() argument
138 mutex_destroy(&obj->mm.get_page.lock); in __i915_gem_object_fini()
139 mutex_destroy(&obj->mm.get_dma_page.lock); in __i915_gem_object_fini()
140 dma_resv_fini(&obj->base._resv); in __i915_gem_object_fini()
149 void i915_gem_object_set_cache_coherency(struct drm_i915_gem_object *obj, in i915_gem_object_set_cache_coherency() argument
152 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_set_cache_coherency()
154 obj->pat_index = i915_gem_get_pat_index(i915, cache_level); in i915_gem_object_set_cache_coherency()
157 obj->cache_coherent = (I915_BO_CACHE_COHERENT_FOR_READ | in i915_gem_object_set_cache_coherency()
160 obj->cache_coherent = I915_BO_CACHE_COHERENT_FOR_READ; in i915_gem_object_set_cache_coherency()
162 obj->cache_coherent = 0; in i915_gem_object_set_cache_coherency()
164 obj->cache_dirty = in i915_gem_object_set_cache_coherency()
165 !(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE) && in i915_gem_object_set_cache_coherency()
177 void i915_gem_object_set_pat_index(struct drm_i915_gem_object *obj, in i915_gem_object_set_pat_index() argument
180 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_set_pat_index()
182 if (obj->pat_index == pat_index) in i915_gem_object_set_pat_index()
185 obj->pat_index = pat_index; in i915_gem_object_set_pat_index()
188 obj->cache_coherent = (I915_BO_CACHE_COHERENT_FOR_READ | in i915_gem_object_set_pat_index()
191 obj->cache_coherent = I915_BO_CACHE_COHERENT_FOR_READ; in i915_gem_object_set_pat_index()
193 obj->cache_coherent = 0; in i915_gem_object_set_pat_index()
195 obj->cache_dirty = in i915_gem_object_set_pat_index()
196 !(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE) && in i915_gem_object_set_pat_index()
200 bool i915_gem_object_can_bypass_llc(struct drm_i915_gem_object *obj) in i915_gem_object_can_bypass_llc() argument
202 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_can_bypass_llc()
208 if (!(obj->flags & I915_BO_ALLOC_USER)) in i915_gem_object_can_bypass_llc()
214 if (obj->pat_set_by_user) in i915_gem_object_can_bypass_llc()
234 struct drm_i915_gem_object *obj = to_intel_bo(gem); in i915_gem_close_object() local
241 spin_lock(&obj->lut_lock); in i915_gem_close_object()
242 list_for_each_entry_safe(lut, ln, &obj->lut_list, obj_link) { in i915_gem_close_object()
251 if (&ln->obj_link != &obj->lut_list) { in i915_gem_close_object()
253 if (cond_resched_lock(&obj->lut_lock)) in i915_gem_close_object()
258 spin_unlock(&obj->lut_lock); in i915_gem_close_object()
260 spin_lock(&obj->mmo.lock); in i915_gem_close_object()
261 rbtree_postorder_for_each_entry_safe(mmo, mn, &obj->mmo.offsets, offset) in i915_gem_close_object()
263 spin_unlock(&obj->mmo.lock); in i915_gem_close_object()
277 GEM_BUG_ON(vma->obj != obj); in i915_gem_close_object()
285 i915_gem_object_put(obj); in i915_gem_close_object()
291 struct drm_i915_gem_object *obj = in __i915_gem_free_object_rcu() local
292 container_of(head, typeof(*obj), rcu); in __i915_gem_free_object_rcu()
293 struct drm_i915_private *i915 = to_i915(obj->base.dev); in __i915_gem_free_object_rcu()
295 i915_gem_object_free(obj); in __i915_gem_free_object_rcu()
301 static void __i915_gem_object_free_mmaps(struct drm_i915_gem_object *obj) in __i915_gem_object_free_mmaps() argument
305 if (obj->userfault_count && !IS_DGFX(to_i915(obj->base.dev))) in __i915_gem_object_free_mmaps()
306 i915_gem_object_release_mmap_gtt(obj); in __i915_gem_object_free_mmaps()
308 if (!RB_EMPTY_ROOT(&obj->mmo.offsets)) { in __i915_gem_object_free_mmaps()
311 i915_gem_object_release_mmap_offset(obj); in __i915_gem_object_free_mmaps()
314 &obj->mmo.offsets, in __i915_gem_object_free_mmaps()
316 drm_vma_offset_remove(obj->base.dev->vma_offset_manager, in __i915_gem_object_free_mmaps()
320 obj->mmo.offsets = RB_ROOT; in __i915_gem_object_free_mmaps()
333 void __i915_gem_object_pages_fini(struct drm_i915_gem_object *obj) in __i915_gem_object_pages_fini() argument
335 assert_object_held_shared(obj); in __i915_gem_object_pages_fini()
337 if (!list_empty(&obj->vma.list)) { in __i915_gem_object_pages_fini()
340 spin_lock(&obj->vma.lock); in __i915_gem_object_pages_fini()
341 while ((vma = list_first_entry_or_null(&obj->vma.list, in __i915_gem_object_pages_fini()
344 GEM_BUG_ON(vma->obj != obj); in __i915_gem_object_pages_fini()
345 spin_unlock(&obj->vma.lock); in __i915_gem_object_pages_fini()
349 spin_lock(&obj->vma.lock); in __i915_gem_object_pages_fini()
351 spin_unlock(&obj->vma.lock); in __i915_gem_object_pages_fini()
354 __i915_gem_object_free_mmaps(obj); in __i915_gem_object_pages_fini()
356 atomic_set(&obj->mm.pages_pin_count, 0); in __i915_gem_object_pages_fini()
364 if (obj->base.import_attach) in __i915_gem_object_pages_fini()
365 i915_gem_object_lock(obj, NULL); in __i915_gem_object_pages_fini()
367 __i915_gem_object_put_pages(obj); in __i915_gem_object_pages_fini()
369 if (obj->base.import_attach) in __i915_gem_object_pages_fini()
370 i915_gem_object_unlock(obj); in __i915_gem_object_pages_fini()
372 GEM_BUG_ON(i915_gem_object_has_pages(obj)); in __i915_gem_object_pages_fini()
375 void __i915_gem_free_object(struct drm_i915_gem_object *obj) in __i915_gem_free_object() argument
377 trace_i915_gem_object_destroy(obj); in __i915_gem_free_object()
379 GEM_BUG_ON(!list_empty(&obj->lut_list)); in __i915_gem_free_object()
381 bitmap_free(obj->bit_17); in __i915_gem_free_object()
383 if (obj->base.import_attach) in __i915_gem_free_object()
384 drm_prime_gem_destroy(&obj->base, NULL); in __i915_gem_free_object()
386 drm_gem_free_mmap_offset(&obj->base); in __i915_gem_free_object()
388 if (obj->ops->release) in __i915_gem_free_object()
389 obj->ops->release(obj); in __i915_gem_free_object()
391 if (obj->mm.n_placements > 1) in __i915_gem_free_object()
392 kfree(obj->mm.placements); in __i915_gem_free_object()
394 if (obj->shares_resv_from) in __i915_gem_free_object()
395 i915_vm_resv_put(obj->shares_resv_from); in __i915_gem_free_object()
397 __i915_gem_object_fini(obj); in __i915_gem_free_object()
403 struct drm_i915_gem_object *obj, *on; in __i915_gem_free_objects() local
405 llist_for_each_entry_safe(obj, on, freed, freed) { in __i915_gem_free_objects()
407 if (obj->ops->delayed_free) { in __i915_gem_free_objects()
408 obj->ops->delayed_free(obj); in __i915_gem_free_objects()
412 __i915_gem_object_pages_fini(obj); in __i915_gem_free_objects()
413 __i915_gem_free_object(obj); in __i915_gem_free_objects()
416 call_rcu(&obj->rcu, __i915_gem_free_object_rcu); in __i915_gem_free_objects()
439 struct drm_i915_gem_object *obj = to_intel_bo(gem_obj); in i915_gem_free_object() local
440 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_free_object()
442 GEM_BUG_ON(i915_gem_object_is_framebuffer(obj)); in i915_gem_free_object()
463 if (llist_add(&obj->freed, &i915->mm.free_list)) in i915_gem_free_object()
467 void __i915_gem_object_flush_frontbuffer(struct drm_i915_gem_object *obj, in __i915_gem_object_flush_frontbuffer() argument
472 front = i915_gem_object_get_frontbuffer(obj); in __i915_gem_object_flush_frontbuffer()
479 void __i915_gem_object_invalidate_frontbuffer(struct drm_i915_gem_object *obj, in __i915_gem_object_invalidate_frontbuffer() argument
484 front = i915_gem_object_get_frontbuffer(obj); in __i915_gem_object_invalidate_frontbuffer()
492 i915_gem_object_read_from_page_kmap(struct drm_i915_gem_object *obj, u64 offset, void *dst, int siz… in i915_gem_object_read_from_page_kmap() argument
498 src_map = kmap_atomic(i915_gem_object_get_page(obj, idx)); in i915_gem_object_read_from_page_kmap()
501 if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ)) in i915_gem_object_read_from_page_kmap()
509 i915_gem_object_read_from_page_iomap(struct drm_i915_gem_object *obj, u64 offset, void *dst, int si… in i915_gem_object_read_from_page_iomap() argument
512 dma_addr_t dma = i915_gem_object_get_dma_address(obj, idx); in i915_gem_object_read_from_page_iomap()
516 src_map = io_mapping_map_wc(&obj->mm.region->iomap, in i915_gem_object_read_from_page_iomap()
517 dma - obj->mm.region->region.start, in i915_gem_object_read_from_page_iomap()
527 static bool object_has_mappable_iomem(struct drm_i915_gem_object *obj) in object_has_mappable_iomem() argument
529 GEM_BUG_ON(!i915_gem_object_has_iomem(obj)); in object_has_mappable_iomem()
531 if (IS_DGFX(to_i915(obj->base.dev))) in object_has_mappable_iomem()
532 return i915_ttm_resource_mappable(i915_gem_to_ttm(obj)->resource); in object_has_mappable_iomem()
551 int i915_gem_object_read_from_page(struct drm_i915_gem_object *obj, u64 offset, void *dst, int size) in i915_gem_object_read_from_page() argument
554 GEM_BUG_ON(offset >= obj->base.size); in i915_gem_object_read_from_page()
556 GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); in i915_gem_object_read_from_page()
558 if (i915_gem_object_has_struct_page(obj)) in i915_gem_object_read_from_page()
559 i915_gem_object_read_from_page_kmap(obj, offset, dst, size); in i915_gem_object_read_from_page()
560 else if (i915_gem_object_has_iomem(obj) && object_has_mappable_iomem(obj)) in i915_gem_object_read_from_page()
561 i915_gem_object_read_from_page_iomap(obj, offset, dst, size); in i915_gem_object_read_from_page()
581 bool i915_gem_object_evictable(struct drm_i915_gem_object *obj) in i915_gem_object_evictable() argument
584 int pin_count = atomic_read(&obj->mm.pages_pin_count); in i915_gem_object_evictable()
589 spin_lock(&obj->vma.lock); in i915_gem_object_evictable()
590 list_for_each_entry(vma, &obj->vma.list, obj_link) { in i915_gem_object_evictable()
592 spin_unlock(&obj->vma.lock); in i915_gem_object_evictable()
598 spin_unlock(&obj->vma.lock); in i915_gem_object_evictable()
612 bool i915_gem_object_migratable(struct drm_i915_gem_object *obj) in i915_gem_object_migratable() argument
614 struct intel_memory_region *mr = READ_ONCE(obj->mm.region); in i915_gem_object_migratable()
619 return obj->mm.n_placements > 1; in i915_gem_object_migratable()
631 bool i915_gem_object_has_struct_page(const struct drm_i915_gem_object *obj) in i915_gem_object_has_struct_page() argument
634 if (IS_DGFX(to_i915(obj->base.dev)) && in i915_gem_object_has_struct_page()
635 i915_gem_object_evictable((void __force *)obj)) in i915_gem_object_has_struct_page()
636 assert_object_held_shared(obj); in i915_gem_object_has_struct_page()
638 return obj->mem_flags & I915_BO_FLAG_STRUCT_PAGE; in i915_gem_object_has_struct_page()
650 bool i915_gem_object_has_iomem(const struct drm_i915_gem_object *obj) in i915_gem_object_has_iomem() argument
653 if (IS_DGFX(to_i915(obj->base.dev)) && in i915_gem_object_has_iomem()
654 i915_gem_object_evictable((void __force *)obj)) in i915_gem_object_has_iomem()
655 assert_object_held_shared(obj); in i915_gem_object_has_iomem()
657 return obj->mem_flags & I915_BO_FLAG_IOMEM; in i915_gem_object_has_iomem()
677 bool i915_gem_object_can_migrate(struct drm_i915_gem_object *obj, in i915_gem_object_can_migrate() argument
680 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_can_migrate()
681 unsigned int num_allowed = obj->mm.n_placements; in i915_gem_object_can_migrate()
686 GEM_BUG_ON(obj->mm.madv != I915_MADV_WILLNEED); in i915_gem_object_can_migrate()
692 if (!IS_ALIGNED(obj->base.size, mr->min_page_size)) in i915_gem_object_can_migrate()
695 if (obj->mm.region == mr) in i915_gem_object_can_migrate()
698 if (!i915_gem_object_evictable(obj)) in i915_gem_object_can_migrate()
701 if (!obj->ops->migrate) in i915_gem_object_can_migrate()
704 if (!(obj->flags & I915_BO_ALLOC_USER)) in i915_gem_object_can_migrate()
711 if (mr == obj->mm.placements[i]) in i915_gem_object_can_migrate()
743 int i915_gem_object_migrate(struct drm_i915_gem_object *obj, in i915_gem_object_migrate() argument
747 return __i915_gem_object_migrate(obj, ww, id, obj->flags); in i915_gem_object_migrate()
777 int __i915_gem_object_migrate(struct drm_i915_gem_object *obj, in __i915_gem_object_migrate() argument
782 struct drm_i915_private *i915 = to_i915(obj->base.dev); in __i915_gem_object_migrate()
786 GEM_BUG_ON(obj->mm.madv != I915_MADV_WILLNEED); in __i915_gem_object_migrate()
787 assert_object_held(obj); in __i915_gem_object_migrate()
792 if (!i915_gem_object_can_migrate(obj, id)) in __i915_gem_object_migrate()
795 if (!obj->ops->migrate) { in __i915_gem_object_migrate()
796 if (GEM_WARN_ON(obj->mm.region != mr)) in __i915_gem_object_migrate()
801 return obj->ops->migrate(obj, mr, flags); in __i915_gem_object_migrate()
812 bool i915_gem_object_placement_possible(struct drm_i915_gem_object *obj, in i915_gem_object_placement_possible() argument
817 if (!obj->mm.n_placements) { in i915_gem_object_placement_possible()
820 return i915_gem_object_has_iomem(obj); in i915_gem_object_placement_possible()
822 return i915_gem_object_has_pages(obj); in i915_gem_object_placement_possible()
830 for (i = 0; i < obj->mm.n_placements; i++) { in i915_gem_object_placement_possible()
831 if (obj->mm.placements[i]->type == type) in i915_gem_object_placement_possible()
847 bool i915_gem_object_needs_ccs_pages(struct drm_i915_gem_object *obj) in i915_gem_object_needs_ccs_pages() argument
852 if (!HAS_FLAT_CCS(to_i915(obj->base.dev))) in i915_gem_object_needs_ccs_pages()
855 if (obj->flags & I915_BO_ALLOC_CCS_AUX) in i915_gem_object_needs_ccs_pages()
858 for (i = 0; i < obj->mm.n_placements; i++) { in i915_gem_object_needs_ccs_pages()
860 if (obj->mm.placements[i]->type == INTEL_MEMORY_SYSTEM) in i915_gem_object_needs_ccs_pages()
863 obj->mm.placements[i]->type == INTEL_MEMORY_LOCAL) in i915_gem_object_needs_ccs_pages()
906 int i915_gem_object_get_moving_fence(struct drm_i915_gem_object *obj, in i915_gem_object_get_moving_fence() argument
909 return dma_resv_get_singleton(obj->base.resv, DMA_RESV_USAGE_KERNEL, in i915_gem_object_get_moving_fence()
925 int i915_gem_object_wait_moving_fence(struct drm_i915_gem_object *obj, in i915_gem_object_wait_moving_fence() argument
930 assert_object_held(obj); in i915_gem_object_wait_moving_fence()
932 ret = dma_resv_wait_timeout(obj->base. resv, DMA_RESV_USAGE_KERNEL, in i915_gem_object_wait_moving_fence()
936 else if (ret > 0 && i915_gem_object_has_unknown_state(obj)) in i915_gem_object_wait_moving_fence()
950 bool i915_gem_object_has_unknown_state(struct drm_i915_gem_object *obj) in i915_gem_object_has_unknown_state() argument
958 return obj->mm.unknown_state; in i915_gem_object_has_unknown_state()