Lines Matching refs:obj

23 static bool gpu_write_needs_clflush(struct drm_i915_gem_object *obj)  in gpu_write_needs_clflush()  argument
25 struct drm_i915_private *i915 = to_i915(obj->base.dev); in gpu_write_needs_clflush()
37 return !(i915_gem_object_has_cache_level(obj, I915_CACHE_NONE) || in gpu_write_needs_clflush()
38 i915_gem_object_has_cache_level(obj, I915_CACHE_WT)); in gpu_write_needs_clflush()
41 bool i915_gem_cpu_write_needs_clflush(struct drm_i915_gem_object *obj) in i915_gem_cpu_write_needs_clflush() argument
43 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_cpu_write_needs_clflush()
45 if (obj->cache_dirty) in i915_gem_cpu_write_needs_clflush()
51 if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE)) in i915_gem_cpu_write_needs_clflush()
55 return i915_gem_object_is_framebuffer(obj); in i915_gem_cpu_write_needs_clflush()
59 flush_write_domain(struct drm_i915_gem_object *obj, unsigned int flush_domains) in flush_write_domain() argument
63 assert_object_held(obj); in flush_write_domain()
65 if (!(obj->write_domain & flush_domains)) in flush_write_domain()
68 switch (obj->write_domain) { in flush_write_domain()
70 spin_lock(&obj->vma.lock); in flush_write_domain()
71 for_each_ggtt_vma(vma, obj) in flush_write_domain()
73 spin_unlock(&obj->vma.lock); in flush_write_domain()
75 i915_gem_object_flush_frontbuffer(obj, ORIGIN_CPU); in flush_write_domain()
83 i915_gem_clflush_object(obj, I915_CLFLUSH_SYNC); in flush_write_domain()
87 if (gpu_write_needs_clflush(obj)) in flush_write_domain()
88 obj->cache_dirty = true; in flush_write_domain()
92 obj->write_domain = 0; in flush_write_domain()
95 static void __i915_gem_object_flush_for_display(struct drm_i915_gem_object *obj) in __i915_gem_object_flush_for_display() argument
101 flush_write_domain(obj, ~I915_GEM_DOMAIN_CPU); in __i915_gem_object_flush_for_display()
102 if (obj->cache_dirty) in __i915_gem_object_flush_for_display()
103 i915_gem_clflush_object(obj, I915_CLFLUSH_FORCE); in __i915_gem_object_flush_for_display()
104 obj->write_domain = 0; in __i915_gem_object_flush_for_display()
107 void i915_gem_object_flush_if_display(struct drm_i915_gem_object *obj) in i915_gem_object_flush_if_display() argument
109 if (!i915_gem_object_is_framebuffer(obj)) in i915_gem_object_flush_if_display()
112 i915_gem_object_lock(obj, NULL); in i915_gem_object_flush_if_display()
113 __i915_gem_object_flush_for_display(obj); in i915_gem_object_flush_if_display()
114 i915_gem_object_unlock(obj); in i915_gem_object_flush_if_display()
117 void i915_gem_object_flush_if_display_locked(struct drm_i915_gem_object *obj) in i915_gem_object_flush_if_display_locked() argument
119 if (i915_gem_object_is_framebuffer(obj)) in i915_gem_object_flush_if_display_locked()
120 __i915_gem_object_flush_for_display(obj); in i915_gem_object_flush_if_display_locked()
133 i915_gem_object_set_to_wc_domain(struct drm_i915_gem_object *obj, bool write) in i915_gem_object_set_to_wc_domain() argument
137 assert_object_held(obj); in i915_gem_object_set_to_wc_domain()
139 ret = i915_gem_object_wait(obj, in i915_gem_object_set_to_wc_domain()
146 if (obj->write_domain == I915_GEM_DOMAIN_WC) in i915_gem_object_set_to_wc_domain()
157 ret = i915_gem_object_pin_pages(obj); in i915_gem_object_set_to_wc_domain()
161 flush_write_domain(obj, ~I915_GEM_DOMAIN_WC); in i915_gem_object_set_to_wc_domain()
167 if ((obj->read_domains & I915_GEM_DOMAIN_WC) == 0) in i915_gem_object_set_to_wc_domain()
173 GEM_BUG_ON((obj->write_domain & ~I915_GEM_DOMAIN_WC) != 0); in i915_gem_object_set_to_wc_domain()
174 obj->read_domains |= I915_GEM_DOMAIN_WC; in i915_gem_object_set_to_wc_domain()
176 obj->read_domains = I915_GEM_DOMAIN_WC; in i915_gem_object_set_to_wc_domain()
177 obj->write_domain = I915_GEM_DOMAIN_WC; in i915_gem_object_set_to_wc_domain()
178 obj->mm.dirty = true; in i915_gem_object_set_to_wc_domain()
181 i915_gem_object_unpin_pages(obj); in i915_gem_object_set_to_wc_domain()
195 i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write) in i915_gem_object_set_to_gtt_domain() argument
199 assert_object_held(obj); in i915_gem_object_set_to_gtt_domain()
201 ret = i915_gem_object_wait(obj, in i915_gem_object_set_to_gtt_domain()
208 if (obj->write_domain == I915_GEM_DOMAIN_GTT) in i915_gem_object_set_to_gtt_domain()
219 ret = i915_gem_object_pin_pages(obj); in i915_gem_object_set_to_gtt_domain()
223 flush_write_domain(obj, ~I915_GEM_DOMAIN_GTT); in i915_gem_object_set_to_gtt_domain()
229 if ((obj->read_domains & I915_GEM_DOMAIN_GTT) == 0) in i915_gem_object_set_to_gtt_domain()
235 GEM_BUG_ON((obj->write_domain & ~I915_GEM_DOMAIN_GTT) != 0); in i915_gem_object_set_to_gtt_domain()
236 obj->read_domains |= I915_GEM_DOMAIN_GTT; in i915_gem_object_set_to_gtt_domain()
240 obj->read_domains = I915_GEM_DOMAIN_GTT; in i915_gem_object_set_to_gtt_domain()
241 obj->write_domain = I915_GEM_DOMAIN_GTT; in i915_gem_object_set_to_gtt_domain()
242 obj->mm.dirty = true; in i915_gem_object_set_to_gtt_domain()
244 spin_lock(&obj->vma.lock); in i915_gem_object_set_to_gtt_domain()
245 for_each_ggtt_vma(vma, obj) in i915_gem_object_set_to_gtt_domain()
248 spin_unlock(&obj->vma.lock); in i915_gem_object_set_to_gtt_domain()
251 i915_gem_object_unpin_pages(obj); in i915_gem_object_set_to_gtt_domain()
270 int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, in i915_gem_object_set_cache_level() argument
281 if (i915_gem_object_has_cache_level(obj, cache_level)) in i915_gem_object_set_cache_level()
284 ret = i915_gem_object_wait(obj, in i915_gem_object_set_cache_level()
292 i915_gem_object_set_cache_coherency(obj, cache_level); in i915_gem_object_set_cache_level()
293 obj->cache_dirty = true; in i915_gem_object_set_cache_level()
296 return i915_gem_object_unbind(obj, in i915_gem_object_set_cache_level()
305 struct drm_i915_gem_object *obj; in i915_gem_get_caching_ioctl() local
312 obj = i915_gem_object_lookup_rcu(file, args->handle); in i915_gem_get_caching_ioctl()
313 if (!obj) { in i915_gem_get_caching_ioctl()
322 if (obj->pat_set_by_user) { in i915_gem_get_caching_ioctl()
327 if (i915_gem_object_has_cache_level(obj, I915_CACHE_LLC) || in i915_gem_get_caching_ioctl()
328 i915_gem_object_has_cache_level(obj, I915_CACHE_L3_LLC)) in i915_gem_get_caching_ioctl()
330 else if (i915_gem_object_has_cache_level(obj, I915_CACHE_WT)) in i915_gem_get_caching_ioctl()
344 struct drm_i915_gem_object *obj; in i915_gem_set_caching_ioctl() local
377 obj = i915_gem_object_lookup(file, args->handle); in i915_gem_set_caching_ioctl()
378 if (!obj) in i915_gem_set_caching_ioctl()
385 if (obj->pat_set_by_user) { in i915_gem_set_caching_ioctl()
394 if (i915_gem_object_is_proxy(obj)) { in i915_gem_set_caching_ioctl()
399 if (!i915_gem_object_is_userptr(obj) || in i915_gem_set_caching_ioctl()
406 ret = i915_gem_object_lock_interruptible(obj, NULL); in i915_gem_set_caching_ioctl()
410 ret = i915_gem_object_set_cache_level(obj, level); in i915_gem_set_caching_ioctl()
411 i915_gem_object_unlock(obj); in i915_gem_set_caching_ioctl()
414 i915_gem_object_put(obj); in i915_gem_set_caching_ioctl()
425 i915_gem_object_pin_to_display_plane(struct drm_i915_gem_object *obj, in i915_gem_object_pin_to_display_plane() argument
431 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_pin_to_display_plane()
436 if (HAS_LMEM(i915) && !i915_gem_object_is_lmem(obj)) in i915_gem_object_pin_to_display_plane()
449 ret = i915_gem_object_set_cache_level(obj, in i915_gem_object_pin_to_display_plane()
459 if (i915_gem_object_is_tiled(obj)) in i915_gem_object_pin_to_display_plane()
461 i915_gem_object_get_tile_row_size(obj)); in i915_gem_object_pin_to_display_plane()
477 vma = i915_gem_object_ggtt_pin_ww(obj, ww, view, 0, alignment, in i915_gem_object_pin_to_display_plane()
481 vma = i915_gem_object_ggtt_pin_ww(obj, ww, view, 0, in i915_gem_object_pin_to_display_plane()
489 i915_gem_object_flush_if_display_locked(obj); in i915_gem_object_pin_to_display_plane()
504 i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj, bool write) in i915_gem_object_set_to_cpu_domain() argument
508 assert_object_held(obj); in i915_gem_object_set_to_cpu_domain()
510 ret = i915_gem_object_wait(obj, in i915_gem_object_set_to_cpu_domain()
517 flush_write_domain(obj, ~I915_GEM_DOMAIN_CPU); in i915_gem_object_set_to_cpu_domain()
520 if ((obj->read_domains & I915_GEM_DOMAIN_CPU) == 0) { in i915_gem_object_set_to_cpu_domain()
521 i915_gem_clflush_object(obj, I915_CLFLUSH_SYNC); in i915_gem_object_set_to_cpu_domain()
522 obj->read_domains |= I915_GEM_DOMAIN_CPU; in i915_gem_object_set_to_cpu_domain()
528 GEM_BUG_ON(obj->write_domain & ~I915_GEM_DOMAIN_CPU); in i915_gem_object_set_to_cpu_domain()
534 __start_cpu_write(obj); in i915_gem_object_set_to_cpu_domain()
552 struct drm_i915_gem_object *obj; in i915_gem_set_domain_ioctl() local
574 obj = i915_gem_object_lookup(file, args->handle); in i915_gem_set_domain_ioctl()
575 if (!obj) in i915_gem_set_domain_ioctl()
583 err = i915_gem_object_wait(obj, in i915_gem_set_domain_ioctl()
591 if (i915_gem_object_is_userptr(obj)) { in i915_gem_set_domain_ioctl()
596 err = i915_gem_object_userptr_validate(obj); in i915_gem_set_domain_ioctl()
598 err = i915_gem_object_wait(obj, in i915_gem_set_domain_ioctl()
612 if (i915_gem_object_is_proxy(obj)) { in i915_gem_set_domain_ioctl()
617 err = i915_gem_object_lock_interruptible(obj, NULL); in i915_gem_set_domain_ioctl()
630 err = i915_gem_object_pin_pages(obj); in i915_gem_set_domain_ioctl()
644 if (READ_ONCE(obj->write_domain) == read_domains) in i915_gem_set_domain_ioctl()
648 err = i915_gem_object_set_to_wc_domain(obj, write_domain); in i915_gem_set_domain_ioctl()
650 err = i915_gem_object_set_to_gtt_domain(obj, write_domain); in i915_gem_set_domain_ioctl()
652 err = i915_gem_object_set_to_cpu_domain(obj, write_domain); in i915_gem_set_domain_ioctl()
655 i915_gem_object_unpin_pages(obj); in i915_gem_set_domain_ioctl()
658 i915_gem_object_unlock(obj); in i915_gem_set_domain_ioctl()
661 i915_gem_object_invalidate_frontbuffer(obj, ORIGIN_CPU); in i915_gem_set_domain_ioctl()
664 i915_gem_object_put(obj); in i915_gem_set_domain_ioctl()
673 int i915_gem_object_prepare_read(struct drm_i915_gem_object *obj, in i915_gem_object_prepare_read() argument
679 if (!i915_gem_object_has_struct_page(obj)) in i915_gem_object_prepare_read()
682 assert_object_held(obj); in i915_gem_object_prepare_read()
684 ret = i915_gem_object_wait(obj, in i915_gem_object_prepare_read()
690 ret = i915_gem_object_pin_pages(obj); in i915_gem_object_prepare_read()
694 if (obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ || in i915_gem_object_prepare_read()
696 ret = i915_gem_object_set_to_cpu_domain(obj, false); in i915_gem_object_prepare_read()
703 flush_write_domain(obj, ~I915_GEM_DOMAIN_CPU); in i915_gem_object_prepare_read()
710 if (!obj->cache_dirty && in i915_gem_object_prepare_read()
711 !(obj->read_domains & I915_GEM_DOMAIN_CPU)) in i915_gem_object_prepare_read()
719 i915_gem_object_unpin_pages(obj); in i915_gem_object_prepare_read()
723 int i915_gem_object_prepare_write(struct drm_i915_gem_object *obj, in i915_gem_object_prepare_write() argument
729 if (!i915_gem_object_has_struct_page(obj)) in i915_gem_object_prepare_write()
732 assert_object_held(obj); in i915_gem_object_prepare_write()
734 ret = i915_gem_object_wait(obj, in i915_gem_object_prepare_write()
741 ret = i915_gem_object_pin_pages(obj); in i915_gem_object_prepare_write()
745 if (obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE || in i915_gem_object_prepare_write()
747 ret = i915_gem_object_set_to_cpu_domain(obj, true); in i915_gem_object_prepare_write()
754 flush_write_domain(obj, ~I915_GEM_DOMAIN_CPU); in i915_gem_object_prepare_write()
761 if (!obj->cache_dirty) { in i915_gem_object_prepare_write()
768 if (!(obj->read_domains & I915_GEM_DOMAIN_CPU)) in i915_gem_object_prepare_write()
773 i915_gem_object_invalidate_frontbuffer(obj, ORIGIN_CPU); in i915_gem_object_prepare_write()
774 obj->mm.dirty = true; in i915_gem_object_prepare_write()
779 i915_gem_object_unpin_pages(obj); in i915_gem_object_prepare_write()