Lines Matching refs:t

140 static int prepare_blit(const struct tiled_blits *t,  in prepare_blit()  argument
155 struct intel_gt *gt = t->ce->engine->gt; in prepare_blit()
163 *cs++ = i915_mmio_reg_offset(BLIT_CCTL(t->ce->engine->mmio_base)); in prepare_blit()
167 src_pitch = t->width; /* in dwords */ in prepare_blit()
179 dst_pitch = t->width; /* in dwords */ in prepare_blit()
195 *cs++ = t->height << 16 | t->width; in prepare_blit()
226 src_pitch = t->width * 4; in prepare_blit()
232 dst_pitch = t->width * 4; in prepare_blit()
241 *cs++ = t->height << 16 | t->width; in prepare_blit()
260 static void tiled_blits_destroy_buffers(struct tiled_blits *t) in tiled_blits_destroy_buffers() argument
264 for (i = 0; i < ARRAY_SIZE(t->buffers); i++) in tiled_blits_destroy_buffers()
265 i915_vma_put(t->buffers[i].vma); in tiled_blits_destroy_buffers()
267 i915_vma_put(t->scratch.vma); in tiled_blits_destroy_buffers()
268 i915_vma_put(t->batch); in tiled_blits_destroy_buffers()
272 __create_vma(struct tiled_blits *t, size_t size, bool lmem) in __create_vma() argument
274 struct drm_i915_private *i915 = t->ce->vm->i915; in __create_vma()
285 vma = i915_vma_instance(obj, t->ce->vm, NULL); in __create_vma()
292 static struct i915_vma *create_vma(struct tiled_blits *t, bool lmem) in create_vma() argument
294 return __create_vma(t, PAGE_ALIGN(t->width * t->height * 4), lmem); in create_vma()
297 static int tiled_blits_create_buffers(struct tiled_blits *t, in tiled_blits_create_buffers() argument
301 struct drm_i915_private *i915 = t->ce->engine->i915; in tiled_blits_create_buffers()
304 t->width = width; in tiled_blits_create_buffers()
305 t->height = height; in tiled_blits_create_buffers()
307 t->batch = __create_vma(t, PAGE_SIZE, false); in tiled_blits_create_buffers()
308 if (IS_ERR(t->batch)) in tiled_blits_create_buffers()
309 return PTR_ERR(t->batch); in tiled_blits_create_buffers()
311 t->scratch.vma = create_vma(t, false); in tiled_blits_create_buffers()
312 if (IS_ERR(t->scratch.vma)) { in tiled_blits_create_buffers()
313 i915_vma_put(t->batch); in tiled_blits_create_buffers()
314 return PTR_ERR(t->scratch.vma); in tiled_blits_create_buffers()
317 for (i = 0; i < ARRAY_SIZE(t->buffers); i++) { in tiled_blits_create_buffers()
320 vma = create_vma(t, HAS_LMEM(i915) && i % 2); in tiled_blits_create_buffers()
322 tiled_blits_destroy_buffers(t); in tiled_blits_create_buffers()
326 t->buffers[i].vma = vma; in tiled_blits_create_buffers()
327 t->buffers[i].tiling = in tiled_blits_create_buffers()
331 if (HAS_4TILE(i915) && t->buffers[i].tiling == CLIENT_TILING_Y) in tiled_blits_create_buffers()
332 t->buffers[i].tiling = CLIENT_TILING_4; in tiled_blits_create_buffers()
333 else if (!HAS_4TILE(i915) && t->buffers[i].tiling == CLIENT_TILING_4) in tiled_blits_create_buffers()
334 t->buffers[i].tiling = CLIENT_TILING_Y; in tiled_blits_create_buffers()
340 static void fill_scratch(struct tiled_blits *t, u32 *vaddr, u32 val) in fill_scratch() argument
344 t->scratch.start_val = val; in fill_scratch()
345 for (i = 0; i < t->width * t->height; i++) in fill_scratch()
348 i915_gem_object_flush_map(t->scratch.vma->obj); in fill_scratch()
423 static int verify_buffer(const struct tiled_blits *t, in verify_buffer() argument
431 x = i915_prandom_u32_max_state(t->width, prng); in verify_buffer()
432 y = i915_prandom_u32_max_state(t->height, prng); in verify_buffer()
433 p = y * t->width + x; in verify_buffer()
443 p * 4, t->width * 4, in verify_buffer()
479 tiled_blit(struct tiled_blits *t, in tiled_blit() argument
498 err = i915_vma_pin(t->batch, 0, 0, PIN_USER | PIN_HIGH); in tiled_blit()
504 err = prepare_blit(t, dst, src, t->batch->obj); in tiled_blit()
508 rq = intel_context_create_request(t->ce); in tiled_blit()
514 err = igt_vma_move_to_active_unlocked(t->batch, rq, 0); in tiled_blit()
521 i915_vma_offset(t->batch), in tiled_blit()
522 i915_vma_size(t->batch), in tiled_blit()
532 i915_vma_unpin(t->batch); in tiled_blit()
544 struct tiled_blits *t; in tiled_blits_create() local
548 t = kzalloc(sizeof(*t), GFP_KERNEL); in tiled_blits_create()
549 if (!t) in tiled_blits_create()
552 t->ce = intel_context_create(engine); in tiled_blits_create()
553 if (IS_ERR(t->ce)) { in tiled_blits_create()
554 err = PTR_ERR(t->ce); in tiled_blits_create()
558 t->align = i915_vm_min_alignment(t->ce->vm, INTEL_MEMORY_LOCAL); in tiled_blits_create()
559 t->align = max(t->align, in tiled_blits_create()
560 i915_vm_min_alignment(t->ce->vm, INTEL_MEMORY_SYSTEM)); in tiled_blits_create()
562 hole_size = 2 * round_up(WIDTH * HEIGHT * 4, t->align); in tiled_blits_create()
564 hole_size += 2 * t->align; /* padding on either side */ in tiled_blits_create()
566 mutex_lock(&t->ce->vm->mutex); in tiled_blits_create()
568 err = drm_mm_insert_node_in_range(&t->ce->vm->mm, &hole, in tiled_blits_create()
569 hole_size, t->align, in tiled_blits_create()
575 mutex_unlock(&t->ce->vm->mutex); in tiled_blits_create()
581 t->hole = hole.start + t->align; in tiled_blits_create()
582 pr_info("Using hole at %llx\n", t->hole); in tiled_blits_create()
584 err = tiled_blits_create_buffers(t, WIDTH, HEIGHT, prng); in tiled_blits_create()
588 return t; in tiled_blits_create()
591 intel_context_put(t->ce); in tiled_blits_create()
593 kfree(t); in tiled_blits_create()
597 static void tiled_blits_destroy(struct tiled_blits *t) in tiled_blits_destroy() argument
599 tiled_blits_destroy_buffers(t); in tiled_blits_destroy()
601 intel_context_put(t->ce); in tiled_blits_destroy()
602 kfree(t); in tiled_blits_destroy()
605 static int tiled_blits_prepare(struct tiled_blits *t, in tiled_blits_prepare() argument
608 u64 offset = round_up(t->width * t->height * 4, t->align); in tiled_blits_prepare()
613 map = i915_gem_object_pin_map_unlocked(t->scratch.vma->obj, I915_MAP_WC); in tiled_blits_prepare()
618 for (i = 0; i < ARRAY_SIZE(t->buffers); i++) { in tiled_blits_prepare()
619 fill_scratch(t, map, prandom_u32_state(prng)); in tiled_blits_prepare()
620 GEM_BUG_ON(verify_buffer(t, &t->scratch, prng)); in tiled_blits_prepare()
622 err = tiled_blit(t, in tiled_blits_prepare()
623 &t->buffers[i], t->hole + offset, in tiled_blits_prepare()
624 &t->scratch, t->hole); in tiled_blits_prepare()
626 err = verify_buffer(t, &t->buffers[i], prng); in tiled_blits_prepare()
633 i915_gem_object_unpin_map(t->scratch.vma->obj); in tiled_blits_prepare()
637 static int tiled_blits_bounce(struct tiled_blits *t, struct rnd_state *prng) in tiled_blits_bounce() argument
639 u64 offset = round_up(t->width * t->height * 4, 2 * t->align); in tiled_blits_bounce()
644 err = tiled_blit(t, in tiled_blits_bounce()
645 &t->buffers[1], t->hole + offset / 2, in tiled_blits_bounce()
646 &t->buffers[0], t->hole + 2 * offset); in tiled_blits_bounce()
651 t->buffers[2].tiling = t->buffers[0].tiling; in tiled_blits_bounce()
654 err = tiled_blit(t, in tiled_blits_bounce()
655 &t->buffers[2], t->hole + t->align, in tiled_blits_bounce()
656 &t->buffers[1], t->hole + 3 * offset / 2); in tiled_blits_bounce()
660 err = verify_buffer(t, &t->buffers[2], prng); in tiled_blits_bounce()
670 struct tiled_blits *t; in __igt_client_tiled_blits() local
673 t = tiled_blits_create(engine, prng); in __igt_client_tiled_blits()
674 if (IS_ERR(t)) in __igt_client_tiled_blits()
675 return PTR_ERR(t); in __igt_client_tiled_blits()
677 err = tiled_blits_prepare(t, prng); in __igt_client_tiled_blits()
681 err = tiled_blits_bounce(t, prng); in __igt_client_tiled_blits()
686 tiled_blits_destroy(t); in __igt_client_tiled_blits()