/openbmc/linux/drivers/gpu/drm/i915/ |
H A D | i915_gem_gtt.c | 47 GEM_BUG_ON(obj->mm.pages == pages); in i915_gem_gtt_prepare_pages() 105 GEM_BUG_ON(!size); in i915_gem_gtt_reserve() 106 GEM_BUG_ON(!IS_ALIGNED(size, I915_GTT_PAGE_SIZE)); in i915_gem_gtt_reserve() 107 GEM_BUG_ON(!IS_ALIGNED(offset, I915_GTT_MIN_ALIGNMENT)); in i915_gem_gtt_reserve() 108 GEM_BUG_ON(range_overflows(offset, size, vm->total)); in i915_gem_gtt_reserve() 109 GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm); in i915_gem_gtt_reserve() 110 GEM_BUG_ON(drm_mm_node_allocated(node)); in i915_gem_gtt_reserve() 134 GEM_BUG_ON(range_overflows(start, len, end)); in random_offset() 135 GEM_BUG_ON(round_up(start, align) > round_down(end - len, align)); in random_offset() 202 GEM_BUG_ON(!size); in i915_gem_gtt_insert() [all …]
|
H A D | i915_vma.c | 147 GEM_BUG_ON(vm == &vm->gt->ggtt->alias->vm); in vma_create() 174 GEM_BUG_ON(range_overflows_t(u64, in vma_create() 180 GEM_BUG_ON(vma->size > obj->base.size); in vma_create() 193 GEM_BUG_ON(!IS_ALIGNED(vma->size, I915_GTT_PAGE_SIZE)); in vma_create() 216 GEM_BUG_ON(!IS_ALIGNED(vma->fence_size, I915_GTT_MIN_ALIGNMENT)); in vma_create() 221 GEM_BUG_ON(!is_power_of_2(vma->fence_alignment)); in vma_create() 320 GEM_BUG_ON(view && !i915_is_ggtt_or_dpt(vm)); in i915_vma_instance() 321 GEM_BUG_ON(!kref_read(&vm->ref)); in i915_vma_instance() 331 GEM_BUG_ON(!IS_ERR(vma) && i915_vma_compare(vma, vm, view)); in i915_vma_instance() 474 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_bind() [all …]
|
H A D | i915_scheduler.c | 47 GEM_BUG_ON(rb_first_cached(&sched_engine->queue) != in assert_priolists() 54 GEM_BUG_ON(p->priority > last_prio); in assert_priolists() 136 GEM_BUG_ON(!locked); in lock_sched_engine() 151 GEM_BUG_ON(locked != sched_engine); in lock_sched_engine() 167 GEM_BUG_ON(prio == I915_PRIORITY_INVALID); in __i915_schedule() 207 GEM_BUG_ON(p == dep); /* no cycles! */ in __i915_schedule() 224 GEM_BUG_ON(!list_empty(&node->link)); in __i915_schedule() 253 GEM_BUG_ON(node_to_request(node)->engine->sched_engine != in __i915_schedule() 311 GEM_BUG_ON(!list_empty(&node->signalers_list)); in i915_sched_node_reinit() 312 GEM_BUG_ON(!list_empty(&node->waiters_list)); in i915_sched_node_reinit() [all …]
|
H A D | i915_vma.h | 90 GEM_BUG_ON(!i915_vma_is_ggtt(vma)); in i915_vma_set_ggtt_write() 109 GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma)); in i915_vma_set_userfault() 146 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_size() 169 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_offset() 175 GEM_BUG_ON(!i915_vma_is_ggtt(vma)); in i915_ggtt_offset() 176 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_ggtt_offset() 177 GEM_BUG_ON(upper_32_bits(i915_vma_offset(vma))); in i915_ggtt_offset() 178 GEM_BUG_ON(upper_32_bits(i915_vma_offset(vma) + in i915_ggtt_offset() 214 GEM_BUG_ON(view && !i915_is_ggtt_or_dpt(vm)); in i915_vma_compare() 329 GEM_BUG_ON(!i915_vma_is_pinned(vma)); in __i915_vma_pin() [all …]
|
H A D | i915_active.c | 50 GEM_BUG_ON(!is_barrier(&node->base)); in barrier_to_ll() 63 GEM_BUG_ON(!is_barrier(&node->base)); in barrier_to_engine() 132 GEM_BUG_ON(i915_active_is_idle(ref)); in __active_retire() 138 GEM_BUG_ON(rcu_access_pointer(ref->excl.fence)); in __active_retire() 154 GEM_BUG_ON(ref->tree.rb_node != &ref->cache->node); in __active_retire() 171 GEM_BUG_ON(i915_active_fence_isset(&it->base)); in __active_retire() 181 GEM_BUG_ON(!atomic_read(&ref->count)); in active_work() 191 GEM_BUG_ON(!atomic_read(&ref->count)); in active_retire() 236 GEM_BUG_ON(idx == 0); /* 0 is the unordered timeline, rsvd for cache */ in __active_lookup() 270 GEM_BUG_ON(i915_active_is_idle(ref)); in __active_lookup() [all …]
|
H A D | i915_request.c | 117 GEM_BUG_ON(rq->guc_prio != GUC_PRIO_INIT && in i915_fence_release() 343 #define assert_capture_list_is_null(_rq) GEM_BUG_ON((_rq)->capture_list) 364 GEM_BUG_ON(!i915_sw_fence_signaled(&rq->submit)); in i915_request_retire() 379 GEM_BUG_ON(!list_is_first(&rq->link, in i915_request_retire() 406 GEM_BUG_ON(!llist_empty(&rq->execute_cb)); in i915_request_retire() 425 GEM_BUG_ON(!__i915_request_is_complete(rq)); in i915_request_retire_upto() 429 GEM_BUG_ON(!i915_request_completed(tmp)); in i915_request_retire_upto() 555 GEM_BUG_ON(!fatal_error(rq->fence.error)); in __i915_request_skip() 575 GEM_BUG_ON(!IS_ERR_VALUE((long)error)); in i915_request_set_error_once() 594 GEM_BUG_ON(i915_request_signaled(rq)); in i915_request_mark_eio() [all …]
|
H A D | i915_syncmap.c | 101 GEM_BUG_ON(p->height); in __sync_seqno() 107 GEM_BUG_ON(!p->height); in __sync_child() 120 GEM_BUG_ON(p->height); in __sync_leaf_idx() 131 GEM_BUG_ON(p->height); in __sync_leaf_prefix() 244 GEM_BUG_ON(__sync_leaf_prefix(p, id) == p->prefix); in __sync_set() 300 GEM_BUG_ON(!(p->parent->bitmap & BIT(idx))); in __sync_set() 317 GEM_BUG_ON(!p->height); in __sync_set() 334 GEM_BUG_ON(p->prefix != __sync_leaf_prefix(p, id)); in __sync_set()
|
H A D | i915_scatterlist.c | 35 GEM_BUG_ON(new_sg); /* Should walk exactly nents and hit the end */ in i915_sg_trim() 91 GEM_BUG_ON(!max_segment); in i915_rsgt_from_mm_node() 126 GEM_BUG_ON(!IS_ALIGNED(sg_dma_address(sg), in i915_rsgt_from_mm_node() 178 GEM_BUG_ON(list_empty(blocks)); in i915_rsgt_from_buddy_resource() 179 GEM_BUG_ON(!max_segment); in i915_rsgt_from_buddy_resource() 216 GEM_BUG_ON(!IS_ALIGNED(sg_dma_address(sg), in i915_rsgt_from_buddy_resource()
|
/openbmc/linux/drivers/gpu/drm/i915/gt/ |
H A D | intel_ring.c | 31 GEM_BUG_ON(!atomic_read(&ring->pin_count)); in __intel_ring_pin() 152 GEM_BUG_ON(!is_power_of_2(size)); in intel_engine_create_ring() 153 GEM_BUG_ON(RING_CTL_SIZE(size) & ~RING_NR_PAGES); in intel_engine_create_ring() 203 GEM_BUG_ON(list_empty(&tl->requests)); in wait_for_space() 226 GEM_BUG_ON(ring->space < bytes); in wait_for_space() 240 GEM_BUG_ON(num_dwords & 1); in intel_ring_begin() 243 GEM_BUG_ON(total_bytes > ring->effective_size); in intel_ring_begin() 279 GEM_BUG_ON(!rq->reserved_space); in intel_ring_begin() 290 GEM_BUG_ON(need_wrap > ring->space); in intel_ring_begin() 291 GEM_BUG_ON(ring->emit + need_wrap > ring->size); in intel_ring_begin() [all …]
|
H A D | gen8_ppgtt.c | 152 GEM_BUG_ON(start >= end); in gen8_pd_range() 166 GEM_BUG_ON(start >= end); in gen8_pd_contains() 172 GEM_BUG_ON(start >= end); in gen8_pt_count() 242 GEM_BUG_ON(end > vm->total >> GEN8_PTE_SHIFT); in __gen8_ppgtt_clear() 248 GEM_BUG_ON(!len || len >= atomic_read(px_used(pd))); in __gen8_ppgtt_clear() 277 GEM_BUG_ON(!count || count >= atomic_read(&pt->used)); in __gen8_ppgtt_clear() 281 GEM_BUG_ON(num_ptes % 16); in __gen8_ppgtt_clear() 282 GEM_BUG_ON(pte % 16); in __gen8_ppgtt_clear() 306 GEM_BUG_ON(!IS_ALIGNED(start, BIT_ULL(GEN8_PTE_SHIFT))); in gen8_ppgtt_clear() 307 GEM_BUG_ON(!IS_ALIGNED(length, BIT_ULL(GEN8_PTE_SHIFT))); in gen8_ppgtt_clear() [all …]
|
H A D | intel_ring.h | 51 GEM_BUG_ON((rq->ring->vaddr + rq->ring->emit) != cs); in intel_ring_advance() 52 GEM_BUG_ON(!IS_ALIGNED(rq->ring->emit, 8)); /* RING_TAIL qword align */ in intel_ring_advance() 86 GEM_BUG_ON(offset > rq->ring->size); in intel_ring_offset() 95 GEM_BUG_ON(!intel_ring_offset_valid(ring, tail)); in assert_ring_tail_valid() 112 GEM_BUG_ON(cacheline(tail) == cacheline(head) && tail < head); in assert_ring_tail_valid() 138 GEM_BUG_ON(!is_power_of_2(size)); in __intel_ring_space()
|
H A D | intel_timeline.c | 99 GEM_BUG_ON(timeline->hwsp_offset >= hwsp->size); in intel_timeline_init() 187 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in __intel_timeline_pin() 227 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in intel_timeline_reset_seqno() 282 GEM_BUG_ON(!atomic_read(&tl->active_count)); in intel_timeline_exit() 301 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in timeline_advance() 302 GEM_BUG_ON(tl->seqno & tl->has_initial_breadcrumb); in timeline_advance() 322 GEM_BUG_ON(i915_seqno_passed(*tl->hwsp_seqno, *seqno)); in __intel_timeline_get_seqno() 383 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in intel_timeline_unpin() 396 GEM_BUG_ON(atomic_read(&timeline->pin_count)); in __intel_timeline_free() 397 GEM_BUG_ON(!list_empty(&timeline->requests)); in __intel_timeline_free() [all …]
|
H A D | intel_wopcm.c | 247 GEM_BUG_ON(!wopcm_size); in intel_wopcm_init() 248 GEM_BUG_ON(wopcm->guc.base); in intel_wopcm_init() 249 GEM_BUG_ON(wopcm->guc.size); in intel_wopcm_init() 250 GEM_BUG_ON(guc_fw_size >= wopcm_size); in intel_wopcm_init() 251 GEM_BUG_ON(huc_fw_size >= wopcm_size); in intel_wopcm_init() 252 GEM_BUG_ON(ctx_rsvd + WOPCM_RESERVED_SIZE >= wopcm_size); in intel_wopcm_init() 319 GEM_BUG_ON(!wopcm->guc.base); in intel_wopcm_init() 320 GEM_BUG_ON(!wopcm->guc.size); in intel_wopcm_init()
|
H A D | intel_execlists_submission.c | 203 GEM_BUG_ON(!intel_engine_is_virtual(engine)); in to_virtual_engine() 385 GEM_BUG_ON(rq_prio(rq) == I915_PRIORITY_INVALID); in __unwind_incomplete_requests() 391 GEM_BUG_ON(i915_sched_engine_is_empty(engine->sched_engine)); in __unwind_incomplete_requests() 494 GEM_BUG_ON(ce->tag <= BITS_PER_LONG); in __execlists_schedule_in() 500 GEM_BUG_ON(tag == 0 || tag >= BITS_PER_LONG); in __execlists_schedule_in() 510 GEM_BUG_ON(tag >= BITS_PER_LONG); in __execlists_schedule_in() 535 GEM_BUG_ON(!intel_engine_pm_is_awake(rq->engine)); in execlists_schedule_in() 543 GEM_BUG_ON(intel_context_inflight(ce) != rq->engine); in execlists_schedule_in() 602 GEM_BUG_ON(ce->inflight != engine); in __execlists_schedule_out() 625 GEM_BUG_ON(ccid == 0); in __execlists_schedule_out() [all …]
|
H A D | intel_ggtt_fencing.c | 80 GEM_BUG_ON(!IS_ALIGNED(stride, 128)); in i965_write_fence_reg() 127 GEM_BUG_ON(!is_power_of_2(stride)); in i915_write_fence_reg() 211 GEM_BUG_ON(!i915_gem_object_get_stride(vma->obj) || in fence_update() 224 GEM_BUG_ON(vma->fence_size > i915_vma_size(vma)); in fence_update() 248 GEM_BUG_ON(old->fence != fence); in fence_update() 268 GEM_BUG_ON(vma); in fence_update() 300 GEM_BUG_ON(fence->vma != vma); in i915_vma_revoke_fence() 302 GEM_BUG_ON(!i915_active_is_idle(&fence->active)); in i915_vma_revoke_fence() 303 GEM_BUG_ON(atomic_read(&fence->pin_count)); in i915_vma_revoke_fence() 335 GEM_BUG_ON(fence->vma && fence->vma->fence != fence); in fence_find() [all …]
|
H A D | intel_engine_pm.c | 71 GEM_BUG_ON(test_bit(CONTEXT_VALID_BIT, &ce->flags)); in __engine_unpark() 87 GEM_BUG_ON(ce->timeline->seqno != in __engine_unpark() 123 GEM_BUG_ON(rq->context->active_count != 1); in __queue_and_release_pm() 173 GEM_BUG_ON(!intel_context_is_barrier(ce)); in switch_to_kernel_context() 174 GEM_BUG_ON(ce->timeline->hwsp_ggtt != engine->status_page.vma); in switch_to_kernel_context() 209 GEM_BUG_ON(atomic_read(&ce->timeline->active_count) < 0); in switch_to_kernel_context()
|
H A D | intel_lrc.c | 82 GEM_BUG_ON(!count); in set_offsets() 690 GEM_BUG_ON(GRAPHICS_VER(engine->i915) >= 12 && in reg_offsets() 826 GEM_BUG_ON(GRAPHICS_VER(engine->i915) < 8); in lrc_ring_indirect_offset_default() 837 GEM_BUG_ON(!size); in lrc_setup_indirect_ctx() 838 GEM_BUG_ON(!IS_ALIGNED(size, CACHELINE_BYTES)); in lrc_setup_indirect_ctx() 839 GEM_BUG_ON(lrc_ring_indirect_ptr(engine) == -1); in lrc_setup_indirect_ctx() 843 GEM_BUG_ON(lrc_ring_indirect_offset(engine) == -1); in lrc_setup_indirect_ctx() 880 GEM_BUG_ON(lrc_ring_wa_bb_per_ctx(engine) == -1); in init_wa_bb_regs() 1004 GEM_BUG_ON(!ce->wa_bb_page); in context_indirect_bb() 1066 GEM_BUG_ON(offset_in_page(cs) > DG2_PREDICATE_RESULT_WA); in setup_predicate_disable_wa() [all …]
|
H A D | intel_context.c | 271 GEM_BUG_ON(!intel_context_is_pinned(ce)); /* no overflow! */ in __intel_context_do_pin_ww() 378 GEM_BUG_ON(!engine->cops); in intel_context_init() 379 GEM_BUG_ON(!engine->gt->vm); in intel_context_init() 473 GEM_BUG_ON(rq->context == ce); in intel_context_prepare_remote_request() 489 GEM_BUG_ON(i915_active_is_idle(&ce->active)); in intel_context_prepare_remote_request() 537 GEM_BUG_ON(!intel_engine_uses_guc(ce->engine)); in intel_context_get_active_request() 569 GEM_BUG_ON(intel_context_is_pinned(parent)); in intel_context_bind_parent_child() 570 GEM_BUG_ON(intel_context_is_child(parent)); in intel_context_bind_parent_child() 571 GEM_BUG_ON(intel_context_is_pinned(child)); in intel_context_bind_parent_child() 572 GEM_BUG_ON(intel_context_is_child(child)); in intel_context_bind_parent_child() [all …]
|
/openbmc/linux/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_object.c | 117 GEM_BUG_ON(flags & ~I915_BO_ALLOC_FLAGS); in i915_gem_object_init() 277 GEM_BUG_ON(vma->obj != obj); in i915_gem_close_object() 278 GEM_BUG_ON(!atomic_read(&vma->open_count)); in i915_gem_close_object() 297 GEM_BUG_ON(!atomic_read(&i915->mm.free_count)); in __i915_gem_free_object_rcu() 344 GEM_BUG_ON(vma->obj != obj); in __i915_gem_object_pages_fini() 372 GEM_BUG_ON(i915_gem_object_has_pages(obj)); in __i915_gem_object_pages_fini() 379 GEM_BUG_ON(!list_empty(&obj->lut_list)); in __i915_gem_free_object() 442 GEM_BUG_ON(i915_gem_object_is_framebuffer(obj)); in i915_gem_free_object() 529 GEM_BUG_ON(!i915_gem_object_has_iomem(obj)); in object_has_mappable_iomem() 553 GEM_BUG_ON(overflows_type(offset >> PAGE_SHIFT, pgoff_t)); in i915_gem_object_read_from_page() [all …]
|
H A D | i915_gem_region.c | 49 GEM_BUG_ON(flags & ~I915_BO_ALLOC_FLAGS); in __i915_gem_object_create_region() 64 GEM_BUG_ON(overflows_type(default_page_size, u32)); in __i915_gem_object_create_region() 65 GEM_BUG_ON(!is_power_of_2_u64(default_page_size)); in __i915_gem_object_create_region() 66 GEM_BUG_ON(default_page_size < PAGE_SIZE); in __i915_gem_object_create_region() 73 GEM_BUG_ON(!size); in __i915_gem_object_create_region() 74 GEM_BUG_ON(!IS_ALIGNED(size, I915_GTT_MIN_ALIGNMENT)); in __i915_gem_object_create_region() 122 GEM_BUG_ON(offset == I915_BO_INVALID_OFFSET); in i915_gem_object_create_region_at()
|
H A D | i915_gem_pages.c | 48 GEM_BUG_ON(!obj->mm.page_sizes.phys); in __i915_gem_object_set_pages() 63 GEM_BUG_ON(!HAS_PAGE_SIZES(i915, obj->mm.page_sizes.sg)); in __i915_gem_object_set_pages() 69 GEM_BUG_ON(i915_gem_object_has_tiling_quirk(obj)); in __i915_gem_object_set_pages() 71 GEM_BUG_ON(!list_empty(&obj->mm.link)); in __i915_gem_object_set_pages() 111 GEM_BUG_ON(!err && !i915_gem_object_has_pages(obj)); in ____i915_gem_object_get_pages() 132 GEM_BUG_ON(i915_gem_object_has_pinned_pages(obj)); in __i915_gem_object_get_pages() 338 GEM_BUG_ON(type != I915_MAP_WC); in i915_gem_object_map_pfn() 380 GEM_BUG_ON(i915_gem_object_has_pinned_pages(obj)); in i915_gem_object_pin_map() 391 GEM_BUG_ON(!i915_gem_object_has_pages(obj)); in i915_gem_object_pin_map() 478 GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); in __i915_gem_object_flush_map() [all …]
|
H A D | i915_gem_tiling.c | 60 GEM_BUG_ON(!size); in i915_gem_fence_size() 65 GEM_BUG_ON(!stride); in i915_gem_fence_size() 69 GEM_BUG_ON(!IS_ALIGNED(stride, I965_FENCE_PAGE)); in i915_gem_fence_size() 98 GEM_BUG_ON(!size); in i915_gem_fence_alignment() 199 GEM_BUG_ON(vma->vm != &ggtt->vm); in i915_gem_object_fence_prepare() 241 GEM_BUG_ON(!i915_tiling_ok(obj, tiling, stride)); in i915_gem_object_set_tiling() 242 GEM_BUG_ON(!stride ^ (tiling == I915_TILING_NONE)); in i915_gem_object_set_tiling() 283 GEM_BUG_ON(!i915_gem_object_has_tiling_quirk(obj)); in i915_gem_object_set_tiling() 288 GEM_BUG_ON(i915_gem_object_has_tiling_quirk(obj)); in i915_gem_object_set_tiling()
|
/openbmc/linux/drivers/gpu/drm/i915/gt/uc/ |
H A D | intel_uc.c | 98 GEM_BUG_ON(intel_uc_wants_guc(uc)); in __confirm_options() 99 GEM_BUG_ON(intel_uc_wants_guc_submission(uc)); in __confirm_options() 100 GEM_BUG_ON(intel_uc_wants_huc(uc)); in __confirm_options() 101 GEM_BUG_ON(intel_uc_wants_guc_slpc(uc)); in __confirm_options() 218 GEM_BUG_ON(!intel_guc_ct_enabled(&guc->ct)); in guc_handle_mmio_msg() 234 GEM_BUG_ON(intel_guc_ct_enabled(&guc->ct)); in guc_enable_communication() 289 GEM_BUG_ON(!intel_uc_wants_guc(uc)); in __uc_fetch_firmwares() 329 GEM_BUG_ON(!intel_uc_wants_guc(uc)); in __uc_init() 362 GEM_BUG_ON(!intel_uc_supports_guc(uc)); in __uc_sanitize() 386 GEM_BUG_ON(!intel_uc_supports_guc(uc)); in uc_init_wopcm() [all …]
|
H A D | intel_guc_submission.c | 363 GEM_BUG_ON(!context_blocked(ce)); /* Overflow check */ in incr_context_blocked() 370 GEM_BUG_ON(!context_blocked(ce)); /* Underflow check */ in decr_context_blocked() 443 GEM_BUG_ON(!ce->parallel.guc.parent_page); in __get_parent_scratch_offset() 512 GEM_BUG_ON(id >= GUC_MAX_CONTEXT_ID); in __get_context() 524 GEM_BUG_ON(index >= GUC_MAX_CONTEXT_ID); in __get_lrc_desc_v69() 620 GEM_BUG_ON(g2h_len_dw && !loop); in guc_submission_send_busy_loop() 638 GEM_BUG_ON(timeout < 0); in intel_guc_wait_for_pending_msg() 703 GEM_BUG_ON(!atomic_read(&ce->guc_id.ref)); in __guc_add_request() 704 GEM_BUG_ON(context_guc_id_invalid(ce)); in __guc_add_request() 827 GEM_BUG_ON(!FIELD_FIT(WQ_LEN_MASK, len_dw)); in guc_wq_noop_append() [all …]
|
H A D | intel_guc.c | 57 GEM_BUG_ON(!guc->send_regs.base); in guc_send_reg() 58 GEM_BUG_ON(!guc->send_regs.count); in guc_send_reg() 59 GEM_BUG_ON(i >= guc->send_regs.count); in guc_send_reg() 70 GEM_BUG_ON(!guc->send_regs.base); in intel_guc_init_send_regs() 71 GEM_BUG_ON(!guc->send_regs.count); in intel_guc_init_send_regs() 241 GEM_BUG_ON(!log->sizes_initialised); in guc_ctl_log_params_flags() 400 GEM_BUG_ON(!guc->ads_vma); in intel_guc_init() 477 GEM_BUG_ON(!len); in intel_guc_send_mmio() 478 GEM_BUG_ON(len > guc->send_regs.count); in intel_guc_send_mmio() 480 GEM_BUG_ON(FIELD_GET(GUC_HXG_MSG_0_ORIGIN, request[0]) != GUC_HXG_ORIGIN_HOST); in intel_guc_send_mmio() [all …]
|