/openbmc/linux/drivers/gpu/drm/i915/selftests/ |
H A D | mock_gem_device.c | 48 struct intel_gt *gt = to_gt(i915); in mock_device_flush() 67 intel_gt_driver_remove(to_gt(i915)); in mock_device_release() 71 mock_fini_ggtt(to_gt(i915)->ggtt); in mock_device_release() 117 i915->gt[0] = to_gt(i915); in mock_gt_probe() 204 atomic_inc(&to_gt(i915)->wakeref.count); /* disable; no hw support */ in mock_gem_device() 205 to_gt(i915)->awake = -ENODEV; in mock_gem_device() 223 ret = intel_gt_assign_ggtt(to_gt(i915)); in mock_gem_device() 227 mock_init_ggtt(to_gt(i915)); in mock_gem_device() 228 to_gt(i915)->vm = i915_vm_get(&to_gt(i915)->ggtt->vm); in mock_gem_device() 230 to_gt(i915)->info.engine_mask = BIT(0); in mock_gem_device() [all …]
|
H A D | i915_gem.c | 45 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in trash_stolen() 106 i915_ggtt_suspend(to_gt(i915)->ggtt); in igt_pm_suspend() 116 i915_ggtt_suspend(to_gt(i915)->ggtt); in igt_pm_hibernate() 132 i915_ggtt_resume(to_gt(i915)->ggtt); in igt_pm_resume() 134 setup_private_pat(to_gt(i915)); in igt_pm_resume() 257 if (intel_gt_is_wedged(to_gt(i915))) in i915_gem_live_selftests()
|
H A D | mock_uncore.c | 45 intel_uncore_init_early(uncore, to_gt(i915)); in mock_uncore_init()
|
/openbmc/linux/drivers/gpu/drm/i915/display/ |
H A D | intel_display_reset.c | 19 intel_has_gpu_reset(to_gt(dev_priv))); in gpu_reset_clobbers_display() 37 set_bit(I915_RESET_MODESET, &to_gt(dev_priv)->reset.flags); in intel_display_reset_prepare() 39 wake_up_bit(&to_gt(dev_priv)->reset.flags, I915_RESET_MODESET); in intel_display_reset_prepare() 44 intel_gt_set_wedged(to_gt(dev_priv)); in intel_display_reset_prepare() 94 if (!test_bit(I915_RESET_MODESET, &to_gt(i915)->reset.flags)) in intel_display_reset_finish() 134 clear_bit_unlock(I915_RESET_MODESET, &to_gt(i915)->reset.flags); in intel_display_reset_finish()
|
/openbmc/linux/drivers/gpu/drm/i915/ |
H A D | i915_getparam.c | 20 const struct sseu_dev_info *sseu = &to_gt(i915)->info.sseu; in i915_getparam_ioctl() 38 value = to_gt(i915)->ggtt->num_fences; in i915_getparam_ioctl() 89 intel_has_gpu_reset(to_gt(i915)); in i915_getparam_ioctl() 90 if (value && intel_has_reset_engine(to_gt(i915))) in i915_getparam_ioctl() 107 value = intel_huc_check_status(&to_gt(i915)->uc.huc); in i915_getparam_ioctl() 181 value = to_gt(i915)->clock_frequency; in i915_getparam_ioctl()
|
H A D | i915_debugfs.c | 72 intel_gt_info_print(&to_gt(i915)->info, &p); in i915_capabilities() 147 if (IS_GFX_GT_IP_RANGE(to_gt(i915), IP_VER(12, 70), IP_VER(12, 71))) { in i915_cache_level_str() 345 gpu = i915_gpu_coredump(to_gt(i915), ALL_ENGINES, CORE_DUMP_FLAG_NONE); in i915_gpu_info_open() 404 struct intel_gt *gt = to_gt(i915); in i915_frequency_info() 443 swizzle_string(to_gt(dev_priv)->ggtt->bit_6_swizzle_x)); in i915_swizzle_info() 445 swizzle_string(to_gt(dev_priv)->ggtt->bit_6_swizzle_y)); in i915_swizzle_info() 492 struct intel_rps *rps = &to_gt(dev_priv)->rps; in i915_rps_boost_info() 498 seq_printf(m, "GPU busy? %s\n", str_yes_no(to_gt(dev_priv)->awake)); in i915_rps_boost_info() 531 seq_printf(m, "GPU idle: %s\n", str_yes_no(!to_gt(dev_priv)->awake)); in i915_runtime_pm_status() 563 str_yes_no(to_gt(i915)->awake), in i915_engine_info() [all …]
|
H A D | i915_irq.c | 180 struct intel_gt *gt = to_gt(dev_priv); in ivb_parity_work() 320 gen6_gt_irq_handler(to_gt(dev_priv), gt_iir); in valleyview_irq_handler() 322 gen6_rps_irq_handler(&to_gt(dev_priv)->rps, pm_iir); in valleyview_irq_handler() 378 gen8_gt_irq_handler(to_gt(dev_priv), master_ctl); in cherryview_irq_handler() 456 gen6_gt_irq_handler(to_gt(i915), gt_iir); in ilk_irq_handler() 458 gen5_gt_irq_handler(to_gt(i915), gt_iir); in ilk_irq_handler() 476 gen6_rps_irq_handler(&to_gt(i915)->rps, pm_iir); in ilk_irq_handler() 527 gen8_gt_irq_handler(to_gt(dev_priv), master_ctl); in gen8_irq_handler() 565 struct intel_gt *gt = to_gt(i915); in gen11_irq_handler() 621 struct intel_gt *gt = to_gt(i915); in dg1_irq_handler() [all …]
|
H A D | i915_gem_gtt.c | 60 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_gtt_finish_pages() 109 GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm); in i915_gem_gtt_reserve() 209 GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm); in i915_gem_gtt_insert()
|
H A D | i915_gem.c | 96 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_get_aperture_ioctl() 309 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_gtt_prepare() 370 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_gtt_cleanup() 386 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_gtt_pread() 549 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_gtt_pwrite_fast() 858 &to_gt(i915)->ggtt->userfault_list, userfault_link) in i915_gem_runtime_suspend() 870 for (i = 0; i < to_gt(i915)->ggtt->num_fences; i++) { in i915_gem_runtime_suspend() 871 struct i915_fence_reg *reg = &to_gt(i915)->ggtt->fence_regs[i]; in i915_gem_runtime_suspend() 912 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_object_ggtt_pin_ww() 1244 i915_ggtt_resume(to_gt(dev_priv)->ggtt); in i915_gem_init()
|
H A D | intel_gvt.c | 88 *mmio = intel_uncore_read_notrace(to_gt(dev_priv)->uncore, in save_mmio() 167 if (intel_uc_wants_guc_submission(&to_gt(dev_priv)->uc)) { in intel_gvt_init_device()
|
H A D | i915_debugfs_params.c | 43 if (intel_uc_uses_guc_submission(&to_gt(i915)->uc)) in notify_guc() 44 ret = intel_guc_global_policies_update(&to_gt(i915)->uc.guc); in notify_guc()
|
/openbmc/linux/drivers/gpu/drm/i915/gem/ |
H A D | i915_gem_tiling.c | 187 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_object_fence_prepare() 226 return to_gt(i915)->ggtt->bit_6_swizzle_x == I915_BIT_6_SWIZZLE_9_10_17 && in i915_gem_object_needs_bit17_swizzle() 351 if (!to_gt(dev_priv)->ggtt->num_fences) in i915_gem_set_tiling_ioctl() 377 args->swizzle_mode = to_gt(dev_priv)->ggtt->bit_6_swizzle_x; in i915_gem_set_tiling_ioctl() 379 args->swizzle_mode = to_gt(dev_priv)->ggtt->bit_6_swizzle_y; in i915_gem_set_tiling_ioctl() 434 if (!to_gt(dev_priv)->ggtt->num_fences) in i915_gem_get_tiling_ioctl() 450 args->swizzle_mode = to_gt(dev_priv)->ggtt->bit_6_swizzle_x; in i915_gem_get_tiling_ioctl() 453 args->swizzle_mode = to_gt(dev_priv)->ggtt->bit_6_swizzle_y; in i915_gem_get_tiling_ioctl()
|
H A D | i915_gem_mman.c | 337 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in vm_fault_gtt() 458 mutex_lock(&to_gt(i915)->ggtt->vm.mutex); in vm_fault_gtt() 460 list_add(&obj->userfault_link, &to_gt(i915)->ggtt->userfault_list); in vm_fault_gtt() 461 mutex_unlock(&to_gt(i915)->ggtt->vm.mutex); in vm_fault_gtt() 582 mutex_lock(&to_gt(i915)->ggtt->vm.mutex); in i915_gem_object_release_mmap_gtt() 600 mutex_unlock(&to_gt(i915)->ggtt->vm.mutex); in i915_gem_object_release_mmap_gtt() 735 err = intel_gt_retire_requests_timeout(to_gt(i915), MAX_SCHEDULE_TIMEOUT, in mmap_offset_attach() 829 else if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt)) in i915_gem_dumb_mmap_offset() 877 if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt)) in i915_gem_mmap_offset_ioctl() 1114 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_fb_mmap()
|
H A D | i915_gem_ttm_move.c | 202 if (!to_gt(i915)->migrate.context || intel_gt_is_wedged(to_gt(i915))) in i915_ttm_accel_move() 215 intel_engine_pm_get(to_gt(i915)->migrate.context->engine); in i915_ttm_accel_move() 216 ret = intel_context_migrate_clear(to_gt(i915)->migrate.context, deps, in i915_ttm_accel_move() 229 intel_engine_pm_get(to_gt(i915)->migrate.context->engine); in i915_ttm_accel_move() 230 ret = intel_context_migrate_copy(to_gt(i915)->migrate.context, in i915_ttm_accel_move() 242 intel_engine_pm_put(to_gt(i915)->migrate.context->engine); in i915_ttm_accel_move()
|
H A D | i915_gem_shrinker.c | 152 intel_gt_retire_requests(to_gt(i915)); in i915_gem_shrink() 400 mutex_lock(&to_gt(i915)->ggtt->vm.mutex); in i915_gem_shrinker_vmap() 402 &to_gt(i915)->ggtt->vm.bound_list, vm_link) { in i915_gem_shrinker_vmap() 417 mutex_unlock(&to_gt(i915)->ggtt->vm.mutex); in i915_gem_shrinker_vmap()
|
H A D | i915_gem_phys.c | 82 intel_gt_chipset_flush(to_gt(i915)); in i915_gem_object_get_pages_phys() 169 intel_gt_chipset_flush(to_gt(i915)); in i915_gem_object_pwrite_phys()
|
H A D | i915_gem_throttle.c | 48 ret = intel_gt_terminally_wedged(to_gt(i915)); in i915_gem_throttle_ioctl()
|
/openbmc/linux/drivers/gpu/drm/i915/gt/ |
H A D | selftest_gt_pm.c | 191 if (intel_gt_is_wedged(to_gt(i915))) in intel_gt_pm_live_selftests() 194 return intel_gt_live_subtests(tests, to_gt(i915)); in intel_gt_pm_live_selftests() 208 if (intel_gt_is_wedged(to_gt(i915))) in intel_gt_pm_late_selftests() 211 return intel_gt_live_subtests(tests, to_gt(i915)); in intel_gt_pm_late_selftests()
|
H A D | mock_engine.c | 348 GEM_BUG_ON(!to_gt(i915)->uncore); in mock_engine() 356 engine->base.gt = to_gt(i915); in mock_engine() 357 engine->base.uncore = to_gt(i915)->uncore; in mock_engine() 380 to_gt(i915)->engine[id] = &engine->base; in mock_engine() 381 to_gt(i915)->engine_class[0][id] = &engine->base; in mock_engine()
|
H A D | selftest_engine.c | 15 struct intel_gt *gt = to_gt(i915); in intel_engine_live_selftests()
|
H A D | intel_ggtt.c | 95 ret = ggtt_init_hw(to_gt(i915)->ggtt); in i915_ggtt_init_hw() 783 ret = init_ggtt(to_gt(i915)->ggtt); in i915_init_ggtt() 788 ret = init_aliasing_ppgtt(to_gt(i915)->ggtt); in i915_init_ggtt() 790 cleanup_init_ggtt(to_gt(i915)->ggtt); in i915_init_ggtt() 843 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_ggtt_driver_release() 858 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_ggtt_driver_late_release() 1260 ret = ggtt_probe_hw(to_gt(i915)->ggtt, to_gt(i915)); in i915_ggtt_probe_hw()
|
H A D | intel_sa_media.c | 24 gt->irq_lock = to_gt(i915)->irq_lock; in intel_sa_mediagt_setup()
|
H A D | selftest_ring_submission.c | 294 if (to_gt(i915)->submission_method > INTEL_SUBMISSION_RING) in intel_ring_submission_live_selftests() 297 return intel_gt_live_subtests(tests, to_gt(i915)); in intel_ring_submission_live_selftests()
|
/openbmc/linux/drivers/gpu/drm/i915/gem/selftests/ |
H A D | i915_gem_mman.c | 155 intel_gt_flush_ggtt_writes(to_gt(i915)); in check_partial_mapping() 251 intel_gt_flush_ggtt_writes(to_gt(i915)); in check_partial_mappings() 324 if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt)) in igt_partial_tiling() 337 (1 + next_prime_number(to_gt(i915)->ggtt->vm.total >> PAGE_SHIFT)) << PAGE_SHIFT); in igt_partial_tiling() 383 tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_x; in igt_partial_tiling() 386 tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_y; in igt_partial_tiling() 457 if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt)) in igt_smoke_tiling() 474 (1 + next_prime_number(to_gt(i915)->ggtt->vm.total >> PAGE_SHIFT)) << PAGE_SHIFT); in igt_smoke_tiling() 503 tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_x; in igt_smoke_tiling() 506 tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_y; in igt_smoke_tiling() [all …]
|
/openbmc/linux/drivers/gpu/drm/i915/pxp/ |
H A D | intel_pxp.c | 165 intel_huc_is_loaded_by_gsc(&to_gt(i915)->uc.huc) && intel_uc_uses_huc(&to_gt(i915)->uc)) in find_gt_for_required_teelink() 166 return to_gt(i915); in find_gt_for_required_teelink() 191 if (IS_ENABLED(CONFIG_INTEL_MEI_PXP) && !i915->media_gt && VDBOX_MASK(to_gt(i915))) in find_gt_for_required_protected_content() 192 return to_gt(i915); in find_gt_for_required_protected_content()
|