/openbmc/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_virt.h | 275 #define amdgpu_sriov_vf(adev) \ macro 285 (amdgpu_sriov_vf((adev)) && !amdgpu_sriov_runtime((adev))) 288 (amdgpu_sriov_vf((adev)) && \ 292 (amdgpu_sriov_vf((adev)) && \ 296 (amdgpu_sriov_vf((adev)) && \ 300 (amdgpu_sriov_vf((adev)) && \
|
H A D | psp_v11_0_8.c | 37 if (amdgpu_sriov_vf(adev)) { in psp_v11_0_8_ring_stop() 68 if (amdgpu_sriov_vf(adev)) { in psp_v11_0_8_ring_create() 150 if (amdgpu_sriov_vf(adev)) in psp_v11_0_8_ring_get_wptr() 162 if (amdgpu_sriov_vf(adev)) { in psp_v11_0_8_ring_set_wptr()
|
H A D | psp_v12_0.c | 191 if (amdgpu_sriov_vf(psp->adev)) { in psp_v12_0_ring_create() 243 if (amdgpu_sriov_vf(adev)) in psp_v12_0_ring_stop() 254 if (amdgpu_sriov_vf(adev)) in psp_v12_0_ring_stop() 321 if (amdgpu_sriov_vf(adev)) in psp_v12_0_ring_get_wptr() 333 if (amdgpu_sriov_vf(adev)) { in psp_v12_0_ring_set_wptr()
|
H A D | psp_v3_1.c | 196 if (amdgpu_sriov_vf(adev)) { in psp_v3_1_ring_create() 258 if (amdgpu_sriov_vf(adev)) in psp_v3_1_ring_stop() 269 if (amdgpu_sriov_vf(adev)) in psp_v3_1_ring_stop() 345 if (amdgpu_sriov_vf(adev)) in psp_v3_1_ring_get_wptr() 356 if (amdgpu_sriov_vf(adev)) { in psp_v3_1_ring_set_wptr()
|
H A D | amdgpu_psp.c | 98 if (amdgpu_sriov_vf(adev)) { in psp_check_pmfw_centralized_cstate_management() 182 adev->psp.sup_pd_fw_up = !amdgpu_sriov_vf(adev); in psp_early_init() 221 adev->psp.sup_ifwi_up = !amdgpu_sriov_vf(adev); in psp_early_init() 235 if (amdgpu_sriov_vf(adev)) in psp_early_init() 254 pptr = amdgpu_sriov_vf(psp->adev) ? &tmr_buf : NULL; in psp_free_shared_bufs() 469 amdgpu_sriov_vf(adev) ? in psp_sw_init() 665 psp->cmd_buf_mem->resp.status == PSP_ERR_UNKNOWN_COMMAND) && amdgpu_sriov_vf(psp->adev); in psp_cmd_submit_buf() 687 if ((ucode && amdgpu_sriov_vf(psp->adev)) || !timeout) { in psp_cmd_submit_buf() 731 if (amdgpu_sriov_vf(psp->adev)) in psp_prep_tmr_cmd_buf() 804 if (!amdgpu_sriov_vf(psp->adev) && in psp_tmr_init() [all …]
|
H A D | gmc_v11_0.c | 109 if (!amdgpu_sriov_vf(adev)) { in gmc_v11_0_process_interrupt() 136 if (!amdgpu_sriov_vf(adev)) in gmc_v11_0_process_interrupt() 158 if (!amdgpu_sriov_vf(adev)) { in gmc_v11_0_set_irq_funcs() 175 (!amdgpu_sriov_vf(adev))); in gmc_v11_0_use_invalidate_semaphore() 257 !amdgpu_sriov_vf(adev)) { in gmc_v11_0_flush_vm_hub() 298 (amdgpu_sriov_runtime(adev) || !amdgpu_sriov_vf(adev))) { in gmc_v11_0_flush_gpu_tlb() 687 if (amdgpu_sriov_vf(adev)) in gmc_v11_0_vram_gtt_location() 811 if (!amdgpu_sriov_vf(adev)) { in gmc_v11_0_sw_init() 891 if (amdgpu_sriov_vf(adev)) { in gmc_v11_0_init_golden_registers() 970 if (amdgpu_sriov_vf(adev)) { in gmc_v11_0_hw_fini()
|
H A D | athub_v1_0.c | 68 if (amdgpu_sriov_vf(adev)) in athub_v1_0_set_clockgating() 94 if (amdgpu_sriov_vf(adev)) in athub_v1_0_get_clockgating()
|
H A D | psp_v13_0_4.c | 200 if (amdgpu_sriov_vf(adev)) { in psp_v13_0_4_ring_stop() 231 if (amdgpu_sriov_vf(adev)) { in psp_v13_0_4_ring_create() 313 if (amdgpu_sriov_vf(adev)) in psp_v13_0_4_ring_get_wptr() 325 if (amdgpu_sriov_vf(adev)) { in psp_v13_0_4_ring_set_wptr()
|
H A D | mmhub_v1_0.c | 115 if (amdgpu_sriov_vf(adev)) in mmhub_v1_0_init_system_aperture_regs() 161 if (amdgpu_sriov_vf(adev)) in mmhub_v1_0_init_cache_regs() 213 if (amdgpu_sriov_vf(adev)) in mmhub_v1_0_disable_identity_aperture() 304 if (amdgpu_sriov_vf(adev)) in mmhub_v1_0_update_power_gating() 315 if (amdgpu_sriov_vf(adev)) { in mmhub_v1_0_gart_enable() 361 if (!amdgpu_sriov_vf(adev)) { in mmhub_v1_0_gart_disable() 380 if (amdgpu_sriov_vf(adev)) in mmhub_v1_0_set_fault_enable_default() 531 if (amdgpu_sriov_vf(adev)) in mmhub_v1_0_set_clockgating() 556 if (amdgpu_sriov_vf(adev)) in mmhub_v1_0_get_clockgating()
|
H A D | nv.c | 221 if (amdgpu_sriov_vf(adev)) { in nv_query_video_codecs() 645 if (!amdgpu_sriov_vf(adev)) { in nv_common_early_init() 741 if (amdgpu_sriov_vf(adev)) in nv_common_early_init() 762 if (amdgpu_sriov_vf(adev)) { in nv_common_early_init() 946 if (amdgpu_sriov_vf(adev)) { in nv_common_early_init() 958 if (amdgpu_sriov_vf(adev)) { in nv_common_late_init() 987 if (amdgpu_sriov_vf(adev)) in nv_common_sw_init() 1016 if (adev->nbio.funcs->remap_hdp_registers && !amdgpu_sriov_vf(adev)) in nv_common_hw_init() 1073 if (amdgpu_sriov_vf(adev)) in nv_common_set_clockgating_state() 1110 if (amdgpu_sriov_vf(adev)) in nv_common_get_clockgating_state()
|
H A D | amdgpu_device.c | 1116 if (amdgpu_sriov_vf(adev)) in amdgpu_device_resize_fb_bar() 1210 if (amdgpu_sriov_vf(adev)) in amdgpu_device_need_post() 1885 if (amdgpu_sriov_vf(adev) && !adev->enable_virtual_display) { 2034 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_early_init() 2108 if (amdgpu_sriov_vf(adev) || sched_policy == KFD_SCHED_POLICY_NO_HWS) in amdgpu_device_ip_early_init() 2110 if (amdgpu_sriov_vf(adev) && adev->asic_type == CHIP_SIENNA_CICHLID) in amdgpu_device_ip_early_init() 2157 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_early_init() 2182 (amdgpu_sriov_vf(adev) && (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP)) || in amdgpu_device_ip_hw_init_phase1() 2257 if (!amdgpu_sriov_vf(adev) || adev->asic_type == CHIP_TONGA) in amdgpu_device_init_schedulers() 2349 if (amdgpu_sriov_vf(ade in amdgpu_device_ip_init() [all...] |
H A D | amdgpu_vf_error.c | 36 if (!amdgpu_sriov_vf(adev)) in amdgpu_vf_error_put() 57 if ((NULL == adev) || (!amdgpu_sriov_vf(adev)) || in amdgpu_vf_error_trans_all()
|
H A D | vega20_ih.c | 122 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in vega20_ih_toggle_ring_interrupts() 131 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in vega20_ih_toggle_ring_interrupts() 147 if (amdgpu_sriov_vf(adev)) { in vega20_ih_toggle_ring_interrupts() 262 if (amdgpu_sriov_vf(adev)) { in vega20_ih_enable_ring() 352 if (!amdgpu_sriov_vf(adev)) in vega20_ih_irq_init() 508 if (amdgpu_sriov_vf(adev)) in vega20_ih_set_rptr()
|
H A D | ih_v6_0.c | 108 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in force_update_wptr_for_self_int() 144 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in ih_v6_0_toggle_ring_interrupts() 153 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in ih_v6_0_toggle_ring_interrupts() 170 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in ih_v6_0_toggle_ring_interrupts() 286 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in ih_v6_0_enable_ring() 503 if (amdgpu_sriov_vf(adev)) in ih_v6_0_set_rptr()
|
H A D | mmhub_v3_0.c | 172 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_init_system_aperture_regs() 238 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_init_cache_regs() 299 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_disable_identity_aperture() 437 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_set_fault_enable_default() 623 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_set_clockgating() 641 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_get_clockgating()
|
H A D | mmhub_v2_0.c | 223 if (!amdgpu_sriov_vf(adev)) { in mmhub_v2_0_init_system_aperture_regs() 281 if (amdgpu_sriov_vf(adev)) in mmhub_v2_0_init_cache_regs() 342 if (amdgpu_sriov_vf(adev)) in mmhub_v2_0_disable_identity_aperture() 480 if (amdgpu_sriov_vf(adev)) in mmhub_v2_0_set_fault_enable_default() 651 if (amdgpu_sriov_vf(adev)) in mmhub_v2_0_set_clockgating() 676 if (amdgpu_sriov_vf(adev)) in mmhub_v2_0_get_clockgating()
|
H A D | psp_v11_0.c | 267 if (amdgpu_sriov_vf(adev)) in psp_v11_0_ring_stop() 278 if (amdgpu_sriov_vf(adev)) in psp_v11_0_ring_stop() 296 if (amdgpu_sriov_vf(adev)) { in psp_v11_0_ring_create() 569 if (amdgpu_sriov_vf(adev)) in psp_v11_0_ring_get_wptr() 581 if (amdgpu_sriov_vf(adev)) { in psp_v11_0_ring_set_wptr()
|
H A D | sdma_v5_0.c | 205 if (amdgpu_sriov_vf(adev)) in sdma_v5_0_init_golden_registers() 624 if (!amdgpu_sriov_vf(adev)) { in sdma_v5_0_ctx_switch_enable() 638 if (!amdgpu_sriov_vf(adev)) in sdma_v5_0_ctx_switch_enable() 662 if (amdgpu_sriov_vf(adev)) in sdma_v5_0_enable() 695 if (!amdgpu_sriov_vf(adev)) in sdma_v5_0_gfx_resume() 747 if (!amdgpu_sriov_vf(adev)) { /* only bare-metal use register write for wptr */ in sdma_v5_0_gfx_resume() 772 if (amdgpu_sriov_vf(adev)) in sdma_v5_0_gfx_resume() 778 if (!amdgpu_sriov_vf(adev)) { in sdma_v5_0_gfx_resume() 801 if (!amdgpu_sriov_vf(adev)) { in sdma_v5_0_gfx_resume() 820 if (amdgpu_sriov_vf(adev)) { /* bare-metal sequence doesn't need below to lines */ in sdma_v5_0_gfx_resume() [all …]
|
H A D | gmc_v9_0.c | 647 if (amdgpu_sriov_vf(adev)) in gmc_v9_0_process_interrupt() 737 if (!amdgpu_sriov_vf(adev) && in gmc_v9_0_set_irq_funcs() 779 (!amdgpu_sriov_vf(adev)) && in gmc_v9_0_use_invalidate_semaphore() 848 (amdgpu_sriov_runtime(adev) || !amdgpu_sriov_vf(adev)) && in gmc_v9_0_flush_gpu_tlb() 955 u32 usec_timeout = amdgpu_sriov_vf(adev) ? SRIOV_USEC_TIMEOUT : adev->usec_timeout; in gmc_v9_0_flush_gpu_tlb_pasid() 1431 if (amdgpu_sriov_vf(adev)) in gmc_v9_0_query_memory_partition() 1641 if (!amdgpu_sriov_vf(adev) && in gmc_v9_0_late_init() 1734 if ((!amdgpu_sriov_vf(adev) && in gmc_v9_0_mc_init() 1988 if (amdgpu_sriov_vf(adev)) in gmc_v9_0_init_mem_ranges() 2006 if (!amdgpu_sriov_vf(adev)) { in gmc_v9_4_3_init_vram_info() [all …]
|
H A D | soc15.c | 936 if (!amdgpu_sriov_vf(adev)) { in soc15_common_early_init() 1187 if (!amdgpu_sriov_vf(adev)) { in soc15_common_early_init() 1197 if (amdgpu_sriov_vf(adev)) { in soc15_common_early_init() 1209 if (amdgpu_sriov_vf(adev)) in soc15_common_late_init() 1224 if (amdgpu_sriov_vf(adev)) in soc15_common_sw_init() 1249 if (!amdgpu_sriov_vf(adev)) { in soc15_sdma_doorbell_range_init() 1270 if (adev->nbio.funcs->remap_hdp_registers && !amdgpu_sriov_vf(adev)) in soc15_common_hw_init() 1299 if (amdgpu_sriov_vf(adev)) in soc15_common_hw_fini() 1397 if (amdgpu_sriov_vf(adev)) in soc15_common_set_clockgating_state() 1449 if (amdgpu_sriov_vf(adev)) in soc15_common_get_clockgating_state()
|
H A D | mmhub_v3_0_2.c | 170 if (!amdgpu_sriov_vf(adev)) { in mmhub_v3_0_2_init_system_aperture_regs() 230 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_2_init_cache_regs() 291 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_2_disable_identity_aperture() 429 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_2_set_fault_enable_default() 546 if (amdgpu_sriov_vf(adev)) in mmhub_v3_0_2_set_clockgating()
|
H A D | gmc_v10_0.c | 141 if (!amdgpu_sriov_vf(adev)) { in gmc_v10_0_process_interrupt() 171 if (!amdgpu_sriov_vf(adev)) in gmc_v10_0_process_interrupt() 193 if (!amdgpu_sriov_vf(adev)) { in gmc_v10_0_set_irq_funcs() 210 (!amdgpu_sriov_vf(adev))); in gmc_v10_0_use_invalidate_semaphore() 340 (amdgpu_sriov_runtime(adev) || !amdgpu_sriov_vf(adev)) && in gmc_v10_0_flush_gpu_tlb() 424 u32 usec_timeout = amdgpu_sriov_vf(adev) ? SRIOV_USEC_TIMEOUT : adev->usec_timeout; in gmc_v10_0_flush_gpu_tlb_pasid() 954 if (!amdgpu_sriov_vf(adev)) { in gmc_v10_0_sw_init() 1136 if (amdgpu_sriov_vf(adev)) { in gmc_v10_0_hw_fini()
|
H A D | sdma_v5_2.c | 450 if (!amdgpu_sriov_vf(adev)) { in sdma_v5_2_ctx_switch_enable() 478 if (!amdgpu_sriov_vf(adev)) { in sdma_v5_2_enable() 510 if (!amdgpu_sriov_vf(adev)) in sdma_v5_2_gfx_resume() 560 if (!amdgpu_sriov_vf(adev)) { /* only bare-metal use register write for wptr */ in sdma_v5_2_gfx_resume() 582 if (amdgpu_sriov_vf(adev)) in sdma_v5_2_gfx_resume() 590 if (!amdgpu_sriov_vf(adev)) { in sdma_v5_2_gfx_resume() 632 if (amdgpu_sriov_vf(adev)) { /* bare-metal sequence doesn't need below to lines */ in sdma_v5_2_gfx_resume() 748 if (amdgpu_sriov_vf(adev)) { in sdma_v5_2_start() 1300 if (amdgpu_sriov_vf(adev)) { in sdma_v5_2_hw_fini() 1413 if (!amdgpu_sriov_vf(adev)) { in sdma_v5_2_set_trap_irq_state() [all …]
|
H A D | soc21.c | 160 if (amdgpu_sriov_vf(adev)) { in soc21_query_video_codecs() 694 if (amdgpu_sriov_vf(adev)) { in soc21_common_early_init() 706 if (amdgpu_sriov_vf(adev)) { in soc21_common_late_init() 744 if (amdgpu_sriov_vf(adev)) in soc21_common_sw_init() 767 if (adev->nbio.funcs->remap_hdp_registers && !amdgpu_sriov_vf(adev)) in soc21_common_hw_init() 787 if (amdgpu_sriov_vf(adev)) { in soc21_common_hw_fini()
|
H A D | navi10_ih.c | 123 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in force_update_wptr_for_self_int() 133 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in force_update_wptr_for_self_int() 168 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in navi10_ih_toggle_ring_interrupts() 282 if (amdgpu_sriov_vf(adev) && amdgpu_sriov_reg_indirect_ih(adev)) { in navi10_ih_enable_ring() 502 if (amdgpu_sriov_vf(adev)) in navi10_ih_set_rptr()
|