/openbmc/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | vcn_v4_0.c | 66 int inst_idx, struct dpg_pause_state *new_state); 436 static void vcn_v4_0_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v4_0_mc_resume_dpg_mode() argument 446 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v4_0_mc_resume_dpg_mode() 447 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), in vcn_v4_0_mc_resume_dpg_mode() 448 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v4_0_mc_resume_dpg_mode() 449 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v4_0_mc_resume_dpg_mode() 450 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), in vcn_v4_0_mc_resume_dpg_mode() 451 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v4_0_mc_resume_dpg_mode() 452 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v4_0_mc_resume_dpg_mode() 453 VCN, inst_idx, regUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v4_0_mc_resume_dpg_mode() [all …]
|
H A D | vcn_v3_0.c | 75 int inst_idx, struct dpg_pause_state *new_state); 498 static void vcn_v3_0_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v3_0_mc_resume_dpg_mode() argument 506 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode() 507 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), in vcn_v3_0_mc_resume_dpg_mode() 508 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() 509 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode() 510 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), in vcn_v3_0_mc_resume_dpg_mode() 511 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() 512 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode() 513 VCN, inst_idx, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() [all …]
|
H A D | vcn_v2_5.c | 64 int inst_idx, struct dpg_pause_state *new_state); 469 static void vcn_v2_5_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v2_5_mc_resume_dpg_mode() argument 477 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode() 479 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode() 480 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode() 482 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode() 483 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode() 486 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode() 488 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode() 490 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode() [all …]
|
H A D | vcn_v4_0_3.c | 54 int inst_idx, struct dpg_pause_state *new_state); 58 int inst_idx, bool indirect); 80 static int vcn_v4_0_3_fw_shared_init(struct amdgpu_device *adev, int inst_idx) in vcn_v4_0_3_fw_shared_init() argument 84 fw_shared = adev->vcn.inst[inst_idx].fw_shared.cpu_addr; in vcn_v4_0_3_fw_shared_init() 89 amdgpu_vcn_fwlog_init(&adev->vcn.inst[inst_idx]); in vcn_v4_0_3_fw_shared_init() 344 static void vcn_v4_0_3_mc_resume(struct amdgpu_device *adev, int inst_idx) in vcn_v4_0_3_mc_resume() argument 352 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v4_0_3_mc_resume() 357 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx] in vcn_v4_0_3_mc_resume() 361 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx] in vcn_v4_0_3_mc_resume() 367 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr)); in vcn_v4_0_3_mc_resume() [all …]
|
H A D | amdgpu_vcn.h | 81 #define RREG32_SOC15_DPG_MODE_1_0(ip, inst_idx, reg, mask, sram_sel) \ argument 82 ({ WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_MASK, mask); \ 83 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_CTL, \ 85 ((adev->reg_offset[ip##_HWIP][inst_idx][reg##_BASE_IDX] + reg) \ 88 RREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_DATA); \ 91 #define WREG32_SOC15_DPG_MODE_1_0(ip, inst_idx, reg, value, mask, sram_sel) \ argument 93 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_DATA, value); \ 94 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_MASK, mask); \ 95 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_CTL, \ 97 ((adev->reg_offset[ip##_HWIP][inst_idx][reg##_BASE_IDX] + reg) \ [all …]
|
H A D | vcn_v1_0.c | 54 int inst_idx, struct dpg_pause_state *new_state); 1212 int inst_idx, struct dpg_pause_state *new_state) in vcn_v1_0_pause_dpg_mode() argument 1220 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v1_0_pause_dpg_mode() 1222 adev->vcn.inst[inst_idx].pause_state.fw_based, in vcn_v1_0_pause_dpg_mode() 1223 adev->vcn.inst[inst_idx].pause_state.jpeg, in vcn_v1_0_pause_dpg_mode() 1272 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v1_0_pause_dpg_mode() 1276 if (adev->vcn.inst[inst_idx].pause_state.jpeg != new_state->jpeg) { in vcn_v1_0_pause_dpg_mode() 1278 adev->vcn.inst[inst_idx].pause_state.fw_based, in vcn_v1_0_pause_dpg_mode() 1279 adev->vcn.inst[inst_idx].pause_state.jpeg, in vcn_v1_0_pause_dpg_mode() 1333 adev->vcn.inst[inst_idx].pause_state.jpeg = new_state->jpeg; in vcn_v1_0_pause_dpg_mode()
|
H A D | aqua_vanjaram.c | 70 uint32_t inst_idx, struct amdgpu_ring *ring) in aqua_vanjaram_set_xcp_id() argument 80 inst_mask = 1 << inst_idx; in aqua_vanjaram_set_xcp_id() 95 inst_mask = 1 << (inst_idx * 2); in aqua_vanjaram_set_xcp_id()
|
H A D | amdgpu_vcn.c | 1249 int amdgpu_vcn_psp_update_sram(struct amdgpu_device *adev, int inst_idx, in amdgpu_vcn_psp_update_sram() argument 1254 (inst_idx ? AMDGPU_UCODE_ID_VCN1_RAM : in amdgpu_vcn_psp_update_sram() 1256 .mc_addr = adev->vcn.inst[inst_idx].dpg_sram_gpu_addr, in amdgpu_vcn_psp_update_sram() 1257 .ucode_size = ((uintptr_t)adev->vcn.inst[inst_idx].dpg_sram_curr_addr - in amdgpu_vcn_psp_update_sram() 1258 (uintptr_t)adev->vcn.inst[inst_idx].dpg_sram_cpu_addr), in amdgpu_vcn_psp_update_sram()
|
H A D | jpeg_v4_0_3.c | 414 static void jpeg_v4_0_3_disable_clock_gating(struct amdgpu_device *adev, int inst_idx) in jpeg_v4_0_3_disable_clock_gating() argument 419 jpeg_inst = GET_INST(JPEG, inst_idx); in jpeg_v4_0_3_disable_clock_gating() 439 static void jpeg_v4_0_3_enable_clock_gating(struct amdgpu_device *adev, int inst_idx) in jpeg_v4_0_3_enable_clock_gating() argument 444 jpeg_inst = GET_INST(JPEG, inst_idx); in jpeg_v4_0_3_enable_clock_gating()
|
H A D | vcn_v2_0.c | 62 int inst_idx, struct dpg_pause_state *new_state); 1202 int inst_idx, struct dpg_pause_state *new_state) in vcn_v2_0_pause_dpg_mode() argument 1209 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v2_0_pause_dpg_mode() 1211 adev->vcn.inst[inst_idx].pause_state.fw_based, new_state->fw_based); in vcn_v2_0_pause_dpg_mode() 1272 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v2_0_pause_dpg_mode()
|