Lines Matching +full:0 +full:x431
41 #define VCN_VID_SOC_ADDRESS_2_0 0x1fa00
42 #define VCN1_VID_SOC_ADDRESS_3_0 0x48200
44 #define mmUVD_CONTEXT_ID_INTERNAL_OFFSET 0x27
45 #define mmUVD_GPCOM_VCPU_CMD_INTERNAL_OFFSET 0x0f
46 #define mmUVD_GPCOM_VCPU_DATA0_INTERNAL_OFFSET 0x10
47 #define mmUVD_GPCOM_VCPU_DATA1_INTERNAL_OFFSET 0x11
48 #define mmUVD_NO_OP_INTERNAL_OFFSET 0x29
49 #define mmUVD_GP_SCRATCH8_INTERNAL_OFFSET 0x66
50 #define mmUVD_SCRATCH9_INTERNAL_OFFSET 0xc01d
52 #define mmUVD_LMI_RBC_IB_VMID_INTERNAL_OFFSET 0x431
53 #define mmUVD_LMI_RBC_IB_64BIT_BAR_LOW_INTERNAL_OFFSET 0x3b4
54 #define mmUVD_LMI_RBC_IB_64BIT_BAR_HIGH_INTERNAL_OFFSET 0x3b5
55 #define mmUVD_RBC_IB_SIZE_INTERNAL_OFFSET 0x25c
60 #define RDECODE_MSG_CREATE 0x00000000
61 #define RDECODE_MESSAGE_CREATE 0x00000001
94 adev->vcn.harvest_config = 0; in vcn_v3_0_early_init()
103 if (adev->ip_versions[UVD_HWIP][0] == IP_VERSION(3, 0, 33)) in vcn_v3_0_early_init()
104 adev->vcn.num_enc_rings = 0; in vcn_v3_0_early_init()
127 int vcn_doorbell_index = 0; in vcn_v3_0_sw_init()
153 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v3_0_sw_init()
183 atomic_set(&adev->vcn.inst[i].sched_score, 0); in vcn_v3_0_sw_init()
192 ring->vm_hub = AMDGPU_MMHUB0(0); in vcn_v3_0_sw_init()
194 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[i].irq, 0, in vcn_v3_0_sw_init()
200 for (j = 0; j < adev->vcn.num_enc_rings; ++j) { in vcn_v3_0_sw_init()
216 ring->vm_hub = AMDGPU_MMHUB0(0); in vcn_v3_0_sw_init()
218 r = amdgpu_ring_init(adev, ring, 512, &adev->vcn.inst[i].irq, 0, in vcn_v3_0_sw_init()
230 if (adev->ip_versions[UVD_HWIP][0] == IP_VERSION(3, 1, 2)) in vcn_v3_0_sw_init()
232 else if (adev->ip_versions[UVD_HWIP][0] == IP_VERSION(3, 1, 1)) in vcn_v3_0_sw_init()
247 return 0; in vcn_v3_0_sw_init()
263 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v3_0_sw_fini()
269 fw_shared->present_flag_0 = 0; in vcn_v3_0_sw_fini()
307 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_hw_init()
317 ring->wptr = 0; in vcn_v3_0_hw_init()
318 ring->wptr_old = 0; in vcn_v3_0_hw_init()
323 for (j = 0; j < adev->vcn.num_enc_rings; ++j) { in vcn_v3_0_hw_init()
330 ring->wptr = 0; in vcn_v3_0_hw_init()
331 ring->wptr_old = 0; in vcn_v3_0_hw_init()
338 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_hw_init()
351 for (j = 0; j < adev->vcn.num_enc_rings; ++j) { in vcn_v3_0_hw_init()
382 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_hw_fini()
395 return 0; in vcn_v3_0_hw_fini()
453 /* cache window 0: fw */ in vcn_v3_0_mc_resume()
459 WREG32_SOC15(VCN, inst, mmUVD_VCPU_CACHE_OFFSET0, 0); in vcn_v3_0_mc_resume()
460 offset = 0; in vcn_v3_0_mc_resume()
477 WREG32_SOC15(VCN, inst, mmUVD_VCPU_CACHE_OFFSET1, 0); in vcn_v3_0_mc_resume()
485 WREG32_SOC15(VCN, inst, mmUVD_VCPU_CACHE_OFFSET2, 0); in vcn_v3_0_mc_resume()
493 WREG32_SOC15(VCN, inst, mmUVD_VCPU_NONCACHE_OFFSET0, 0); in vcn_v3_0_mc_resume()
503 /* cache window 0: fw */ in vcn_v3_0_mc_resume_dpg_mode()
508 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
511 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
513 VCN, inst_idx, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
516 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
518 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
520 VCN, inst_idx, mmUVD_VCPU_CACHE_OFFSET0), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
522 offset = 0; in vcn_v3_0_mc_resume_dpg_mode()
526 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
529 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
533 AMDGPU_UVD_FIRMWARE_OFFSET >> 3, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
538 VCN, inst_idx, mmUVD_VCPU_CACHE_SIZE0), size, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
541 VCN, inst_idx, mmUVD_VCPU_CACHE_SIZE0), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
547 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
550 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
552 VCN, inst_idx, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
555 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_LOW), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
557 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE1_64BIT_BAR_HIGH), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
559 VCN, inst_idx, mmUVD_VCPU_CACHE_OFFSET1), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
562 VCN, inst_idx, mmUVD_VCPU_CACHE_SIZE1), AMDGPU_VCN_STACK_SIZE, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
567 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
570 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
572 VCN, inst_idx, mmUVD_VCPU_CACHE_OFFSET2), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
574 VCN, inst_idx, mmUVD_VCPU_CACHE_SIZE2), AMDGPU_VCN_CONTEXT_SIZE, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
579 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
582 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
584 VCN, inst_idx, mmUVD_VCPU_NONCACHE_OFFSET0), 0, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
587 AMDGPU_GPU_PAGE_ALIGN(sizeof(struct amdgpu_fw_shared)), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
591 UVD, inst_idx, mmUVD_GFX10_ADDR_CONFIG), adev->gfx.config.gb_addr_config, 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
596 uint32_t data = 0; in vcn_v3_0_disable_static_power_gating()
616 UVD_PGFSM_STATUS__UVDM_UVDU_UVDLM_PWR_ON_3_0, 0x3F3FFFFF); in vcn_v3_0_disable_static_power_gating()
633 SOC15_WAIT_ON_RREG(VCN, inst, mmUVD_PGFSM_STATUS, 0, 0x3F3FFFFF); in vcn_v3_0_disable_static_power_gating()
637 data &= ~0x103; in vcn_v3_0_disable_static_power_gating()
686 SOC15_WAIT_ON_RREG(VCN, inst, mmUVD_PGFSM_STATUS, data, 0x3F3FFFFF); in vcn_v3_0_enable_static_power_gating()
736 SOC15_WAIT_ON_RREG(VCN, inst, mmUVD_CGC_GATE, 0, 0xFFFFFFFF); in vcn_v3_0_disable_clock_gating()
829 uint32_t reg_data = 0; in vcn_v3_0_clock_gating_dpg_mode()
835 reg_data = 0 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT; in vcn_v3_0_clock_gating_dpg_mode()
863 VCN, inst_idx, mmUVD_CGC_GATE), 0, sram_sel, indirect); in vcn_v3_0_clock_gating_dpg_mode()
871 VCN, inst_idx, mmUVD_SUVD_CGC_CTRL), 0, sram_sel, indirect); in vcn_v3_0_clock_gating_dpg_mode()
891 data |= 0 << UVD_CGC_CTRL__DYN_CLOCK_MODE__SHIFT; in vcn_v3_0_enable_clock_gating()
961 vcn_v3_0_clock_gating_dpg_mode(adev, 0, inst_idx, indirect); in vcn_v3_0_start_dpg_mode()
964 tmp = (0xFF << UVD_VCPU_CNTL__PRB_TIMEOUT_VAL__SHIFT); in vcn_v3_0_start_dpg_mode()
968 VCN, inst_idx, mmUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v3_0_start_dpg_mode()
972 VCN, inst_idx, mmUVD_MASTINT_EN), 0, 0, indirect); in vcn_v3_0_start_dpg_mode()
975 tmp = (0x8 | UVD_LMI_CTRL__WRITE_CLEAN_TIMER_EN_MASK | in vcn_v3_0_start_dpg_mode()
982 0x00100000L); in vcn_v3_0_start_dpg_mode()
984 VCN, inst_idx, mmUVD_LMI_CTRL), tmp, 0, indirect); in vcn_v3_0_start_dpg_mode()
988 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT, 0, indirect); in vcn_v3_0_start_dpg_mode()
992 ((0x1 << UVD_MPC_SET_MUXA0__VARA_1__SHIFT) | in vcn_v3_0_start_dpg_mode()
993 (0x2 << UVD_MPC_SET_MUXA0__VARA_2__SHIFT) | in vcn_v3_0_start_dpg_mode()
994 (0x3 << UVD_MPC_SET_MUXA0__VARA_3__SHIFT) | in vcn_v3_0_start_dpg_mode()
995 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT)), 0, indirect); in vcn_v3_0_start_dpg_mode()
999 ((0x1 << UVD_MPC_SET_MUXB0__VARB_1__SHIFT) | in vcn_v3_0_start_dpg_mode()
1000 (0x2 << UVD_MPC_SET_MUXB0__VARB_2__SHIFT) | in vcn_v3_0_start_dpg_mode()
1001 (0x3 << UVD_MPC_SET_MUXB0__VARB_3__SHIFT) | in vcn_v3_0_start_dpg_mode()
1002 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT)), 0, indirect); in vcn_v3_0_start_dpg_mode()
1006 ((0x0 << UVD_MPC_SET_MUX__SET_0__SHIFT) | in vcn_v3_0_start_dpg_mode()
1007 (0x1 << UVD_MPC_SET_MUX__SET_1__SHIFT) | in vcn_v3_0_start_dpg_mode()
1008 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT)), 0, indirect); in vcn_v3_0_start_dpg_mode()
1013 VCN, inst_idx, mmUVD_REG_XX_MASK), 0x10, 0, indirect); in vcn_v3_0_start_dpg_mode()
1015 VCN, inst_idx, mmUVD_RBC_XX_IB_REG_CHECK), 0x3, 0, indirect); in vcn_v3_0_start_dpg_mode()
1019 VCN, inst_idx, mmUVD_LMI_CTRL2), 0, 0, indirect); in vcn_v3_0_start_dpg_mode()
1023 VCN, inst_idx, mmUVD_RB_ARB_CTRL), 0, 0, indirect); in vcn_v3_0_start_dpg_mode()
1025 tmp = (0xFF << UVD_VCPU_CNTL__PRB_TIMEOUT_VAL__SHIFT); in vcn_v3_0_start_dpg_mode()
1028 VCN, inst_idx, mmUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v3_0_start_dpg_mode()
1033 UVD_MASTINT_EN__VCPU_EN_MASK, 0, indirect); in vcn_v3_0_start_dpg_mode()
1037 VCN, inst_idx, mmUVD_VCPU_CNTL), tmp, 0, indirect); in vcn_v3_0_start_dpg_mode()
1040 amdgpu_vcn_psp_update_sram(adev, inst_idx, 0); in vcn_v3_0_start_dpg_mode()
1045 tmp = REG_SET_FIELD(0, UVD_RBC_RB_CNTL, RB_BUFSZ, rb_bufsz); in vcn_v3_0_start_dpg_mode()
1059 WREG32_SOC15(VCN, inst_idx, mmUVD_RBC_RB_WPTR_CNTL, 0); in vcn_v3_0_start_dpg_mode()
1072 WREG32_SOC15(VCN, inst_idx, mmUVD_RBC_RB_RPTR, 0); in vcn_v3_0_start_dpg_mode()
1074 WREG32_SOC15(VCN, inst_idx, mmUVD_SCRATCH2, 0); in vcn_v3_0_start_dpg_mode()
1081 fw_shared->rb.rptr = 0; in vcn_v3_0_start_dpg_mode()
1089 0, ~UVD_POWER_STATUS__STALL_DPG_POWER_UP_MASK); in vcn_v3_0_start_dpg_mode()
1091 return 0; in vcn_v3_0_start_dpg_mode()
1104 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_start()
1128 WREG32_P(SOC15_REG_OFFSET(VCN, i, mmUVD_MASTINT_EN), 0, in vcn_v3_0_start()
1132 WREG32_P(SOC15_REG_OFFSET(VCN, i, mmUVD_LMI_CTRL2), 0, in vcn_v3_0_start()
1151 tmp |= 0x2 << UVD_MPC_CNTL__REPLACEMENT_MODE__SHIFT; in vcn_v3_0_start()
1156 ((0x1 << UVD_MPC_SET_MUXA0__VARA_1__SHIFT) | in vcn_v3_0_start()
1157 (0x2 << UVD_MPC_SET_MUXA0__VARA_2__SHIFT) | in vcn_v3_0_start()
1158 (0x3 << UVD_MPC_SET_MUXA0__VARA_3__SHIFT) | in vcn_v3_0_start()
1159 (0x4 << UVD_MPC_SET_MUXA0__VARA_4__SHIFT))); in vcn_v3_0_start()
1163 ((0x1 << UVD_MPC_SET_MUXB0__VARB_1__SHIFT) | in vcn_v3_0_start()
1164 (0x2 << UVD_MPC_SET_MUXB0__VARB_2__SHIFT) | in vcn_v3_0_start()
1165 (0x3 << UVD_MPC_SET_MUXB0__VARB_3__SHIFT) | in vcn_v3_0_start()
1166 (0x4 << UVD_MPC_SET_MUXB0__VARB_4__SHIFT))); in vcn_v3_0_start()
1170 ((0x0 << UVD_MPC_SET_MUX__SET_0__SHIFT) | in vcn_v3_0_start()
1171 (0x1 << UVD_MPC_SET_MUX__SET_1__SHIFT) | in vcn_v3_0_start()
1172 (0x2 << UVD_MPC_SET_MUX__SET_2__SHIFT))); in vcn_v3_0_start()
1181 WREG32_P(SOC15_REG_OFFSET(VCN, i, mmUVD_RB_ARB_CTRL), 0, in vcn_v3_0_start()
1185 WREG32_P(SOC15_REG_OFFSET(VCN, i, mmUVD_VCPU_CNTL), 0, in vcn_v3_0_start()
1188 for (j = 0; j < 10; ++j) { in vcn_v3_0_start()
1191 for (k = 0; k < 100; ++k) { in vcn_v3_0_start()
1197 r = 0; in vcn_v3_0_start()
1206 WREG32_P(SOC15_REG_OFFSET(VCN, i, mmUVD_VCPU_CNTL), 0, in vcn_v3_0_start()
1224 WREG32_P(SOC15_REG_OFFSET(VCN, i, mmUVD_STATUS), 0, in vcn_v3_0_start()
1227 WREG32_SOC15(VCN, i, mmUVD_LMI_RBC_RB_VMID, 0); in vcn_v3_0_start()
1232 tmp = REG_SET_FIELD(0, UVD_RBC_RB_CNTL, RB_BUFSZ, rb_bufsz); in vcn_v3_0_start()
1249 WREG32_SOC15(VCN, i, mmUVD_RBC_RB_RPTR, 0); in vcn_v3_0_start()
1251 WREG32_SOC15(VCN, i, mmUVD_SCRATCH2, 0); in vcn_v3_0_start()
1258 if (adev->ip_versions[UVD_HWIP][0] != IP_VERSION(3, 0, 33)) { in vcn_v3_0_start()
1260 ring = &adev->vcn.inst[i].ring_enc[0]; in vcn_v3_0_start()
1279 return 0; in vcn_v3_0_start()
1299 direct_wt = { {0} }; in vcn_v3_0_start_sriov()
1301 direct_rd_mod_wt = { {0} }; in vcn_v3_0_start_sriov()
1302 struct mmsch_v3_0_cmd_end end = { {0} }; in vcn_v3_0_start_sriov()
1314 for (i = 0; i < MMSCH_V3_0_VCN_INSTANCES; i++) { in vcn_v3_0_start_sriov()
1315 header.inst[i].init_status = 0; in vcn_v3_0_start_sriov()
1316 header.inst[i].table_offset = 0; in vcn_v3_0_start_sriov()
1317 header.inst[i].table_size = 0; in vcn_v3_0_start_sriov()
1322 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v3_0_start_sriov()
1326 table_size = 0; in vcn_v3_0_start_sriov()
1341 offset = 0; in vcn_v3_0_start_sriov()
1344 0); in vcn_v3_0_start_sriov()
1371 0); in vcn_v3_0_start_sriov()
1386 0); in vcn_v3_0_start_sriov()
1391 for (j = 0; j < adev->vcn.num_enc_rings; ++j) { in vcn_v3_0_start_sriov()
1393 ring->wptr = 0; in vcn_v3_0_start_sriov()
1407 ring->wptr = 0; in vcn_v3_0_start_sriov()
1417 tmp = REG_SET_FIELD(0, UVD_RBC_RB_CNTL, RB_BUFSZ, tmp); in vcn_v3_0_start_sriov()
1430 header.inst[i].init_status = 0; in vcn_v3_0_start_sriov()
1441 /* message MMSCH (in VCN[0]) to initialize this client in vcn_v3_0_start_sriov()
1446 WREG32_SOC15(VCN, 0, mmMMSCH_VF_CTX_ADDR_LO, lower_32_bits(ctx_addr)); in vcn_v3_0_start_sriov()
1447 WREG32_SOC15(VCN, 0, mmMMSCH_VF_CTX_ADDR_HI, upper_32_bits(ctx_addr)); in vcn_v3_0_start_sriov()
1450 tmp = RREG32_SOC15(VCN, 0, mmMMSCH_VF_VMID); in vcn_v3_0_start_sriov()
1453 tmp |= (0 << MMSCH_VF_VMID__VF_CTX_VMID__SHIFT); in vcn_v3_0_start_sriov()
1454 WREG32_SOC15(VCN, 0, mmMMSCH_VF_VMID, tmp); in vcn_v3_0_start_sriov()
1458 WREG32_SOC15(VCN, 0, mmMMSCH_VF_CTX_SIZE, size); in vcn_v3_0_start_sriov()
1461 WREG32_SOC15(VCN, 0, mmMMSCH_VF_MAILBOX_RESP, 0); in vcn_v3_0_start_sriov()
1466 param = 0x10000001; in vcn_v3_0_start_sriov()
1467 WREG32_SOC15(VCN, 0, mmMMSCH_VF_MAILBOX_HOST, param); in vcn_v3_0_start_sriov()
1468 tmp = 0; in vcn_v3_0_start_sriov()
1470 resp = 0; in vcn_v3_0_start_sriov()
1473 resp = RREG32_SOC15(VCN, 0, mmMMSCH_VF_MAILBOX_RESP); in vcn_v3_0_start_sriov()
1482 "(expected=0x%08x, readback=0x%08x)\n", in vcn_v3_0_start_sriov()
1488 return 0; in vcn_v3_0_start_sriov()
1504 SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_RB_RPTR, tmp, 0xFFFFFFFF); in vcn_v3_0_stop_dpg_mode()
1507 SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_RB_RPTR2, tmp, 0xFFFFFFFF); in vcn_v3_0_stop_dpg_mode()
1509 tmp = RREG32_SOC15(VCN, inst_idx, mmUVD_RBC_RB_WPTR) & 0x7FFFFFFF; in vcn_v3_0_stop_dpg_mode()
1510 SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_RBC_RB_RPTR, tmp, 0xFFFFFFFF); in vcn_v3_0_stop_dpg_mode()
1516 WREG32_P(SOC15_REG_OFFSET(VCN, inst_idx, mmUVD_POWER_STATUS), 0, in vcn_v3_0_stop_dpg_mode()
1519 return 0; in vcn_v3_0_stop_dpg_mode()
1525 int i, r = 0; in vcn_v3_0_stop()
1527 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_stop()
1537 r = SOC15_WAIT_ON_RREG(VCN, i, mmUVD_STATUS, UVD_STATUS__IDLE, 0x7); in vcn_v3_0_stop()
1570 WREG32_P(SOC15_REG_OFFSET(VCN, i, mmUVD_VCPU_CNTL), 0, in vcn_v3_0_stop()
1582 WREG32_SOC15(VCN, i, mmUVD_STATUS, 0); in vcn_v3_0_stop()
1594 return 0; in vcn_v3_0_stop()
1602 uint32_t reg_data = 0; in vcn_v3_0_pause_dpg_mode()
1613 ret_code = SOC15_WAIT_ON_RREG(VCN, inst_idx, mmUVD_POWER_STATUS, 0x1, in vcn_v3_0_pause_dpg_mode()
1631 if (adev->ip_versions[UVD_HWIP][0] != IP_VERSION(3, 0, 33)) { in vcn_v3_0_pause_dpg_mode()
1635 ring = &adev->vcn.inst[inst_idx].ring_enc[0]; in vcn_v3_0_pause_dpg_mode()
1636 ring->wptr = 0; in vcn_v3_0_pause_dpg_mode()
1646 ring->wptr = 0; in vcn_v3_0_pause_dpg_mode()
1661 0, ~UVD_POWER_STATUS__STALL_DPG_POWER_UP_MASK); in vcn_v3_0_pause_dpg_mode()
1674 return 0; in vcn_v3_0_pause_dpg_mode()
1738 .align_mask = 0x3f,
1780 return 0; in vcn_v3_0_limit_sched()
1798 DRM_ERROR("Can't find BO for addr 0x%08Lx\n", addr); in vcn_v3_0_dec_msg()
1804 if (addr & 0x7) { in vcn_v3_0_dec_msg()
1835 for (i = 0, msg = &msg[6]; i < num_buffers; ++i, msg += 4) { in vcn_v3_0_dec_msg()
1838 if (msg[0] != RDECODE_MESSAGE_CREATE) in vcn_v3_0_dec_msg()
1852 if (create[0] == 0x7 || create[0] == 0x10 || create[0] == 0x11) in vcn_v3_0_dec_msg()
1870 uint32_t msg_lo = 0, msg_hi = 0; in vcn_v3_0_ring_patch_cs_in_place()
1876 return 0; in vcn_v3_0_ring_patch_cs_in_place()
1878 for (i = 0; i < ib->length_dw; i += 2) { in vcn_v3_0_ring_patch_cs_in_place()
1882 if (reg == PACKET0(p->adev->vcn.internal.data0, 0)) { in vcn_v3_0_ring_patch_cs_in_place()
1884 } else if (reg == PACKET0(p->adev->vcn.internal.data1, 0)) { in vcn_v3_0_ring_patch_cs_in_place()
1886 } else if (reg == PACKET0(p->adev->vcn.internal.cmd, 0) && in vcn_v3_0_ring_patch_cs_in_place()
1887 val == 0) { in vcn_v3_0_ring_patch_cs_in_place()
1894 return 0; in vcn_v3_0_ring_patch_cs_in_place()
1899 .align_mask = 0xf,
1939 if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) in vcn_v3_0_enc_ring_get_rptr()
1956 if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) { in vcn_v3_0_enc_ring_get_wptr()
1980 if (ring == &adev->vcn.inst[ring->me].ring_enc[0]) { in vcn_v3_0_enc_ring_set_wptr()
1999 .align_mask = 0x3f,
2030 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_set_dec_ring_funcs()
2048 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_set_enc_ring_funcs()
2052 for (j = 0; j < adev->vcn.num_enc_rings; ++j) { in vcn_v3_0_set_enc_ring_funcs()
2056 if (adev->vcn.num_enc_rings > 0) in vcn_v3_0_set_enc_ring_funcs()
2066 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_is_idle()
2079 int i, ret = 0; in vcn_v3_0_wait_for_idle()
2081 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_wait_for_idle()
2101 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_set_clockgating_state()
2114 return 0; in vcn_v3_0_set_clockgating_state()
2129 return 0; in vcn_v3_0_set_powergating_state()
2133 return 0; in vcn_v3_0_set_powergating_state()
2151 return 0; in vcn_v3_0_set_interrupt_state()
2162 ip_instance = 0; in vcn_v3_0_process_interrupt()
2169 return 0; in vcn_v3_0_process_interrupt()
2179 amdgpu_fence_process(&adev->vcn.inst[ip_instance].ring_enc[0]); in vcn_v3_0_process_interrupt()
2186 entry->src_id, entry->src_data[0]); in vcn_v3_0_process_interrupt()
2190 return 0; in vcn_v3_0_process_interrupt()
2202 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_set_irq_funcs()
2234 .minor = 0,
2235 .rev = 0,