1 /* 2 * Copyright 2012-17 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: AMD 23 * 24 */ 25 26 #include "dcn20_hubp.h" 27 28 #include "dm_services.h" 29 #include "dce_calcs.h" 30 #include "reg_helper.h" 31 #include "basics/conversion.h" 32 33 #define DC_LOGGER_INIT(logger) 34 35 #define REG(reg)\ 36 hubp2->hubp_regs->reg 37 38 #define CTX \ 39 hubp2->base.ctx 40 41 #undef FN 42 #define FN(reg_name, field_name) \ 43 hubp2->hubp_shift->field_name, hubp2->hubp_mask->field_name 44 45 void hubp2_set_vm_system_aperture_settings(struct hubp *hubp, 46 struct vm_system_aperture_param *apt) 47 { 48 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 49 50 PHYSICAL_ADDRESS_LOC mc_vm_apt_default; 51 PHYSICAL_ADDRESS_LOC mc_vm_apt_low; 52 PHYSICAL_ADDRESS_LOC mc_vm_apt_high; 53 54 // The format of default addr is 48:12 of the 48 bit addr 55 mc_vm_apt_default.quad_part = apt->sys_default.quad_part >> 12; 56 57 // The format of high/low are 48:18 of the 48 bit addr 58 mc_vm_apt_low.quad_part = apt->sys_low.quad_part >> 18; 59 mc_vm_apt_high.quad_part = apt->sys_high.quad_part >> 18; 60 61 REG_UPDATE_2(DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB, 62 DCN_VM_SYSTEM_APERTURE_DEFAULT_SYSTEM, 1, /* 1 = system physical memory */ 63 DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB, mc_vm_apt_default.high_part); 64 65 REG_SET(DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB, 0, 66 DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB, mc_vm_apt_default.low_part); 67 68 REG_SET(DCN_VM_SYSTEM_APERTURE_LOW_ADDR, 0, 69 MC_VM_SYSTEM_APERTURE_LOW_ADDR, mc_vm_apt_low.quad_part); 70 71 REG_SET(DCN_VM_SYSTEM_APERTURE_HIGH_ADDR, 0, 72 MC_VM_SYSTEM_APERTURE_HIGH_ADDR, mc_vm_apt_high.quad_part); 73 74 REG_SET_2(DCN_VM_MX_L1_TLB_CNTL, 0, 75 ENABLE_L1_TLB, 1, 76 SYSTEM_ACCESS_MODE, 0x3); 77 } 78 79 void hubp2_program_deadline( 80 struct hubp *hubp, 81 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 82 struct _vcs_dpi_display_ttu_regs_st *ttu_attr) 83 { 84 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 85 86 /* DLG - Per hubp */ 87 REG_SET_2(BLANK_OFFSET_0, 0, 88 REFCYC_H_BLANK_END, dlg_attr->refcyc_h_blank_end, 89 DLG_V_BLANK_END, dlg_attr->dlg_vblank_end); 90 91 REG_SET(BLANK_OFFSET_1, 0, 92 MIN_DST_Y_NEXT_START, dlg_attr->min_dst_y_next_start); 93 94 REG_SET(DST_DIMENSIONS, 0, 95 REFCYC_PER_HTOTAL, dlg_attr->refcyc_per_htotal); 96 97 REG_SET_2(DST_AFTER_SCALER, 0, 98 REFCYC_X_AFTER_SCALER, dlg_attr->refcyc_x_after_scaler, 99 DST_Y_AFTER_SCALER, dlg_attr->dst_y_after_scaler); 100 101 REG_SET(REF_FREQ_TO_PIX_FREQ, 0, 102 REF_FREQ_TO_PIX_FREQ, dlg_attr->ref_freq_to_pix_freq); 103 104 /* DLG - Per luma/chroma */ 105 REG_SET(VBLANK_PARAMETERS_1, 0, 106 REFCYC_PER_PTE_GROUP_VBLANK_L, dlg_attr->refcyc_per_pte_group_vblank_l); 107 108 if (REG(NOM_PARAMETERS_0)) 109 REG_SET(NOM_PARAMETERS_0, 0, 110 DST_Y_PER_PTE_ROW_NOM_L, dlg_attr->dst_y_per_pte_row_nom_l); 111 112 if (REG(NOM_PARAMETERS_1)) 113 REG_SET(NOM_PARAMETERS_1, 0, 114 REFCYC_PER_PTE_GROUP_NOM_L, dlg_attr->refcyc_per_pte_group_nom_l); 115 116 REG_SET(NOM_PARAMETERS_4, 0, 117 DST_Y_PER_META_ROW_NOM_L, dlg_attr->dst_y_per_meta_row_nom_l); 118 119 REG_SET(NOM_PARAMETERS_5, 0, 120 REFCYC_PER_META_CHUNK_NOM_L, dlg_attr->refcyc_per_meta_chunk_nom_l); 121 122 REG_SET_2(PER_LINE_DELIVERY, 0, 123 REFCYC_PER_LINE_DELIVERY_L, dlg_attr->refcyc_per_line_delivery_l, 124 REFCYC_PER_LINE_DELIVERY_C, dlg_attr->refcyc_per_line_delivery_c); 125 126 REG_SET(VBLANK_PARAMETERS_2, 0, 127 REFCYC_PER_PTE_GROUP_VBLANK_C, dlg_attr->refcyc_per_pte_group_vblank_c); 128 129 if (REG(NOM_PARAMETERS_2)) 130 REG_SET(NOM_PARAMETERS_2, 0, 131 DST_Y_PER_PTE_ROW_NOM_C, dlg_attr->dst_y_per_pte_row_nom_c); 132 133 if (REG(NOM_PARAMETERS_3)) 134 REG_SET(NOM_PARAMETERS_3, 0, 135 REFCYC_PER_PTE_GROUP_NOM_C, dlg_attr->refcyc_per_pte_group_nom_c); 136 137 REG_SET(NOM_PARAMETERS_6, 0, 138 DST_Y_PER_META_ROW_NOM_C, dlg_attr->dst_y_per_meta_row_nom_c); 139 140 REG_SET(NOM_PARAMETERS_7, 0, 141 REFCYC_PER_META_CHUNK_NOM_C, dlg_attr->refcyc_per_meta_chunk_nom_c); 142 143 /* TTU - per hubp */ 144 REG_SET_2(DCN_TTU_QOS_WM, 0, 145 QoS_LEVEL_LOW_WM, ttu_attr->qos_level_low_wm, 146 QoS_LEVEL_HIGH_WM, ttu_attr->qos_level_high_wm); 147 148 /* TTU - per luma/chroma */ 149 /* Assumed surf0 is luma and 1 is chroma */ 150 151 REG_SET_3(DCN_SURF0_TTU_CNTL0, 0, 152 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_l, 153 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_l, 154 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_l); 155 156 REG_SET_3(DCN_SURF1_TTU_CNTL0, 0, 157 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_c, 158 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_c, 159 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_c); 160 161 REG_SET_3(DCN_CUR0_TTU_CNTL0, 0, 162 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_cur0, 163 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_cur0, 164 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_cur0); 165 166 REG_SET(FLIP_PARAMETERS_1, 0, 167 REFCYC_PER_PTE_GROUP_FLIP_L, dlg_attr->refcyc_per_pte_group_flip_l); 168 } 169 170 void hubp2_vready_at_or_After_vsync(struct hubp *hubp, 171 struct _vcs_dpi_display_pipe_dest_params_st *pipe_dest) 172 { 173 uint32_t value = 0; 174 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 175 /* disable_dlg_test_mode Set 9th bit to 1 to disable "dv" mode */ 176 REG_WRITE(HUBPREQ_DEBUG_DB, 1 << 8); 177 /* 178 if (VSTARTUP_START - (VREADY_OFFSET+VUPDATE_WIDTH+VUPDATE_OFFSET)/htotal) 179 <= OTG_V_BLANK_END 180 Set HUBP_VREADY_AT_OR_AFTER_VSYNC = 1 181 else 182 Set HUBP_VREADY_AT_OR_AFTER_VSYNC = 0 183 */ 184 if ((pipe_dest->vstartup_start - (pipe_dest->vready_offset+pipe_dest->vupdate_width 185 + pipe_dest->vupdate_offset) / pipe_dest->htotal) <= pipe_dest->vblank_end) { 186 value = 1; 187 } else 188 value = 0; 189 REG_UPDATE(DCHUBP_CNTL, HUBP_VREADY_AT_OR_AFTER_VSYNC, value); 190 } 191 192 void hubp2_program_requestor( 193 struct hubp *hubp, 194 struct _vcs_dpi_display_rq_regs_st *rq_regs) 195 { 196 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 197 198 REG_UPDATE(HUBPRET_CONTROL, 199 DET_BUF_PLANE1_BASE_ADDRESS, rq_regs->plane1_base_address); 200 REG_SET_4(DCN_EXPANSION_MODE, 0, 201 DRQ_EXPANSION_MODE, rq_regs->drq_expansion_mode, 202 PRQ_EXPANSION_MODE, rq_regs->prq_expansion_mode, 203 MRQ_EXPANSION_MODE, rq_regs->mrq_expansion_mode, 204 CRQ_EXPANSION_MODE, rq_regs->crq_expansion_mode); 205 REG_SET_8(DCHUBP_REQ_SIZE_CONFIG, 0, 206 CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, 207 MIN_CHUNK_SIZE, rq_regs->rq_regs_l.min_chunk_size, 208 META_CHUNK_SIZE, rq_regs->rq_regs_l.meta_chunk_size, 209 MIN_META_CHUNK_SIZE, rq_regs->rq_regs_l.min_meta_chunk_size, 210 DPTE_GROUP_SIZE, rq_regs->rq_regs_l.dpte_group_size, 211 MPTE_GROUP_SIZE, rq_regs->rq_regs_l.mpte_group_size, 212 SWATH_HEIGHT, rq_regs->rq_regs_l.swath_height, 213 PTE_ROW_HEIGHT_LINEAR, rq_regs->rq_regs_l.pte_row_height_linear); 214 REG_SET_8(DCHUBP_REQ_SIZE_CONFIG_C, 0, 215 CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, 216 MIN_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_chunk_size, 217 META_CHUNK_SIZE_C, rq_regs->rq_regs_c.meta_chunk_size, 218 MIN_META_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_meta_chunk_size, 219 DPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.dpte_group_size, 220 MPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.mpte_group_size, 221 SWATH_HEIGHT_C, rq_regs->rq_regs_c.swath_height, 222 PTE_ROW_HEIGHT_LINEAR_C, rq_regs->rq_regs_c.pte_row_height_linear); 223 } 224 225 static void hubp2_setup( 226 struct hubp *hubp, 227 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 228 struct _vcs_dpi_display_ttu_regs_st *ttu_attr, 229 struct _vcs_dpi_display_rq_regs_st *rq_regs, 230 struct _vcs_dpi_display_pipe_dest_params_st *pipe_dest) 231 { 232 /* otg is locked when this func is called. Register are double buffered. 233 * disable the requestors is not needed 234 */ 235 236 hubp2_vready_at_or_After_vsync(hubp, pipe_dest); 237 hubp2_program_requestor(hubp, rq_regs); 238 hubp2_program_deadline(hubp, dlg_attr, ttu_attr); 239 240 } 241 242 void hubp2_setup_interdependent( 243 struct hubp *hubp, 244 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 245 struct _vcs_dpi_display_ttu_regs_st *ttu_attr) 246 { 247 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 248 249 REG_SET_2(PREFETCH_SETTINGS, 0, 250 DST_Y_PREFETCH, dlg_attr->dst_y_prefetch, 251 VRATIO_PREFETCH, dlg_attr->vratio_prefetch); 252 253 REG_SET(PREFETCH_SETTINGS_C, 0, 254 VRATIO_PREFETCH_C, dlg_attr->vratio_prefetch_c); 255 256 REG_SET_2(VBLANK_PARAMETERS_0, 0, 257 DST_Y_PER_VM_VBLANK, dlg_attr->dst_y_per_vm_vblank, 258 DST_Y_PER_ROW_VBLANK, dlg_attr->dst_y_per_row_vblank); 259 260 REG_SET_2(FLIP_PARAMETERS_0, 0, 261 DST_Y_PER_VM_FLIP, dlg_attr->dst_y_per_vm_flip, 262 DST_Y_PER_ROW_FLIP, dlg_attr->dst_y_per_row_flip); 263 264 REG_SET(VBLANK_PARAMETERS_3, 0, 265 REFCYC_PER_META_CHUNK_VBLANK_L, dlg_attr->refcyc_per_meta_chunk_vblank_l); 266 267 REG_SET(VBLANK_PARAMETERS_4, 0, 268 REFCYC_PER_META_CHUNK_VBLANK_C, dlg_attr->refcyc_per_meta_chunk_vblank_c); 269 270 REG_SET(FLIP_PARAMETERS_2, 0, 271 REFCYC_PER_META_CHUNK_FLIP_L, dlg_attr->refcyc_per_meta_chunk_flip_l); 272 273 REG_SET_2(PER_LINE_DELIVERY_PRE, 0, 274 REFCYC_PER_LINE_DELIVERY_PRE_L, dlg_attr->refcyc_per_line_delivery_pre_l, 275 REFCYC_PER_LINE_DELIVERY_PRE_C, dlg_attr->refcyc_per_line_delivery_pre_c); 276 277 REG_SET(DCN_SURF0_TTU_CNTL1, 0, 278 REFCYC_PER_REQ_DELIVERY_PRE, 279 ttu_attr->refcyc_per_req_delivery_pre_l); 280 REG_SET(DCN_SURF1_TTU_CNTL1, 0, 281 REFCYC_PER_REQ_DELIVERY_PRE, 282 ttu_attr->refcyc_per_req_delivery_pre_c); 283 REG_SET(DCN_CUR0_TTU_CNTL1, 0, 284 REFCYC_PER_REQ_DELIVERY_PRE, ttu_attr->refcyc_per_req_delivery_pre_cur0); 285 REG_SET(DCN_CUR1_TTU_CNTL1, 0, 286 REFCYC_PER_REQ_DELIVERY_PRE, ttu_attr->refcyc_per_req_delivery_pre_cur1); 287 288 REG_SET_2(DCN_GLOBAL_TTU_CNTL, 0, 289 MIN_TTU_VBLANK, ttu_attr->min_ttu_vblank, 290 QoS_LEVEL_FLIP, ttu_attr->qos_level_flip); 291 } 292 293 /* DCN2 (GFX10), the following GFX fields are deprecated. They can be set but they will not be used: 294 * NUM_BANKS 295 * NUM_SE 296 * NUM_RB_PER_SE 297 * RB_ALIGNED 298 * Other things can be defaulted, since they never change: 299 * PIPE_ALIGNED = 0 300 * META_LINEAR = 0 301 * In GFX10, only these apply: 302 * PIPE_INTERLEAVE 303 * NUM_PIPES 304 * MAX_COMPRESSED_FRAGS 305 * SW_MODE 306 */ 307 static void hubp2_program_tiling( 308 struct dcn20_hubp *hubp2, 309 const union dc_tiling_info *info, 310 const enum surface_pixel_format pixel_format) 311 { 312 REG_UPDATE_3(DCSURF_ADDR_CONFIG, 313 NUM_PIPES, log_2(info->gfx9.num_pipes), 314 PIPE_INTERLEAVE, info->gfx9.pipe_interleave, 315 MAX_COMPRESSED_FRAGS, log_2(info->gfx9.max_compressed_frags)); 316 317 REG_UPDATE_4(DCSURF_TILING_CONFIG, 318 SW_MODE, info->gfx9.swizzle, 319 META_LINEAR, 0, 320 RB_ALIGNED, 0, 321 PIPE_ALIGNED, 0); 322 } 323 324 void hubp2_program_size( 325 struct hubp *hubp, 326 enum surface_pixel_format format, 327 const struct plane_size *plane_size, 328 struct dc_plane_dcc_param *dcc) 329 { 330 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 331 uint32_t pitch, meta_pitch, pitch_c, meta_pitch_c; 332 bool use_pitch_c = false; 333 334 /* Program data and meta surface pitch (calculation from addrlib) 335 * 444 or 420 luma 336 */ 337 use_pitch_c = format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN 338 && format < SURFACE_PIXEL_FORMAT_SUBSAMPLE_END; 339 #if defined(CONFIG_DRM_AMD_DC_DCN3_0) 340 use_pitch_c = use_pitch_c 341 || (format == SURFACE_PIXEL_FORMAT_GRPH_RGBE_ALPHA); 342 #endif 343 if (use_pitch_c) { 344 ASSERT(plane_size->chroma_pitch != 0); 345 /* Chroma pitch zero can cause system hang! */ 346 347 pitch = plane_size->surface_pitch - 1; 348 meta_pitch = dcc->meta_pitch - 1; 349 pitch_c = plane_size->chroma_pitch - 1; 350 meta_pitch_c = dcc->meta_pitch_c - 1; 351 } else { 352 pitch = plane_size->surface_pitch - 1; 353 meta_pitch = dcc->meta_pitch - 1; 354 pitch_c = 0; 355 meta_pitch_c = 0; 356 } 357 358 if (!dcc->enable) { 359 meta_pitch = 0; 360 meta_pitch_c = 0; 361 } 362 363 REG_UPDATE_2(DCSURF_SURFACE_PITCH, 364 PITCH, pitch, META_PITCH, meta_pitch); 365 366 use_pitch_c = format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN; 367 #if defined(CONFIG_DRM_AMD_DC_DCN3_0) 368 use_pitch_c = use_pitch_c 369 || (format == SURFACE_PIXEL_FORMAT_GRPH_RGBE_ALPHA); 370 #endif 371 if (use_pitch_c) 372 REG_UPDATE_2(DCSURF_SURFACE_PITCH_C, 373 PITCH_C, pitch_c, META_PITCH_C, meta_pitch_c); 374 } 375 376 void hubp2_program_rotation( 377 struct hubp *hubp, 378 enum dc_rotation_angle rotation, 379 bool horizontal_mirror) 380 { 381 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 382 uint32_t mirror; 383 384 385 if (horizontal_mirror) 386 mirror = 1; 387 else 388 mirror = 0; 389 390 /* Program rotation angle and horz mirror - no mirror */ 391 if (rotation == ROTATION_ANGLE_0) 392 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 393 ROTATION_ANGLE, 0, 394 H_MIRROR_EN, mirror); 395 else if (rotation == ROTATION_ANGLE_90) 396 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 397 ROTATION_ANGLE, 1, 398 H_MIRROR_EN, mirror); 399 else if (rotation == ROTATION_ANGLE_180) 400 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 401 ROTATION_ANGLE, 2, 402 H_MIRROR_EN, mirror); 403 else if (rotation == ROTATION_ANGLE_270) 404 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 405 ROTATION_ANGLE, 3, 406 H_MIRROR_EN, mirror); 407 } 408 409 void hubp2_dcc_control(struct hubp *hubp, bool enable, 410 enum hubp_ind_block_size independent_64b_blks) 411 { 412 uint32_t dcc_en = enable ? 1 : 0; 413 uint32_t dcc_ind_64b_blk = independent_64b_blks ? 1 : 0; 414 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 415 416 REG_UPDATE_4(DCSURF_SURFACE_CONTROL, 417 PRIMARY_SURFACE_DCC_EN, dcc_en, 418 PRIMARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk, 419 SECONDARY_SURFACE_DCC_EN, dcc_en, 420 SECONDARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk); 421 } 422 423 void hubp2_program_pixel_format( 424 struct hubp *hubp, 425 enum surface_pixel_format format) 426 { 427 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 428 uint32_t red_bar = 3; 429 uint32_t blue_bar = 2; 430 431 /* swap for ABGR format */ 432 if (format == SURFACE_PIXEL_FORMAT_GRPH_ABGR8888 433 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010 434 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS 435 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F) { 436 red_bar = 2; 437 blue_bar = 3; 438 } 439 440 REG_UPDATE_2(HUBPRET_CONTROL, 441 CROSSBAR_SRC_CB_B, blue_bar, 442 CROSSBAR_SRC_CR_R, red_bar); 443 444 /* Mapping is same as ipp programming (cnvc) */ 445 446 switch (format) { 447 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555: 448 REG_UPDATE(DCSURF_SURFACE_CONFIG, 449 SURFACE_PIXEL_FORMAT, 1); 450 break; 451 case SURFACE_PIXEL_FORMAT_GRPH_RGB565: 452 REG_UPDATE(DCSURF_SURFACE_CONFIG, 453 SURFACE_PIXEL_FORMAT, 3); 454 break; 455 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888: 456 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888: 457 REG_UPDATE(DCSURF_SURFACE_CONFIG, 458 SURFACE_PIXEL_FORMAT, 8); 459 break; 460 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010: 461 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010: 462 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS: 463 REG_UPDATE(DCSURF_SURFACE_CONFIG, 464 SURFACE_PIXEL_FORMAT, 10); 465 break; 466 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616: 467 REG_UPDATE(DCSURF_SURFACE_CONFIG, 468 SURFACE_PIXEL_FORMAT, 22); 469 break; 470 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F: 471 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:/*we use crossbar already*/ 472 REG_UPDATE(DCSURF_SURFACE_CONFIG, 473 SURFACE_PIXEL_FORMAT, 24); 474 break; 475 476 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr: 477 REG_UPDATE(DCSURF_SURFACE_CONFIG, 478 SURFACE_PIXEL_FORMAT, 65); 479 break; 480 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb: 481 REG_UPDATE(DCSURF_SURFACE_CONFIG, 482 SURFACE_PIXEL_FORMAT, 64); 483 break; 484 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr: 485 REG_UPDATE(DCSURF_SURFACE_CONFIG, 486 SURFACE_PIXEL_FORMAT, 67); 487 break; 488 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb: 489 REG_UPDATE(DCSURF_SURFACE_CONFIG, 490 SURFACE_PIXEL_FORMAT, 66); 491 break; 492 case SURFACE_PIXEL_FORMAT_VIDEO_AYCrCb8888: 493 REG_UPDATE(DCSURF_SURFACE_CONFIG, 494 SURFACE_PIXEL_FORMAT, 12); 495 break; 496 case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FIX: 497 REG_UPDATE(DCSURF_SURFACE_CONFIG, 498 SURFACE_PIXEL_FORMAT, 112); 499 break; 500 case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FIX: 501 REG_UPDATE(DCSURF_SURFACE_CONFIG, 502 SURFACE_PIXEL_FORMAT, 113); 503 break; 504 case SURFACE_PIXEL_FORMAT_VIDEO_ACrYCb2101010: 505 REG_UPDATE(DCSURF_SURFACE_CONFIG, 506 SURFACE_PIXEL_FORMAT, 114); 507 break; 508 case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FLOAT: 509 REG_UPDATE(DCSURF_SURFACE_CONFIG, 510 SURFACE_PIXEL_FORMAT, 118); 511 break; 512 case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FLOAT: 513 REG_UPDATE(DCSURF_SURFACE_CONFIG, 514 SURFACE_PIXEL_FORMAT, 119); 515 break; 516 #if defined(CONFIG_DRM_AMD_DC_DCN3_0) 517 case SURFACE_PIXEL_FORMAT_GRPH_RGBE: 518 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 519 SURFACE_PIXEL_FORMAT, 116, 520 ALPHA_PLANE_EN, 0); 521 break; 522 case SURFACE_PIXEL_FORMAT_GRPH_RGBE_ALPHA: 523 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 524 SURFACE_PIXEL_FORMAT, 116, 525 ALPHA_PLANE_EN, 1); 526 break; 527 #endif 528 default: 529 BREAK_TO_DEBUGGER(); 530 break; 531 } 532 533 /* don't see the need of program the xbar in DCN 1.0 */ 534 } 535 536 void hubp2_program_surface_config( 537 struct hubp *hubp, 538 enum surface_pixel_format format, 539 union dc_tiling_info *tiling_info, 540 struct plane_size *plane_size, 541 enum dc_rotation_angle rotation, 542 struct dc_plane_dcc_param *dcc, 543 bool horizontal_mirror, 544 unsigned int compat_level) 545 { 546 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 547 548 hubp2_dcc_control(hubp, dcc->enable, dcc->independent_64b_blks); 549 hubp2_program_tiling(hubp2, tiling_info, format); 550 hubp2_program_size(hubp, format, plane_size, dcc); 551 hubp2_program_rotation(hubp, rotation, horizontal_mirror); 552 hubp2_program_pixel_format(hubp, format); 553 } 554 555 enum cursor_lines_per_chunk hubp2_get_lines_per_chunk( 556 unsigned int cursor_width, 557 enum dc_cursor_color_format cursor_mode) 558 { 559 enum cursor_lines_per_chunk line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 560 561 if (cursor_mode == CURSOR_MODE_MONO) 562 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 563 else if (cursor_mode == CURSOR_MODE_COLOR_1BIT_AND || 564 cursor_mode == CURSOR_MODE_COLOR_PRE_MULTIPLIED_ALPHA || 565 cursor_mode == CURSOR_MODE_COLOR_UN_PRE_MULTIPLIED_ALPHA) { 566 if (cursor_width >= 1 && cursor_width <= 32) 567 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 568 else if (cursor_width >= 33 && cursor_width <= 64) 569 line_per_chunk = CURSOR_LINE_PER_CHUNK_8; 570 else if (cursor_width >= 65 && cursor_width <= 128) 571 line_per_chunk = CURSOR_LINE_PER_CHUNK_4; 572 else if (cursor_width >= 129 && cursor_width <= 256) 573 line_per_chunk = CURSOR_LINE_PER_CHUNK_2; 574 } else if (cursor_mode == CURSOR_MODE_COLOR_64BIT_FP_PRE_MULTIPLIED || 575 cursor_mode == CURSOR_MODE_COLOR_64BIT_FP_UN_PRE_MULTIPLIED) { 576 if (cursor_width >= 1 && cursor_width <= 16) 577 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 578 else if (cursor_width >= 17 && cursor_width <= 32) 579 line_per_chunk = CURSOR_LINE_PER_CHUNK_8; 580 else if (cursor_width >= 33 && cursor_width <= 64) 581 line_per_chunk = CURSOR_LINE_PER_CHUNK_4; 582 else if (cursor_width >= 65 && cursor_width <= 128) 583 line_per_chunk = CURSOR_LINE_PER_CHUNK_2; 584 else if (cursor_width >= 129 && cursor_width <= 256) 585 line_per_chunk = CURSOR_LINE_PER_CHUNK_1; 586 } 587 588 return line_per_chunk; 589 } 590 591 void hubp2_cursor_set_attributes( 592 struct hubp *hubp, 593 const struct dc_cursor_attributes *attr) 594 { 595 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 596 enum cursor_pitch hw_pitch = hubp1_get_cursor_pitch(attr->pitch); 597 enum cursor_lines_per_chunk lpc = hubp2_get_lines_per_chunk( 598 attr->width, attr->color_format); 599 600 hubp->curs_attr = *attr; 601 602 REG_UPDATE(CURSOR_SURFACE_ADDRESS_HIGH, 603 CURSOR_SURFACE_ADDRESS_HIGH, attr->address.high_part); 604 REG_UPDATE(CURSOR_SURFACE_ADDRESS, 605 CURSOR_SURFACE_ADDRESS, attr->address.low_part); 606 607 REG_UPDATE_2(CURSOR_SIZE, 608 CURSOR_WIDTH, attr->width, 609 CURSOR_HEIGHT, attr->height); 610 611 REG_UPDATE_4(CURSOR_CONTROL, 612 CURSOR_MODE, attr->color_format, 613 CURSOR_2X_MAGNIFY, attr->attribute_flags.bits.ENABLE_MAGNIFICATION, 614 CURSOR_PITCH, hw_pitch, 615 CURSOR_LINES_PER_CHUNK, lpc); 616 617 REG_SET_2(CURSOR_SETTINGS, 0, 618 /* no shift of the cursor HDL schedule */ 619 CURSOR0_DST_Y_OFFSET, 0, 620 /* used to shift the cursor chunk request deadline */ 621 CURSOR0_CHUNK_HDL_ADJUST, 3); 622 } 623 624 void hubp2_dmdata_set_attributes( 625 struct hubp *hubp, 626 const struct dc_dmdata_attributes *attr) 627 { 628 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 629 630 if (attr->dmdata_mode == DMDATA_HW_MODE) { 631 /* set to HW mode */ 632 REG_UPDATE(DMDATA_CNTL, 633 DMDATA_MODE, 1); 634 635 /* for DMDATA flip, need to use SURFACE_UPDATE_LOCK */ 636 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_UPDATE_LOCK, 1); 637 638 /* toggle DMDATA_UPDATED and set repeat and size */ 639 REG_UPDATE(DMDATA_CNTL, 640 DMDATA_UPDATED, 0); 641 REG_UPDATE_3(DMDATA_CNTL, 642 DMDATA_UPDATED, 1, 643 DMDATA_REPEAT, attr->dmdata_repeat, 644 DMDATA_SIZE, attr->dmdata_size); 645 646 /* set DMDATA address */ 647 REG_WRITE(DMDATA_ADDRESS_LOW, attr->address.low_part); 648 REG_UPDATE(DMDATA_ADDRESS_HIGH, 649 DMDATA_ADDRESS_HIGH, attr->address.high_part); 650 651 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_UPDATE_LOCK, 0); 652 653 } else { 654 /* set to SW mode before loading data */ 655 REG_SET(DMDATA_CNTL, 0, 656 DMDATA_MODE, 0); 657 /* toggle DMDATA_SW_UPDATED to start loading sequence */ 658 REG_UPDATE(DMDATA_SW_CNTL, 659 DMDATA_SW_UPDATED, 0); 660 REG_UPDATE_3(DMDATA_SW_CNTL, 661 DMDATA_SW_UPDATED, 1, 662 DMDATA_SW_REPEAT, attr->dmdata_repeat, 663 DMDATA_SW_SIZE, attr->dmdata_size); 664 /* load data into hubp dmdata buffer */ 665 hubp2_dmdata_load(hubp, attr->dmdata_size, attr->dmdata_sw_data); 666 } 667 668 /* Note that DL_DELTA must be programmed if we want to use TTU mode */ 669 REG_SET_3(DMDATA_QOS_CNTL, 0, 670 DMDATA_QOS_MODE, attr->dmdata_qos_mode, 671 DMDATA_QOS_LEVEL, attr->dmdata_qos_level, 672 DMDATA_DL_DELTA, attr->dmdata_dl_delta); 673 } 674 675 void hubp2_dmdata_load( 676 struct hubp *hubp, 677 uint32_t dmdata_sw_size, 678 const uint32_t *dmdata_sw_data) 679 { 680 int i; 681 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 682 683 /* load dmdata into HUBP buffer in SW mode */ 684 for (i = 0; i < dmdata_sw_size / 4; i++) 685 REG_WRITE(DMDATA_SW_DATA, dmdata_sw_data[i]); 686 } 687 688 bool hubp2_dmdata_status_done(struct hubp *hubp) 689 { 690 uint32_t status; 691 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 692 693 REG_GET(DMDATA_STATUS, DMDATA_DONE, &status); 694 return (status == 1); 695 } 696 697 bool hubp2_program_surface_flip_and_addr( 698 struct hubp *hubp, 699 const struct dc_plane_address *address, 700 bool flip_immediate) 701 { 702 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 703 704 //program flip type 705 REG_UPDATE(DCSURF_FLIP_CONTROL, 706 SURFACE_FLIP_TYPE, flip_immediate); 707 708 // Program VMID reg 709 REG_UPDATE(VMID_SETTINGS_0, 710 VMID, address->vmid); 711 712 if (address->type == PLN_ADDR_TYPE_GRPH_STEREO) { 713 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x1); 714 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x1); 715 716 } else { 717 // turn off stereo if not in stereo 718 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x0); 719 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x0); 720 } 721 722 723 724 /* HW automatically latch rest of address register on write to 725 * DCSURF_PRIMARY_SURFACE_ADDRESS if SURFACE_UPDATE_LOCK is not used 726 * 727 * program high first and then the low addr, order matters! 728 */ 729 switch (address->type) { 730 case PLN_ADDR_TYPE_GRAPHICS: 731 /* DCN1.0 does not support const color 732 * TODO: program DCHUBBUB_RET_PATH_DCC_CFGx_0/1 733 * base on address->grph.dcc_const_color 734 * x = 0, 2, 4, 6 for pipe 0, 1, 2, 3 for rgb and luma 735 * x = 1, 3, 5, 7 for pipe 0, 1, 2, 3 for chroma 736 */ 737 738 if (address->grph.addr.quad_part == 0) 739 break; 740 741 REG_UPDATE_2(DCSURF_SURFACE_CONTROL, 742 PRIMARY_SURFACE_TMZ, address->tmz_surface, 743 PRIMARY_META_SURFACE_TMZ, address->tmz_surface); 744 745 if (address->grph.meta_addr.quad_part != 0) { 746 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 747 PRIMARY_META_SURFACE_ADDRESS_HIGH, 748 address->grph.meta_addr.high_part); 749 750 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 751 PRIMARY_META_SURFACE_ADDRESS, 752 address->grph.meta_addr.low_part); 753 } 754 755 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 756 PRIMARY_SURFACE_ADDRESS_HIGH, 757 address->grph.addr.high_part); 758 759 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 760 PRIMARY_SURFACE_ADDRESS, 761 address->grph.addr.low_part); 762 break; 763 case PLN_ADDR_TYPE_VIDEO_PROGRESSIVE: 764 if (address->video_progressive.luma_addr.quad_part == 0 765 || address->video_progressive.chroma_addr.quad_part == 0) 766 break; 767 768 REG_UPDATE_4(DCSURF_SURFACE_CONTROL, 769 PRIMARY_SURFACE_TMZ, address->tmz_surface, 770 PRIMARY_SURFACE_TMZ_C, address->tmz_surface, 771 PRIMARY_META_SURFACE_TMZ, address->tmz_surface, 772 PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface); 773 774 if (address->video_progressive.luma_meta_addr.quad_part != 0) { 775 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 0, 776 PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 777 address->video_progressive.chroma_meta_addr.high_part); 778 779 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_C, 0, 780 PRIMARY_META_SURFACE_ADDRESS_C, 781 address->video_progressive.chroma_meta_addr.low_part); 782 783 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 784 PRIMARY_META_SURFACE_ADDRESS_HIGH, 785 address->video_progressive.luma_meta_addr.high_part); 786 787 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 788 PRIMARY_META_SURFACE_ADDRESS, 789 address->video_progressive.luma_meta_addr.low_part); 790 } 791 792 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C, 0, 793 PRIMARY_SURFACE_ADDRESS_HIGH_C, 794 address->video_progressive.chroma_addr.high_part); 795 796 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_C, 0, 797 PRIMARY_SURFACE_ADDRESS_C, 798 address->video_progressive.chroma_addr.low_part); 799 800 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 801 PRIMARY_SURFACE_ADDRESS_HIGH, 802 address->video_progressive.luma_addr.high_part); 803 804 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 805 PRIMARY_SURFACE_ADDRESS, 806 address->video_progressive.luma_addr.low_part); 807 break; 808 case PLN_ADDR_TYPE_GRPH_STEREO: 809 if (address->grph_stereo.left_addr.quad_part == 0) 810 break; 811 if (address->grph_stereo.right_addr.quad_part == 0) 812 break; 813 814 REG_UPDATE_8(DCSURF_SURFACE_CONTROL, 815 PRIMARY_SURFACE_TMZ, address->tmz_surface, 816 PRIMARY_SURFACE_TMZ_C, address->tmz_surface, 817 PRIMARY_META_SURFACE_TMZ, address->tmz_surface, 818 PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface, 819 SECONDARY_SURFACE_TMZ, address->tmz_surface, 820 SECONDARY_SURFACE_TMZ_C, address->tmz_surface, 821 SECONDARY_META_SURFACE_TMZ, address->tmz_surface, 822 SECONDARY_META_SURFACE_TMZ_C, address->tmz_surface); 823 824 if (address->grph_stereo.right_meta_addr.quad_part != 0) { 825 826 REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH, 0, 827 SECONDARY_META_SURFACE_ADDRESS_HIGH, 828 address->grph_stereo.right_meta_addr.high_part); 829 830 REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS, 0, 831 SECONDARY_META_SURFACE_ADDRESS, 832 address->grph_stereo.right_meta_addr.low_part); 833 } 834 if (address->grph_stereo.left_meta_addr.quad_part != 0) { 835 836 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 837 PRIMARY_META_SURFACE_ADDRESS_HIGH, 838 address->grph_stereo.left_meta_addr.high_part); 839 840 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 841 PRIMARY_META_SURFACE_ADDRESS, 842 address->grph_stereo.left_meta_addr.low_part); 843 } 844 845 REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH, 0, 846 SECONDARY_SURFACE_ADDRESS_HIGH, 847 address->grph_stereo.right_addr.high_part); 848 849 REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS, 0, 850 SECONDARY_SURFACE_ADDRESS, 851 address->grph_stereo.right_addr.low_part); 852 853 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 854 PRIMARY_SURFACE_ADDRESS_HIGH, 855 address->grph_stereo.left_addr.high_part); 856 857 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 858 PRIMARY_SURFACE_ADDRESS, 859 address->grph_stereo.left_addr.low_part); 860 break; 861 default: 862 BREAK_TO_DEBUGGER(); 863 break; 864 } 865 866 hubp->request_address = *address; 867 868 return true; 869 } 870 871 void hubp2_enable_triplebuffer( 872 struct hubp *hubp, 873 bool enable) 874 { 875 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 876 uint32_t triple_buffer_en = 0; 877 bool tri_buffer_en; 878 879 REG_GET(DCSURF_FLIP_CONTROL2, SURFACE_TRIPLE_BUFFER_ENABLE, &triple_buffer_en); 880 tri_buffer_en = (triple_buffer_en == 1); 881 if (tri_buffer_en != enable) { 882 REG_UPDATE(DCSURF_FLIP_CONTROL2, 883 SURFACE_TRIPLE_BUFFER_ENABLE, enable ? DC_TRIPLEBUFFER_ENABLE : DC_TRIPLEBUFFER_DISABLE); 884 } 885 } 886 887 bool hubp2_is_triplebuffer_enabled( 888 struct hubp *hubp) 889 { 890 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 891 uint32_t triple_buffer_en = 0; 892 893 REG_GET(DCSURF_FLIP_CONTROL2, SURFACE_TRIPLE_BUFFER_ENABLE, &triple_buffer_en); 894 895 return (bool)triple_buffer_en; 896 } 897 898 void hubp2_set_flip_control_surface_gsl(struct hubp *hubp, bool enable) 899 { 900 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 901 902 REG_UPDATE(DCSURF_FLIP_CONTROL2, SURFACE_GSL_ENABLE, enable ? 1 : 0); 903 } 904 905 bool hubp2_is_flip_pending(struct hubp *hubp) 906 { 907 uint32_t flip_pending = 0; 908 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 909 struct dc_plane_address earliest_inuse_address; 910 911 if (hubp && hubp->power_gated) 912 return false; 913 914 REG_GET(DCSURF_FLIP_CONTROL, 915 SURFACE_FLIP_PENDING, &flip_pending); 916 917 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE, 918 SURFACE_EARLIEST_INUSE_ADDRESS, &earliest_inuse_address.grph.addr.low_part); 919 920 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH, 921 SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &earliest_inuse_address.grph.addr.high_part); 922 923 if (flip_pending) 924 return true; 925 926 if (earliest_inuse_address.grph.addr.quad_part != hubp->request_address.grph.addr.quad_part) 927 return true; 928 929 return false; 930 } 931 932 void hubp2_set_blank(struct hubp *hubp, bool blank) 933 { 934 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 935 uint32_t blank_en = blank ? 1 : 0; 936 937 REG_UPDATE_2(DCHUBP_CNTL, 938 HUBP_BLANK_EN, blank_en, 939 HUBP_TTU_DISABLE, blank_en); 940 941 if (blank) { 942 uint32_t reg_val = REG_READ(DCHUBP_CNTL); 943 944 if (reg_val) { 945 /* init sequence workaround: in case HUBP is 946 * power gated, this wait would timeout. 947 * 948 * we just wrote reg_val to non-0, if it stay 0 949 * it means HUBP is gated 950 */ 951 REG_WAIT(DCHUBP_CNTL, 952 HUBP_NO_OUTSTANDING_REQ, 1, 953 1, 200); 954 } 955 956 hubp->mpcc_id = 0xf; 957 hubp->opp_id = OPP_ID_INVALID; 958 } 959 } 960 961 void hubp2_cursor_set_position( 962 struct hubp *hubp, 963 const struct dc_cursor_position *pos, 964 const struct dc_cursor_mi_param *param) 965 { 966 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 967 int src_x_offset = pos->x - pos->x_hotspot - param->viewport.x; 968 int src_y_offset = pos->y - pos->y_hotspot - param->viewport.y; 969 int x_hotspot = pos->x_hotspot; 970 int y_hotspot = pos->y_hotspot; 971 int cursor_height = (int)hubp->curs_attr.height; 972 int cursor_width = (int)hubp->curs_attr.width; 973 uint32_t dst_x_offset; 974 uint32_t cur_en = pos->enable ? 1 : 0; 975 976 /* 977 * Guard aganst cursor_set_position() from being called with invalid 978 * attributes 979 * 980 * TODO: Look at combining cursor_set_position() and 981 * cursor_set_attributes() into cursor_update() 982 */ 983 if (hubp->curs_attr.address.quad_part == 0) 984 return; 985 986 // Rotated cursor width/height and hotspots tweaks for offset calculation 987 if (param->rotation == ROTATION_ANGLE_90 || param->rotation == ROTATION_ANGLE_270) { 988 swap(cursor_height, cursor_width); 989 if (param->rotation == ROTATION_ANGLE_90) { 990 src_x_offset = pos->x - pos->y_hotspot - param->viewport.x; 991 src_y_offset = pos->y - pos->x_hotspot - param->viewport.y; 992 } 993 } else if (param->rotation == ROTATION_ANGLE_180) { 994 src_x_offset = pos->x - param->viewport.x; 995 src_y_offset = pos->y - param->viewport.y; 996 } 997 998 if (param->mirror) { 999 x_hotspot = param->viewport.width - x_hotspot; 1000 src_x_offset = param->viewport.x + param->viewport.width - src_x_offset; 1001 } 1002 1003 dst_x_offset = (src_x_offset >= 0) ? src_x_offset : 0; 1004 dst_x_offset *= param->ref_clk_khz; 1005 dst_x_offset /= param->pixel_clk_khz; 1006 1007 ASSERT(param->h_scale_ratio.value); 1008 1009 if (param->h_scale_ratio.value) 1010 dst_x_offset = dc_fixpt_floor(dc_fixpt_div( 1011 dc_fixpt_from_int(dst_x_offset), 1012 param->h_scale_ratio)); 1013 1014 if (src_x_offset >= (int)param->viewport.width) 1015 cur_en = 0; /* not visible beyond right edge*/ 1016 1017 if (src_x_offset + cursor_width <= 0) 1018 cur_en = 0; /* not visible beyond left edge*/ 1019 1020 if (src_y_offset >= (int)param->viewport.height) 1021 cur_en = 0; /* not visible beyond bottom edge*/ 1022 1023 if (src_y_offset + cursor_height <= 0) 1024 cur_en = 0; /* not visible beyond top edge*/ 1025 1026 if (cur_en && REG_READ(CURSOR_SURFACE_ADDRESS) == 0) 1027 hubp->funcs->set_cursor_attributes(hubp, &hubp->curs_attr); 1028 1029 REG_UPDATE(CURSOR_CONTROL, 1030 CURSOR_ENABLE, cur_en); 1031 1032 REG_SET_2(CURSOR_POSITION, 0, 1033 CURSOR_X_POSITION, pos->x, 1034 CURSOR_Y_POSITION, pos->y); 1035 1036 REG_SET_2(CURSOR_HOT_SPOT, 0, 1037 CURSOR_HOT_SPOT_X, x_hotspot, 1038 CURSOR_HOT_SPOT_Y, y_hotspot); 1039 1040 REG_SET(CURSOR_DST_OFFSET, 0, 1041 CURSOR_DST_X_OFFSET, dst_x_offset); 1042 /* TODO Handle surface pixel formats other than 4:4:4 */ 1043 } 1044 1045 void hubp2_clk_cntl(struct hubp *hubp, bool enable) 1046 { 1047 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1048 uint32_t clk_enable = enable ? 1 : 0; 1049 1050 REG_UPDATE(HUBP_CLK_CNTL, HUBP_CLOCK_ENABLE, clk_enable); 1051 } 1052 1053 void hubp2_vtg_sel(struct hubp *hubp, uint32_t otg_inst) 1054 { 1055 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1056 1057 REG_UPDATE(DCHUBP_CNTL, HUBP_VTG_SEL, otg_inst); 1058 } 1059 1060 void hubp2_clear_underflow(struct hubp *hubp) 1061 { 1062 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1063 1064 REG_UPDATE(DCHUBP_CNTL, HUBP_UNDERFLOW_CLEAR, 1); 1065 } 1066 1067 void hubp2_read_state_common(struct hubp *hubp) 1068 { 1069 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1070 struct dcn_hubp_state *s = &hubp2->state; 1071 struct _vcs_dpi_display_dlg_regs_st *dlg_attr = &s->dlg_attr; 1072 struct _vcs_dpi_display_ttu_regs_st *ttu_attr = &s->ttu_attr; 1073 struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs; 1074 1075 /* Requester */ 1076 REG_GET(HUBPRET_CONTROL, 1077 DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs->plane1_base_address); 1078 REG_GET_4(DCN_EXPANSION_MODE, 1079 DRQ_EXPANSION_MODE, &rq_regs->drq_expansion_mode, 1080 PRQ_EXPANSION_MODE, &rq_regs->prq_expansion_mode, 1081 MRQ_EXPANSION_MODE, &rq_regs->mrq_expansion_mode, 1082 CRQ_EXPANSION_MODE, &rq_regs->crq_expansion_mode); 1083 1084 /* DLG - Per hubp */ 1085 REG_GET_2(BLANK_OFFSET_0, 1086 REFCYC_H_BLANK_END, &dlg_attr->refcyc_h_blank_end, 1087 DLG_V_BLANK_END, &dlg_attr->dlg_vblank_end); 1088 1089 REG_GET(BLANK_OFFSET_1, 1090 MIN_DST_Y_NEXT_START, &dlg_attr->min_dst_y_next_start); 1091 1092 REG_GET(DST_DIMENSIONS, 1093 REFCYC_PER_HTOTAL, &dlg_attr->refcyc_per_htotal); 1094 1095 REG_GET_2(DST_AFTER_SCALER, 1096 REFCYC_X_AFTER_SCALER, &dlg_attr->refcyc_x_after_scaler, 1097 DST_Y_AFTER_SCALER, &dlg_attr->dst_y_after_scaler); 1098 1099 if (REG(PREFETCH_SETTINS)) 1100 REG_GET_2(PREFETCH_SETTINS, 1101 DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch, 1102 VRATIO_PREFETCH, &dlg_attr->vratio_prefetch); 1103 else 1104 REG_GET_2(PREFETCH_SETTINGS, 1105 DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch, 1106 VRATIO_PREFETCH, &dlg_attr->vratio_prefetch); 1107 1108 REG_GET_2(VBLANK_PARAMETERS_0, 1109 DST_Y_PER_VM_VBLANK, &dlg_attr->dst_y_per_vm_vblank, 1110 DST_Y_PER_ROW_VBLANK, &dlg_attr->dst_y_per_row_vblank); 1111 1112 REG_GET(REF_FREQ_TO_PIX_FREQ, 1113 REF_FREQ_TO_PIX_FREQ, &dlg_attr->ref_freq_to_pix_freq); 1114 1115 /* DLG - Per luma/chroma */ 1116 REG_GET(VBLANK_PARAMETERS_1, 1117 REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr->refcyc_per_pte_group_vblank_l); 1118 1119 REG_GET(VBLANK_PARAMETERS_3, 1120 REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr->refcyc_per_meta_chunk_vblank_l); 1121 1122 if (REG(NOM_PARAMETERS_0)) 1123 REG_GET(NOM_PARAMETERS_0, 1124 DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr->dst_y_per_pte_row_nom_l); 1125 1126 if (REG(NOM_PARAMETERS_1)) 1127 REG_GET(NOM_PARAMETERS_1, 1128 REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr->refcyc_per_pte_group_nom_l); 1129 1130 REG_GET(NOM_PARAMETERS_4, 1131 DST_Y_PER_META_ROW_NOM_L, &dlg_attr->dst_y_per_meta_row_nom_l); 1132 1133 REG_GET(NOM_PARAMETERS_5, 1134 REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr->refcyc_per_meta_chunk_nom_l); 1135 1136 REG_GET_2(PER_LINE_DELIVERY_PRE, 1137 REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr->refcyc_per_line_delivery_pre_l, 1138 REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr->refcyc_per_line_delivery_pre_c); 1139 1140 REG_GET_2(PER_LINE_DELIVERY, 1141 REFCYC_PER_LINE_DELIVERY_L, &dlg_attr->refcyc_per_line_delivery_l, 1142 REFCYC_PER_LINE_DELIVERY_C, &dlg_attr->refcyc_per_line_delivery_c); 1143 1144 if (REG(PREFETCH_SETTINS_C)) 1145 REG_GET(PREFETCH_SETTINS_C, 1146 VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c); 1147 else 1148 REG_GET(PREFETCH_SETTINGS_C, 1149 VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c); 1150 1151 REG_GET(VBLANK_PARAMETERS_2, 1152 REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr->refcyc_per_pte_group_vblank_c); 1153 1154 REG_GET(VBLANK_PARAMETERS_4, 1155 REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr->refcyc_per_meta_chunk_vblank_c); 1156 1157 if (REG(NOM_PARAMETERS_2)) 1158 REG_GET(NOM_PARAMETERS_2, 1159 DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr->dst_y_per_pte_row_nom_c); 1160 1161 if (REG(NOM_PARAMETERS_3)) 1162 REG_GET(NOM_PARAMETERS_3, 1163 REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr->refcyc_per_pte_group_nom_c); 1164 1165 REG_GET(NOM_PARAMETERS_6, 1166 DST_Y_PER_META_ROW_NOM_C, &dlg_attr->dst_y_per_meta_row_nom_c); 1167 1168 REG_GET(NOM_PARAMETERS_7, 1169 REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr->refcyc_per_meta_chunk_nom_c); 1170 1171 /* TTU - per hubp */ 1172 REG_GET_2(DCN_TTU_QOS_WM, 1173 QoS_LEVEL_LOW_WM, &ttu_attr->qos_level_low_wm, 1174 QoS_LEVEL_HIGH_WM, &ttu_attr->qos_level_high_wm); 1175 1176 REG_GET_2(DCN_GLOBAL_TTU_CNTL, 1177 MIN_TTU_VBLANK, &ttu_attr->min_ttu_vblank, 1178 QoS_LEVEL_FLIP, &ttu_attr->qos_level_flip); 1179 1180 /* TTU - per luma/chroma */ 1181 /* Assumed surf0 is luma and 1 is chroma */ 1182 1183 REG_GET_3(DCN_SURF0_TTU_CNTL0, 1184 REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_l, 1185 QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_l, 1186 QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_l); 1187 1188 REG_GET(DCN_SURF0_TTU_CNTL1, 1189 REFCYC_PER_REQ_DELIVERY_PRE, 1190 &ttu_attr->refcyc_per_req_delivery_pre_l); 1191 1192 REG_GET_3(DCN_SURF1_TTU_CNTL0, 1193 REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_c, 1194 QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_c, 1195 QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_c); 1196 1197 REG_GET(DCN_SURF1_TTU_CNTL1, 1198 REFCYC_PER_REQ_DELIVERY_PRE, 1199 &ttu_attr->refcyc_per_req_delivery_pre_c); 1200 1201 /* Rest of hubp */ 1202 REG_GET(DCSURF_SURFACE_CONFIG, 1203 SURFACE_PIXEL_FORMAT, &s->pixel_format); 1204 1205 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH, 1206 SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &s->inuse_addr_hi); 1207 1208 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE, 1209 SURFACE_EARLIEST_INUSE_ADDRESS, &s->inuse_addr_lo); 1210 1211 REG_GET_2(DCSURF_PRI_VIEWPORT_DIMENSION, 1212 PRI_VIEWPORT_WIDTH, &s->viewport_width, 1213 PRI_VIEWPORT_HEIGHT, &s->viewport_height); 1214 1215 REG_GET_2(DCSURF_SURFACE_CONFIG, 1216 ROTATION_ANGLE, &s->rotation_angle, 1217 H_MIRROR_EN, &s->h_mirror_en); 1218 1219 REG_GET(DCSURF_TILING_CONFIG, 1220 SW_MODE, &s->sw_mode); 1221 1222 REG_GET(DCSURF_SURFACE_CONTROL, 1223 PRIMARY_SURFACE_DCC_EN, &s->dcc_en); 1224 1225 REG_GET_3(DCHUBP_CNTL, 1226 HUBP_BLANK_EN, &s->blank_en, 1227 HUBP_TTU_DISABLE, &s->ttu_disable, 1228 HUBP_UNDERFLOW_STATUS, &s->underflow_status); 1229 1230 REG_GET(HUBP_CLK_CNTL, 1231 HUBP_CLOCK_ENABLE, &s->clock_en); 1232 1233 REG_GET(DCN_GLOBAL_TTU_CNTL, 1234 MIN_TTU_VBLANK, &s->min_ttu_vblank); 1235 1236 REG_GET_2(DCN_TTU_QOS_WM, 1237 QoS_LEVEL_LOW_WM, &s->qos_level_low_wm, 1238 QoS_LEVEL_HIGH_WM, &s->qos_level_high_wm); 1239 1240 } 1241 1242 void hubp2_read_state(struct hubp *hubp) 1243 { 1244 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1245 struct dcn_hubp_state *s = &hubp2->state; 1246 struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs; 1247 1248 hubp2_read_state_common(hubp); 1249 1250 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG, 1251 CHUNK_SIZE, &rq_regs->rq_regs_l.chunk_size, 1252 MIN_CHUNK_SIZE, &rq_regs->rq_regs_l.min_chunk_size, 1253 META_CHUNK_SIZE, &rq_regs->rq_regs_l.meta_chunk_size, 1254 MIN_META_CHUNK_SIZE, &rq_regs->rq_regs_l.min_meta_chunk_size, 1255 DPTE_GROUP_SIZE, &rq_regs->rq_regs_l.dpte_group_size, 1256 MPTE_GROUP_SIZE, &rq_regs->rq_regs_l.mpte_group_size, 1257 SWATH_HEIGHT, &rq_regs->rq_regs_l.swath_height, 1258 PTE_ROW_HEIGHT_LINEAR, &rq_regs->rq_regs_l.pte_row_height_linear); 1259 1260 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG_C, 1261 CHUNK_SIZE_C, &rq_regs->rq_regs_c.chunk_size, 1262 MIN_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_chunk_size, 1263 META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.meta_chunk_size, 1264 MIN_META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_meta_chunk_size, 1265 DPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.dpte_group_size, 1266 MPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.mpte_group_size, 1267 SWATH_HEIGHT_C, &rq_regs->rq_regs_c.swath_height, 1268 PTE_ROW_HEIGHT_LINEAR_C, &rq_regs->rq_regs_c.pte_row_height_linear); 1269 1270 } 1271 1272 void hubp2_validate_dml_output(struct hubp *hubp, 1273 struct dc_context *ctx, 1274 struct _vcs_dpi_display_rq_regs_st *dml_rq_regs, 1275 struct _vcs_dpi_display_dlg_regs_st *dml_dlg_attr, 1276 struct _vcs_dpi_display_ttu_regs_st *dml_ttu_attr) 1277 { 1278 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1279 struct _vcs_dpi_display_rq_regs_st rq_regs = {0}; 1280 struct _vcs_dpi_display_dlg_regs_st dlg_attr = {0}; 1281 struct _vcs_dpi_display_ttu_regs_st ttu_attr = {0}; 1282 DC_LOGGER_INIT(ctx->logger); 1283 DC_LOG_DEBUG("DML Validation | Running Validation"); 1284 1285 /* Requestor Regs */ 1286 REG_GET(HUBPRET_CONTROL, 1287 DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs.plane1_base_address); 1288 REG_GET_4(DCN_EXPANSION_MODE, 1289 DRQ_EXPANSION_MODE, &rq_regs.drq_expansion_mode, 1290 PRQ_EXPANSION_MODE, &rq_regs.prq_expansion_mode, 1291 MRQ_EXPANSION_MODE, &rq_regs.mrq_expansion_mode, 1292 CRQ_EXPANSION_MODE, &rq_regs.crq_expansion_mode); 1293 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG, 1294 CHUNK_SIZE, &rq_regs.rq_regs_l.chunk_size, 1295 MIN_CHUNK_SIZE, &rq_regs.rq_regs_l.min_chunk_size, 1296 META_CHUNK_SIZE, &rq_regs.rq_regs_l.meta_chunk_size, 1297 MIN_META_CHUNK_SIZE, &rq_regs.rq_regs_l.min_meta_chunk_size, 1298 DPTE_GROUP_SIZE, &rq_regs.rq_regs_l.dpte_group_size, 1299 MPTE_GROUP_SIZE, &rq_regs.rq_regs_l.mpte_group_size, 1300 SWATH_HEIGHT, &rq_regs.rq_regs_l.swath_height, 1301 PTE_ROW_HEIGHT_LINEAR, &rq_regs.rq_regs_l.pte_row_height_linear); 1302 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG_C, 1303 CHUNK_SIZE_C, &rq_regs.rq_regs_c.chunk_size, 1304 MIN_CHUNK_SIZE_C, &rq_regs.rq_regs_c.min_chunk_size, 1305 META_CHUNK_SIZE_C, &rq_regs.rq_regs_c.meta_chunk_size, 1306 MIN_META_CHUNK_SIZE_C, &rq_regs.rq_regs_c.min_meta_chunk_size, 1307 DPTE_GROUP_SIZE_C, &rq_regs.rq_regs_c.dpte_group_size, 1308 MPTE_GROUP_SIZE_C, &rq_regs.rq_regs_c.mpte_group_size, 1309 SWATH_HEIGHT_C, &rq_regs.rq_regs_c.swath_height, 1310 PTE_ROW_HEIGHT_LINEAR_C, &rq_regs.rq_regs_c.pte_row_height_linear); 1311 1312 if (rq_regs.plane1_base_address != dml_rq_regs->plane1_base_address) 1313 DC_LOG_DEBUG("DML Validation | HUBPRET_CONTROL:DET_BUF_PLANE1_BASE_ADDRESS - Expected: %u Actual: %u\n", 1314 dml_rq_regs->plane1_base_address, rq_regs.plane1_base_address); 1315 if (rq_regs.drq_expansion_mode != dml_rq_regs->drq_expansion_mode) 1316 DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:DRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", 1317 dml_rq_regs->drq_expansion_mode, rq_regs.drq_expansion_mode); 1318 if (rq_regs.prq_expansion_mode != dml_rq_regs->prq_expansion_mode) 1319 DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:MRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", 1320 dml_rq_regs->prq_expansion_mode, rq_regs.prq_expansion_mode); 1321 if (rq_regs.mrq_expansion_mode != dml_rq_regs->mrq_expansion_mode) 1322 DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:DET_BUF_PLANE1_BASE_ADDRESS - Expected: %u Actual: %u\n", 1323 dml_rq_regs->mrq_expansion_mode, rq_regs.mrq_expansion_mode); 1324 if (rq_regs.crq_expansion_mode != dml_rq_regs->crq_expansion_mode) 1325 DC_LOG_DEBUG("DML Validation | DCN_EXPANSION_MODE:CRQ_EXPANSION_MODE - Expected: %u Actual: %u\n", 1326 dml_rq_regs->crq_expansion_mode, rq_regs.crq_expansion_mode); 1327 1328 if (rq_regs.rq_regs_l.chunk_size != dml_rq_regs->rq_regs_l.chunk_size) 1329 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:CHUNK_SIZE - Expected: %u Actual: %u\n", 1330 dml_rq_regs->rq_regs_l.chunk_size, rq_regs.rq_regs_l.chunk_size); 1331 if (rq_regs.rq_regs_l.min_chunk_size != dml_rq_regs->rq_regs_l.min_chunk_size) 1332 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MIN_CHUNK_SIZE - Expected: %u Actual: %u\n", 1333 dml_rq_regs->rq_regs_l.min_chunk_size, rq_regs.rq_regs_l.min_chunk_size); 1334 if (rq_regs.rq_regs_l.meta_chunk_size != dml_rq_regs->rq_regs_l.meta_chunk_size) 1335 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:META_CHUNK_SIZE - Expected: %u Actual: %u\n", 1336 dml_rq_regs->rq_regs_l.meta_chunk_size, rq_regs.rq_regs_l.meta_chunk_size); 1337 if (rq_regs.rq_regs_l.min_meta_chunk_size != dml_rq_regs->rq_regs_l.min_meta_chunk_size) 1338 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MIN_META_CHUNK_SIZE - Expected: %u Actual: %u\n", 1339 dml_rq_regs->rq_regs_l.min_meta_chunk_size, rq_regs.rq_regs_l.min_meta_chunk_size); 1340 if (rq_regs.rq_regs_l.dpte_group_size != dml_rq_regs->rq_regs_l.dpte_group_size) 1341 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:DPTE_GROUP_SIZE - Expected: %u Actual: %u\n", 1342 dml_rq_regs->rq_regs_l.dpte_group_size, rq_regs.rq_regs_l.dpte_group_size); 1343 if (rq_regs.rq_regs_l.mpte_group_size != dml_rq_regs->rq_regs_l.mpte_group_size) 1344 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:MPTE_GROUP_SIZE - Expected: %u Actual: %u\n", 1345 dml_rq_regs->rq_regs_l.mpte_group_size, rq_regs.rq_regs_l.mpte_group_size); 1346 if (rq_regs.rq_regs_l.swath_height != dml_rq_regs->rq_regs_l.swath_height) 1347 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:SWATH_HEIGHT - Expected: %u Actual: %u\n", 1348 dml_rq_regs->rq_regs_l.swath_height, rq_regs.rq_regs_l.swath_height); 1349 if (rq_regs.rq_regs_l.pte_row_height_linear != dml_rq_regs->rq_regs_l.pte_row_height_linear) 1350 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG_C:PTE_ROW_HEIGHT_LINEAR - Expected: %u Actual: %u\n", 1351 dml_rq_regs->rq_regs_l.pte_row_height_linear, rq_regs.rq_regs_l.pte_row_height_linear); 1352 1353 if (rq_regs.rq_regs_c.chunk_size != dml_rq_regs->rq_regs_c.chunk_size) 1354 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:CHUNK_SIZE_C - Expected: %u Actual: %u\n", 1355 dml_rq_regs->rq_regs_c.chunk_size, rq_regs.rq_regs_c.chunk_size); 1356 if (rq_regs.rq_regs_c.min_chunk_size != dml_rq_regs->rq_regs_c.min_chunk_size) 1357 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MIN_CHUNK_SIZE_C - Expected: %u Actual: %u\n", 1358 dml_rq_regs->rq_regs_c.min_chunk_size, rq_regs.rq_regs_c.min_chunk_size); 1359 if (rq_regs.rq_regs_c.meta_chunk_size != dml_rq_regs->rq_regs_c.meta_chunk_size) 1360 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:META_CHUNK_SIZE_C - Expected: %u Actual: %u\n", 1361 dml_rq_regs->rq_regs_c.meta_chunk_size, rq_regs.rq_regs_c.meta_chunk_size); 1362 if (rq_regs.rq_regs_c.min_meta_chunk_size != dml_rq_regs->rq_regs_c.min_meta_chunk_size) 1363 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MIN_META_CHUNK_SIZE_C - Expected: %u Actual: %u\n", 1364 dml_rq_regs->rq_regs_c.min_meta_chunk_size, rq_regs.rq_regs_c.min_meta_chunk_size); 1365 if (rq_regs.rq_regs_c.dpte_group_size != dml_rq_regs->rq_regs_c.dpte_group_size) 1366 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:DPTE_GROUP_SIZE_C - Expected: %u Actual: %u\n", 1367 dml_rq_regs->rq_regs_c.dpte_group_size, rq_regs.rq_regs_c.dpte_group_size); 1368 if (rq_regs.rq_regs_c.mpte_group_size != dml_rq_regs->rq_regs_c.mpte_group_size) 1369 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:MPTE_GROUP_SIZE_C - Expected: %u Actual: %u\n", 1370 dml_rq_regs->rq_regs_c.mpte_group_size, rq_regs.rq_regs_c.mpte_group_size); 1371 if (rq_regs.rq_regs_c.swath_height != dml_rq_regs->rq_regs_c.swath_height) 1372 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:SWATH_HEIGHT_C - Expected: %u Actual: %u\n", 1373 dml_rq_regs->rq_regs_c.swath_height, rq_regs.rq_regs_c.swath_height); 1374 if (rq_regs.rq_regs_c.pte_row_height_linear != dml_rq_regs->rq_regs_c.pte_row_height_linear) 1375 DC_LOG_DEBUG("DML Validation | DCHUBP_REQ_SIZE_CONFIG:PTE_ROW_HEIGHT_LINEAR_C - Expected: %u Actual: %u\n", 1376 dml_rq_regs->rq_regs_c.pte_row_height_linear, rq_regs.rq_regs_c.pte_row_height_linear); 1377 1378 /* DLG - Per hubp */ 1379 REG_GET_2(BLANK_OFFSET_0, 1380 REFCYC_H_BLANK_END, &dlg_attr.refcyc_h_blank_end, 1381 DLG_V_BLANK_END, &dlg_attr.dlg_vblank_end); 1382 REG_GET(BLANK_OFFSET_1, 1383 MIN_DST_Y_NEXT_START, &dlg_attr.min_dst_y_next_start); 1384 REG_GET(DST_DIMENSIONS, 1385 REFCYC_PER_HTOTAL, &dlg_attr.refcyc_per_htotal); 1386 REG_GET_2(DST_AFTER_SCALER, 1387 REFCYC_X_AFTER_SCALER, &dlg_attr.refcyc_x_after_scaler, 1388 DST_Y_AFTER_SCALER, &dlg_attr.dst_y_after_scaler); 1389 REG_GET(REF_FREQ_TO_PIX_FREQ, 1390 REF_FREQ_TO_PIX_FREQ, &dlg_attr.ref_freq_to_pix_freq); 1391 1392 if (dlg_attr.refcyc_h_blank_end != dml_dlg_attr->refcyc_h_blank_end) 1393 DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_0:REFCYC_H_BLANK_END - Expected: %u Actual: %u\n", 1394 dml_dlg_attr->refcyc_h_blank_end, dlg_attr.refcyc_h_blank_end); 1395 if (dlg_attr.dlg_vblank_end != dml_dlg_attr->dlg_vblank_end) 1396 DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_0:DLG_V_BLANK_END - Expected: %u Actual: %u\n", 1397 dml_dlg_attr->dlg_vblank_end, dlg_attr.dlg_vblank_end); 1398 if (dlg_attr.min_dst_y_next_start != dml_dlg_attr->min_dst_y_next_start) 1399 DC_LOG_DEBUG("DML Validation | BLANK_OFFSET_1:MIN_DST_Y_NEXT_START - Expected: %u Actual: %u\n", 1400 dml_dlg_attr->min_dst_y_next_start, dlg_attr.min_dst_y_next_start); 1401 if (dlg_attr.refcyc_per_htotal != dml_dlg_attr->refcyc_per_htotal) 1402 DC_LOG_DEBUG("DML Validation | DST_DIMENSIONS:REFCYC_PER_HTOTAL - Expected: %u Actual: %u\n", 1403 dml_dlg_attr->refcyc_per_htotal, dlg_attr.refcyc_per_htotal); 1404 if (dlg_attr.refcyc_x_after_scaler != dml_dlg_attr->refcyc_x_after_scaler) 1405 DC_LOG_DEBUG("DML Validation | DST_AFTER_SCALER:REFCYC_X_AFTER_SCALER - Expected: %u Actual: %u\n", 1406 dml_dlg_attr->refcyc_x_after_scaler, dlg_attr.refcyc_x_after_scaler); 1407 if (dlg_attr.dst_y_after_scaler != dml_dlg_attr->dst_y_after_scaler) 1408 DC_LOG_DEBUG("DML Validation | DST_AFTER_SCALER:DST_Y_AFTER_SCALER - Expected: %u Actual: %u\n", 1409 dml_dlg_attr->dst_y_after_scaler, dlg_attr.dst_y_after_scaler); 1410 if (dlg_attr.ref_freq_to_pix_freq != dml_dlg_attr->ref_freq_to_pix_freq) 1411 DC_LOG_DEBUG("DML Validation | REF_FREQ_TO_PIX_FREQ:REF_FREQ_TO_PIX_FREQ - Expected: %u Actual: %u\n", 1412 dml_dlg_attr->ref_freq_to_pix_freq, dlg_attr.ref_freq_to_pix_freq); 1413 1414 /* DLG - Per luma/chroma */ 1415 REG_GET(VBLANK_PARAMETERS_1, 1416 REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr.refcyc_per_pte_group_vblank_l); 1417 if (REG(NOM_PARAMETERS_0)) 1418 REG_GET(NOM_PARAMETERS_0, 1419 DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr.dst_y_per_pte_row_nom_l); 1420 if (REG(NOM_PARAMETERS_1)) 1421 REG_GET(NOM_PARAMETERS_1, 1422 REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr.refcyc_per_pte_group_nom_l); 1423 REG_GET(NOM_PARAMETERS_4, 1424 DST_Y_PER_META_ROW_NOM_L, &dlg_attr.dst_y_per_meta_row_nom_l); 1425 REG_GET(NOM_PARAMETERS_5, 1426 REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr.refcyc_per_meta_chunk_nom_l); 1427 REG_GET_2(PER_LINE_DELIVERY, 1428 REFCYC_PER_LINE_DELIVERY_L, &dlg_attr.refcyc_per_line_delivery_l, 1429 REFCYC_PER_LINE_DELIVERY_C, &dlg_attr.refcyc_per_line_delivery_c); 1430 REG_GET_2(PER_LINE_DELIVERY_PRE, 1431 REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr.refcyc_per_line_delivery_pre_l, 1432 REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr.refcyc_per_line_delivery_pre_c); 1433 REG_GET(VBLANK_PARAMETERS_2, 1434 REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr.refcyc_per_pte_group_vblank_c); 1435 if (REG(NOM_PARAMETERS_2)) 1436 REG_GET(NOM_PARAMETERS_2, 1437 DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr.dst_y_per_pte_row_nom_c); 1438 if (REG(NOM_PARAMETERS_3)) 1439 REG_GET(NOM_PARAMETERS_3, 1440 REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr.refcyc_per_pte_group_nom_c); 1441 REG_GET(NOM_PARAMETERS_6, 1442 DST_Y_PER_META_ROW_NOM_C, &dlg_attr.dst_y_per_meta_row_nom_c); 1443 REG_GET(NOM_PARAMETERS_7, 1444 REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr.refcyc_per_meta_chunk_nom_c); 1445 REG_GET(VBLANK_PARAMETERS_3, 1446 REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr.refcyc_per_meta_chunk_vblank_l); 1447 REG_GET(VBLANK_PARAMETERS_4, 1448 REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr.refcyc_per_meta_chunk_vblank_c); 1449 1450 if (dlg_attr.refcyc_per_pte_group_vblank_l != dml_dlg_attr->refcyc_per_pte_group_vblank_l) 1451 DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_1:REFCYC_PER_PTE_GROUP_VBLANK_L - Expected: %u Actual: %u\n", 1452 dml_dlg_attr->refcyc_per_pte_group_vblank_l, dlg_attr.refcyc_per_pte_group_vblank_l); 1453 if (dlg_attr.dst_y_per_pte_row_nom_l != dml_dlg_attr->dst_y_per_pte_row_nom_l) 1454 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_0:DST_Y_PER_PTE_ROW_NOM_L - Expected: %u Actual: %u\n", 1455 dml_dlg_attr->dst_y_per_pte_row_nom_l, dlg_attr.dst_y_per_pte_row_nom_l); 1456 if (dlg_attr.refcyc_per_pte_group_nom_l != dml_dlg_attr->refcyc_per_pte_group_nom_l) 1457 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_1:REFCYC_PER_PTE_GROUP_NOM_L - Expected: %u Actual: %u\n", 1458 dml_dlg_attr->refcyc_per_pte_group_nom_l, dlg_attr.refcyc_per_pte_group_nom_l); 1459 if (dlg_attr.dst_y_per_meta_row_nom_l != dml_dlg_attr->dst_y_per_meta_row_nom_l) 1460 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_4:DST_Y_PER_META_ROW_NOM_L - Expected: %u Actual: %u\n", 1461 dml_dlg_attr->dst_y_per_meta_row_nom_l, dlg_attr.dst_y_per_meta_row_nom_l); 1462 if (dlg_attr.refcyc_per_meta_chunk_nom_l != dml_dlg_attr->refcyc_per_meta_chunk_nom_l) 1463 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_5:REFCYC_PER_META_CHUNK_NOM_L - Expected: %u Actual: %u\n", 1464 dml_dlg_attr->refcyc_per_meta_chunk_nom_l, dlg_attr.refcyc_per_meta_chunk_nom_l); 1465 if (dlg_attr.refcyc_per_line_delivery_l != dml_dlg_attr->refcyc_per_line_delivery_l) 1466 DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY:REFCYC_PER_LINE_DELIVERY_L - Expected: %u Actual: %u\n", 1467 dml_dlg_attr->refcyc_per_line_delivery_l, dlg_attr.refcyc_per_line_delivery_l); 1468 if (dlg_attr.refcyc_per_line_delivery_c != dml_dlg_attr->refcyc_per_line_delivery_c) 1469 DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY:REFCYC_PER_LINE_DELIVERY_C - Expected: %u Actual: %u\n", 1470 dml_dlg_attr->refcyc_per_line_delivery_c, dlg_attr.refcyc_per_line_delivery_c); 1471 if (dlg_attr.refcyc_per_pte_group_vblank_c != dml_dlg_attr->refcyc_per_pte_group_vblank_c) 1472 DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_2:REFCYC_PER_PTE_GROUP_VBLANK_C - Expected: %u Actual: %u\n", 1473 dml_dlg_attr->refcyc_per_pte_group_vblank_c, dlg_attr.refcyc_per_pte_group_vblank_c); 1474 if (dlg_attr.dst_y_per_pte_row_nom_c != dml_dlg_attr->dst_y_per_pte_row_nom_c) 1475 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_2:DST_Y_PER_PTE_ROW_NOM_C - Expected: %u Actual: %u\n", 1476 dml_dlg_attr->dst_y_per_pte_row_nom_c, dlg_attr.dst_y_per_pte_row_nom_c); 1477 if (dlg_attr.refcyc_per_pte_group_nom_c != dml_dlg_attr->refcyc_per_pte_group_nom_c) 1478 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_3:REFCYC_PER_PTE_GROUP_NOM_C - Expected: %u Actual: %u\n", 1479 dml_dlg_attr->refcyc_per_pte_group_nom_c, dlg_attr.refcyc_per_pte_group_nom_c); 1480 if (dlg_attr.dst_y_per_meta_row_nom_c != dml_dlg_attr->dst_y_per_meta_row_nom_c) 1481 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_6:DST_Y_PER_META_ROW_NOM_C - Expected: %u Actual: %u\n", 1482 dml_dlg_attr->dst_y_per_meta_row_nom_c, dlg_attr.dst_y_per_meta_row_nom_c); 1483 if (dlg_attr.refcyc_per_meta_chunk_nom_c != dml_dlg_attr->refcyc_per_meta_chunk_nom_c) 1484 DC_LOG_DEBUG("DML Validation | NOM_PARAMETERS_7:REFCYC_PER_META_CHUNK_NOM_C - Expected: %u Actual: %u\n", 1485 dml_dlg_attr->refcyc_per_meta_chunk_nom_c, dlg_attr.refcyc_per_meta_chunk_nom_c); 1486 if (dlg_attr.refcyc_per_line_delivery_pre_l != dml_dlg_attr->refcyc_per_line_delivery_pre_l) 1487 DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY_PRE:REFCYC_PER_LINE_DELIVERY_PRE_L - Expected: %u Actual: %u\n", 1488 dml_dlg_attr->refcyc_per_line_delivery_pre_l, dlg_attr.refcyc_per_line_delivery_pre_l); 1489 if (dlg_attr.refcyc_per_line_delivery_pre_c != dml_dlg_attr->refcyc_per_line_delivery_pre_c) 1490 DC_LOG_DEBUG("DML Validation | PER_LINE_DELIVERY_PRE:REFCYC_PER_LINE_DELIVERY_PRE_C - Expected: %u Actual: %u\n", 1491 dml_dlg_attr->refcyc_per_line_delivery_pre_c, dlg_attr.refcyc_per_line_delivery_pre_c); 1492 if (dlg_attr.refcyc_per_meta_chunk_vblank_l != dml_dlg_attr->refcyc_per_meta_chunk_vblank_l) 1493 DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_3:REFCYC_PER_META_CHUNK_VBLANK_L - Expected: %u Actual: %u\n", 1494 dml_dlg_attr->refcyc_per_meta_chunk_vblank_l, dlg_attr.refcyc_per_meta_chunk_vblank_l); 1495 if (dlg_attr.refcyc_per_meta_chunk_vblank_c != dml_dlg_attr->refcyc_per_meta_chunk_vblank_c) 1496 DC_LOG_DEBUG("DML Validation | VBLANK_PARAMETERS_4:REFCYC_PER_META_CHUNK_VBLANK_C - Expected: %u Actual: %u\n", 1497 dml_dlg_attr->refcyc_per_meta_chunk_vblank_c, dlg_attr.refcyc_per_meta_chunk_vblank_c); 1498 1499 /* TTU - per hubp */ 1500 REG_GET_2(DCN_TTU_QOS_WM, 1501 QoS_LEVEL_LOW_WM, &ttu_attr.qos_level_low_wm, 1502 QoS_LEVEL_HIGH_WM, &ttu_attr.qos_level_high_wm); 1503 1504 if (ttu_attr.qos_level_low_wm != dml_ttu_attr->qos_level_low_wm) 1505 DC_LOG_DEBUG("DML Validation | DCN_TTU_QOS_WM:QoS_LEVEL_LOW_WM - Expected: %u Actual: %u\n", 1506 dml_ttu_attr->qos_level_low_wm, ttu_attr.qos_level_low_wm); 1507 if (ttu_attr.qos_level_high_wm != dml_ttu_attr->qos_level_high_wm) 1508 DC_LOG_DEBUG("DML Validation | DCN_TTU_QOS_WM:QoS_LEVEL_HIGH_WM - Expected: %u Actual: %u\n", 1509 dml_ttu_attr->qos_level_high_wm, ttu_attr.qos_level_high_wm); 1510 1511 /* TTU - per luma/chroma */ 1512 /* Assumed surf0 is luma and 1 is chroma */ 1513 REG_GET_3(DCN_SURF0_TTU_CNTL0, 1514 REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_l, 1515 QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_l, 1516 QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_l); 1517 REG_GET_3(DCN_SURF1_TTU_CNTL0, 1518 REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_c, 1519 QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_c, 1520 QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_c); 1521 REG_GET_3(DCN_CUR0_TTU_CNTL0, 1522 REFCYC_PER_REQ_DELIVERY, &ttu_attr.refcyc_per_req_delivery_cur0, 1523 QoS_LEVEL_FIXED, &ttu_attr.qos_level_fixed_cur0, 1524 QoS_RAMP_DISABLE, &ttu_attr.qos_ramp_disable_cur0); 1525 REG_GET(FLIP_PARAMETERS_1, 1526 REFCYC_PER_PTE_GROUP_FLIP_L, &dlg_attr.refcyc_per_pte_group_flip_l); 1527 REG_GET(DCN_CUR0_TTU_CNTL1, 1528 REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_cur0); 1529 REG_GET(DCN_CUR1_TTU_CNTL1, 1530 REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_cur1); 1531 REG_GET(DCN_SURF0_TTU_CNTL1, 1532 REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_l); 1533 REG_GET(DCN_SURF1_TTU_CNTL1, 1534 REFCYC_PER_REQ_DELIVERY_PRE, &ttu_attr.refcyc_per_req_delivery_pre_c); 1535 1536 if (ttu_attr.refcyc_per_req_delivery_l != dml_ttu_attr->refcyc_per_req_delivery_l) 1537 DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", 1538 dml_ttu_attr->refcyc_per_req_delivery_l, ttu_attr.refcyc_per_req_delivery_l); 1539 if (ttu_attr.qos_level_fixed_l != dml_ttu_attr->qos_level_fixed_l) 1540 DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", 1541 dml_ttu_attr->qos_level_fixed_l, ttu_attr.qos_level_fixed_l); 1542 if (ttu_attr.qos_ramp_disable_l != dml_ttu_attr->qos_ramp_disable_l) 1543 DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", 1544 dml_ttu_attr->qos_ramp_disable_l, ttu_attr.qos_ramp_disable_l); 1545 if (ttu_attr.refcyc_per_req_delivery_c != dml_ttu_attr->refcyc_per_req_delivery_c) 1546 DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", 1547 dml_ttu_attr->refcyc_per_req_delivery_c, ttu_attr.refcyc_per_req_delivery_c); 1548 if (ttu_attr.qos_level_fixed_c != dml_ttu_attr->qos_level_fixed_c) 1549 DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", 1550 dml_ttu_attr->qos_level_fixed_c, ttu_attr.qos_level_fixed_c); 1551 if (ttu_attr.qos_ramp_disable_c != dml_ttu_attr->qos_ramp_disable_c) 1552 DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", 1553 dml_ttu_attr->qos_ramp_disable_c, ttu_attr.qos_ramp_disable_c); 1554 if (ttu_attr.refcyc_per_req_delivery_cur0 != dml_ttu_attr->refcyc_per_req_delivery_cur0) 1555 DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:REFCYC_PER_REQ_DELIVERY - Expected: %u Actual: %u\n", 1556 dml_ttu_attr->refcyc_per_req_delivery_cur0, ttu_attr.refcyc_per_req_delivery_cur0); 1557 if (ttu_attr.qos_level_fixed_cur0 != dml_ttu_attr->qos_level_fixed_cur0) 1558 DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:QoS_LEVEL_FIXED - Expected: %u Actual: %u\n", 1559 dml_ttu_attr->qos_level_fixed_cur0, ttu_attr.qos_level_fixed_cur0); 1560 if (ttu_attr.qos_ramp_disable_cur0 != dml_ttu_attr->qos_ramp_disable_cur0) 1561 DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL0:QoS_RAMP_DISABLE - Expected: %u Actual: %u\n", 1562 dml_ttu_attr->qos_ramp_disable_cur0, ttu_attr.qos_ramp_disable_cur0); 1563 if (dlg_attr.refcyc_per_pte_group_flip_l != dml_dlg_attr->refcyc_per_pte_group_flip_l) 1564 DC_LOG_DEBUG("DML Validation | FLIP_PARAMETERS_1:REFCYC_PER_PTE_GROUP_FLIP_L - Expected: %u Actual: %u\n", 1565 dml_dlg_attr->refcyc_per_pte_group_flip_l, dlg_attr.refcyc_per_pte_group_flip_l); 1566 if (ttu_attr.refcyc_per_req_delivery_pre_cur0 != dml_ttu_attr->refcyc_per_req_delivery_pre_cur0) 1567 DC_LOG_DEBUG("DML Validation | DCN_CUR0_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", 1568 dml_ttu_attr->refcyc_per_req_delivery_pre_cur0, ttu_attr.refcyc_per_req_delivery_pre_cur0); 1569 if (ttu_attr.refcyc_per_req_delivery_pre_cur1 != dml_ttu_attr->refcyc_per_req_delivery_pre_cur1) 1570 DC_LOG_DEBUG("DML Validation | DCN_CUR1_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", 1571 dml_ttu_attr->refcyc_per_req_delivery_pre_cur1, ttu_attr.refcyc_per_req_delivery_pre_cur1); 1572 if (ttu_attr.refcyc_per_req_delivery_pre_l != dml_ttu_attr->refcyc_per_req_delivery_pre_l) 1573 DC_LOG_DEBUG("DML Validation | DCN_SURF0_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", 1574 dml_ttu_attr->refcyc_per_req_delivery_pre_l, ttu_attr.refcyc_per_req_delivery_pre_l); 1575 if (ttu_attr.refcyc_per_req_delivery_pre_c != dml_ttu_attr->refcyc_per_req_delivery_pre_c) 1576 DC_LOG_DEBUG("DML Validation | DCN_SURF1_TTU_CNTL1:REFCYC_PER_REQ_DELIVERY_PRE - Expected: %u Actual: %u\n", 1577 dml_ttu_attr->refcyc_per_req_delivery_pre_c, ttu_attr.refcyc_per_req_delivery_pre_c); 1578 } 1579 1580 static struct hubp_funcs dcn20_hubp_funcs = { 1581 .hubp_enable_tripleBuffer = hubp2_enable_triplebuffer, 1582 .hubp_is_triplebuffer_enabled = hubp2_is_triplebuffer_enabled, 1583 .hubp_program_surface_flip_and_addr = hubp2_program_surface_flip_and_addr, 1584 .hubp_program_surface_config = hubp2_program_surface_config, 1585 .hubp_is_flip_pending = hubp2_is_flip_pending, 1586 .hubp_setup = hubp2_setup, 1587 .hubp_setup_interdependent = hubp2_setup_interdependent, 1588 .hubp_set_vm_system_aperture_settings = hubp2_set_vm_system_aperture_settings, 1589 .set_blank = hubp2_set_blank, 1590 .dcc_control = hubp2_dcc_control, 1591 .mem_program_viewport = min_set_viewport, 1592 .set_cursor_attributes = hubp2_cursor_set_attributes, 1593 .set_cursor_position = hubp2_cursor_set_position, 1594 .hubp_clk_cntl = hubp2_clk_cntl, 1595 .hubp_vtg_sel = hubp2_vtg_sel, 1596 .dmdata_set_attributes = hubp2_dmdata_set_attributes, 1597 .dmdata_load = hubp2_dmdata_load, 1598 .dmdata_status_done = hubp2_dmdata_status_done, 1599 .hubp_read_state = hubp2_read_state, 1600 .hubp_clear_underflow = hubp2_clear_underflow, 1601 .hubp_set_flip_control_surface_gsl = hubp2_set_flip_control_surface_gsl, 1602 .hubp_init = hubp1_init, 1603 .validate_dml_output = hubp2_validate_dml_output, 1604 }; 1605 1606 1607 bool hubp2_construct( 1608 struct dcn20_hubp *hubp2, 1609 struct dc_context *ctx, 1610 uint32_t inst, 1611 const struct dcn_hubp2_registers *hubp_regs, 1612 const struct dcn_hubp2_shift *hubp_shift, 1613 const struct dcn_hubp2_mask *hubp_mask) 1614 { 1615 hubp2->base.funcs = &dcn20_hubp_funcs; 1616 hubp2->base.ctx = ctx; 1617 hubp2->hubp_regs = hubp_regs; 1618 hubp2->hubp_shift = hubp_shift; 1619 hubp2->hubp_mask = hubp_mask; 1620 hubp2->base.inst = inst; 1621 hubp2->base.opp_id = OPP_ID_INVALID; 1622 hubp2->base.mpcc_id = 0xf; 1623 1624 return true; 1625 } 1626