1 /* 2 * Copyright 2012-17 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: AMD 23 * 24 */ 25 26 #include "dcn20_hubp.h" 27 28 #include "dm_services.h" 29 #include "dce_calcs.h" 30 #include "reg_helper.h" 31 #include "basics/conversion.h" 32 33 #define REG(reg)\ 34 hubp2->hubp_regs->reg 35 36 #define CTX \ 37 hubp2->base.ctx 38 39 #undef FN 40 #define FN(reg_name, field_name) \ 41 hubp2->hubp_shift->field_name, hubp2->hubp_mask->field_name 42 43 void hubp2_update_dchub( 44 struct hubp *hubp, 45 struct dchub_init_data *dh_data) 46 { 47 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 48 if (REG(DCN_VM_FB_LOCATION_TOP) == 0) 49 return; 50 51 switch (dh_data->fb_mode) { 52 case FRAME_BUFFER_MODE_ZFB_ONLY: 53 /*For ZFB case need to put DCHUB FB BASE and TOP upside down to indicate ZFB mode*/ 54 REG_UPDATE(DCN_VM_FB_LOCATION_TOP, 55 FB_TOP, 0); 56 57 REG_UPDATE(DCN_VM_FB_LOCATION_BASE, 58 FB_BASE, 0xFFFFFF); 59 60 /*This field defines the 24 MSBs, bits [47:24] of the 48 bit AGP Base*/ 61 REG_UPDATE(DCN_VM_AGP_BASE, 62 AGP_BASE, dh_data->zfb_phys_addr_base >> 24); 63 64 /*This field defines the bottom range of the AGP aperture and represents the 24*/ 65 /*MSBs, bits [47:24] of the 48 address bits*/ 66 REG_UPDATE(DCN_VM_AGP_BOT, 67 AGP_BOT, dh_data->zfb_mc_base_addr >> 24); 68 69 /*This field defines the top range of the AGP aperture and represents the 24*/ 70 /*MSBs, bits [47:24] of the 48 address bits*/ 71 REG_UPDATE(DCN_VM_AGP_TOP, 72 AGP_TOP, (dh_data->zfb_mc_base_addr + 73 dh_data->zfb_size_in_byte - 1) >> 24); 74 break; 75 case FRAME_BUFFER_MODE_MIXED_ZFB_AND_LOCAL: 76 /*Should not touch FB LOCATION (done by VBIOS on AsicInit table)*/ 77 78 /*This field defines the 24 MSBs, bits [47:24] of the 48 bit AGP Base*/ 79 REG_UPDATE(DCN_VM_AGP_BASE, 80 AGP_BASE, dh_data->zfb_phys_addr_base >> 24); 81 82 /*This field defines the bottom range of the AGP aperture and represents the 24*/ 83 /*MSBs, bits [47:24] of the 48 address bits*/ 84 REG_UPDATE(DCN_VM_AGP_BOT, 85 AGP_BOT, dh_data->zfb_mc_base_addr >> 24); 86 87 /*This field defines the top range of the AGP aperture and represents the 24*/ 88 /*MSBs, bits [47:24] of the 48 address bits*/ 89 REG_UPDATE(DCN_VM_AGP_TOP, 90 AGP_TOP, (dh_data->zfb_mc_base_addr + 91 dh_data->zfb_size_in_byte - 1) >> 24); 92 break; 93 case FRAME_BUFFER_MODE_LOCAL_ONLY: 94 /*Should not touch FB LOCATION (should be done by VBIOS)*/ 95 96 /*This field defines the 24 MSBs, bits [47:24] of the 48 bit AGP Base*/ 97 REG_UPDATE(DCN_VM_AGP_BASE, 98 AGP_BASE, 0); 99 100 /*This field defines the bottom range of the AGP aperture and represents the 24*/ 101 /*MSBs, bits [47:24] of the 48 address bits*/ 102 REG_UPDATE(DCN_VM_AGP_BOT, 103 AGP_BOT, 0xFFFFFF); 104 105 /*This field defines the top range of the AGP aperture and represents the 24*/ 106 /*MSBs, bits [47:24] of the 48 address bits*/ 107 REG_UPDATE(DCN_VM_AGP_TOP, 108 AGP_TOP, 0); 109 break; 110 default: 111 break; 112 } 113 114 dh_data->dchub_initialzied = true; 115 dh_data->dchub_info_valid = false; 116 } 117 118 void hubp2_set_vm_system_aperture_settings(struct hubp *hubp, 119 struct vm_system_aperture_param *apt) 120 { 121 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 122 123 PHYSICAL_ADDRESS_LOC mc_vm_apt_default; 124 PHYSICAL_ADDRESS_LOC mc_vm_apt_low; 125 PHYSICAL_ADDRESS_LOC mc_vm_apt_high; 126 127 // The format of default addr is 48:12 of the 48 bit addr 128 mc_vm_apt_default.quad_part = apt->sys_default.quad_part >> 12; 129 130 // The format of high/low are 48:18 of the 48 bit addr 131 mc_vm_apt_low.quad_part = apt->sys_low.quad_part >> 18; 132 mc_vm_apt_high.quad_part = apt->sys_high.quad_part >> 18; 133 134 REG_UPDATE_2(DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB, 135 DCN_VM_SYSTEM_APERTURE_DEFAULT_SYSTEM, 1, /* 1 = system physical memory */ 136 DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB, mc_vm_apt_default.high_part); 137 138 REG_SET(DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB, 0, 139 DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB, mc_vm_apt_default.low_part); 140 141 REG_SET(DCN_VM_SYSTEM_APERTURE_LOW_ADDR, 0, 142 MC_VM_SYSTEM_APERTURE_LOW_ADDR, mc_vm_apt_low.quad_part); 143 144 REG_SET(DCN_VM_SYSTEM_APERTURE_HIGH_ADDR, 0, 145 MC_VM_SYSTEM_APERTURE_HIGH_ADDR, mc_vm_apt_high.quad_part); 146 147 REG_SET_2(DCN_VM_MX_L1_TLB_CNTL, 0, 148 ENABLE_L1_TLB, 1, 149 SYSTEM_ACCESS_MODE, 0x3); 150 } 151 152 void hubp2_program_deadline( 153 struct hubp *hubp, 154 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 155 struct _vcs_dpi_display_ttu_regs_st *ttu_attr) 156 { 157 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 158 159 /* DLG - Per hubp */ 160 REG_SET_2(BLANK_OFFSET_0, 0, 161 REFCYC_H_BLANK_END, dlg_attr->refcyc_h_blank_end, 162 DLG_V_BLANK_END, dlg_attr->dlg_vblank_end); 163 164 REG_SET(BLANK_OFFSET_1, 0, 165 MIN_DST_Y_NEXT_START, dlg_attr->min_dst_y_next_start); 166 167 REG_SET(DST_DIMENSIONS, 0, 168 REFCYC_PER_HTOTAL, dlg_attr->refcyc_per_htotal); 169 170 REG_SET_2(DST_AFTER_SCALER, 0, 171 REFCYC_X_AFTER_SCALER, dlg_attr->refcyc_x_after_scaler, 172 DST_Y_AFTER_SCALER, dlg_attr->dst_y_after_scaler); 173 174 REG_SET(REF_FREQ_TO_PIX_FREQ, 0, 175 REF_FREQ_TO_PIX_FREQ, dlg_attr->ref_freq_to_pix_freq); 176 177 /* DLG - Per luma/chroma */ 178 REG_SET(VBLANK_PARAMETERS_1, 0, 179 REFCYC_PER_PTE_GROUP_VBLANK_L, dlg_attr->refcyc_per_pte_group_vblank_l); 180 181 if (REG(NOM_PARAMETERS_0)) 182 REG_SET(NOM_PARAMETERS_0, 0, 183 DST_Y_PER_PTE_ROW_NOM_L, dlg_attr->dst_y_per_pte_row_nom_l); 184 185 if (REG(NOM_PARAMETERS_1)) 186 REG_SET(NOM_PARAMETERS_1, 0, 187 REFCYC_PER_PTE_GROUP_NOM_L, dlg_attr->refcyc_per_pte_group_nom_l); 188 189 REG_SET(NOM_PARAMETERS_4, 0, 190 DST_Y_PER_META_ROW_NOM_L, dlg_attr->dst_y_per_meta_row_nom_l); 191 192 REG_SET(NOM_PARAMETERS_5, 0, 193 REFCYC_PER_META_CHUNK_NOM_L, dlg_attr->refcyc_per_meta_chunk_nom_l); 194 195 REG_SET_2(PER_LINE_DELIVERY, 0, 196 REFCYC_PER_LINE_DELIVERY_L, dlg_attr->refcyc_per_line_delivery_l, 197 REFCYC_PER_LINE_DELIVERY_C, dlg_attr->refcyc_per_line_delivery_c); 198 199 REG_SET(VBLANK_PARAMETERS_2, 0, 200 REFCYC_PER_PTE_GROUP_VBLANK_C, dlg_attr->refcyc_per_pte_group_vblank_c); 201 202 if (REG(NOM_PARAMETERS_2)) 203 REG_SET(NOM_PARAMETERS_2, 0, 204 DST_Y_PER_PTE_ROW_NOM_C, dlg_attr->dst_y_per_pte_row_nom_c); 205 206 if (REG(NOM_PARAMETERS_3)) 207 REG_SET(NOM_PARAMETERS_3, 0, 208 REFCYC_PER_PTE_GROUP_NOM_C, dlg_attr->refcyc_per_pte_group_nom_c); 209 210 REG_SET(NOM_PARAMETERS_6, 0, 211 DST_Y_PER_META_ROW_NOM_C, dlg_attr->dst_y_per_meta_row_nom_c); 212 213 REG_SET(NOM_PARAMETERS_7, 0, 214 REFCYC_PER_META_CHUNK_NOM_C, dlg_attr->refcyc_per_meta_chunk_nom_c); 215 216 /* TTU - per hubp */ 217 REG_SET_2(DCN_TTU_QOS_WM, 0, 218 QoS_LEVEL_LOW_WM, ttu_attr->qos_level_low_wm, 219 QoS_LEVEL_HIGH_WM, ttu_attr->qos_level_high_wm); 220 221 /* TTU - per luma/chroma */ 222 /* Assumed surf0 is luma and 1 is chroma */ 223 224 REG_SET_3(DCN_SURF0_TTU_CNTL0, 0, 225 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_l, 226 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_l, 227 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_l); 228 229 REG_SET_3(DCN_SURF1_TTU_CNTL0, 0, 230 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_c, 231 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_c, 232 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_c); 233 234 REG_SET_3(DCN_CUR0_TTU_CNTL0, 0, 235 REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_cur0, 236 QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_cur0, 237 QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_cur0); 238 239 REG_SET(FLIP_PARAMETERS_1, 0, 240 REFCYC_PER_PTE_GROUP_FLIP_L, dlg_attr->refcyc_per_pte_group_flip_l); 241 } 242 243 void hubp2_vready_at_or_After_vsync(struct hubp *hubp, 244 struct _vcs_dpi_display_pipe_dest_params_st *pipe_dest) 245 { 246 uint32_t value = 0; 247 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 248 /* disable_dlg_test_mode Set 9th bit to 1 to disable "dv" mode */ 249 REG_WRITE(HUBPREQ_DEBUG_DB, 1 << 8); 250 /* 251 if (VSTARTUP_START - (VREADY_OFFSET+VUPDATE_WIDTH+VUPDATE_OFFSET)/htotal) 252 <= OTG_V_BLANK_END 253 Set HUBP_VREADY_AT_OR_AFTER_VSYNC = 1 254 else 255 Set HUBP_VREADY_AT_OR_AFTER_VSYNC = 0 256 */ 257 if ((pipe_dest->vstartup_start - (pipe_dest->vready_offset+pipe_dest->vupdate_width 258 + pipe_dest->vupdate_offset) / pipe_dest->htotal) <= pipe_dest->vblank_end) { 259 value = 1; 260 } else 261 value = 0; 262 REG_UPDATE(DCHUBP_CNTL, HUBP_VREADY_AT_OR_AFTER_VSYNC, value); 263 } 264 265 void hubp2_program_requestor( 266 struct hubp *hubp, 267 struct _vcs_dpi_display_rq_regs_st *rq_regs) 268 { 269 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 270 271 REG_UPDATE(HUBPRET_CONTROL, 272 DET_BUF_PLANE1_BASE_ADDRESS, rq_regs->plane1_base_address); 273 REG_SET_4(DCN_EXPANSION_MODE, 0, 274 DRQ_EXPANSION_MODE, rq_regs->drq_expansion_mode, 275 PRQ_EXPANSION_MODE, rq_regs->prq_expansion_mode, 276 MRQ_EXPANSION_MODE, rq_regs->mrq_expansion_mode, 277 CRQ_EXPANSION_MODE, rq_regs->crq_expansion_mode); 278 REG_SET_8(DCHUBP_REQ_SIZE_CONFIG, 0, 279 CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, 280 MIN_CHUNK_SIZE, rq_regs->rq_regs_l.min_chunk_size, 281 META_CHUNK_SIZE, rq_regs->rq_regs_l.meta_chunk_size, 282 MIN_META_CHUNK_SIZE, rq_regs->rq_regs_l.min_meta_chunk_size, 283 DPTE_GROUP_SIZE, rq_regs->rq_regs_l.dpte_group_size, 284 MPTE_GROUP_SIZE, rq_regs->rq_regs_l.mpte_group_size, 285 SWATH_HEIGHT, rq_regs->rq_regs_l.swath_height, 286 PTE_ROW_HEIGHT_LINEAR, rq_regs->rq_regs_l.pte_row_height_linear); 287 REG_SET_8(DCHUBP_REQ_SIZE_CONFIG_C, 0, 288 CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, 289 MIN_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_chunk_size, 290 META_CHUNK_SIZE_C, rq_regs->rq_regs_c.meta_chunk_size, 291 MIN_META_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_meta_chunk_size, 292 DPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.dpte_group_size, 293 MPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.mpte_group_size, 294 SWATH_HEIGHT_C, rq_regs->rq_regs_c.swath_height, 295 PTE_ROW_HEIGHT_LINEAR_C, rq_regs->rq_regs_c.pte_row_height_linear); 296 } 297 298 static void hubp2_setup( 299 struct hubp *hubp, 300 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 301 struct _vcs_dpi_display_ttu_regs_st *ttu_attr, 302 struct _vcs_dpi_display_rq_regs_st *rq_regs, 303 struct _vcs_dpi_display_pipe_dest_params_st *pipe_dest) 304 { 305 /* otg is locked when this func is called. Register are double buffered. 306 * disable the requestors is not needed 307 */ 308 309 hubp2_vready_at_or_After_vsync(hubp, pipe_dest); 310 hubp2_program_requestor(hubp, rq_regs); 311 hubp2_program_deadline(hubp, dlg_attr, ttu_attr); 312 313 } 314 315 void hubp2_setup_interdependent( 316 struct hubp *hubp, 317 struct _vcs_dpi_display_dlg_regs_st *dlg_attr, 318 struct _vcs_dpi_display_ttu_regs_st *ttu_attr) 319 { 320 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 321 322 REG_SET_2(PREFETCH_SETTINGS, 0, 323 DST_Y_PREFETCH, dlg_attr->dst_y_prefetch, 324 VRATIO_PREFETCH, dlg_attr->vratio_prefetch); 325 326 REG_SET(PREFETCH_SETTINGS_C, 0, 327 VRATIO_PREFETCH_C, dlg_attr->vratio_prefetch_c); 328 329 REG_SET_2(VBLANK_PARAMETERS_0, 0, 330 DST_Y_PER_VM_VBLANK, dlg_attr->dst_y_per_vm_vblank, 331 DST_Y_PER_ROW_VBLANK, dlg_attr->dst_y_per_row_vblank); 332 333 REG_SET_2(FLIP_PARAMETERS_0, 0, 334 DST_Y_PER_VM_FLIP, dlg_attr->dst_y_per_vm_flip, 335 DST_Y_PER_ROW_FLIP, dlg_attr->dst_y_per_row_flip); 336 337 REG_SET(VBLANK_PARAMETERS_3, 0, 338 REFCYC_PER_META_CHUNK_VBLANK_L, dlg_attr->refcyc_per_meta_chunk_vblank_l); 339 340 REG_SET(VBLANK_PARAMETERS_4, 0, 341 REFCYC_PER_META_CHUNK_VBLANK_C, dlg_attr->refcyc_per_meta_chunk_vblank_c); 342 343 REG_SET(FLIP_PARAMETERS_2, 0, 344 REFCYC_PER_META_CHUNK_FLIP_L, dlg_attr->refcyc_per_meta_chunk_flip_l); 345 346 REG_SET_2(PER_LINE_DELIVERY_PRE, 0, 347 REFCYC_PER_LINE_DELIVERY_PRE_L, dlg_attr->refcyc_per_line_delivery_pre_l, 348 REFCYC_PER_LINE_DELIVERY_PRE_C, dlg_attr->refcyc_per_line_delivery_pre_c); 349 350 REG_SET(DCN_SURF0_TTU_CNTL1, 0, 351 REFCYC_PER_REQ_DELIVERY_PRE, 352 ttu_attr->refcyc_per_req_delivery_pre_l); 353 REG_SET(DCN_SURF1_TTU_CNTL1, 0, 354 REFCYC_PER_REQ_DELIVERY_PRE, 355 ttu_attr->refcyc_per_req_delivery_pre_c); 356 REG_SET(DCN_CUR0_TTU_CNTL1, 0, 357 REFCYC_PER_REQ_DELIVERY_PRE, ttu_attr->refcyc_per_req_delivery_pre_cur0); 358 REG_SET(DCN_CUR1_TTU_CNTL1, 0, 359 REFCYC_PER_REQ_DELIVERY_PRE, ttu_attr->refcyc_per_req_delivery_pre_cur1); 360 361 REG_SET_2(DCN_GLOBAL_TTU_CNTL, 0, 362 MIN_TTU_VBLANK, ttu_attr->min_ttu_vblank, 363 QoS_LEVEL_FLIP, ttu_attr->qos_level_flip); 364 } 365 366 /* DCN2 (GFX10), the following GFX fields are deprecated. They can be set but they will not be used: 367 * NUM_BANKS 368 * NUM_SE 369 * NUM_RB_PER_SE 370 * RB_ALIGNED 371 * Other things can be defaulted, since they never change: 372 * PIPE_ALIGNED = 0 373 * META_LINEAR = 0 374 * In GFX10, only these apply: 375 * PIPE_INTERLEAVE 376 * NUM_PIPES 377 * MAX_COMPRESSED_FRAGS 378 * SW_MODE 379 */ 380 static void hubp2_program_tiling( 381 struct dcn20_hubp *hubp2, 382 const union dc_tiling_info *info, 383 const enum surface_pixel_format pixel_format) 384 { 385 REG_UPDATE_3(DCSURF_ADDR_CONFIG, 386 NUM_PIPES, log_2(info->gfx9.num_pipes), 387 PIPE_INTERLEAVE, info->gfx9.pipe_interleave, 388 MAX_COMPRESSED_FRAGS, log_2(info->gfx9.max_compressed_frags)); 389 390 REG_UPDATE_4(DCSURF_TILING_CONFIG, 391 SW_MODE, info->gfx9.swizzle, 392 META_LINEAR, 0, 393 RB_ALIGNED, 0, 394 PIPE_ALIGNED, 0); 395 } 396 397 void hubp2_program_size( 398 struct hubp *hubp, 399 enum surface_pixel_format format, 400 const union plane_size *plane_size, 401 struct dc_plane_dcc_param *dcc) 402 { 403 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 404 uint32_t pitch, meta_pitch, pitch_c, meta_pitch_c; 405 bool use_pitch_c = false; 406 407 /* Program data and meta surface pitch (calculation from addrlib) 408 * 444 or 420 luma 409 */ 410 use_pitch_c = format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN 411 && format < SURFACE_PIXEL_FORMAT_SUBSAMPLE_END; 412 if (use_pitch_c) { 413 ASSERT(plane_size->video.chroma_pitch != 0); 414 /* Chroma pitch zero can cause system hang! */ 415 416 pitch = plane_size->video.luma_pitch - 1; 417 meta_pitch = dcc->video.meta_pitch_l - 1; 418 pitch_c = plane_size->video.chroma_pitch - 1; 419 meta_pitch_c = dcc->video.meta_pitch_c - 1; 420 } else { 421 pitch = plane_size->grph.surface_pitch - 1; 422 meta_pitch = dcc->grph.meta_pitch - 1; 423 pitch_c = 0; 424 meta_pitch_c = 0; 425 } 426 427 if (!dcc->enable) { 428 meta_pitch = 0; 429 meta_pitch_c = 0; 430 } 431 432 REG_UPDATE_2(DCSURF_SURFACE_PITCH, 433 PITCH, pitch, META_PITCH, meta_pitch); 434 435 use_pitch_c = format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN; 436 if (use_pitch_c) 437 REG_UPDATE_2(DCSURF_SURFACE_PITCH_C, 438 PITCH_C, pitch_c, META_PITCH_C, meta_pitch_c); 439 } 440 441 void hubp2_program_rotation( 442 struct hubp *hubp, 443 enum dc_rotation_angle rotation, 444 bool horizontal_mirror) 445 { 446 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 447 uint32_t mirror; 448 449 450 if (horizontal_mirror) 451 mirror = 1; 452 else 453 mirror = 0; 454 455 /* Program rotation angle and horz mirror - no mirror */ 456 if (rotation == ROTATION_ANGLE_0) 457 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 458 ROTATION_ANGLE, 0, 459 H_MIRROR_EN, mirror); 460 else if (rotation == ROTATION_ANGLE_90) 461 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 462 ROTATION_ANGLE, 1, 463 H_MIRROR_EN, mirror); 464 else if (rotation == ROTATION_ANGLE_180) 465 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 466 ROTATION_ANGLE, 2, 467 H_MIRROR_EN, mirror); 468 else if (rotation == ROTATION_ANGLE_270) 469 REG_UPDATE_2(DCSURF_SURFACE_CONFIG, 470 ROTATION_ANGLE, 3, 471 H_MIRROR_EN, mirror); 472 } 473 474 void hubp2_dcc_control(struct hubp *hubp, bool enable, 475 bool independent_64b_blks) 476 { 477 uint32_t dcc_en = enable ? 1 : 0; 478 uint32_t dcc_ind_64b_blk = independent_64b_blks ? 1 : 0; 479 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 480 481 REG_UPDATE_4(DCSURF_SURFACE_CONTROL, 482 PRIMARY_SURFACE_DCC_EN, dcc_en, 483 PRIMARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk, 484 SECONDARY_SURFACE_DCC_EN, dcc_en, 485 SECONDARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk); 486 } 487 488 void hubp2_program_pixel_format( 489 struct hubp *hubp, 490 enum surface_pixel_format format) 491 { 492 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 493 uint32_t red_bar = 3; 494 uint32_t blue_bar = 2; 495 496 /* swap for ABGR format */ 497 if (format == SURFACE_PIXEL_FORMAT_GRPH_ABGR8888 498 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010 499 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS 500 || format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F) { 501 red_bar = 2; 502 blue_bar = 3; 503 } 504 505 REG_UPDATE_2(HUBPRET_CONTROL, 506 CROSSBAR_SRC_CB_B, blue_bar, 507 CROSSBAR_SRC_CR_R, red_bar); 508 509 /* Mapping is same as ipp programming (cnvc) */ 510 511 switch (format) { 512 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555: 513 REG_UPDATE(DCSURF_SURFACE_CONFIG, 514 SURFACE_PIXEL_FORMAT, 1); 515 break; 516 case SURFACE_PIXEL_FORMAT_GRPH_RGB565: 517 REG_UPDATE(DCSURF_SURFACE_CONFIG, 518 SURFACE_PIXEL_FORMAT, 3); 519 break; 520 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888: 521 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888: 522 REG_UPDATE(DCSURF_SURFACE_CONFIG, 523 SURFACE_PIXEL_FORMAT, 8); 524 break; 525 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010: 526 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010: 527 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS: 528 REG_UPDATE(DCSURF_SURFACE_CONFIG, 529 SURFACE_PIXEL_FORMAT, 10); 530 break; 531 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616: 532 REG_UPDATE(DCSURF_SURFACE_CONFIG, 533 SURFACE_PIXEL_FORMAT, 22); 534 break; 535 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F: 536 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:/*we use crossbar already*/ 537 REG_UPDATE(DCSURF_SURFACE_CONFIG, 538 SURFACE_PIXEL_FORMAT, 24); 539 break; 540 541 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr: 542 REG_UPDATE(DCSURF_SURFACE_CONFIG, 543 SURFACE_PIXEL_FORMAT, 65); 544 break; 545 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb: 546 REG_UPDATE(DCSURF_SURFACE_CONFIG, 547 SURFACE_PIXEL_FORMAT, 64); 548 break; 549 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr: 550 REG_UPDATE(DCSURF_SURFACE_CONFIG, 551 SURFACE_PIXEL_FORMAT, 67); 552 break; 553 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb: 554 REG_UPDATE(DCSURF_SURFACE_CONFIG, 555 SURFACE_PIXEL_FORMAT, 66); 556 break; 557 case SURFACE_PIXEL_FORMAT_VIDEO_AYCrCb8888: 558 REG_UPDATE(DCSURF_SURFACE_CONFIG, 559 SURFACE_PIXEL_FORMAT, 12); 560 break; 561 #if defined(CONFIG_DRM_AMD_DC_DCN2_0) 562 case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FIX: 563 REG_UPDATE(DCSURF_SURFACE_CONFIG, 564 SURFACE_PIXEL_FORMAT, 112); 565 break; 566 case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FIX: 567 REG_UPDATE(DCSURF_SURFACE_CONFIG, 568 SURFACE_PIXEL_FORMAT, 113); 569 break; 570 case SURFACE_PIXEL_FORMAT_VIDEO_ACrYCb2101010: 571 REG_UPDATE(DCSURF_SURFACE_CONFIG, 572 SURFACE_PIXEL_FORMAT, 114); 573 break; 574 case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FLOAT: 575 REG_UPDATE(DCSURF_SURFACE_CONFIG, 576 SURFACE_PIXEL_FORMAT, 118); 577 break; 578 case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FLOAT: 579 REG_UPDATE(DCSURF_SURFACE_CONFIG, 580 SURFACE_PIXEL_FORMAT, 119); 581 break; 582 #endif 583 default: 584 BREAK_TO_DEBUGGER(); 585 break; 586 } 587 588 /* don't see the need of program the xbar in DCN 1.0 */ 589 } 590 591 void hubp2_program_surface_config( 592 struct hubp *hubp, 593 enum surface_pixel_format format, 594 union dc_tiling_info *tiling_info, 595 union plane_size *plane_size, 596 enum dc_rotation_angle rotation, 597 struct dc_plane_dcc_param *dcc, 598 bool horizontal_mirror, 599 unsigned int compat_level) 600 { 601 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 602 603 hubp2_dcc_control(hubp, dcc->enable, dcc->grph.independent_64b_blks); 604 hubp2_program_tiling(hubp2, tiling_info, format); 605 hubp2_program_size(hubp, format, plane_size, dcc); 606 hubp2_program_rotation(hubp, rotation, horizontal_mirror); 607 hubp2_program_pixel_format(hubp, format); 608 } 609 610 enum cursor_lines_per_chunk hubp2_get_lines_per_chunk( 611 unsigned int cursor_width, 612 enum dc_cursor_color_format cursor_mode) 613 { 614 enum cursor_lines_per_chunk line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 615 616 if (cursor_mode == CURSOR_MODE_MONO) 617 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 618 else if (cursor_mode == CURSOR_MODE_COLOR_1BIT_AND || 619 cursor_mode == CURSOR_MODE_COLOR_PRE_MULTIPLIED_ALPHA || 620 cursor_mode == CURSOR_MODE_COLOR_UN_PRE_MULTIPLIED_ALPHA) { 621 if (cursor_width >= 1 && cursor_width <= 32) 622 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 623 else if (cursor_width >= 33 && cursor_width <= 64) 624 line_per_chunk = CURSOR_LINE_PER_CHUNK_8; 625 else if (cursor_width >= 65 && cursor_width <= 128) 626 line_per_chunk = CURSOR_LINE_PER_CHUNK_4; 627 else if (cursor_width >= 129 && cursor_width <= 256) 628 line_per_chunk = CURSOR_LINE_PER_CHUNK_2; 629 } else if (cursor_mode == CURSOR_MODE_COLOR_64BIT_FP_PRE_MULTIPLIED || 630 cursor_mode == CURSOR_MODE_COLOR_64BIT_FP_UN_PRE_MULTIPLIED) { 631 if (cursor_width >= 1 && cursor_width <= 16) 632 line_per_chunk = CURSOR_LINE_PER_CHUNK_16; 633 else if (cursor_width >= 17 && cursor_width <= 32) 634 line_per_chunk = CURSOR_LINE_PER_CHUNK_8; 635 else if (cursor_width >= 33 && cursor_width <= 64) 636 line_per_chunk = CURSOR_LINE_PER_CHUNK_4; 637 else if (cursor_width >= 65 && cursor_width <= 128) 638 line_per_chunk = CURSOR_LINE_PER_CHUNK_2; 639 else if (cursor_width >= 129 && cursor_width <= 256) 640 line_per_chunk = CURSOR_LINE_PER_CHUNK_1; 641 } 642 643 return line_per_chunk; 644 } 645 646 void hubp2_cursor_set_attributes( 647 struct hubp *hubp, 648 const struct dc_cursor_attributes *attr) 649 { 650 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 651 enum cursor_pitch hw_pitch = hubp1_get_cursor_pitch(attr->pitch); 652 enum cursor_lines_per_chunk lpc = hubp2_get_lines_per_chunk( 653 attr->width, attr->color_format); 654 655 hubp->curs_attr = *attr; 656 657 REG_UPDATE(CURSOR_SURFACE_ADDRESS_HIGH, 658 CURSOR_SURFACE_ADDRESS_HIGH, attr->address.high_part); 659 REG_UPDATE(CURSOR_SURFACE_ADDRESS, 660 CURSOR_SURFACE_ADDRESS, attr->address.low_part); 661 662 REG_UPDATE_2(CURSOR_SIZE, 663 CURSOR_WIDTH, attr->width, 664 CURSOR_HEIGHT, attr->height); 665 666 REG_UPDATE_4(CURSOR_CONTROL, 667 CURSOR_MODE, attr->color_format, 668 CURSOR_2X_MAGNIFY, attr->attribute_flags.bits.ENABLE_MAGNIFICATION, 669 CURSOR_PITCH, hw_pitch, 670 CURSOR_LINES_PER_CHUNK, lpc); 671 672 REG_SET_2(CURSOR_SETTINGS, 0, 673 /* no shift of the cursor HDL schedule */ 674 CURSOR0_DST_Y_OFFSET, 0, 675 /* used to shift the cursor chunk request deadline */ 676 CURSOR0_CHUNK_HDL_ADJUST, 3); 677 } 678 679 void hubp2_dmdata_set_attributes( 680 struct hubp *hubp, 681 const struct dc_dmdata_attributes *attr) 682 { 683 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 684 685 if (attr->dmdata_mode == DMDATA_HW_MODE) { 686 /* set to HW mode */ 687 REG_UPDATE(DMDATA_CNTL, 688 DMDATA_MODE, 1); 689 690 /* for DMDATA flip, need to use SURFACE_UPDATE_LOCK */ 691 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_UPDATE_LOCK, 1); 692 693 /* toggle DMDATA_UPDATED and set repeat and size */ 694 REG_UPDATE(DMDATA_CNTL, 695 DMDATA_UPDATED, 0); 696 REG_UPDATE_3(DMDATA_CNTL, 697 DMDATA_UPDATED, 1, 698 DMDATA_REPEAT, attr->dmdata_repeat, 699 DMDATA_SIZE, attr->dmdata_size); 700 701 /* set DMDATA address */ 702 REG_WRITE(DMDATA_ADDRESS_LOW, attr->address.low_part); 703 REG_UPDATE(DMDATA_ADDRESS_HIGH, 704 DMDATA_ADDRESS_HIGH, attr->address.high_part); 705 706 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_UPDATE_LOCK, 0); 707 708 } else { 709 /* set to SW mode before loading data */ 710 REG_SET(DMDATA_CNTL, 0, 711 DMDATA_MODE, 0); 712 /* toggle DMDATA_SW_UPDATED to start loading sequence */ 713 REG_UPDATE(DMDATA_SW_CNTL, 714 DMDATA_SW_UPDATED, 0); 715 REG_UPDATE_3(DMDATA_SW_CNTL, 716 DMDATA_SW_UPDATED, 1, 717 DMDATA_SW_REPEAT, attr->dmdata_repeat, 718 DMDATA_SW_SIZE, attr->dmdata_size); 719 /* load data into hubp dmdata buffer */ 720 hubp2_dmdata_load(hubp, attr->dmdata_size, attr->dmdata_sw_data); 721 } 722 723 /* Note that DL_DELTA must be programmed if we want to use TTU mode */ 724 REG_SET_3(DMDATA_QOS_CNTL, 0, 725 DMDATA_QOS_MODE, attr->dmdata_qos_mode, 726 DMDATA_QOS_LEVEL, attr->dmdata_qos_level, 727 DMDATA_DL_DELTA, attr->dmdata_dl_delta); 728 } 729 730 void hubp2_dmdata_load( 731 struct hubp *hubp, 732 uint32_t dmdata_sw_size, 733 const uint32_t *dmdata_sw_data) 734 { 735 int i; 736 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 737 738 /* load dmdata into HUBP buffer in SW mode */ 739 for (i = 0; i < dmdata_sw_size / 4; i++) 740 REG_WRITE(DMDATA_SW_DATA, dmdata_sw_data[i]); 741 } 742 743 bool hubp2_dmdata_status_done(struct hubp *hubp) 744 { 745 uint32_t status; 746 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 747 748 REG_GET(DMDATA_STATUS, DMDATA_DONE, &status); 749 return (status == 1); 750 } 751 752 bool hubp2_program_surface_flip_and_addr( 753 struct hubp *hubp, 754 const struct dc_plane_address *address, 755 bool flip_immediate) 756 { 757 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 758 759 //program flip type 760 REG_UPDATE(DCSURF_FLIP_CONTROL, 761 SURFACE_FLIP_TYPE, flip_immediate); 762 763 // Program VMID reg 764 REG_UPDATE(VMID_SETTINGS_0, 765 VMID, address->vmid); 766 767 if (address->type == PLN_ADDR_TYPE_GRPH_STEREO) { 768 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x1); 769 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x1); 770 771 } else { 772 // turn off stereo if not in stereo 773 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x0); 774 REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x0); 775 } 776 777 778 779 /* HW automatically latch rest of address register on write to 780 * DCSURF_PRIMARY_SURFACE_ADDRESS if SURFACE_UPDATE_LOCK is not used 781 * 782 * program high first and then the low addr, order matters! 783 */ 784 switch (address->type) { 785 case PLN_ADDR_TYPE_GRAPHICS: 786 /* DCN1.0 does not support const color 787 * TODO: program DCHUBBUB_RET_PATH_DCC_CFGx_0/1 788 * base on address->grph.dcc_const_color 789 * x = 0, 2, 4, 6 for pipe 0, 1, 2, 3 for rgb and luma 790 * x = 1, 3, 5, 7 for pipe 0, 1, 2, 3 for chroma 791 */ 792 793 if (address->grph.addr.quad_part == 0) 794 break; 795 796 REG_UPDATE_2(DCSURF_SURFACE_CONTROL, 797 PRIMARY_SURFACE_TMZ, address->tmz_surface, 798 PRIMARY_META_SURFACE_TMZ, address->tmz_surface); 799 800 if (address->grph.meta_addr.quad_part != 0) { 801 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 802 PRIMARY_META_SURFACE_ADDRESS_HIGH, 803 address->grph.meta_addr.high_part); 804 805 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 806 PRIMARY_META_SURFACE_ADDRESS, 807 address->grph.meta_addr.low_part); 808 } 809 810 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 811 PRIMARY_SURFACE_ADDRESS_HIGH, 812 address->grph.addr.high_part); 813 814 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 815 PRIMARY_SURFACE_ADDRESS, 816 address->grph.addr.low_part); 817 break; 818 case PLN_ADDR_TYPE_VIDEO_PROGRESSIVE: 819 if (address->video_progressive.luma_addr.quad_part == 0 820 || address->video_progressive.chroma_addr.quad_part == 0) 821 break; 822 823 REG_UPDATE_4(DCSURF_SURFACE_CONTROL, 824 PRIMARY_SURFACE_TMZ, address->tmz_surface, 825 PRIMARY_SURFACE_TMZ_C, address->tmz_surface, 826 PRIMARY_META_SURFACE_TMZ, address->tmz_surface, 827 PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface); 828 829 if (address->video_progressive.luma_meta_addr.quad_part != 0) { 830 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 0, 831 PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 832 address->video_progressive.chroma_meta_addr.high_part); 833 834 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_C, 0, 835 PRIMARY_META_SURFACE_ADDRESS_C, 836 address->video_progressive.chroma_meta_addr.low_part); 837 838 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 839 PRIMARY_META_SURFACE_ADDRESS_HIGH, 840 address->video_progressive.luma_meta_addr.high_part); 841 842 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 843 PRIMARY_META_SURFACE_ADDRESS, 844 address->video_progressive.luma_meta_addr.low_part); 845 } 846 847 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C, 0, 848 PRIMARY_SURFACE_ADDRESS_HIGH_C, 849 address->video_progressive.chroma_addr.high_part); 850 851 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_C, 0, 852 PRIMARY_SURFACE_ADDRESS_C, 853 address->video_progressive.chroma_addr.low_part); 854 855 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 856 PRIMARY_SURFACE_ADDRESS_HIGH, 857 address->video_progressive.luma_addr.high_part); 858 859 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 860 PRIMARY_SURFACE_ADDRESS, 861 address->video_progressive.luma_addr.low_part); 862 break; 863 case PLN_ADDR_TYPE_GRPH_STEREO: 864 if (address->grph_stereo.left_addr.quad_part == 0) 865 break; 866 if (address->grph_stereo.right_addr.quad_part == 0) 867 break; 868 869 REG_UPDATE_8(DCSURF_SURFACE_CONTROL, 870 PRIMARY_SURFACE_TMZ, address->tmz_surface, 871 PRIMARY_SURFACE_TMZ_C, address->tmz_surface, 872 PRIMARY_META_SURFACE_TMZ, address->tmz_surface, 873 PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface, 874 SECONDARY_SURFACE_TMZ, address->tmz_surface, 875 SECONDARY_SURFACE_TMZ_C, address->tmz_surface, 876 SECONDARY_META_SURFACE_TMZ, address->tmz_surface, 877 SECONDARY_META_SURFACE_TMZ_C, address->tmz_surface); 878 879 if (address->grph_stereo.right_meta_addr.quad_part != 0) { 880 881 REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH, 0, 882 SECONDARY_META_SURFACE_ADDRESS_HIGH, 883 address->grph_stereo.right_meta_addr.high_part); 884 885 REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS, 0, 886 SECONDARY_META_SURFACE_ADDRESS, 887 address->grph_stereo.right_meta_addr.low_part); 888 } 889 if (address->grph_stereo.left_meta_addr.quad_part != 0) { 890 891 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0, 892 PRIMARY_META_SURFACE_ADDRESS_HIGH, 893 address->grph_stereo.left_meta_addr.high_part); 894 895 REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0, 896 PRIMARY_META_SURFACE_ADDRESS, 897 address->grph_stereo.left_meta_addr.low_part); 898 } 899 900 REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH, 0, 901 SECONDARY_SURFACE_ADDRESS_HIGH, 902 address->grph_stereo.right_addr.high_part); 903 904 REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS, 0, 905 SECONDARY_SURFACE_ADDRESS, 906 address->grph_stereo.right_addr.low_part); 907 908 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0, 909 PRIMARY_SURFACE_ADDRESS_HIGH, 910 address->grph_stereo.left_addr.high_part); 911 912 REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0, 913 PRIMARY_SURFACE_ADDRESS, 914 address->grph_stereo.left_addr.low_part); 915 break; 916 default: 917 BREAK_TO_DEBUGGER(); 918 break; 919 } 920 921 hubp->request_address = *address; 922 923 return true; 924 } 925 926 void hubp2_enable_triplebuffer( 927 struct hubp *hubp, 928 bool enable) 929 { 930 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 931 uint32_t triple_buffer_en = 0; 932 bool tri_buffer_en; 933 934 REG_GET(DCSURF_FLIP_CONTROL2, SURFACE_TRIPLE_BUFFER_ENABLE, &triple_buffer_en); 935 tri_buffer_en = (triple_buffer_en == 1); 936 if (tri_buffer_en != enable) { 937 REG_UPDATE(DCSURF_FLIP_CONTROL2, 938 SURFACE_TRIPLE_BUFFER_ENABLE, enable ? DC_TRIPLEBUFFER_ENABLE : DC_TRIPLEBUFFER_DISABLE); 939 } 940 } 941 942 bool hubp2_is_triplebuffer_enabled( 943 struct hubp *hubp) 944 { 945 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 946 uint32_t triple_buffer_en = 0; 947 948 REG_GET(DCSURF_FLIP_CONTROL2, SURFACE_TRIPLE_BUFFER_ENABLE, &triple_buffer_en); 949 950 return (bool)triple_buffer_en; 951 } 952 953 void hubp2_set_flip_control_surface_gsl(struct hubp *hubp, bool enable) 954 { 955 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 956 957 REG_UPDATE(DCSURF_FLIP_CONTROL2, SURFACE_GSL_ENABLE, enable ? 1 : 0); 958 } 959 960 bool hubp2_is_flip_pending(struct hubp *hubp) 961 { 962 uint32_t flip_pending = 0; 963 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 964 struct dc_plane_address earliest_inuse_address; 965 966 REG_GET(DCSURF_FLIP_CONTROL, 967 SURFACE_FLIP_PENDING, &flip_pending); 968 969 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE, 970 SURFACE_EARLIEST_INUSE_ADDRESS, &earliest_inuse_address.grph.addr.low_part); 971 972 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH, 973 SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &earliest_inuse_address.grph.addr.high_part); 974 975 if (flip_pending) 976 return true; 977 978 if (earliest_inuse_address.grph.addr.quad_part != hubp->request_address.grph.addr.quad_part) 979 return true; 980 981 return false; 982 } 983 984 void hubp2_set_blank(struct hubp *hubp, bool blank) 985 { 986 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 987 uint32_t blank_en = blank ? 1 : 0; 988 989 REG_UPDATE_2(DCHUBP_CNTL, 990 HUBP_BLANK_EN, blank_en, 991 HUBP_TTU_DISABLE, blank_en); 992 993 if (blank) { 994 uint32_t reg_val = REG_READ(DCHUBP_CNTL); 995 996 if (reg_val) { 997 /* init sequence workaround: in case HUBP is 998 * power gated, this wait would timeout. 999 * 1000 * we just wrote reg_val to non-0, if it stay 0 1001 * it means HUBP is gated 1002 */ 1003 REG_WAIT(DCHUBP_CNTL, 1004 HUBP_NO_OUTSTANDING_REQ, 1, 1005 1, 200); 1006 } 1007 1008 hubp->mpcc_id = 0xf; 1009 hubp->opp_id = OPP_ID_INVALID; 1010 } 1011 } 1012 1013 void hubp2_cursor_set_position( 1014 struct hubp *hubp, 1015 const struct dc_cursor_position *pos, 1016 const struct dc_cursor_mi_param *param) 1017 { 1018 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1019 int src_x_offset = pos->x - pos->x_hotspot - param->viewport.x; 1020 int src_y_offset = pos->y - pos->y_hotspot - param->viewport.y; 1021 int x_hotspot = pos->x_hotspot; 1022 int y_hotspot = pos->y_hotspot; 1023 uint32_t dst_x_offset; 1024 uint32_t cur_en = pos->enable ? 1 : 0; 1025 1026 /* 1027 * Guard aganst cursor_set_position() from being called with invalid 1028 * attributes 1029 * 1030 * TODO: Look at combining cursor_set_position() and 1031 * cursor_set_attributes() into cursor_update() 1032 */ 1033 if (hubp->curs_attr.address.quad_part == 0) 1034 return; 1035 1036 if (param->rotation == ROTATION_ANGLE_90 || param->rotation == ROTATION_ANGLE_270) { 1037 src_x_offset = pos->y - pos->y_hotspot - param->viewport.x; 1038 y_hotspot = pos->x_hotspot; 1039 x_hotspot = pos->y_hotspot; 1040 } 1041 1042 if (param->mirror) { 1043 x_hotspot = param->viewport.width - x_hotspot; 1044 src_x_offset = param->viewport.x + param->viewport.width - src_x_offset; 1045 } 1046 1047 dst_x_offset = (src_x_offset >= 0) ? src_x_offset : 0; 1048 dst_x_offset *= param->ref_clk_khz; 1049 dst_x_offset /= param->pixel_clk_khz; 1050 1051 ASSERT(param->h_scale_ratio.value); 1052 1053 if (param->h_scale_ratio.value) 1054 dst_x_offset = dc_fixpt_floor(dc_fixpt_div( 1055 dc_fixpt_from_int(dst_x_offset), 1056 param->h_scale_ratio)); 1057 1058 if (src_x_offset >= (int)param->viewport.width) 1059 cur_en = 0; /* not visible beyond right edge*/ 1060 1061 if (src_x_offset + (int)hubp->curs_attr.width <= 0) 1062 cur_en = 0; /* not visible beyond left edge*/ 1063 1064 if (src_y_offset >= (int)param->viewport.height) 1065 cur_en = 0; /* not visible beyond bottom edge*/ 1066 1067 if (src_y_offset + (int)hubp->curs_attr.height <= 0) 1068 cur_en = 0; /* not visible beyond top edge*/ 1069 1070 if (cur_en && REG_READ(CURSOR_SURFACE_ADDRESS) == 0) 1071 hubp->funcs->set_cursor_attributes(hubp, &hubp->curs_attr); 1072 1073 REG_UPDATE(CURSOR_CONTROL, 1074 CURSOR_ENABLE, cur_en); 1075 1076 REG_SET_2(CURSOR_POSITION, 0, 1077 CURSOR_X_POSITION, pos->x, 1078 CURSOR_Y_POSITION, pos->y); 1079 1080 REG_SET_2(CURSOR_HOT_SPOT, 0, 1081 CURSOR_HOT_SPOT_X, x_hotspot, 1082 CURSOR_HOT_SPOT_Y, y_hotspot); 1083 1084 REG_SET(CURSOR_DST_OFFSET, 0, 1085 CURSOR_DST_X_OFFSET, dst_x_offset); 1086 /* TODO Handle surface pixel formats other than 4:4:4 */ 1087 } 1088 1089 void hubp2_clk_cntl(struct hubp *hubp, bool enable) 1090 { 1091 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1092 uint32_t clk_enable = enable ? 1 : 0; 1093 1094 REG_UPDATE(HUBP_CLK_CNTL, HUBP_CLOCK_ENABLE, clk_enable); 1095 } 1096 1097 void hubp2_vtg_sel(struct hubp *hubp, uint32_t otg_inst) 1098 { 1099 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1100 1101 REG_UPDATE(DCHUBP_CNTL, HUBP_VTG_SEL, otg_inst); 1102 } 1103 1104 void hubp2_clear_underflow(struct hubp *hubp) 1105 { 1106 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1107 1108 REG_UPDATE(DCHUBP_CNTL, HUBP_UNDERFLOW_CLEAR, 1); 1109 } 1110 1111 void hubp2_read_state_common(struct hubp *hubp) 1112 { 1113 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1114 struct dcn_hubp_state *s = &hubp2->state; 1115 struct _vcs_dpi_display_dlg_regs_st *dlg_attr = &s->dlg_attr; 1116 struct _vcs_dpi_display_ttu_regs_st *ttu_attr = &s->ttu_attr; 1117 struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs; 1118 1119 /* Requester */ 1120 REG_GET(HUBPRET_CONTROL, 1121 DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs->plane1_base_address); 1122 REG_GET_4(DCN_EXPANSION_MODE, 1123 DRQ_EXPANSION_MODE, &rq_regs->drq_expansion_mode, 1124 PRQ_EXPANSION_MODE, &rq_regs->prq_expansion_mode, 1125 MRQ_EXPANSION_MODE, &rq_regs->mrq_expansion_mode, 1126 CRQ_EXPANSION_MODE, &rq_regs->crq_expansion_mode); 1127 1128 /* DLG - Per hubp */ 1129 REG_GET_2(BLANK_OFFSET_0, 1130 REFCYC_H_BLANK_END, &dlg_attr->refcyc_h_blank_end, 1131 DLG_V_BLANK_END, &dlg_attr->dlg_vblank_end); 1132 1133 REG_GET(BLANK_OFFSET_1, 1134 MIN_DST_Y_NEXT_START, &dlg_attr->min_dst_y_next_start); 1135 1136 REG_GET(DST_DIMENSIONS, 1137 REFCYC_PER_HTOTAL, &dlg_attr->refcyc_per_htotal); 1138 1139 REG_GET_2(DST_AFTER_SCALER, 1140 REFCYC_X_AFTER_SCALER, &dlg_attr->refcyc_x_after_scaler, 1141 DST_Y_AFTER_SCALER, &dlg_attr->dst_y_after_scaler); 1142 1143 if (REG(PREFETCH_SETTINS)) 1144 REG_GET_2(PREFETCH_SETTINS, 1145 DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch, 1146 VRATIO_PREFETCH, &dlg_attr->vratio_prefetch); 1147 else 1148 REG_GET_2(PREFETCH_SETTINGS, 1149 DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch, 1150 VRATIO_PREFETCH, &dlg_attr->vratio_prefetch); 1151 1152 REG_GET_2(VBLANK_PARAMETERS_0, 1153 DST_Y_PER_VM_VBLANK, &dlg_attr->dst_y_per_vm_vblank, 1154 DST_Y_PER_ROW_VBLANK, &dlg_attr->dst_y_per_row_vblank); 1155 1156 REG_GET(REF_FREQ_TO_PIX_FREQ, 1157 REF_FREQ_TO_PIX_FREQ, &dlg_attr->ref_freq_to_pix_freq); 1158 1159 /* DLG - Per luma/chroma */ 1160 REG_GET(VBLANK_PARAMETERS_1, 1161 REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr->refcyc_per_pte_group_vblank_l); 1162 1163 REG_GET(VBLANK_PARAMETERS_3, 1164 REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr->refcyc_per_meta_chunk_vblank_l); 1165 1166 if (REG(NOM_PARAMETERS_0)) 1167 REG_GET(NOM_PARAMETERS_0, 1168 DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr->dst_y_per_pte_row_nom_l); 1169 1170 if (REG(NOM_PARAMETERS_1)) 1171 REG_GET(NOM_PARAMETERS_1, 1172 REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr->refcyc_per_pte_group_nom_l); 1173 1174 REG_GET(NOM_PARAMETERS_4, 1175 DST_Y_PER_META_ROW_NOM_L, &dlg_attr->dst_y_per_meta_row_nom_l); 1176 1177 REG_GET(NOM_PARAMETERS_5, 1178 REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr->refcyc_per_meta_chunk_nom_l); 1179 1180 REG_GET_2(PER_LINE_DELIVERY_PRE, 1181 REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr->refcyc_per_line_delivery_pre_l, 1182 REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr->refcyc_per_line_delivery_pre_c); 1183 1184 REG_GET_2(PER_LINE_DELIVERY, 1185 REFCYC_PER_LINE_DELIVERY_L, &dlg_attr->refcyc_per_line_delivery_l, 1186 REFCYC_PER_LINE_DELIVERY_C, &dlg_attr->refcyc_per_line_delivery_c); 1187 1188 if (REG(PREFETCH_SETTINS_C)) 1189 REG_GET(PREFETCH_SETTINS_C, 1190 VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c); 1191 else 1192 REG_GET(PREFETCH_SETTINGS_C, 1193 VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c); 1194 1195 REG_GET(VBLANK_PARAMETERS_2, 1196 REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr->refcyc_per_pte_group_vblank_c); 1197 1198 REG_GET(VBLANK_PARAMETERS_4, 1199 REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr->refcyc_per_meta_chunk_vblank_c); 1200 1201 if (REG(NOM_PARAMETERS_2)) 1202 REG_GET(NOM_PARAMETERS_2, 1203 DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr->dst_y_per_pte_row_nom_c); 1204 1205 if (REG(NOM_PARAMETERS_3)) 1206 REG_GET(NOM_PARAMETERS_3, 1207 REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr->refcyc_per_pte_group_nom_c); 1208 1209 REG_GET(NOM_PARAMETERS_6, 1210 DST_Y_PER_META_ROW_NOM_C, &dlg_attr->dst_y_per_meta_row_nom_c); 1211 1212 REG_GET(NOM_PARAMETERS_7, 1213 REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr->refcyc_per_meta_chunk_nom_c); 1214 1215 /* TTU - per hubp */ 1216 REG_GET_2(DCN_TTU_QOS_WM, 1217 QoS_LEVEL_LOW_WM, &ttu_attr->qos_level_low_wm, 1218 QoS_LEVEL_HIGH_WM, &ttu_attr->qos_level_high_wm); 1219 1220 REG_GET_2(DCN_GLOBAL_TTU_CNTL, 1221 MIN_TTU_VBLANK, &ttu_attr->min_ttu_vblank, 1222 QoS_LEVEL_FLIP, &ttu_attr->qos_level_flip); 1223 1224 /* TTU - per luma/chroma */ 1225 /* Assumed surf0 is luma and 1 is chroma */ 1226 1227 REG_GET_3(DCN_SURF0_TTU_CNTL0, 1228 REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_l, 1229 QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_l, 1230 QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_l); 1231 1232 REG_GET(DCN_SURF0_TTU_CNTL1, 1233 REFCYC_PER_REQ_DELIVERY_PRE, 1234 &ttu_attr->refcyc_per_req_delivery_pre_l); 1235 1236 REG_GET_3(DCN_SURF1_TTU_CNTL0, 1237 REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_c, 1238 QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_c, 1239 QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_c); 1240 1241 REG_GET(DCN_SURF1_TTU_CNTL1, 1242 REFCYC_PER_REQ_DELIVERY_PRE, 1243 &ttu_attr->refcyc_per_req_delivery_pre_c); 1244 1245 /* Rest of hubp */ 1246 REG_GET(DCSURF_SURFACE_CONFIG, 1247 SURFACE_PIXEL_FORMAT, &s->pixel_format); 1248 1249 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH, 1250 SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &s->inuse_addr_hi); 1251 1252 REG_GET(DCSURF_SURFACE_EARLIEST_INUSE, 1253 SURFACE_EARLIEST_INUSE_ADDRESS, &s->inuse_addr_lo); 1254 1255 REG_GET_2(DCSURF_PRI_VIEWPORT_DIMENSION, 1256 PRI_VIEWPORT_WIDTH, &s->viewport_width, 1257 PRI_VIEWPORT_HEIGHT, &s->viewport_height); 1258 1259 REG_GET_2(DCSURF_SURFACE_CONFIG, 1260 ROTATION_ANGLE, &s->rotation_angle, 1261 H_MIRROR_EN, &s->h_mirror_en); 1262 1263 REG_GET(DCSURF_TILING_CONFIG, 1264 SW_MODE, &s->sw_mode); 1265 1266 REG_GET(DCSURF_SURFACE_CONTROL, 1267 PRIMARY_SURFACE_DCC_EN, &s->dcc_en); 1268 1269 REG_GET_3(DCHUBP_CNTL, 1270 HUBP_BLANK_EN, &s->blank_en, 1271 HUBP_TTU_DISABLE, &s->ttu_disable, 1272 HUBP_UNDERFLOW_STATUS, &s->underflow_status); 1273 1274 REG_GET(DCN_GLOBAL_TTU_CNTL, 1275 MIN_TTU_VBLANK, &s->min_ttu_vblank); 1276 1277 REG_GET_2(DCN_TTU_QOS_WM, 1278 QoS_LEVEL_LOW_WM, &s->qos_level_low_wm, 1279 QoS_LEVEL_HIGH_WM, &s->qos_level_high_wm); 1280 1281 } 1282 1283 void hubp2_read_state(struct hubp *hubp) 1284 { 1285 struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp); 1286 struct dcn_hubp_state *s = &hubp2->state; 1287 struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs; 1288 1289 hubp2_read_state_common(hubp); 1290 1291 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG, 1292 CHUNK_SIZE, &rq_regs->rq_regs_l.chunk_size, 1293 MIN_CHUNK_SIZE, &rq_regs->rq_regs_l.min_chunk_size, 1294 META_CHUNK_SIZE, &rq_regs->rq_regs_l.meta_chunk_size, 1295 MIN_META_CHUNK_SIZE, &rq_regs->rq_regs_l.min_meta_chunk_size, 1296 DPTE_GROUP_SIZE, &rq_regs->rq_regs_l.dpte_group_size, 1297 MPTE_GROUP_SIZE, &rq_regs->rq_regs_l.mpte_group_size, 1298 SWATH_HEIGHT, &rq_regs->rq_regs_l.swath_height, 1299 PTE_ROW_HEIGHT_LINEAR, &rq_regs->rq_regs_l.pte_row_height_linear); 1300 1301 REG_GET_8(DCHUBP_REQ_SIZE_CONFIG_C, 1302 CHUNK_SIZE_C, &rq_regs->rq_regs_c.chunk_size, 1303 MIN_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_chunk_size, 1304 META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.meta_chunk_size, 1305 MIN_META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_meta_chunk_size, 1306 DPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.dpte_group_size, 1307 MPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.mpte_group_size, 1308 SWATH_HEIGHT_C, &rq_regs->rq_regs_c.swath_height, 1309 PTE_ROW_HEIGHT_LINEAR_C, &rq_regs->rq_regs_c.pte_row_height_linear); 1310 1311 } 1312 1313 static struct hubp_funcs dcn20_hubp_funcs = { 1314 .hubp_enable_tripleBuffer = hubp2_enable_triplebuffer, 1315 .hubp_is_triplebuffer_enabled = hubp2_is_triplebuffer_enabled, 1316 .hubp_program_surface_flip_and_addr = hubp2_program_surface_flip_and_addr, 1317 .hubp_program_surface_config = hubp2_program_surface_config, 1318 .hubp_is_flip_pending = hubp2_is_flip_pending, 1319 .hubp_setup = hubp2_setup, 1320 .hubp_setup_interdependent = hubp2_setup_interdependent, 1321 .hubp_set_vm_system_aperture_settings = hubp2_set_vm_system_aperture_settings, 1322 .set_blank = hubp2_set_blank, 1323 .dcc_control = hubp2_dcc_control, 1324 .hubp_update_dchub = hubp2_update_dchub, 1325 .mem_program_viewport = min_set_viewport, 1326 .set_cursor_attributes = hubp2_cursor_set_attributes, 1327 .set_cursor_position = hubp2_cursor_set_position, 1328 .hubp_clk_cntl = hubp2_clk_cntl, 1329 .hubp_vtg_sel = hubp2_vtg_sel, 1330 .dmdata_set_attributes = hubp2_dmdata_set_attributes, 1331 .dmdata_load = hubp2_dmdata_load, 1332 .dmdata_status_done = hubp2_dmdata_status_done, 1333 .hubp_read_state = hubp2_read_state, 1334 .hubp_clear_underflow = hubp2_clear_underflow, 1335 .hubp_set_flip_control_surface_gsl = hubp2_set_flip_control_surface_gsl, 1336 .hubp_init = hubp1_init, 1337 }; 1338 1339 1340 bool hubp2_construct( 1341 struct dcn20_hubp *hubp2, 1342 struct dc_context *ctx, 1343 uint32_t inst, 1344 const struct dcn_hubp2_registers *hubp_regs, 1345 const struct dcn_hubp2_shift *hubp_shift, 1346 const struct dcn_hubp2_mask *hubp_mask) 1347 { 1348 hubp2->base.funcs = &dcn20_hubp_funcs; 1349 hubp2->base.ctx = ctx; 1350 hubp2->hubp_regs = hubp_regs; 1351 hubp2->hubp_shift = hubp_shift; 1352 hubp2->hubp_mask = hubp_mask; 1353 hubp2->base.inst = inst; 1354 hubp2->base.opp_id = OPP_ID_INVALID; 1355 hubp2->base.mpcc_id = 0xf; 1356 1357 return true; 1358 } 1359