1 /* 2 * Copyright 2019 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: AMD 23 * 24 */ 25 26 27 #include "dm_services.h" 28 #include "dc.h" 29 30 #include "dcn31/dcn31_init.h" 31 32 #include "resource.h" 33 #include "include/irq_service_interface.h" 34 #include "dcn31_resource.h" 35 36 #include "dcn20/dcn20_resource.h" 37 #include "dcn30/dcn30_resource.h" 38 39 #include "dcn10/dcn10_ipp.h" 40 #include "dcn30/dcn30_hubbub.h" 41 #include "dcn31/dcn31_hubbub.h" 42 #include "dcn30/dcn30_mpc.h" 43 #include "dcn31/dcn31_hubp.h" 44 #include "irq/dcn31/irq_service_dcn31.h" 45 #include "dcn30/dcn30_dpp.h" 46 #include "dcn31/dcn31_optc.h" 47 #include "dcn20/dcn20_hwseq.h" 48 #include "dcn30/dcn30_hwseq.h" 49 #include "dce110/dce110_hw_sequencer.h" 50 #include "dcn30/dcn30_opp.h" 51 #include "dcn20/dcn20_dsc.h" 52 #include "dcn30/dcn30_vpg.h" 53 #include "dcn30/dcn30_afmt.h" 54 #include "dcn30/dcn30_dio_stream_encoder.h" 55 #include "dcn31/dcn31_hpo_dp_stream_encoder.h" 56 #include "dcn31/dcn31_hpo_dp_link_encoder.h" 57 #include "dcn31/dcn31_apg.h" 58 #include "dcn31/dcn31_dio_link_encoder.h" 59 #include "dcn31/dcn31_vpg.h" 60 #include "dcn31/dcn31_afmt.h" 61 #include "dce/dce_clock_source.h" 62 #include "dce/dce_audio.h" 63 #include "dce/dce_hwseq.h" 64 #include "clk_mgr.h" 65 #include "virtual/virtual_stream_encoder.h" 66 #include "dce110/dce110_resource.h" 67 #include "dml/display_mode_vba.h" 68 #include "dcn31/dcn31_dccg.h" 69 #include "dcn10/dcn10_resource.h" 70 #include "dcn31_panel_cntl.h" 71 72 #include "dcn30/dcn30_dwb.h" 73 #include "dcn30/dcn30_mmhubbub.h" 74 75 // TODO: change include headers /amd/include/asic_reg after upstream 76 #include "yellow_carp_offset.h" 77 #include "dcn/dcn_3_1_2_offset.h" 78 #include "dcn/dcn_3_1_2_sh_mask.h" 79 #include "nbio/nbio_7_2_0_offset.h" 80 #include "dpcs/dpcs_4_2_0_offset.h" 81 #include "dpcs/dpcs_4_2_0_sh_mask.h" 82 #include "mmhub/mmhub_2_3_0_offset.h" 83 #include "mmhub/mmhub_2_3_0_sh_mask.h" 84 85 86 #define regDCHUBBUB_DEBUG_CTRL_0 0x04d6 87 #define regDCHUBBUB_DEBUG_CTRL_0_BASE_IDX 2 88 #define DCHUBBUB_DEBUG_CTRL_0__DET_DEPTH__SHIFT 0x10 89 #define DCHUBBUB_DEBUG_CTRL_0__DET_DEPTH_MASK 0x01FF0000L 90 91 #include "reg_helper.h" 92 #include "dce/dmub_abm.h" 93 #include "dce/dmub_psr.h" 94 #include "dce/dce_aux.h" 95 #include "dce/dce_i2c.h" 96 97 #include "dml/dcn30/display_mode_vba_30.h" 98 #include "vm_helper.h" 99 #include "dcn20/dcn20_vmid.h" 100 101 #include "link_enc_cfg.h" 102 103 #define DC_LOGGER_INIT(logger) 104 #define fixed16_to_double(x) (((double) x) / ((double) (1 << 16))) 105 #define fixed16_to_double_to_cpu(x) fixed16_to_double(le32_to_cpu(x)) 106 107 #define DCN3_1_DEFAULT_DET_SIZE 384 108 109 struct _vcs_dpi_ip_params_st dcn3_1_ip = { 110 .gpuvm_enable = 1, 111 .gpuvm_max_page_table_levels = 1, 112 .hostvm_enable = 1, 113 .hostvm_max_page_table_levels = 2, 114 .rob_buffer_size_kbytes = 64, 115 .det_buffer_size_kbytes = DCN3_1_DEFAULT_DET_SIZE, 116 .config_return_buffer_size_in_kbytes = 1792, 117 .compressed_buffer_segment_size_in_kbytes = 64, 118 .meta_fifo_size_in_kentries = 32, 119 .zero_size_buffer_entries = 512, 120 .compbuf_reserved_space_64b = 256, 121 .compbuf_reserved_space_zs = 64, 122 .dpp_output_buffer_pixels = 2560, 123 .opp_output_buffer_lines = 1, 124 .pixel_chunk_size_kbytes = 8, 125 .meta_chunk_size_kbytes = 2, 126 .min_meta_chunk_size_bytes = 256, 127 .writeback_chunk_size_kbytes = 8, 128 .ptoi_supported = false, 129 .num_dsc = 3, 130 .maximum_dsc_bits_per_component = 10, 131 .dsc422_native_support = false, 132 .is_line_buffer_bpp_fixed = true, 133 .line_buffer_fixed_bpp = 48, 134 .line_buffer_size_bits = 789504, 135 .max_line_buffer_lines = 12, 136 .writeback_interface_buffer_size_kbytes = 90, 137 .max_num_dpp = 4, 138 .max_num_otg = 4, 139 .max_num_hdmi_frl_outputs = 1, 140 .max_num_wb = 1, 141 .max_dchub_pscl_bw_pix_per_clk = 4, 142 .max_pscl_lb_bw_pix_per_clk = 2, 143 .max_lb_vscl_bw_pix_per_clk = 4, 144 .max_vscl_hscl_bw_pix_per_clk = 4, 145 .max_hscl_ratio = 6, 146 .max_vscl_ratio = 6, 147 .max_hscl_taps = 8, 148 .max_vscl_taps = 8, 149 .dpte_buffer_size_in_pte_reqs_luma = 64, 150 .dpte_buffer_size_in_pte_reqs_chroma = 34, 151 .dispclk_ramp_margin_percent = 1, 152 .max_inter_dcn_tile_repeaters = 8, 153 .cursor_buffer_size = 16, 154 .cursor_chunk_size = 2, 155 .writeback_line_buffer_buffer_size = 0, 156 .writeback_min_hscl_ratio = 1, 157 .writeback_min_vscl_ratio = 1, 158 .writeback_max_hscl_ratio = 1, 159 .writeback_max_vscl_ratio = 1, 160 .writeback_max_hscl_taps = 1, 161 .writeback_max_vscl_taps = 1, 162 .dppclk_delay_subtotal = 46, 163 .dppclk_delay_scl = 50, 164 .dppclk_delay_scl_lb_only = 16, 165 .dppclk_delay_cnvc_formatter = 27, 166 .dppclk_delay_cnvc_cursor = 6, 167 .dispclk_delay_subtotal = 119, 168 .dynamic_metadata_vm_enabled = false, 169 .odm_combine_4to1_supported = false, 170 .dcc_supported = true, 171 }; 172 173 struct _vcs_dpi_soc_bounding_box_st dcn3_1_soc = { 174 /*TODO: correct dispclk/dppclk voltage level determination*/ 175 .clock_limits = { 176 { 177 .state = 0, 178 .dispclk_mhz = 1200.0, 179 .dppclk_mhz = 1200.0, 180 .phyclk_mhz = 600.0, 181 .phyclk_d18_mhz = 667.0, 182 .dscclk_mhz = 186.0, 183 .dtbclk_mhz = 625.0, 184 }, 185 { 186 .state = 1, 187 .dispclk_mhz = 1200.0, 188 .dppclk_mhz = 1200.0, 189 .phyclk_mhz = 810.0, 190 .phyclk_d18_mhz = 667.0, 191 .dscclk_mhz = 209.0, 192 .dtbclk_mhz = 625.0, 193 }, 194 { 195 .state = 2, 196 .dispclk_mhz = 1200.0, 197 .dppclk_mhz = 1200.0, 198 .phyclk_mhz = 810.0, 199 .phyclk_d18_mhz = 667.0, 200 .dscclk_mhz = 209.0, 201 .dtbclk_mhz = 625.0, 202 }, 203 { 204 .state = 3, 205 .dispclk_mhz = 1200.0, 206 .dppclk_mhz = 1200.0, 207 .phyclk_mhz = 810.0, 208 .phyclk_d18_mhz = 667.0, 209 .dscclk_mhz = 371.0, 210 .dtbclk_mhz = 625.0, 211 }, 212 { 213 .state = 4, 214 .dispclk_mhz = 1200.0, 215 .dppclk_mhz = 1200.0, 216 .phyclk_mhz = 810.0, 217 .phyclk_d18_mhz = 667.0, 218 .dscclk_mhz = 417.0, 219 .dtbclk_mhz = 625.0, 220 }, 221 }, 222 .num_states = 5, 223 .sr_exit_time_us = 9.0, 224 .sr_enter_plus_exit_time_us = 11.0, 225 .sr_exit_z8_time_us = 402.0, 226 .sr_enter_plus_exit_z8_time_us = 520.0, 227 .writeback_latency_us = 12.0, 228 .dram_channel_width_bytes = 4, 229 .round_trip_ping_latency_dcfclk_cycles = 106, 230 .urgent_latency_pixel_data_only_us = 4.0, 231 .urgent_latency_pixel_mixed_with_vm_data_us = 4.0, 232 .urgent_latency_vm_data_only_us = 4.0, 233 .urgent_out_of_order_return_per_channel_pixel_only_bytes = 4096, 234 .urgent_out_of_order_return_per_channel_pixel_and_vm_bytes = 4096, 235 .urgent_out_of_order_return_per_channel_vm_only_bytes = 4096, 236 .pct_ideal_sdp_bw_after_urgent = 80.0, 237 .pct_ideal_dram_sdp_bw_after_urgent_pixel_only = 65.0, 238 .pct_ideal_dram_sdp_bw_after_urgent_pixel_and_vm = 60.0, 239 .pct_ideal_dram_sdp_bw_after_urgent_vm_only = 30.0, 240 .max_avg_sdp_bw_use_normal_percent = 60.0, 241 .max_avg_dram_bw_use_normal_percent = 60.0, 242 .fabric_datapath_to_dcn_data_return_bytes = 32, 243 .return_bus_width_bytes = 64, 244 .downspread_percent = 0.38, 245 .dcn_downspread_percent = 0.5, 246 .gpuvm_min_page_size_bytes = 4096, 247 .hostvm_min_page_size_bytes = 4096, 248 .do_urgent_latency_adjustment = false, 249 .urgent_latency_adjustment_fabric_clock_component_us = 0, 250 .urgent_latency_adjustment_fabric_clock_reference_mhz = 0, 251 }; 252 253 enum dcn31_clk_src_array_id { 254 DCN31_CLK_SRC_PLL0, 255 DCN31_CLK_SRC_PLL1, 256 DCN31_CLK_SRC_PLL2, 257 DCN31_CLK_SRC_PLL3, 258 DCN31_CLK_SRC_PLL4, 259 DCN30_CLK_SRC_TOTAL 260 }; 261 262 /* begin ********************* 263 * macros to expend register list macro defined in HW object header file 264 */ 265 266 /* DCN */ 267 /* TODO awful hack. fixup dcn20_dwb.h */ 268 #undef BASE_INNER 269 #define BASE_INNER(seg) DCN_BASE__INST0_SEG ## seg 270 271 #define BASE(seg) BASE_INNER(seg) 272 273 #define SR(reg_name)\ 274 .reg_name = BASE(reg ## reg_name ## _BASE_IDX) + \ 275 reg ## reg_name 276 277 #define SRI(reg_name, block, id)\ 278 .reg_name = BASE(reg ## block ## id ## _ ## reg_name ## _BASE_IDX) + \ 279 reg ## block ## id ## _ ## reg_name 280 281 #define SRI2(reg_name, block, id)\ 282 .reg_name = BASE(reg ## reg_name ## _BASE_IDX) + \ 283 reg ## reg_name 284 285 #define SRIR(var_name, reg_name, block, id)\ 286 .var_name = BASE(reg ## block ## id ## _ ## reg_name ## _BASE_IDX) + \ 287 reg ## block ## id ## _ ## reg_name 288 289 #define SRII(reg_name, block, id)\ 290 .reg_name[id] = BASE(reg ## block ## id ## _ ## reg_name ## _BASE_IDX) + \ 291 reg ## block ## id ## _ ## reg_name 292 293 #define SRII_MPC_RMU(reg_name, block, id)\ 294 .RMU##_##reg_name[id] = BASE(reg ## block ## id ## _ ## reg_name ## _BASE_IDX) + \ 295 reg ## block ## id ## _ ## reg_name 296 297 #define SRII_DWB(reg_name, temp_name, block, id)\ 298 .reg_name[id] = BASE(reg ## block ## id ## _ ## temp_name ## _BASE_IDX) + \ 299 reg ## block ## id ## _ ## temp_name 300 301 #define DCCG_SRII(reg_name, block, id)\ 302 .block ## _ ## reg_name[id] = BASE(reg ## block ## id ## _ ## reg_name ## _BASE_IDX) + \ 303 reg ## block ## id ## _ ## reg_name 304 305 #define VUPDATE_SRII(reg_name, block, id)\ 306 .reg_name[id] = BASE(reg ## reg_name ## _ ## block ## id ## _BASE_IDX) + \ 307 reg ## reg_name ## _ ## block ## id 308 309 /* NBIO */ 310 #define NBIO_BASE_INNER(seg) \ 311 NBIO_BASE__INST0_SEG ## seg 312 313 #define NBIO_BASE(seg) \ 314 NBIO_BASE_INNER(seg) 315 316 #define NBIO_SR(reg_name)\ 317 .reg_name = NBIO_BASE(regBIF_BX1_ ## reg_name ## _BASE_IDX) + \ 318 regBIF_BX1_ ## reg_name 319 320 /* MMHUB */ 321 #define MMHUB_BASE_INNER(seg) \ 322 MMHUB_BASE__INST0_SEG ## seg 323 324 #define MMHUB_BASE(seg) \ 325 MMHUB_BASE_INNER(seg) 326 327 #define MMHUB_SR(reg_name)\ 328 .reg_name = MMHUB_BASE(mm ## reg_name ## _BASE_IDX) + \ 329 mm ## reg_name 330 331 /* CLOCK */ 332 #define CLK_BASE_INNER(seg) \ 333 CLK_BASE__INST0_SEG ## seg 334 335 #define CLK_BASE(seg) \ 336 CLK_BASE_INNER(seg) 337 338 #define CLK_SRI(reg_name, block, inst)\ 339 .reg_name = CLK_BASE(reg ## block ## _ ## inst ## _ ## reg_name ## _BASE_IDX) + \ 340 reg ## block ## _ ## inst ## _ ## reg_name 341 342 343 static const struct bios_registers bios_regs = { 344 NBIO_SR(BIOS_SCRATCH_3), 345 NBIO_SR(BIOS_SCRATCH_6) 346 }; 347 348 #define clk_src_regs(index, pllid)\ 349 [index] = {\ 350 CS_COMMON_REG_LIST_DCN3_0(index, pllid),\ 351 } 352 353 static const struct dce110_clk_src_regs clk_src_regs[] = { 354 clk_src_regs(0, A), 355 clk_src_regs(1, B), 356 clk_src_regs(2, C), 357 clk_src_regs(3, D), 358 clk_src_regs(4, E) 359 }; 360 361 static const struct dce110_clk_src_shift cs_shift = { 362 CS_COMMON_MASK_SH_LIST_DCN2_0(__SHIFT) 363 }; 364 365 static const struct dce110_clk_src_mask cs_mask = { 366 CS_COMMON_MASK_SH_LIST_DCN2_0(_MASK) 367 }; 368 369 #define abm_regs(id)\ 370 [id] = {\ 371 ABM_DCN302_REG_LIST(id)\ 372 } 373 374 static const struct dce_abm_registers abm_regs[] = { 375 abm_regs(0), 376 abm_regs(1), 377 abm_regs(2), 378 abm_regs(3), 379 }; 380 381 static const struct dce_abm_shift abm_shift = { 382 ABM_MASK_SH_LIST_DCN30(__SHIFT) 383 }; 384 385 static const struct dce_abm_mask abm_mask = { 386 ABM_MASK_SH_LIST_DCN30(_MASK) 387 }; 388 389 #define audio_regs(id)\ 390 [id] = {\ 391 AUD_COMMON_REG_LIST(id)\ 392 } 393 394 static const struct dce_audio_registers audio_regs[] = { 395 audio_regs(0), 396 audio_regs(1), 397 audio_regs(2), 398 audio_regs(3), 399 audio_regs(4), 400 audio_regs(5), 401 audio_regs(6) 402 }; 403 404 #define DCE120_AUD_COMMON_MASK_SH_LIST(mask_sh)\ 405 SF(AZF0ENDPOINT0_AZALIA_F0_CODEC_ENDPOINT_INDEX, AZALIA_ENDPOINT_REG_INDEX, mask_sh),\ 406 SF(AZF0ENDPOINT0_AZALIA_F0_CODEC_ENDPOINT_DATA, AZALIA_ENDPOINT_REG_DATA, mask_sh),\ 407 AUD_COMMON_MASK_SH_LIST_BASE(mask_sh) 408 409 static const struct dce_audio_shift audio_shift = { 410 DCE120_AUD_COMMON_MASK_SH_LIST(__SHIFT) 411 }; 412 413 static const struct dce_audio_mask audio_mask = { 414 DCE120_AUD_COMMON_MASK_SH_LIST(_MASK) 415 }; 416 417 #define vpg_regs(id)\ 418 [id] = {\ 419 VPG_DCN31_REG_LIST(id)\ 420 } 421 422 static const struct dcn31_vpg_registers vpg_regs[] = { 423 vpg_regs(0), 424 vpg_regs(1), 425 vpg_regs(2), 426 vpg_regs(3), 427 vpg_regs(4), 428 vpg_regs(5), 429 vpg_regs(6), 430 vpg_regs(7), 431 vpg_regs(8), 432 vpg_regs(9), 433 }; 434 435 static const struct dcn31_vpg_shift vpg_shift = { 436 DCN31_VPG_MASK_SH_LIST(__SHIFT) 437 }; 438 439 static const struct dcn31_vpg_mask vpg_mask = { 440 DCN31_VPG_MASK_SH_LIST(_MASK) 441 }; 442 443 #define afmt_regs(id)\ 444 [id] = {\ 445 AFMT_DCN31_REG_LIST(id)\ 446 } 447 448 static const struct dcn31_afmt_registers afmt_regs[] = { 449 afmt_regs(0), 450 afmt_regs(1), 451 afmt_regs(2), 452 afmt_regs(3), 453 afmt_regs(4), 454 afmt_regs(5) 455 }; 456 457 static const struct dcn31_afmt_shift afmt_shift = { 458 DCN31_AFMT_MASK_SH_LIST(__SHIFT) 459 }; 460 461 static const struct dcn31_afmt_mask afmt_mask = { 462 DCN31_AFMT_MASK_SH_LIST(_MASK) 463 }; 464 465 #define apg_regs(id)\ 466 [id] = {\ 467 APG_DCN31_REG_LIST(id)\ 468 } 469 470 static const struct dcn31_apg_registers apg_regs[] = { 471 apg_regs(0), 472 apg_regs(1), 473 apg_regs(2), 474 apg_regs(3) 475 }; 476 477 static const struct dcn31_apg_shift apg_shift = { 478 DCN31_APG_MASK_SH_LIST(__SHIFT) 479 }; 480 481 static const struct dcn31_apg_mask apg_mask = { 482 DCN31_APG_MASK_SH_LIST(_MASK) 483 }; 484 485 #define stream_enc_regs(id)\ 486 [id] = {\ 487 SE_DCN3_REG_LIST(id)\ 488 } 489 490 static const struct dcn10_stream_enc_registers stream_enc_regs[] = { 491 stream_enc_regs(0), 492 stream_enc_regs(1), 493 stream_enc_regs(2), 494 stream_enc_regs(3), 495 stream_enc_regs(4) 496 }; 497 498 static const struct dcn10_stream_encoder_shift se_shift = { 499 SE_COMMON_MASK_SH_LIST_DCN30(__SHIFT) 500 }; 501 502 static const struct dcn10_stream_encoder_mask se_mask = { 503 SE_COMMON_MASK_SH_LIST_DCN30(_MASK) 504 }; 505 506 507 #define aux_regs(id)\ 508 [id] = {\ 509 DCN2_AUX_REG_LIST(id)\ 510 } 511 512 static const struct dcn10_link_enc_aux_registers link_enc_aux_regs[] = { 513 aux_regs(0), 514 aux_regs(1), 515 aux_regs(2), 516 aux_regs(3), 517 aux_regs(4) 518 }; 519 520 #define hpd_regs(id)\ 521 [id] = {\ 522 HPD_REG_LIST(id)\ 523 } 524 525 static const struct dcn10_link_enc_hpd_registers link_enc_hpd_regs[] = { 526 hpd_regs(0), 527 hpd_regs(1), 528 hpd_regs(2), 529 hpd_regs(3), 530 hpd_regs(4) 531 }; 532 533 #define link_regs(id, phyid)\ 534 [id] = {\ 535 LE_DCN31_REG_LIST(id), \ 536 UNIPHY_DCN2_REG_LIST(phyid), \ 537 DPCS_DCN31_REG_LIST(id), \ 538 } 539 540 static const struct dce110_aux_registers_shift aux_shift = { 541 DCN_AUX_MASK_SH_LIST(__SHIFT) 542 }; 543 544 static const struct dce110_aux_registers_mask aux_mask = { 545 DCN_AUX_MASK_SH_LIST(_MASK) 546 }; 547 548 static const struct dcn10_link_enc_registers link_enc_regs[] = { 549 link_regs(0, A), 550 link_regs(1, B), 551 link_regs(2, C), 552 link_regs(3, D), 553 link_regs(4, E) 554 }; 555 556 static const struct dcn10_link_enc_shift le_shift = { 557 LINK_ENCODER_MASK_SH_LIST_DCN31(__SHIFT), \ 558 DPCS_DCN31_MASK_SH_LIST(__SHIFT) 559 }; 560 561 static const struct dcn10_link_enc_mask le_mask = { 562 LINK_ENCODER_MASK_SH_LIST_DCN31(_MASK), \ 563 DPCS_DCN31_MASK_SH_LIST(_MASK) 564 }; 565 566 #define hpo_dp_stream_encoder_reg_list(id)\ 567 [id] = {\ 568 DCN3_1_HPO_DP_STREAM_ENC_REG_LIST(id)\ 569 } 570 571 static const struct dcn31_hpo_dp_stream_encoder_registers hpo_dp_stream_enc_regs[] = { 572 hpo_dp_stream_encoder_reg_list(0), 573 hpo_dp_stream_encoder_reg_list(1), 574 hpo_dp_stream_encoder_reg_list(2), 575 hpo_dp_stream_encoder_reg_list(3), 576 }; 577 578 static const struct dcn31_hpo_dp_stream_encoder_shift hpo_dp_se_shift = { 579 DCN3_1_HPO_DP_STREAM_ENC_MASK_SH_LIST(__SHIFT) 580 }; 581 582 static const struct dcn31_hpo_dp_stream_encoder_mask hpo_dp_se_mask = { 583 DCN3_1_HPO_DP_STREAM_ENC_MASK_SH_LIST(_MASK) 584 }; 585 586 #define hpo_dp_link_encoder_reg_list(id)\ 587 [id] = {\ 588 DCN3_1_HPO_DP_LINK_ENC_REG_LIST(id),\ 589 DCN3_1_RDPCSTX_REG_LIST(0),\ 590 DCN3_1_RDPCSTX_REG_LIST(1),\ 591 DCN3_1_RDPCSTX_REG_LIST(2),\ 592 DCN3_1_RDPCSTX_REG_LIST(3),\ 593 DCN3_1_RDPCSTX_REG_LIST(4)\ 594 } 595 596 static const struct dcn31_hpo_dp_link_encoder_registers hpo_dp_link_enc_regs[] = { 597 hpo_dp_link_encoder_reg_list(0), 598 hpo_dp_link_encoder_reg_list(1), 599 }; 600 601 static const struct dcn31_hpo_dp_link_encoder_shift hpo_dp_le_shift = { 602 DCN3_1_HPO_DP_LINK_ENC_MASK_SH_LIST(__SHIFT) 603 }; 604 605 static const struct dcn31_hpo_dp_link_encoder_mask hpo_dp_le_mask = { 606 DCN3_1_HPO_DP_LINK_ENC_MASK_SH_LIST(_MASK) 607 }; 608 609 #define dpp_regs(id)\ 610 [id] = {\ 611 DPP_REG_LIST_DCN30(id),\ 612 } 613 614 static const struct dcn3_dpp_registers dpp_regs[] = { 615 dpp_regs(0), 616 dpp_regs(1), 617 dpp_regs(2), 618 dpp_regs(3) 619 }; 620 621 static const struct dcn3_dpp_shift tf_shift = { 622 DPP_REG_LIST_SH_MASK_DCN30(__SHIFT) 623 }; 624 625 static const struct dcn3_dpp_mask tf_mask = { 626 DPP_REG_LIST_SH_MASK_DCN30(_MASK) 627 }; 628 629 #define opp_regs(id)\ 630 [id] = {\ 631 OPP_REG_LIST_DCN30(id),\ 632 } 633 634 static const struct dcn20_opp_registers opp_regs[] = { 635 opp_regs(0), 636 opp_regs(1), 637 opp_regs(2), 638 opp_regs(3) 639 }; 640 641 static const struct dcn20_opp_shift opp_shift = { 642 OPP_MASK_SH_LIST_DCN20(__SHIFT) 643 }; 644 645 static const struct dcn20_opp_mask opp_mask = { 646 OPP_MASK_SH_LIST_DCN20(_MASK) 647 }; 648 649 #define aux_engine_regs(id)\ 650 [id] = {\ 651 AUX_COMMON_REG_LIST0(id), \ 652 .AUXN_IMPCAL = 0, \ 653 .AUXP_IMPCAL = 0, \ 654 .AUX_RESET_MASK = DP_AUX0_AUX_CONTROL__AUX_RESET_MASK, \ 655 } 656 657 static const struct dce110_aux_registers aux_engine_regs[] = { 658 aux_engine_regs(0), 659 aux_engine_regs(1), 660 aux_engine_regs(2), 661 aux_engine_regs(3), 662 aux_engine_regs(4) 663 }; 664 665 #define dwbc_regs_dcn3(id)\ 666 [id] = {\ 667 DWBC_COMMON_REG_LIST_DCN30(id),\ 668 } 669 670 static const struct dcn30_dwbc_registers dwbc30_regs[] = { 671 dwbc_regs_dcn3(0), 672 }; 673 674 static const struct dcn30_dwbc_shift dwbc30_shift = { 675 DWBC_COMMON_MASK_SH_LIST_DCN30(__SHIFT) 676 }; 677 678 static const struct dcn30_dwbc_mask dwbc30_mask = { 679 DWBC_COMMON_MASK_SH_LIST_DCN30(_MASK) 680 }; 681 682 #define mcif_wb_regs_dcn3(id)\ 683 [id] = {\ 684 MCIF_WB_COMMON_REG_LIST_DCN30(id),\ 685 } 686 687 static const struct dcn30_mmhubbub_registers mcif_wb30_regs[] = { 688 mcif_wb_regs_dcn3(0) 689 }; 690 691 static const struct dcn30_mmhubbub_shift mcif_wb30_shift = { 692 MCIF_WB_COMMON_MASK_SH_LIST_DCN30(__SHIFT) 693 }; 694 695 static const struct dcn30_mmhubbub_mask mcif_wb30_mask = { 696 MCIF_WB_COMMON_MASK_SH_LIST_DCN30(_MASK) 697 }; 698 699 #define dsc_regsDCN20(id)\ 700 [id] = {\ 701 DSC_REG_LIST_DCN20(id)\ 702 } 703 704 static const struct dcn20_dsc_registers dsc_regs[] = { 705 dsc_regsDCN20(0), 706 dsc_regsDCN20(1), 707 dsc_regsDCN20(2) 708 }; 709 710 static const struct dcn20_dsc_shift dsc_shift = { 711 DSC_REG_LIST_SH_MASK_DCN20(__SHIFT) 712 }; 713 714 static const struct dcn20_dsc_mask dsc_mask = { 715 DSC_REG_LIST_SH_MASK_DCN20(_MASK) 716 }; 717 718 static const struct dcn30_mpc_registers mpc_regs = { 719 MPC_REG_LIST_DCN3_0(0), 720 MPC_REG_LIST_DCN3_0(1), 721 MPC_REG_LIST_DCN3_0(2), 722 MPC_REG_LIST_DCN3_0(3), 723 MPC_OUT_MUX_REG_LIST_DCN3_0(0), 724 MPC_OUT_MUX_REG_LIST_DCN3_0(1), 725 MPC_OUT_MUX_REG_LIST_DCN3_0(2), 726 MPC_OUT_MUX_REG_LIST_DCN3_0(3), 727 MPC_RMU_GLOBAL_REG_LIST_DCN3AG, 728 MPC_RMU_REG_LIST_DCN3AG(0), 729 MPC_RMU_REG_LIST_DCN3AG(1), 730 //MPC_RMU_REG_LIST_DCN3AG(2), 731 MPC_DWB_MUX_REG_LIST_DCN3_0(0), 732 }; 733 734 static const struct dcn30_mpc_shift mpc_shift = { 735 MPC_COMMON_MASK_SH_LIST_DCN30(__SHIFT) 736 }; 737 738 static const struct dcn30_mpc_mask mpc_mask = { 739 MPC_COMMON_MASK_SH_LIST_DCN30(_MASK) 740 }; 741 742 #define optc_regs(id)\ 743 [id] = {OPTC_COMMON_REG_LIST_DCN3_1(id)} 744 745 static const struct dcn_optc_registers optc_regs[] = { 746 optc_regs(0), 747 optc_regs(1), 748 optc_regs(2), 749 optc_regs(3) 750 }; 751 752 static const struct dcn_optc_shift optc_shift = { 753 OPTC_COMMON_MASK_SH_LIST_DCN3_1(__SHIFT) 754 }; 755 756 static const struct dcn_optc_mask optc_mask = { 757 OPTC_COMMON_MASK_SH_LIST_DCN3_1(_MASK) 758 }; 759 760 #define hubp_regs(id)\ 761 [id] = {\ 762 HUBP_REG_LIST_DCN30(id)\ 763 } 764 765 static const struct dcn_hubp2_registers hubp_regs[] = { 766 hubp_regs(0), 767 hubp_regs(1), 768 hubp_regs(2), 769 hubp_regs(3) 770 }; 771 772 773 static const struct dcn_hubp2_shift hubp_shift = { 774 HUBP_MASK_SH_LIST_DCN31(__SHIFT) 775 }; 776 777 static const struct dcn_hubp2_mask hubp_mask = { 778 HUBP_MASK_SH_LIST_DCN31(_MASK) 779 }; 780 static const struct dcn_hubbub_registers hubbub_reg = { 781 HUBBUB_REG_LIST_DCN31(0) 782 }; 783 784 static const struct dcn_hubbub_shift hubbub_shift = { 785 HUBBUB_MASK_SH_LIST_DCN31(__SHIFT) 786 }; 787 788 static const struct dcn_hubbub_mask hubbub_mask = { 789 HUBBUB_MASK_SH_LIST_DCN31(_MASK) 790 }; 791 792 static const struct dccg_registers dccg_regs = { 793 DCCG_REG_LIST_DCN31() 794 }; 795 796 static const struct dccg_shift dccg_shift = { 797 DCCG_MASK_SH_LIST_DCN31(__SHIFT) 798 }; 799 800 static const struct dccg_mask dccg_mask = { 801 DCCG_MASK_SH_LIST_DCN31(_MASK) 802 }; 803 804 805 #define SRII2(reg_name_pre, reg_name_post, id)\ 806 .reg_name_pre ## _ ## reg_name_post[id] = BASE(reg ## reg_name_pre \ 807 ## id ## _ ## reg_name_post ## _BASE_IDX) + \ 808 reg ## reg_name_pre ## id ## _ ## reg_name_post 809 810 811 #define HWSEQ_DCN31_REG_LIST()\ 812 SR(DCHUBBUB_GLOBAL_TIMER_CNTL), \ 813 SR(DCHUBBUB_ARB_HOSTVM_CNTL), \ 814 SR(DIO_MEM_PWR_CTRL), \ 815 SR(ODM_MEM_PWR_CTRL3), \ 816 SR(DMU_MEM_PWR_CNTL), \ 817 SR(MMHUBBUB_MEM_PWR_CNTL), \ 818 SR(DCCG_GATE_DISABLE_CNTL), \ 819 SR(DCCG_GATE_DISABLE_CNTL2), \ 820 SR(DCFCLK_CNTL),\ 821 SR(DC_MEM_GLOBAL_PWR_REQ_CNTL), \ 822 SRII(PIXEL_RATE_CNTL, OTG, 0), \ 823 SRII(PIXEL_RATE_CNTL, OTG, 1),\ 824 SRII(PIXEL_RATE_CNTL, OTG, 2),\ 825 SRII(PIXEL_RATE_CNTL, OTG, 3),\ 826 SRII(PHYPLL_PIXEL_RATE_CNTL, OTG, 0),\ 827 SRII(PHYPLL_PIXEL_RATE_CNTL, OTG, 1),\ 828 SRII(PHYPLL_PIXEL_RATE_CNTL, OTG, 2),\ 829 SRII(PHYPLL_PIXEL_RATE_CNTL, OTG, 3),\ 830 SR(MICROSECOND_TIME_BASE_DIV), \ 831 SR(MILLISECOND_TIME_BASE_DIV), \ 832 SR(DISPCLK_FREQ_CHANGE_CNTL), \ 833 SR(RBBMIF_TIMEOUT_DIS), \ 834 SR(RBBMIF_TIMEOUT_DIS_2), \ 835 SR(DCHUBBUB_CRC_CTRL), \ 836 SR(DPP_TOP0_DPP_CRC_CTRL), \ 837 SR(DPP_TOP0_DPP_CRC_VAL_B_A), \ 838 SR(DPP_TOP0_DPP_CRC_VAL_R_G), \ 839 SR(MPC_CRC_CTRL), \ 840 SR(MPC_CRC_RESULT_GB), \ 841 SR(MPC_CRC_RESULT_C), \ 842 SR(MPC_CRC_RESULT_AR), \ 843 SR(DOMAIN0_PG_CONFIG), \ 844 SR(DOMAIN1_PG_CONFIG), \ 845 SR(DOMAIN2_PG_CONFIG), \ 846 SR(DOMAIN3_PG_CONFIG), \ 847 SR(DOMAIN16_PG_CONFIG), \ 848 SR(DOMAIN17_PG_CONFIG), \ 849 SR(DOMAIN18_PG_CONFIG), \ 850 SR(DOMAIN0_PG_STATUS), \ 851 SR(DOMAIN1_PG_STATUS), \ 852 SR(DOMAIN2_PG_STATUS), \ 853 SR(DOMAIN3_PG_STATUS), \ 854 SR(DOMAIN16_PG_STATUS), \ 855 SR(DOMAIN17_PG_STATUS), \ 856 SR(DOMAIN18_PG_STATUS), \ 857 SR(D1VGA_CONTROL), \ 858 SR(D2VGA_CONTROL), \ 859 SR(D3VGA_CONTROL), \ 860 SR(D4VGA_CONTROL), \ 861 SR(D5VGA_CONTROL), \ 862 SR(D6VGA_CONTROL), \ 863 SR(DC_IP_REQUEST_CNTL), \ 864 SR(AZALIA_AUDIO_DTO), \ 865 SR(AZALIA_CONTROLLER_CLOCK_GATING) 866 867 static const struct dce_hwseq_registers hwseq_reg = { 868 HWSEQ_DCN31_REG_LIST() 869 }; 870 871 #define HWSEQ_DCN31_MASK_SH_LIST(mask_sh)\ 872 HWSEQ_DCN_MASK_SH_LIST(mask_sh), \ 873 HWS_SF(, DCHUBBUB_GLOBAL_TIMER_CNTL, DCHUBBUB_GLOBAL_TIMER_REFDIV, mask_sh), \ 874 HWS_SF(, DCHUBBUB_ARB_HOSTVM_CNTL, DISABLE_HOSTVM_FORCE_ALLOW_PSTATE, mask_sh), \ 875 HWS_SF(, DOMAIN0_PG_CONFIG, DOMAIN_POWER_FORCEON, mask_sh), \ 876 HWS_SF(, DOMAIN0_PG_CONFIG, DOMAIN_POWER_GATE, mask_sh), \ 877 HWS_SF(, DOMAIN1_PG_CONFIG, DOMAIN_POWER_FORCEON, mask_sh), \ 878 HWS_SF(, DOMAIN1_PG_CONFIG, DOMAIN_POWER_GATE, mask_sh), \ 879 HWS_SF(, DOMAIN2_PG_CONFIG, DOMAIN_POWER_FORCEON, mask_sh), \ 880 HWS_SF(, DOMAIN2_PG_CONFIG, DOMAIN_POWER_GATE, mask_sh), \ 881 HWS_SF(, DOMAIN3_PG_CONFIG, DOMAIN_POWER_FORCEON, mask_sh), \ 882 HWS_SF(, DOMAIN3_PG_CONFIG, DOMAIN_POWER_GATE, mask_sh), \ 883 HWS_SF(, DOMAIN16_PG_CONFIG, DOMAIN_POWER_FORCEON, mask_sh), \ 884 HWS_SF(, DOMAIN16_PG_CONFIG, DOMAIN_POWER_GATE, mask_sh), \ 885 HWS_SF(, DOMAIN17_PG_CONFIG, DOMAIN_POWER_FORCEON, mask_sh), \ 886 HWS_SF(, DOMAIN17_PG_CONFIG, DOMAIN_POWER_GATE, mask_sh), \ 887 HWS_SF(, DOMAIN18_PG_CONFIG, DOMAIN_POWER_FORCEON, mask_sh), \ 888 HWS_SF(, DOMAIN18_PG_CONFIG, DOMAIN_POWER_GATE, mask_sh), \ 889 HWS_SF(, DOMAIN0_PG_STATUS, DOMAIN_PGFSM_PWR_STATUS, mask_sh), \ 890 HWS_SF(, DOMAIN1_PG_STATUS, DOMAIN_PGFSM_PWR_STATUS, mask_sh), \ 891 HWS_SF(, DOMAIN2_PG_STATUS, DOMAIN_PGFSM_PWR_STATUS, mask_sh), \ 892 HWS_SF(, DOMAIN3_PG_STATUS, DOMAIN_PGFSM_PWR_STATUS, mask_sh), \ 893 HWS_SF(, DOMAIN16_PG_STATUS, DOMAIN_PGFSM_PWR_STATUS, mask_sh), \ 894 HWS_SF(, DOMAIN17_PG_STATUS, DOMAIN_PGFSM_PWR_STATUS, mask_sh), \ 895 HWS_SF(, DOMAIN18_PG_STATUS, DOMAIN_PGFSM_PWR_STATUS, mask_sh), \ 896 HWS_SF(, DC_IP_REQUEST_CNTL, IP_REQUEST_EN, mask_sh), \ 897 HWS_SF(, AZALIA_AUDIO_DTO, AZALIA_AUDIO_DTO_MODULE, mask_sh), \ 898 HWS_SF(, HPO_TOP_CLOCK_CONTROL, HPO_HDMISTREAMCLK_G_GATE_DIS, mask_sh), \ 899 HWS_SF(, DMU_MEM_PWR_CNTL, DMCU_ERAM_MEM_PWR_FORCE, mask_sh), \ 900 HWS_SF(, ODM_MEM_PWR_CTRL3, ODM_MEM_UNASSIGNED_PWR_MODE, mask_sh), \ 901 HWS_SF(, ODM_MEM_PWR_CTRL3, ODM_MEM_VBLANK_PWR_MODE, mask_sh), \ 902 HWS_SF(, MMHUBBUB_MEM_PWR_CNTL, VGA_MEM_PWR_FORCE, mask_sh) 903 904 static const struct dce_hwseq_shift hwseq_shift = { 905 HWSEQ_DCN31_MASK_SH_LIST(__SHIFT) 906 }; 907 908 static const struct dce_hwseq_mask hwseq_mask = { 909 HWSEQ_DCN31_MASK_SH_LIST(_MASK) 910 }; 911 #define vmid_regs(id)\ 912 [id] = {\ 913 DCN20_VMID_REG_LIST(id)\ 914 } 915 916 static const struct dcn_vmid_registers vmid_regs[] = { 917 vmid_regs(0), 918 vmid_regs(1), 919 vmid_regs(2), 920 vmid_regs(3), 921 vmid_regs(4), 922 vmid_regs(5), 923 vmid_regs(6), 924 vmid_regs(7), 925 vmid_regs(8), 926 vmid_regs(9), 927 vmid_regs(10), 928 vmid_regs(11), 929 vmid_regs(12), 930 vmid_regs(13), 931 vmid_regs(14), 932 vmid_regs(15) 933 }; 934 935 static const struct dcn20_vmid_shift vmid_shifts = { 936 DCN20_VMID_MASK_SH_LIST(__SHIFT) 937 }; 938 939 static const struct dcn20_vmid_mask vmid_masks = { 940 DCN20_VMID_MASK_SH_LIST(_MASK) 941 }; 942 943 static const struct resource_caps res_cap_dcn31 = { 944 .num_timing_generator = 4, 945 .num_opp = 4, 946 .num_video_plane = 4, 947 .num_audio = 5, 948 .num_stream_encoder = 5, 949 .num_dig_link_enc = 5, 950 .num_hpo_dp_stream_encoder = 4, 951 .num_hpo_dp_link_encoder = 2, 952 .num_pll = 5, 953 .num_dwb = 1, 954 .num_ddc = 5, 955 .num_vmid = 16, 956 .num_mpc_3dlut = 2, 957 .num_dsc = 3, 958 }; 959 960 static const struct dc_plane_cap plane_cap = { 961 .type = DC_PLANE_TYPE_DCN_UNIVERSAL, 962 .blends_with_above = true, 963 .blends_with_below = true, 964 .per_pixel_alpha = true, 965 966 .pixel_format_support = { 967 .argb8888 = true, 968 .nv12 = true, 969 .fp16 = true, 970 .p010 = false, 971 .ayuv = false, 972 }, 973 974 .max_upscale_factor = { 975 .argb8888 = 16000, 976 .nv12 = 16000, 977 .fp16 = 16000 978 }, 979 980 // 6:1 downscaling ratio: 1000/6 = 166.666 981 .max_downscale_factor = { 982 .argb8888 = 167, 983 .nv12 = 167, 984 .fp16 = 167 985 }, 986 64, 987 64 988 }; 989 990 static const struct dc_debug_options debug_defaults_drv = { 991 .disable_dmcu = true, 992 .force_abm_enable = false, 993 .timing_trace = false, 994 .clock_trace = true, 995 .disable_pplib_clock_request = false, 996 .pipe_split_policy = MPC_SPLIT_AVOID, 997 .force_single_disp_pipe_split = false, 998 .disable_dcc = DCC_ENABLE, 999 .vsr_support = true, 1000 .performance_trace = false, 1001 .max_downscale_src_width = 7680,/*upto 8K*/ 1002 .disable_pplib_wm_range = false, 1003 .scl_reset_length10 = true, 1004 .sanity_checks = false, 1005 .underflow_assert_delay_us = 0xFFFFFFFF, 1006 .dwb_fi_phase = -1, // -1 = disable, 1007 .dmub_command_table = true, 1008 .pstate_enabled = true, 1009 .use_max_lb = true, 1010 .enable_mem_low_power = { 1011 .bits = { 1012 .vga = true, 1013 .i2c = true, 1014 .dmcu = false, // This is previously known to cause hang on S3 cycles if enabled 1015 .dscl = true, 1016 .cm = false, // visible flicker on OLED eDPs 1017 .mpc = true, 1018 .optc = true, 1019 .vpg = true, 1020 .afmt = true, 1021 } 1022 }, 1023 .optimize_edp_link_rate = true, 1024 .enable_sw_cntl_psr = true, 1025 }; 1026 1027 static const struct dc_debug_options debug_defaults_diags = { 1028 .disable_dmcu = true, 1029 .force_abm_enable = false, 1030 .timing_trace = true, 1031 .clock_trace = true, 1032 .disable_dpp_power_gate = true, 1033 .disable_hubp_power_gate = true, 1034 .disable_clock_gate = true, 1035 .disable_pplib_clock_request = true, 1036 .disable_pplib_wm_range = true, 1037 .disable_stutter = false, 1038 .scl_reset_length10 = true, 1039 .dwb_fi_phase = -1, // -1 = disable 1040 .dmub_command_table = true, 1041 .enable_tri_buf = true, 1042 .use_max_lb = true 1043 }; 1044 1045 static void dcn31_dpp_destroy(struct dpp **dpp) 1046 { 1047 kfree(TO_DCN20_DPP(*dpp)); 1048 *dpp = NULL; 1049 } 1050 1051 static struct dpp *dcn31_dpp_create( 1052 struct dc_context *ctx, 1053 uint32_t inst) 1054 { 1055 struct dcn3_dpp *dpp = 1056 kzalloc(sizeof(struct dcn3_dpp), GFP_KERNEL); 1057 1058 if (!dpp) 1059 return NULL; 1060 1061 if (dpp3_construct(dpp, ctx, inst, 1062 &dpp_regs[inst], &tf_shift, &tf_mask)) 1063 return &dpp->base; 1064 1065 BREAK_TO_DEBUGGER(); 1066 kfree(dpp); 1067 return NULL; 1068 } 1069 1070 static struct output_pixel_processor *dcn31_opp_create( 1071 struct dc_context *ctx, uint32_t inst) 1072 { 1073 struct dcn20_opp *opp = 1074 kzalloc(sizeof(struct dcn20_opp), GFP_KERNEL); 1075 1076 if (!opp) { 1077 BREAK_TO_DEBUGGER(); 1078 return NULL; 1079 } 1080 1081 dcn20_opp_construct(opp, ctx, inst, 1082 &opp_regs[inst], &opp_shift, &opp_mask); 1083 return &opp->base; 1084 } 1085 1086 static struct dce_aux *dcn31_aux_engine_create( 1087 struct dc_context *ctx, 1088 uint32_t inst) 1089 { 1090 struct aux_engine_dce110 *aux_engine = 1091 kzalloc(sizeof(struct aux_engine_dce110), GFP_KERNEL); 1092 1093 if (!aux_engine) 1094 return NULL; 1095 1096 dce110_aux_engine_construct(aux_engine, ctx, inst, 1097 SW_AUX_TIMEOUT_PERIOD_MULTIPLIER * AUX_TIMEOUT_PERIOD, 1098 &aux_engine_regs[inst], 1099 &aux_mask, 1100 &aux_shift, 1101 ctx->dc->caps.extended_aux_timeout_support); 1102 1103 return &aux_engine->base; 1104 } 1105 #define i2c_inst_regs(id) { I2C_HW_ENGINE_COMMON_REG_LIST_DCN30(id) } 1106 1107 static const struct dce_i2c_registers i2c_hw_regs[] = { 1108 i2c_inst_regs(1), 1109 i2c_inst_regs(2), 1110 i2c_inst_regs(3), 1111 i2c_inst_regs(4), 1112 i2c_inst_regs(5), 1113 }; 1114 1115 static const struct dce_i2c_shift i2c_shifts = { 1116 I2C_COMMON_MASK_SH_LIST_DCN30(__SHIFT) 1117 }; 1118 1119 static const struct dce_i2c_mask i2c_masks = { 1120 I2C_COMMON_MASK_SH_LIST_DCN30(_MASK) 1121 }; 1122 1123 static struct dce_i2c_hw *dcn31_i2c_hw_create( 1124 struct dc_context *ctx, 1125 uint32_t inst) 1126 { 1127 struct dce_i2c_hw *dce_i2c_hw = 1128 kzalloc(sizeof(struct dce_i2c_hw), GFP_KERNEL); 1129 1130 if (!dce_i2c_hw) 1131 return NULL; 1132 1133 dcn2_i2c_hw_construct(dce_i2c_hw, ctx, inst, 1134 &i2c_hw_regs[inst], &i2c_shifts, &i2c_masks); 1135 1136 return dce_i2c_hw; 1137 } 1138 static struct mpc *dcn31_mpc_create( 1139 struct dc_context *ctx, 1140 int num_mpcc, 1141 int num_rmu) 1142 { 1143 struct dcn30_mpc *mpc30 = kzalloc(sizeof(struct dcn30_mpc), 1144 GFP_KERNEL); 1145 1146 if (!mpc30) 1147 return NULL; 1148 1149 dcn30_mpc_construct(mpc30, ctx, 1150 &mpc_regs, 1151 &mpc_shift, 1152 &mpc_mask, 1153 num_mpcc, 1154 num_rmu); 1155 1156 return &mpc30->base; 1157 } 1158 1159 static struct hubbub *dcn31_hubbub_create(struct dc_context *ctx) 1160 { 1161 int i; 1162 1163 struct dcn20_hubbub *hubbub3 = kzalloc(sizeof(struct dcn20_hubbub), 1164 GFP_KERNEL); 1165 1166 if (!hubbub3) 1167 return NULL; 1168 1169 hubbub31_construct(hubbub3, ctx, 1170 &hubbub_reg, 1171 &hubbub_shift, 1172 &hubbub_mask, 1173 dcn3_1_ip.det_buffer_size_kbytes, 1174 dcn3_1_ip.pixel_chunk_size_kbytes, 1175 dcn3_1_ip.config_return_buffer_size_in_kbytes); 1176 1177 1178 for (i = 0; i < res_cap_dcn31.num_vmid; i++) { 1179 struct dcn20_vmid *vmid = &hubbub3->vmid[i]; 1180 1181 vmid->ctx = ctx; 1182 1183 vmid->regs = &vmid_regs[i]; 1184 vmid->shifts = &vmid_shifts; 1185 vmid->masks = &vmid_masks; 1186 } 1187 1188 return &hubbub3->base; 1189 } 1190 1191 static struct timing_generator *dcn31_timing_generator_create( 1192 struct dc_context *ctx, 1193 uint32_t instance) 1194 { 1195 struct optc *tgn10 = 1196 kzalloc(sizeof(struct optc), GFP_KERNEL); 1197 1198 if (!tgn10) 1199 return NULL; 1200 1201 tgn10->base.inst = instance; 1202 tgn10->base.ctx = ctx; 1203 1204 tgn10->tg_regs = &optc_regs[instance]; 1205 tgn10->tg_shift = &optc_shift; 1206 tgn10->tg_mask = &optc_mask; 1207 1208 dcn31_timing_generator_init(tgn10); 1209 1210 return &tgn10->base; 1211 } 1212 1213 static const struct encoder_feature_support link_enc_feature = { 1214 .max_hdmi_deep_color = COLOR_DEPTH_121212, 1215 .max_hdmi_pixel_clock = 600000, 1216 .hdmi_ycbcr420_supported = true, 1217 .dp_ycbcr420_supported = true, 1218 .fec_supported = true, 1219 .flags.bits.IS_HBR2_CAPABLE = true, 1220 .flags.bits.IS_HBR3_CAPABLE = true, 1221 .flags.bits.IS_TPS3_CAPABLE = true, 1222 .flags.bits.IS_TPS4_CAPABLE = true 1223 }; 1224 1225 static struct link_encoder *dcn31_link_encoder_create( 1226 const struct encoder_init_data *enc_init_data) 1227 { 1228 struct dcn20_link_encoder *enc20 = 1229 kzalloc(sizeof(struct dcn20_link_encoder), GFP_KERNEL); 1230 1231 if (!enc20) 1232 return NULL; 1233 1234 dcn31_link_encoder_construct(enc20, 1235 enc_init_data, 1236 &link_enc_feature, 1237 &link_enc_regs[enc_init_data->transmitter], 1238 &link_enc_aux_regs[enc_init_data->channel - 1], 1239 &link_enc_hpd_regs[enc_init_data->hpd_source], 1240 &le_shift, 1241 &le_mask); 1242 1243 return &enc20->enc10.base; 1244 } 1245 1246 /* Create a minimal link encoder object not associated with a particular 1247 * physical connector. 1248 * resource_funcs.link_enc_create_minimal 1249 */ 1250 static struct link_encoder *dcn31_link_enc_create_minimal( 1251 struct dc_context *ctx, enum engine_id eng_id) 1252 { 1253 struct dcn20_link_encoder *enc20; 1254 1255 if ((eng_id - ENGINE_ID_DIGA) > ctx->dc->res_pool->res_cap->num_dig_link_enc) 1256 return NULL; 1257 1258 enc20 = kzalloc(sizeof(struct dcn20_link_encoder), GFP_KERNEL); 1259 if (!enc20) 1260 return NULL; 1261 1262 dcn31_link_encoder_construct_minimal( 1263 enc20, 1264 ctx, 1265 &link_enc_feature, 1266 &link_enc_regs[eng_id - ENGINE_ID_DIGA], 1267 eng_id); 1268 1269 return &enc20->enc10.base; 1270 } 1271 1272 struct panel_cntl *dcn31_panel_cntl_create(const struct panel_cntl_init_data *init_data) 1273 { 1274 struct dcn31_panel_cntl *panel_cntl = 1275 kzalloc(sizeof(struct dcn31_panel_cntl), GFP_KERNEL); 1276 1277 if (!panel_cntl) 1278 return NULL; 1279 1280 dcn31_panel_cntl_construct(panel_cntl, init_data); 1281 1282 return &panel_cntl->base; 1283 } 1284 1285 static void read_dce_straps( 1286 struct dc_context *ctx, 1287 struct resource_straps *straps) 1288 { 1289 generic_reg_get(ctx, regDC_PINSTRAPS + BASE(regDC_PINSTRAPS_BASE_IDX), 1290 FN(DC_PINSTRAPS, DC_PINSTRAPS_AUDIO), &straps->dc_pinstraps_audio); 1291 1292 } 1293 1294 static struct audio *dcn31_create_audio( 1295 struct dc_context *ctx, unsigned int inst) 1296 { 1297 return dce_audio_create(ctx, inst, 1298 &audio_regs[inst], &audio_shift, &audio_mask); 1299 } 1300 1301 static struct vpg *dcn31_vpg_create( 1302 struct dc_context *ctx, 1303 uint32_t inst) 1304 { 1305 struct dcn31_vpg *vpg31 = kzalloc(sizeof(struct dcn31_vpg), GFP_KERNEL); 1306 1307 if (!vpg31) 1308 return NULL; 1309 1310 vpg31_construct(vpg31, ctx, inst, 1311 &vpg_regs[inst], 1312 &vpg_shift, 1313 &vpg_mask); 1314 1315 // Will re-enable hw block when we enable stream 1316 // Check for enabled stream before powering down? 1317 vpg31_powerdown(&vpg31->base); 1318 1319 return &vpg31->base; 1320 } 1321 1322 static struct afmt *dcn31_afmt_create( 1323 struct dc_context *ctx, 1324 uint32_t inst) 1325 { 1326 struct dcn31_afmt *afmt31 = kzalloc(sizeof(struct dcn31_afmt), GFP_KERNEL); 1327 1328 if (!afmt31) 1329 return NULL; 1330 1331 afmt31_construct(afmt31, ctx, inst, 1332 &afmt_regs[inst], 1333 &afmt_shift, 1334 &afmt_mask); 1335 1336 // Light sleep by default, no need to power down here 1337 1338 return &afmt31->base; 1339 } 1340 1341 static struct apg *dcn31_apg_create( 1342 struct dc_context *ctx, 1343 uint32_t inst) 1344 { 1345 struct dcn31_apg *apg31 = kzalloc(sizeof(struct dcn31_apg), GFP_KERNEL); 1346 1347 if (!apg31) 1348 return NULL; 1349 1350 apg31_construct(apg31, ctx, inst, 1351 &apg_regs[inst], 1352 &apg_shift, 1353 &apg_mask); 1354 1355 return &apg31->base; 1356 } 1357 1358 static struct stream_encoder *dcn31_stream_encoder_create( 1359 enum engine_id eng_id, 1360 struct dc_context *ctx) 1361 { 1362 struct dcn10_stream_encoder *enc1; 1363 struct vpg *vpg; 1364 struct afmt *afmt; 1365 int vpg_inst; 1366 int afmt_inst; 1367 1368 /* Mapping of VPG, AFMT, DME register blocks to DIO block instance */ 1369 if (eng_id <= ENGINE_ID_DIGF) { 1370 vpg_inst = eng_id; 1371 afmt_inst = eng_id; 1372 } else 1373 return NULL; 1374 1375 enc1 = kzalloc(sizeof(struct dcn10_stream_encoder), GFP_KERNEL); 1376 vpg = dcn31_vpg_create(ctx, vpg_inst); 1377 afmt = dcn31_afmt_create(ctx, afmt_inst); 1378 1379 if (!enc1 || !vpg || !afmt) { 1380 kfree(enc1); 1381 kfree(vpg); 1382 kfree(afmt); 1383 return NULL; 1384 } 1385 1386 dcn30_dio_stream_encoder_construct(enc1, ctx, ctx->dc_bios, 1387 eng_id, vpg, afmt, 1388 &stream_enc_regs[eng_id], 1389 &se_shift, &se_mask); 1390 1391 return &enc1->base; 1392 } 1393 1394 static struct hpo_dp_stream_encoder *dcn31_hpo_dp_stream_encoder_create( 1395 enum engine_id eng_id, 1396 struct dc_context *ctx) 1397 { 1398 struct dcn31_hpo_dp_stream_encoder *hpo_dp_enc31; 1399 struct vpg *vpg; 1400 struct apg *apg; 1401 uint32_t hpo_dp_inst; 1402 uint32_t vpg_inst; 1403 uint32_t apg_inst; 1404 1405 ASSERT((eng_id >= ENGINE_ID_HPO_DP_0) && (eng_id <= ENGINE_ID_HPO_DP_3)); 1406 hpo_dp_inst = eng_id - ENGINE_ID_HPO_DP_0; 1407 1408 /* Mapping of VPG register blocks to HPO DP block instance: 1409 * VPG[6] -> HPO_DP[0] 1410 * VPG[7] -> HPO_DP[1] 1411 * VPG[8] -> HPO_DP[2] 1412 * VPG[9] -> HPO_DP[3] 1413 */ 1414 vpg_inst = hpo_dp_inst + 6; 1415 1416 /* Mapping of APG register blocks to HPO DP block instance: 1417 * APG[0] -> HPO_DP[0] 1418 * APG[1] -> HPO_DP[1] 1419 * APG[2] -> HPO_DP[2] 1420 * APG[3] -> HPO_DP[3] 1421 */ 1422 apg_inst = hpo_dp_inst; 1423 1424 /* allocate HPO stream encoder and create VPG sub-block */ 1425 hpo_dp_enc31 = kzalloc(sizeof(struct dcn31_hpo_dp_stream_encoder), GFP_KERNEL); 1426 vpg = dcn31_vpg_create(ctx, vpg_inst); 1427 apg = dcn31_apg_create(ctx, apg_inst); 1428 1429 if (!hpo_dp_enc31 || !vpg || !apg) { 1430 kfree(hpo_dp_enc31); 1431 kfree(vpg); 1432 kfree(apg); 1433 return NULL; 1434 } 1435 1436 dcn31_hpo_dp_stream_encoder_construct(hpo_dp_enc31, ctx, ctx->dc_bios, 1437 hpo_dp_inst, eng_id, vpg, apg, 1438 &hpo_dp_stream_enc_regs[hpo_dp_inst], 1439 &hpo_dp_se_shift, &hpo_dp_se_mask); 1440 1441 return &hpo_dp_enc31->base; 1442 } 1443 1444 static struct hpo_dp_link_encoder *dcn31_hpo_dp_link_encoder_create( 1445 uint8_t inst, 1446 struct dc_context *ctx) 1447 { 1448 struct dcn31_hpo_dp_link_encoder *hpo_dp_enc31; 1449 1450 /* allocate HPO link encoder */ 1451 hpo_dp_enc31 = kzalloc(sizeof(struct dcn31_hpo_dp_link_encoder), GFP_KERNEL); 1452 1453 hpo_dp_link_encoder31_construct(hpo_dp_enc31, ctx, inst, 1454 &hpo_dp_link_enc_regs[inst], 1455 &hpo_dp_le_shift, &hpo_dp_le_mask); 1456 1457 return &hpo_dp_enc31->base; 1458 } 1459 1460 static struct dce_hwseq *dcn31_hwseq_create( 1461 struct dc_context *ctx) 1462 { 1463 struct dce_hwseq *hws = kzalloc(sizeof(struct dce_hwseq), GFP_KERNEL); 1464 1465 if (hws) { 1466 hws->ctx = ctx; 1467 hws->regs = &hwseq_reg; 1468 hws->shifts = &hwseq_shift; 1469 hws->masks = &hwseq_mask; 1470 /* DCN3.1 FPGA Workaround 1471 * Need to enable HPO DP Stream Encoder before setting OTG master enable. 1472 * To do so, move calling function enable_stream_timing to only be done AFTER calling 1473 * function core_link_enable_stream 1474 */ 1475 if (IS_FPGA_MAXIMUS_DC(ctx->dce_environment)) 1476 hws->wa.dp_hpo_and_otg_sequence = true; 1477 } 1478 return hws; 1479 } 1480 static const struct resource_create_funcs res_create_funcs = { 1481 .read_dce_straps = read_dce_straps, 1482 .create_audio = dcn31_create_audio, 1483 .create_stream_encoder = dcn31_stream_encoder_create, 1484 .create_hpo_dp_stream_encoder = dcn31_hpo_dp_stream_encoder_create, 1485 .create_hpo_dp_link_encoder = dcn31_hpo_dp_link_encoder_create, 1486 .create_hwseq = dcn31_hwseq_create, 1487 }; 1488 1489 static const struct resource_create_funcs res_create_maximus_funcs = { 1490 .read_dce_straps = NULL, 1491 .create_audio = NULL, 1492 .create_stream_encoder = NULL, 1493 .create_hpo_dp_stream_encoder = dcn31_hpo_dp_stream_encoder_create, 1494 .create_hpo_dp_link_encoder = dcn31_hpo_dp_link_encoder_create, 1495 .create_hwseq = dcn31_hwseq_create, 1496 }; 1497 1498 static void dcn31_resource_destruct(struct dcn31_resource_pool *pool) 1499 { 1500 unsigned int i; 1501 1502 for (i = 0; i < pool->base.stream_enc_count; i++) { 1503 if (pool->base.stream_enc[i] != NULL) { 1504 if (pool->base.stream_enc[i]->vpg != NULL) { 1505 kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg)); 1506 pool->base.stream_enc[i]->vpg = NULL; 1507 } 1508 if (pool->base.stream_enc[i]->afmt != NULL) { 1509 kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt)); 1510 pool->base.stream_enc[i]->afmt = NULL; 1511 } 1512 kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i])); 1513 pool->base.stream_enc[i] = NULL; 1514 } 1515 } 1516 1517 for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) { 1518 if (pool->base.hpo_dp_stream_enc[i] != NULL) { 1519 if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) { 1520 kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg)); 1521 pool->base.hpo_dp_stream_enc[i]->vpg = NULL; 1522 } 1523 if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) { 1524 kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg)); 1525 pool->base.hpo_dp_stream_enc[i]->apg = NULL; 1526 } 1527 kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i])); 1528 pool->base.hpo_dp_stream_enc[i] = NULL; 1529 } 1530 } 1531 1532 for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) { 1533 if (pool->base.hpo_dp_link_enc[i] != NULL) { 1534 kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i])); 1535 pool->base.hpo_dp_link_enc[i] = NULL; 1536 } 1537 } 1538 1539 for (i = 0; i < pool->base.res_cap->num_dsc; i++) { 1540 if (pool->base.dscs[i] != NULL) 1541 dcn20_dsc_destroy(&pool->base.dscs[i]); 1542 } 1543 1544 if (pool->base.mpc != NULL) { 1545 kfree(TO_DCN20_MPC(pool->base.mpc)); 1546 pool->base.mpc = NULL; 1547 } 1548 if (pool->base.hubbub != NULL) { 1549 kfree(pool->base.hubbub); 1550 pool->base.hubbub = NULL; 1551 } 1552 for (i = 0; i < pool->base.pipe_count; i++) { 1553 if (pool->base.dpps[i] != NULL) 1554 dcn31_dpp_destroy(&pool->base.dpps[i]); 1555 1556 if (pool->base.ipps[i] != NULL) 1557 pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]); 1558 1559 if (pool->base.hubps[i] != NULL) { 1560 kfree(TO_DCN20_HUBP(pool->base.hubps[i])); 1561 pool->base.hubps[i] = NULL; 1562 } 1563 1564 if (pool->base.irqs != NULL) { 1565 dal_irq_service_destroy(&pool->base.irqs); 1566 } 1567 } 1568 1569 for (i = 0; i < pool->base.res_cap->num_ddc; i++) { 1570 if (pool->base.engines[i] != NULL) 1571 dce110_engine_destroy(&pool->base.engines[i]); 1572 if (pool->base.hw_i2cs[i] != NULL) { 1573 kfree(pool->base.hw_i2cs[i]); 1574 pool->base.hw_i2cs[i] = NULL; 1575 } 1576 if (pool->base.sw_i2cs[i] != NULL) { 1577 kfree(pool->base.sw_i2cs[i]); 1578 pool->base.sw_i2cs[i] = NULL; 1579 } 1580 } 1581 1582 for (i = 0; i < pool->base.res_cap->num_opp; i++) { 1583 if (pool->base.opps[i] != NULL) 1584 pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]); 1585 } 1586 1587 for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) { 1588 if (pool->base.timing_generators[i] != NULL) { 1589 kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i])); 1590 pool->base.timing_generators[i] = NULL; 1591 } 1592 } 1593 1594 for (i = 0; i < pool->base.res_cap->num_dwb; i++) { 1595 if (pool->base.dwbc[i] != NULL) { 1596 kfree(TO_DCN30_DWBC(pool->base.dwbc[i])); 1597 pool->base.dwbc[i] = NULL; 1598 } 1599 if (pool->base.mcif_wb[i] != NULL) { 1600 kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i])); 1601 pool->base.mcif_wb[i] = NULL; 1602 } 1603 } 1604 1605 for (i = 0; i < pool->base.audio_count; i++) { 1606 if (pool->base.audios[i]) 1607 dce_aud_destroy(&pool->base.audios[i]); 1608 } 1609 1610 for (i = 0; i < pool->base.clk_src_count; i++) { 1611 if (pool->base.clock_sources[i] != NULL) { 1612 dcn20_clock_source_destroy(&pool->base.clock_sources[i]); 1613 pool->base.clock_sources[i] = NULL; 1614 } 1615 } 1616 1617 for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) { 1618 if (pool->base.mpc_lut[i] != NULL) { 1619 dc_3dlut_func_release(pool->base.mpc_lut[i]); 1620 pool->base.mpc_lut[i] = NULL; 1621 } 1622 if (pool->base.mpc_shaper[i] != NULL) { 1623 dc_transfer_func_release(pool->base.mpc_shaper[i]); 1624 pool->base.mpc_shaper[i] = NULL; 1625 } 1626 } 1627 1628 if (pool->base.dp_clock_source != NULL) { 1629 dcn20_clock_source_destroy(&pool->base.dp_clock_source); 1630 pool->base.dp_clock_source = NULL; 1631 } 1632 1633 for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) { 1634 if (pool->base.multiple_abms[i] != NULL) 1635 dce_abm_destroy(&pool->base.multiple_abms[i]); 1636 } 1637 1638 if (pool->base.psr != NULL) 1639 dmub_psr_destroy(&pool->base.psr); 1640 1641 if (pool->base.dccg != NULL) 1642 dcn_dccg_destroy(&pool->base.dccg); 1643 } 1644 1645 static struct hubp *dcn31_hubp_create( 1646 struct dc_context *ctx, 1647 uint32_t inst) 1648 { 1649 struct dcn20_hubp *hubp2 = 1650 kzalloc(sizeof(struct dcn20_hubp), GFP_KERNEL); 1651 1652 if (!hubp2) 1653 return NULL; 1654 1655 if (hubp31_construct(hubp2, ctx, inst, 1656 &hubp_regs[inst], &hubp_shift, &hubp_mask)) 1657 return &hubp2->base; 1658 1659 BREAK_TO_DEBUGGER(); 1660 kfree(hubp2); 1661 return NULL; 1662 } 1663 1664 static bool dcn31_dwbc_create(struct dc_context *ctx, struct resource_pool *pool) 1665 { 1666 int i; 1667 uint32_t pipe_count = pool->res_cap->num_dwb; 1668 1669 for (i = 0; i < pipe_count; i++) { 1670 struct dcn30_dwbc *dwbc30 = kzalloc(sizeof(struct dcn30_dwbc), 1671 GFP_KERNEL); 1672 1673 if (!dwbc30) { 1674 dm_error("DC: failed to create dwbc30!\n"); 1675 return false; 1676 } 1677 1678 dcn30_dwbc_construct(dwbc30, ctx, 1679 &dwbc30_regs[i], 1680 &dwbc30_shift, 1681 &dwbc30_mask, 1682 i); 1683 1684 pool->dwbc[i] = &dwbc30->base; 1685 } 1686 return true; 1687 } 1688 1689 static bool dcn31_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool) 1690 { 1691 int i; 1692 uint32_t pipe_count = pool->res_cap->num_dwb; 1693 1694 for (i = 0; i < pipe_count; i++) { 1695 struct dcn30_mmhubbub *mcif_wb30 = kzalloc(sizeof(struct dcn30_mmhubbub), 1696 GFP_KERNEL); 1697 1698 if (!mcif_wb30) { 1699 dm_error("DC: failed to create mcif_wb30!\n"); 1700 return false; 1701 } 1702 1703 dcn30_mmhubbub_construct(mcif_wb30, ctx, 1704 &mcif_wb30_regs[i], 1705 &mcif_wb30_shift, 1706 &mcif_wb30_mask, 1707 i); 1708 1709 pool->mcif_wb[i] = &mcif_wb30->base; 1710 } 1711 return true; 1712 } 1713 1714 static struct display_stream_compressor *dcn31_dsc_create( 1715 struct dc_context *ctx, uint32_t inst) 1716 { 1717 struct dcn20_dsc *dsc = 1718 kzalloc(sizeof(struct dcn20_dsc), GFP_KERNEL); 1719 1720 if (!dsc) { 1721 BREAK_TO_DEBUGGER(); 1722 return NULL; 1723 } 1724 1725 dsc2_construct(dsc, ctx, inst, &dsc_regs[inst], &dsc_shift, &dsc_mask); 1726 return &dsc->base; 1727 } 1728 1729 static void dcn31_destroy_resource_pool(struct resource_pool **pool) 1730 { 1731 struct dcn31_resource_pool *dcn31_pool = TO_DCN31_RES_POOL(*pool); 1732 1733 dcn31_resource_destruct(dcn31_pool); 1734 kfree(dcn31_pool); 1735 *pool = NULL; 1736 } 1737 1738 static struct clock_source *dcn31_clock_source_create( 1739 struct dc_context *ctx, 1740 struct dc_bios *bios, 1741 enum clock_source_id id, 1742 const struct dce110_clk_src_regs *regs, 1743 bool dp_clk_src) 1744 { 1745 struct dce110_clk_src *clk_src = 1746 kzalloc(sizeof(struct dce110_clk_src), GFP_KERNEL); 1747 1748 if (!clk_src) 1749 return NULL; 1750 1751 if (dcn3_clk_src_construct(clk_src, ctx, bios, id, 1752 regs, &cs_shift, &cs_mask)) { 1753 clk_src->base.dp_clk_src = dp_clk_src; 1754 return &clk_src->base; 1755 } 1756 1757 BREAK_TO_DEBUGGER(); 1758 return NULL; 1759 } 1760 1761 static bool is_dual_plane(enum surface_pixel_format format) 1762 { 1763 return format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN || format == SURFACE_PIXEL_FORMAT_GRPH_RGBE_ALPHA; 1764 } 1765 1766 static int dcn31_populate_dml_pipes_from_context( 1767 struct dc *dc, struct dc_state *context, 1768 display_e2e_pipe_params_st *pipes, 1769 bool fast_validate) 1770 { 1771 int i, pipe_cnt; 1772 struct resource_context *res_ctx = &context->res_ctx; 1773 struct pipe_ctx *pipe; 1774 1775 dcn20_populate_dml_pipes_from_context(dc, context, pipes, fast_validate); 1776 1777 for (i = 0, pipe_cnt = 0; i < dc->res_pool->pipe_count; i++) { 1778 struct dc_crtc_timing *timing; 1779 1780 if (!res_ctx->pipe_ctx[i].stream) 1781 continue; 1782 pipe = &res_ctx->pipe_ctx[i]; 1783 timing = &pipe->stream->timing; 1784 1785 pipes[pipe_cnt].pipe.src.unbounded_req_mode = false; 1786 pipes[pipe_cnt].pipe.src.gpuvm = true; 1787 pipes[pipe_cnt].pipe.src.dcc_fraction_of_zs_req_luma = 0; 1788 pipes[pipe_cnt].pipe.src.dcc_fraction_of_zs_req_chroma = 0; 1789 pipes[pipe_cnt].pipe.dest.vfront_porch = timing->v_front_porch; 1790 pipes[pipe_cnt].pipe.src.dcc_rate = 3; 1791 pipes[pipe_cnt].dout.dsc_input_bpc = 0; 1792 1793 if (pipes[pipe_cnt].dout.dsc_enable) { 1794 switch (timing->display_color_depth) { 1795 case COLOR_DEPTH_888: 1796 pipes[pipe_cnt].dout.dsc_input_bpc = 8; 1797 break; 1798 case COLOR_DEPTH_101010: 1799 pipes[pipe_cnt].dout.dsc_input_bpc = 10; 1800 break; 1801 case COLOR_DEPTH_121212: 1802 pipes[pipe_cnt].dout.dsc_input_bpc = 12; 1803 break; 1804 default: 1805 ASSERT(0); 1806 break; 1807 } 1808 } 1809 1810 pipe_cnt++; 1811 } 1812 context->bw_ctx.dml.ip.det_buffer_size_kbytes = DCN3_1_DEFAULT_DET_SIZE; 1813 dc->config.enable_4to1MPC = false; 1814 if (pipe_cnt == 1 && pipe->plane_state && !dc->debug.disable_z9_mpc) { 1815 if (is_dual_plane(pipe->plane_state->format) 1816 && pipe->plane_state->src_rect.width <= 1920 && pipe->plane_state->src_rect.height <= 1080) { 1817 dc->config.enable_4to1MPC = true; 1818 } else if (!is_dual_plane(pipe->plane_state->format)) { 1819 context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192; 1820 pipes[0].pipe.src.unbounded_req_mode = true; 1821 } 1822 } 1823 1824 return pipe_cnt; 1825 } 1826 1827 static void dcn31_update_soc_for_wm_a(struct dc *dc, struct dc_state *context) 1828 { 1829 if (dc->clk_mgr->bw_params->wm_table.entries[WM_A].valid) { 1830 context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].pstate_latency_us; 1831 context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].sr_enter_plus_exit_time_us; 1832 context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].sr_exit_time_us; 1833 } 1834 } 1835 1836 static void dcn31_calculate_wm_and_dlg_fp( 1837 struct dc *dc, struct dc_state *context, 1838 display_e2e_pipe_params_st *pipes, 1839 int pipe_cnt, 1840 int vlevel) 1841 { 1842 int i, pipe_idx; 1843 double dcfclk = context->bw_ctx.dml.vba.DCFCLKState[vlevel][context->bw_ctx.dml.vba.maxMpcComb]; 1844 1845 if (context->bw_ctx.dml.soc.min_dcfclk > dcfclk) 1846 dcfclk = context->bw_ctx.dml.soc.min_dcfclk; 1847 1848 /* We don't recalculate clocks for 0 pipe configs, which can block 1849 * S0i3 as high clocks will block low power states 1850 * Override any clocks that can block S0i3 to min here 1851 */ 1852 if (pipe_cnt == 0) { 1853 context->bw_ctx.bw.dcn.clk.dcfclk_khz = dcfclk; // always should be vlevel 0 1854 return; 1855 } 1856 1857 pipes[0].clks_cfg.voltage = vlevel; 1858 pipes[0].clks_cfg.dcfclk_mhz = dcfclk; 1859 pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].socclk_mhz; 1860 1861 #if 0 // TODO 1862 /* Set B: 1863 * TODO 1864 */ 1865 if (dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].valid) { 1866 if (vlevel == 0) { 1867 pipes[0].clks_cfg.voltage = 1; 1868 pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dcfclk_mhz; 1869 } 1870 context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.pstate_latency_us; 1871 context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.sr_enter_plus_exit_time_us; 1872 context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.sr_exit_time_us; 1873 } 1874 context->bw_ctx.bw.dcn.watermarks.b.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1875 context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1876 context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1877 context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_z8_ns = get_wm_z8_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1878 context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_exit_z8_ns = get_wm_z8_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1879 context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1880 context->bw_ctx.bw.dcn.watermarks.b.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1881 context->bw_ctx.bw.dcn.watermarks.b.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1882 context->bw_ctx.bw.dcn.watermarks.b.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1883 context->bw_ctx.bw.dcn.watermarks.b.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1884 1885 pipes[0].clks_cfg.voltage = vlevel; 1886 pipes[0].clks_cfg.dcfclk_mhz = dcfclk; 1887 1888 /* Set C: 1889 * TODO 1890 */ 1891 if (dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].valid) { 1892 context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].dml_input.pstate_latency_us; 1893 context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].dml_input.sr_enter_plus_exit_time_us; 1894 context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].dml_input.sr_exit_time_us; 1895 } 1896 context->bw_ctx.bw.dcn.watermarks.c.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1897 context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1898 context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1899 context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_z8_ns = get_wm_z8_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1900 context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_exit_z8_ns = get_wm_z8_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1901 context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1902 context->bw_ctx.bw.dcn.watermarks.c.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1903 context->bw_ctx.bw.dcn.watermarks.c.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1904 context->bw_ctx.bw.dcn.watermarks.c.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1905 context->bw_ctx.bw.dcn.watermarks.c.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1906 1907 /* Set D: 1908 * TODO 1909 */ 1910 if (dc->clk_mgr->bw_params->wm_table.nv_entries[WM_D].valid) { 1911 context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_D].dml_input.pstate_latency_us; 1912 context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_D].dml_input.sr_enter_plus_exit_time_us; 1913 context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_D].dml_input.sr_exit_time_us; 1914 } 1915 context->bw_ctx.bw.dcn.watermarks.d.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1916 context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1917 context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1918 context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1919 context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_z8_ns = get_wm_z8_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1920 context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_exit_z8_ns = get_wm_z8_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1921 context->bw_ctx.bw.dcn.watermarks.d.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1922 context->bw_ctx.bw.dcn.watermarks.d.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1923 context->bw_ctx.bw.dcn.watermarks.d.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1924 context->bw_ctx.bw.dcn.watermarks.d.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1925 #endif 1926 1927 /* Set A: 1928 * All clocks min required 1929 * 1930 * Set A calculated last so that following calculations are based on Set A 1931 */ 1932 dc->res_pool->funcs->update_soc_for_wm_a(dc, context); 1933 context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1934 context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1935 context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1936 context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1937 context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_z8_ns = get_wm_z8_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1938 context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_z8_ns = get_wm_z8_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1939 context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1940 context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1941 context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1942 context->bw_ctx.bw.dcn.watermarks.a.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; 1943 /* TODO: remove: */ 1944 context->bw_ctx.bw.dcn.watermarks.b = context->bw_ctx.bw.dcn.watermarks.a; 1945 context->bw_ctx.bw.dcn.watermarks.c = context->bw_ctx.bw.dcn.watermarks.a; 1946 context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a; 1947 /* end remove*/ 1948 1949 for (i = 0, pipe_idx = 0; i < dc->res_pool->pipe_count; i++) { 1950 if (!context->res_ctx.pipe_ctx[i].stream) 1951 continue; 1952 1953 pipes[pipe_idx].clks_cfg.dispclk_mhz = get_dispclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt); 1954 pipes[pipe_idx].clks_cfg.dppclk_mhz = get_dppclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx); 1955 1956 if (dc->config.forced_clocks) { 1957 pipes[pipe_idx].clks_cfg.dispclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dispclk_mhz; 1958 pipes[pipe_idx].clks_cfg.dppclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dppclk_mhz; 1959 } 1960 if (dc->debug.min_disp_clk_khz > pipes[pipe_idx].clks_cfg.dispclk_mhz * 1000) 1961 pipes[pipe_idx].clks_cfg.dispclk_mhz = dc->debug.min_disp_clk_khz / 1000.0; 1962 if (dc->debug.min_dpp_clk_khz > pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000) 1963 pipes[pipe_idx].clks_cfg.dppclk_mhz = dc->debug.min_dpp_clk_khz / 1000.0; 1964 1965 pipe_idx++; 1966 } 1967 1968 dcn20_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel); 1969 } 1970 1971 static void dcn31_calculate_wm_and_dlg( 1972 struct dc *dc, struct dc_state *context, 1973 display_e2e_pipe_params_st *pipes, 1974 int pipe_cnt, 1975 int vlevel) 1976 { 1977 DC_FP_START(); 1978 dcn31_calculate_wm_and_dlg_fp(dc, context, pipes, pipe_cnt, vlevel); 1979 DC_FP_END(); 1980 } 1981 1982 bool dcn31_validate_bandwidth(struct dc *dc, 1983 struct dc_state *context, 1984 bool fast_validate) 1985 { 1986 bool out = false; 1987 1988 BW_VAL_TRACE_SETUP(); 1989 1990 int vlevel = 0; 1991 int pipe_cnt = 0; 1992 display_e2e_pipe_params_st *pipes = kzalloc(dc->res_pool->pipe_count * sizeof(display_e2e_pipe_params_st), GFP_KERNEL); 1993 DC_LOGGER_INIT(dc->ctx->logger); 1994 1995 BW_VAL_TRACE_COUNT(); 1996 1997 out = dcn30_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel, fast_validate); 1998 1999 // Disable fast_validate to set min dcfclk in alculate_wm_and_dlg 2000 if (pipe_cnt == 0) 2001 fast_validate = false; 2002 2003 if (!out) 2004 goto validate_fail; 2005 2006 BW_VAL_TRACE_END_VOLTAGE_LEVEL(); 2007 2008 if (fast_validate) { 2009 BW_VAL_TRACE_SKIP(fast); 2010 goto validate_out; 2011 } 2012 2013 dc->res_pool->funcs->calculate_wm_and_dlg(dc, context, pipes, pipe_cnt, vlevel); 2014 2015 BW_VAL_TRACE_END_WATERMARKS(); 2016 2017 goto validate_out; 2018 2019 validate_fail: 2020 DC_LOG_WARNING("Mode Validation Warning: %s failed validation.\n", 2021 dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states])); 2022 2023 BW_VAL_TRACE_SKIP(fail); 2024 out = false; 2025 2026 validate_out: 2027 kfree(pipes); 2028 2029 BW_VAL_TRACE_FINISH(); 2030 2031 return out; 2032 } 2033 2034 static struct dc_cap_funcs cap_funcs = { 2035 .get_dcc_compression_cap = dcn20_get_dcc_compression_cap 2036 }; 2037 2038 static void dcn31_update_bw_bounding_box(struct dc *dc, struct clk_bw_params *bw_params) 2039 { 2040 struct clk_limit_table *clk_table = &bw_params->clk_table; 2041 struct _vcs_dpi_voltage_scaling_st clock_limits[DC__VOLTAGE_STATES]; 2042 unsigned int i, closest_clk_lvl; 2043 int j; 2044 2045 // Default clock levels are used for diags, which may lead to overclocking. 2046 if (!IS_DIAG_DC(dc->ctx->dce_environment)) { 2047 int max_dispclk_mhz = 0, max_dppclk_mhz = 0; 2048 2049 dcn3_1_ip.max_num_otg = dc->res_pool->res_cap->num_timing_generator; 2050 dcn3_1_ip.max_num_dpp = dc->res_pool->pipe_count; 2051 dcn3_1_soc.num_chans = bw_params->num_channels; 2052 2053 ASSERT(clk_table->num_entries); 2054 2055 /* Prepass to find max clocks independent of voltage level. */ 2056 for (i = 0; i < clk_table->num_entries; ++i) { 2057 if (clk_table->entries[i].dispclk_mhz > max_dispclk_mhz) 2058 max_dispclk_mhz = clk_table->entries[i].dispclk_mhz; 2059 if (clk_table->entries[i].dppclk_mhz > max_dppclk_mhz) 2060 max_dppclk_mhz = clk_table->entries[i].dppclk_mhz; 2061 } 2062 2063 for (i = 0; i < clk_table->num_entries; i++) { 2064 /* loop backwards*/ 2065 for (closest_clk_lvl = 0, j = dcn3_1_soc.num_states - 1; j >= 0; j--) { 2066 if ((unsigned int) dcn3_1_soc.clock_limits[j].dcfclk_mhz <= clk_table->entries[i].dcfclk_mhz) { 2067 closest_clk_lvl = j; 2068 break; 2069 } 2070 } 2071 2072 clock_limits[i].state = i; 2073 2074 /* Clocks dependent on voltage level. */ 2075 clock_limits[i].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz; 2076 clock_limits[i].fabricclk_mhz = clk_table->entries[i].fclk_mhz; 2077 clock_limits[i].socclk_mhz = clk_table->entries[i].socclk_mhz; 2078 clock_limits[i].dram_speed_mts = clk_table->entries[i].memclk_mhz * 2 * clk_table->entries[i].wck_ratio; 2079 2080 /* Clocks independent of voltage level. */ 2081 clock_limits[i].dispclk_mhz = max_dispclk_mhz ? max_dispclk_mhz : 2082 dcn3_1_soc.clock_limits[closest_clk_lvl].dispclk_mhz; 2083 2084 clock_limits[i].dppclk_mhz = max_dppclk_mhz ? max_dppclk_mhz : 2085 dcn3_1_soc.clock_limits[closest_clk_lvl].dppclk_mhz; 2086 2087 clock_limits[i].dram_bw_per_chan_gbps = dcn3_1_soc.clock_limits[closest_clk_lvl].dram_bw_per_chan_gbps; 2088 clock_limits[i].dscclk_mhz = dcn3_1_soc.clock_limits[closest_clk_lvl].dscclk_mhz; 2089 clock_limits[i].dtbclk_mhz = dcn3_1_soc.clock_limits[closest_clk_lvl].dtbclk_mhz; 2090 clock_limits[i].phyclk_d18_mhz = dcn3_1_soc.clock_limits[closest_clk_lvl].phyclk_d18_mhz; 2091 clock_limits[i].phyclk_mhz = dcn3_1_soc.clock_limits[closest_clk_lvl].phyclk_mhz; 2092 } 2093 for (i = 0; i < clk_table->num_entries; i++) 2094 dcn3_1_soc.clock_limits[i] = clock_limits[i]; 2095 if (clk_table->num_entries) { 2096 dcn3_1_soc.num_states = clk_table->num_entries; 2097 } 2098 } 2099 2100 dcn3_1_soc.dispclk_dppclk_vco_speed_mhz = dc->clk_mgr->dentist_vco_freq_khz / 1000.0; 2101 dc->dml.soc.dispclk_dppclk_vco_speed_mhz = dc->clk_mgr->dentist_vco_freq_khz / 1000.0; 2102 2103 if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) 2104 dml_init_instance(&dc->dml, &dcn3_1_soc, &dcn3_1_ip, DML_PROJECT_DCN31); 2105 else 2106 dml_init_instance(&dc->dml, &dcn3_1_soc, &dcn3_1_ip, DML_PROJECT_DCN31_FPGA); 2107 } 2108 2109 static struct resource_funcs dcn31_res_pool_funcs = { 2110 .destroy = dcn31_destroy_resource_pool, 2111 .link_enc_create = dcn31_link_encoder_create, 2112 .link_enc_create_minimal = dcn31_link_enc_create_minimal, 2113 .link_encs_assign = link_enc_cfg_link_encs_assign, 2114 .link_enc_unassign = link_enc_cfg_link_enc_unassign, 2115 .panel_cntl_create = dcn31_panel_cntl_create, 2116 .validate_bandwidth = dcn31_validate_bandwidth, 2117 .calculate_wm_and_dlg = dcn31_calculate_wm_and_dlg, 2118 .update_soc_for_wm_a = dcn31_update_soc_for_wm_a, 2119 .populate_dml_pipes = dcn31_populate_dml_pipes_from_context, 2120 .acquire_idle_pipe_for_layer = dcn20_acquire_idle_pipe_for_layer, 2121 .add_stream_to_ctx = dcn30_add_stream_to_ctx, 2122 .add_dsc_to_stream_resource = dcn20_add_dsc_to_stream_resource, 2123 .remove_stream_from_ctx = dcn20_remove_stream_from_ctx, 2124 .populate_dml_writeback_from_context = dcn30_populate_dml_writeback_from_context, 2125 .set_mcif_arb_params = dcn30_set_mcif_arb_params, 2126 .find_first_free_match_stream_enc_for_link = dcn10_find_first_free_match_stream_enc_for_link, 2127 .acquire_post_bldn_3dlut = dcn30_acquire_post_bldn_3dlut, 2128 .release_post_bldn_3dlut = dcn30_release_post_bldn_3dlut, 2129 .update_bw_bounding_box = dcn31_update_bw_bounding_box, 2130 .patch_unknown_plane_state = dcn20_patch_unknown_plane_state, 2131 }; 2132 2133 static struct clock_source *dcn30_clock_source_create( 2134 struct dc_context *ctx, 2135 struct dc_bios *bios, 2136 enum clock_source_id id, 2137 const struct dce110_clk_src_regs *regs, 2138 bool dp_clk_src) 2139 { 2140 struct dce110_clk_src *clk_src = 2141 kzalloc(sizeof(struct dce110_clk_src), GFP_KERNEL); 2142 2143 if (!clk_src) 2144 return NULL; 2145 2146 if (dcn3_clk_src_construct(clk_src, ctx, bios, id, 2147 regs, &cs_shift, &cs_mask)) { 2148 clk_src->base.dp_clk_src = dp_clk_src; 2149 return &clk_src->base; 2150 } 2151 2152 BREAK_TO_DEBUGGER(); 2153 return NULL; 2154 } 2155 2156 static bool dcn31_resource_construct( 2157 uint8_t num_virtual_links, 2158 struct dc *dc, 2159 struct dcn31_resource_pool *pool) 2160 { 2161 int i; 2162 struct dc_context *ctx = dc->ctx; 2163 struct irq_service_init_data init_data; 2164 2165 DC_FP_START(); 2166 2167 ctx->dc_bios->regs = &bios_regs; 2168 2169 pool->base.res_cap = &res_cap_dcn31; 2170 2171 pool->base.funcs = &dcn31_res_pool_funcs; 2172 2173 /************************************************* 2174 * Resource + asic cap harcoding * 2175 *************************************************/ 2176 pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE; 2177 pool->base.pipe_count = pool->base.res_cap->num_timing_generator; 2178 pool->base.mpcc_count = pool->base.res_cap->num_timing_generator; 2179 dc->caps.max_downscale_ratio = 600; 2180 dc->caps.i2c_speed_in_khz = 100; 2181 dc->caps.i2c_speed_in_khz_hdcp = 5; /*1.4 w/a applied by default*/ 2182 dc->caps.max_cursor_size = 256; 2183 dc->caps.min_horizontal_blanking_period = 80; 2184 dc->caps.dmdata_alloc_size = 2048; 2185 2186 dc->caps.max_slave_planes = 1; 2187 dc->caps.max_slave_yuv_planes = 1; 2188 dc->caps.max_slave_rgb_planes = 1; 2189 dc->caps.post_blend_color_processing = true; 2190 dc->caps.force_dp_tps4_for_cp2520 = true; 2191 dc->caps.dp_hpo = true; 2192 dc->caps.extended_aux_timeout_support = true; 2193 dc->caps.dmcub_support = true; 2194 dc->caps.is_apu = true; 2195 2196 /* Color pipeline capabilities */ 2197 dc->caps.color.dpp.dcn_arch = 1; 2198 dc->caps.color.dpp.input_lut_shared = 0; 2199 dc->caps.color.dpp.icsc = 1; 2200 dc->caps.color.dpp.dgam_ram = 0; // must use gamma_corr 2201 dc->caps.color.dpp.dgam_rom_caps.srgb = 1; 2202 dc->caps.color.dpp.dgam_rom_caps.bt2020 = 1; 2203 dc->caps.color.dpp.dgam_rom_caps.gamma2_2 = 1; 2204 dc->caps.color.dpp.dgam_rom_caps.pq = 1; 2205 dc->caps.color.dpp.dgam_rom_caps.hlg = 1; 2206 dc->caps.color.dpp.post_csc = 1; 2207 dc->caps.color.dpp.gamma_corr = 1; 2208 dc->caps.color.dpp.dgam_rom_for_yuv = 0; 2209 2210 dc->caps.color.dpp.hw_3d_lut = 1; 2211 dc->caps.color.dpp.ogam_ram = 1; 2212 // no OGAM ROM on DCN301 2213 dc->caps.color.dpp.ogam_rom_caps.srgb = 0; 2214 dc->caps.color.dpp.ogam_rom_caps.bt2020 = 0; 2215 dc->caps.color.dpp.ogam_rom_caps.gamma2_2 = 0; 2216 dc->caps.color.dpp.ogam_rom_caps.pq = 0; 2217 dc->caps.color.dpp.ogam_rom_caps.hlg = 0; 2218 dc->caps.color.dpp.ocsc = 0; 2219 2220 dc->caps.color.mpc.gamut_remap = 1; 2221 dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2 2222 dc->caps.color.mpc.ogam_ram = 1; 2223 dc->caps.color.mpc.ogam_rom_caps.srgb = 0; 2224 dc->caps.color.mpc.ogam_rom_caps.bt2020 = 0; 2225 dc->caps.color.mpc.ogam_rom_caps.gamma2_2 = 0; 2226 dc->caps.color.mpc.ogam_rom_caps.pq = 0; 2227 dc->caps.color.mpc.ogam_rom_caps.hlg = 0; 2228 dc->caps.color.mpc.ocsc = 1; 2229 2230 /* read VBIOS LTTPR caps */ 2231 { 2232 if (ctx->dc_bios->funcs->get_lttpr_caps) { 2233 enum bp_result bp_query_result; 2234 uint8_t is_vbios_lttpr_enable = 0; 2235 2236 bp_query_result = ctx->dc_bios->funcs->get_lttpr_caps(ctx->dc_bios, &is_vbios_lttpr_enable); 2237 dc->caps.vbios_lttpr_enable = (bp_query_result == BP_RESULT_OK) && !!is_vbios_lttpr_enable; 2238 } 2239 2240 /* interop bit is implicit */ 2241 { 2242 dc->caps.vbios_lttpr_aware = true; 2243 } 2244 } 2245 2246 if (dc->ctx->dce_environment == DCE_ENV_PRODUCTION_DRV) 2247 dc->debug = debug_defaults_drv; 2248 else if (dc->ctx->dce_environment == DCE_ENV_FPGA_MAXIMUS) { 2249 dc->debug = debug_defaults_diags; 2250 } else 2251 dc->debug = debug_defaults_diags; 2252 // Init the vm_helper 2253 if (dc->vm_helper) 2254 vm_helper_init(dc->vm_helper, 16); 2255 2256 /************************************************* 2257 * Create resources * 2258 *************************************************/ 2259 2260 /* Clock Sources for Pixel Clock*/ 2261 pool->base.clock_sources[DCN31_CLK_SRC_PLL0] = 2262 dcn30_clock_source_create(ctx, ctx->dc_bios, 2263 CLOCK_SOURCE_COMBO_PHY_PLL0, 2264 &clk_src_regs[0], false); 2265 pool->base.clock_sources[DCN31_CLK_SRC_PLL1] = 2266 dcn30_clock_source_create(ctx, ctx->dc_bios, 2267 CLOCK_SOURCE_COMBO_PHY_PLL1, 2268 &clk_src_regs[1], false); 2269 pool->base.clock_sources[DCN31_CLK_SRC_PLL2] = 2270 dcn30_clock_source_create(ctx, ctx->dc_bios, 2271 CLOCK_SOURCE_COMBO_PHY_PLL2, 2272 &clk_src_regs[2], false); 2273 pool->base.clock_sources[DCN31_CLK_SRC_PLL3] = 2274 dcn30_clock_source_create(ctx, ctx->dc_bios, 2275 CLOCK_SOURCE_COMBO_PHY_PLL3, 2276 &clk_src_regs[3], false); 2277 pool->base.clock_sources[DCN31_CLK_SRC_PLL4] = 2278 dcn30_clock_source_create(ctx, ctx->dc_bios, 2279 CLOCK_SOURCE_COMBO_PHY_PLL4, 2280 &clk_src_regs[4], false); 2281 2282 pool->base.clk_src_count = DCN30_CLK_SRC_TOTAL; 2283 2284 /* todo: not reuse phy_pll registers */ 2285 pool->base.dp_clock_source = 2286 dcn31_clock_source_create(ctx, ctx->dc_bios, 2287 CLOCK_SOURCE_ID_DP_DTO, 2288 &clk_src_regs[0], true); 2289 2290 for (i = 0; i < pool->base.clk_src_count; i++) { 2291 if (pool->base.clock_sources[i] == NULL) { 2292 dm_error("DC: failed to create clock sources!\n"); 2293 BREAK_TO_DEBUGGER(); 2294 goto create_fail; 2295 } 2296 } 2297 2298 /* TODO: DCCG */ 2299 pool->base.dccg = dccg31_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask); 2300 if (pool->base.dccg == NULL) { 2301 dm_error("DC: failed to create dccg!\n"); 2302 BREAK_TO_DEBUGGER(); 2303 goto create_fail; 2304 } 2305 2306 /* TODO: IRQ */ 2307 init_data.ctx = dc->ctx; 2308 pool->base.irqs = dal_irq_service_dcn31_create(&init_data); 2309 if (!pool->base.irqs) 2310 goto create_fail; 2311 2312 /* HUBBUB */ 2313 pool->base.hubbub = dcn31_hubbub_create(ctx); 2314 if (pool->base.hubbub == NULL) { 2315 BREAK_TO_DEBUGGER(); 2316 dm_error("DC: failed to create hubbub!\n"); 2317 goto create_fail; 2318 } 2319 2320 /* HUBPs, DPPs, OPPs and TGs */ 2321 for (i = 0; i < pool->base.pipe_count; i++) { 2322 pool->base.hubps[i] = dcn31_hubp_create(ctx, i); 2323 if (pool->base.hubps[i] == NULL) { 2324 BREAK_TO_DEBUGGER(); 2325 dm_error( 2326 "DC: failed to create hubps!\n"); 2327 goto create_fail; 2328 } 2329 2330 pool->base.dpps[i] = dcn31_dpp_create(ctx, i); 2331 if (pool->base.dpps[i] == NULL) { 2332 BREAK_TO_DEBUGGER(); 2333 dm_error( 2334 "DC: failed to create dpps!\n"); 2335 goto create_fail; 2336 } 2337 } 2338 2339 for (i = 0; i < pool->base.res_cap->num_opp; i++) { 2340 pool->base.opps[i] = dcn31_opp_create(ctx, i); 2341 if (pool->base.opps[i] == NULL) { 2342 BREAK_TO_DEBUGGER(); 2343 dm_error( 2344 "DC: failed to create output pixel processor!\n"); 2345 goto create_fail; 2346 } 2347 } 2348 2349 for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) { 2350 pool->base.timing_generators[i] = dcn31_timing_generator_create( 2351 ctx, i); 2352 if (pool->base.timing_generators[i] == NULL) { 2353 BREAK_TO_DEBUGGER(); 2354 dm_error("DC: failed to create tg!\n"); 2355 goto create_fail; 2356 } 2357 } 2358 pool->base.timing_generator_count = i; 2359 2360 /* PSR */ 2361 pool->base.psr = dmub_psr_create(ctx); 2362 if (pool->base.psr == NULL) { 2363 dm_error("DC: failed to create psr obj!\n"); 2364 BREAK_TO_DEBUGGER(); 2365 goto create_fail; 2366 } 2367 2368 /* ABM */ 2369 for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) { 2370 pool->base.multiple_abms[i] = dmub_abm_create(ctx, 2371 &abm_regs[i], 2372 &abm_shift, 2373 &abm_mask); 2374 if (pool->base.multiple_abms[i] == NULL) { 2375 dm_error("DC: failed to create abm for pipe %d!\n", i); 2376 BREAK_TO_DEBUGGER(); 2377 goto create_fail; 2378 } 2379 } 2380 2381 /* MPC and DSC */ 2382 pool->base.mpc = dcn31_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut); 2383 if (pool->base.mpc == NULL) { 2384 BREAK_TO_DEBUGGER(); 2385 dm_error("DC: failed to create mpc!\n"); 2386 goto create_fail; 2387 } 2388 2389 for (i = 0; i < pool->base.res_cap->num_dsc; i++) { 2390 pool->base.dscs[i] = dcn31_dsc_create(ctx, i); 2391 if (pool->base.dscs[i] == NULL) { 2392 BREAK_TO_DEBUGGER(); 2393 dm_error("DC: failed to create display stream compressor %d!\n", i); 2394 goto create_fail; 2395 } 2396 } 2397 2398 /* DWB and MMHUBBUB */ 2399 if (!dcn31_dwbc_create(ctx, &pool->base)) { 2400 BREAK_TO_DEBUGGER(); 2401 dm_error("DC: failed to create dwbc!\n"); 2402 goto create_fail; 2403 } 2404 2405 if (!dcn31_mmhubbub_create(ctx, &pool->base)) { 2406 BREAK_TO_DEBUGGER(); 2407 dm_error("DC: failed to create mcif_wb!\n"); 2408 goto create_fail; 2409 } 2410 2411 /* AUX and I2C */ 2412 for (i = 0; i < pool->base.res_cap->num_ddc; i++) { 2413 pool->base.engines[i] = dcn31_aux_engine_create(ctx, i); 2414 if (pool->base.engines[i] == NULL) { 2415 BREAK_TO_DEBUGGER(); 2416 dm_error( 2417 "DC:failed to create aux engine!!\n"); 2418 goto create_fail; 2419 } 2420 pool->base.hw_i2cs[i] = dcn31_i2c_hw_create(ctx, i); 2421 if (pool->base.hw_i2cs[i] == NULL) { 2422 BREAK_TO_DEBUGGER(); 2423 dm_error( 2424 "DC:failed to create hw i2c!!\n"); 2425 goto create_fail; 2426 } 2427 pool->base.sw_i2cs[i] = NULL; 2428 } 2429 2430 /* Audio, Stream Encoders including HPO and virtual, MPC 3D LUTs */ 2431 if (!resource_construct(num_virtual_links, dc, &pool->base, 2432 (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment) ? 2433 &res_create_funcs : &res_create_maximus_funcs))) 2434 goto create_fail; 2435 2436 /* HW Sequencer and Plane caps */ 2437 dcn31_hw_sequencer_construct(dc); 2438 2439 dc->caps.max_planes = pool->base.pipe_count; 2440 2441 for (i = 0; i < dc->caps.max_planes; ++i) 2442 dc->caps.planes[i] = plane_cap; 2443 2444 dc->cap_funcs = cap_funcs; 2445 2446 DC_FP_END(); 2447 2448 return true; 2449 2450 create_fail: 2451 2452 DC_FP_END(); 2453 dcn31_resource_destruct(pool); 2454 2455 return false; 2456 } 2457 2458 struct resource_pool *dcn31_create_resource_pool( 2459 const struct dc_init_data *init_data, 2460 struct dc *dc) 2461 { 2462 struct dcn31_resource_pool *pool = 2463 kzalloc(sizeof(struct dcn31_resource_pool), GFP_KERNEL); 2464 2465 if (!pool) 2466 return NULL; 2467 2468 if (dcn31_resource_construct(init_data->num_virtual_links, dc, pool)) 2469 return &pool->base; 2470 2471 BREAK_TO_DEBUGGER(); 2472 kfree(pool); 2473 return NULL; 2474 } 2475