1 /* 2 * Copyright © 2012 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Eugeni Dodonov <eugeni.dodonov@intel.com> 25 * 26 */ 27 28 #include <linux/string_helpers.h> 29 30 #include <drm/display/drm_scdc_helper.h> 31 #include <drm/drm_privacy_screen_consumer.h> 32 33 #include "i915_drv.h" 34 #include "intel_audio.h" 35 #include "intel_audio_regs.h" 36 #include "intel_backlight.h" 37 #include "intel_combo_phy.h" 38 #include "intel_combo_phy_regs.h" 39 #include "intel_connector.h" 40 #include "intel_crtc.h" 41 #include "intel_ddi.h" 42 #include "intel_ddi_buf_trans.h" 43 #include "intel_de.h" 44 #include "intel_display_power.h" 45 #include "intel_display_types.h" 46 #include "intel_dp.h" 47 #include "intel_dp_link_training.h" 48 #include "intel_dp_mst.h" 49 #include "intel_dpio_phy.h" 50 #include "intel_dsi.h" 51 #include "intel_fdi.h" 52 #include "intel_fifo_underrun.h" 53 #include "intel_gmbus.h" 54 #include "intel_hdcp.h" 55 #include "intel_hdmi.h" 56 #include "intel_hotplug.h" 57 #include "intel_lspcon.h" 58 #include "intel_pps.h" 59 #include "intel_psr.h" 60 #include "intel_quirks.h" 61 #include "intel_snps_phy.h" 62 #include "intel_sprite.h" 63 #include "intel_tc.h" 64 #include "intel_tc_phy_regs.h" 65 #include "intel_vdsc.h" 66 #include "intel_vrr.h" 67 #include "skl_scaler.h" 68 #include "skl_universal_plane.h" 69 70 static const u8 index_to_dp_signal_levels[] = { 71 [0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0, 72 [1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1, 73 [2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2, 74 [3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3, 75 [4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0, 76 [5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1, 77 [6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2, 78 [7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0, 79 [8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1, 80 [9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0, 81 }; 82 83 static int intel_ddi_hdmi_level(struct intel_encoder *encoder, 84 const struct intel_ddi_buf_trans *trans) 85 { 86 int level; 87 88 level = intel_bios_hdmi_level_shift(encoder); 89 if (level < 0) 90 level = trans->hdmi_default_entry; 91 92 return level; 93 } 94 95 static bool has_buf_trans_select(struct drm_i915_private *i915) 96 { 97 return DISPLAY_VER(i915) < 10 && !IS_BROXTON(i915); 98 } 99 100 static bool has_iboost(struct drm_i915_private *i915) 101 { 102 return DISPLAY_VER(i915) == 9 && !IS_BROXTON(i915); 103 } 104 105 /* 106 * Starting with Haswell, DDI port buffers must be programmed with correct 107 * values in advance. This function programs the correct values for 108 * DP/eDP/FDI use cases. 109 */ 110 void hsw_prepare_dp_ddi_buffers(struct intel_encoder *encoder, 111 const struct intel_crtc_state *crtc_state) 112 { 113 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 114 u32 iboost_bit = 0; 115 int i, n_entries; 116 enum port port = encoder->port; 117 const struct intel_ddi_buf_trans *trans; 118 119 trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries); 120 if (drm_WARN_ON_ONCE(&dev_priv->drm, !trans)) 121 return; 122 123 /* If we're boosting the current, set bit 31 of trans1 */ 124 if (has_iboost(dev_priv) && 125 intel_bios_encoder_dp_boost_level(encoder->devdata)) 126 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 127 128 for (i = 0; i < n_entries; i++) { 129 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i), 130 trans->entries[i].hsw.trans1 | iboost_bit); 131 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i), 132 trans->entries[i].hsw.trans2); 133 } 134 } 135 136 /* 137 * Starting with Haswell, DDI port buffers must be programmed with correct 138 * values in advance. This function programs the correct values for 139 * HDMI/DVI use cases. 140 */ 141 static void hsw_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder, 142 const struct intel_crtc_state *crtc_state) 143 { 144 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 145 int level = intel_ddi_level(encoder, crtc_state, 0); 146 u32 iboost_bit = 0; 147 int n_entries; 148 enum port port = encoder->port; 149 const struct intel_ddi_buf_trans *trans; 150 151 trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries); 152 if (drm_WARN_ON_ONCE(&dev_priv->drm, !trans)) 153 return; 154 155 /* If we're boosting the current, set bit 31 of trans1 */ 156 if (has_iboost(dev_priv) && 157 intel_bios_encoder_hdmi_boost_level(encoder->devdata)) 158 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 159 160 /* Entry 9 is for HDMI: */ 161 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9), 162 trans->entries[level].hsw.trans1 | iboost_bit); 163 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9), 164 trans->entries[level].hsw.trans2); 165 } 166 167 void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv, 168 enum port port) 169 { 170 if (IS_BROXTON(dev_priv)) { 171 udelay(16); 172 return; 173 } 174 175 if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 176 DDI_BUF_IS_IDLE), 8)) 177 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n", 178 port_name(port)); 179 } 180 181 static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv, 182 enum port port) 183 { 184 int ret; 185 186 /* Wait > 518 usecs for DDI_BUF_CTL to be non idle */ 187 if (DISPLAY_VER(dev_priv) < 10) { 188 usleep_range(518, 1000); 189 return; 190 } 191 192 ret = _wait_for(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 193 DDI_BUF_IS_IDLE), IS_DG2(dev_priv) ? 1200 : 500, 10, 10); 194 195 if (ret) 196 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n", 197 port_name(port)); 198 } 199 200 static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll) 201 { 202 switch (pll->info->id) { 203 case DPLL_ID_WRPLL1: 204 return PORT_CLK_SEL_WRPLL1; 205 case DPLL_ID_WRPLL2: 206 return PORT_CLK_SEL_WRPLL2; 207 case DPLL_ID_SPLL: 208 return PORT_CLK_SEL_SPLL; 209 case DPLL_ID_LCPLL_810: 210 return PORT_CLK_SEL_LCPLL_810; 211 case DPLL_ID_LCPLL_1350: 212 return PORT_CLK_SEL_LCPLL_1350; 213 case DPLL_ID_LCPLL_2700: 214 return PORT_CLK_SEL_LCPLL_2700; 215 default: 216 MISSING_CASE(pll->info->id); 217 return PORT_CLK_SEL_NONE; 218 } 219 } 220 221 static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder, 222 const struct intel_crtc_state *crtc_state) 223 { 224 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 225 int clock = crtc_state->port_clock; 226 const enum intel_dpll_id id = pll->info->id; 227 228 switch (id) { 229 default: 230 /* 231 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used 232 * here, so do warn if this get passed in 233 */ 234 MISSING_CASE(id); 235 return DDI_CLK_SEL_NONE; 236 case DPLL_ID_ICL_TBTPLL: 237 switch (clock) { 238 case 162000: 239 return DDI_CLK_SEL_TBT_162; 240 case 270000: 241 return DDI_CLK_SEL_TBT_270; 242 case 540000: 243 return DDI_CLK_SEL_TBT_540; 244 case 810000: 245 return DDI_CLK_SEL_TBT_810; 246 default: 247 MISSING_CASE(clock); 248 return DDI_CLK_SEL_NONE; 249 } 250 case DPLL_ID_ICL_MGPLL1: 251 case DPLL_ID_ICL_MGPLL2: 252 case DPLL_ID_ICL_MGPLL3: 253 case DPLL_ID_ICL_MGPLL4: 254 case DPLL_ID_TGL_MGPLL5: 255 case DPLL_ID_TGL_MGPLL6: 256 return DDI_CLK_SEL_MG; 257 } 258 } 259 260 static u32 ddi_buf_phy_link_rate(int port_clock) 261 { 262 switch (port_clock) { 263 case 162000: 264 return DDI_BUF_PHY_LINK_RATE(0); 265 case 216000: 266 return DDI_BUF_PHY_LINK_RATE(4); 267 case 243000: 268 return DDI_BUF_PHY_LINK_RATE(5); 269 case 270000: 270 return DDI_BUF_PHY_LINK_RATE(1); 271 case 324000: 272 return DDI_BUF_PHY_LINK_RATE(6); 273 case 432000: 274 return DDI_BUF_PHY_LINK_RATE(7); 275 case 540000: 276 return DDI_BUF_PHY_LINK_RATE(2); 277 case 810000: 278 return DDI_BUF_PHY_LINK_RATE(3); 279 default: 280 MISSING_CASE(port_clock); 281 return DDI_BUF_PHY_LINK_RATE(0); 282 } 283 } 284 285 static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder, 286 const struct intel_crtc_state *crtc_state) 287 { 288 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 289 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 290 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 291 enum phy phy = intel_port_to_phy(i915, encoder->port); 292 293 /* DDI_BUF_CTL_ENABLE will be set by intel_ddi_prepare_link_retrain() later */ 294 intel_dp->DP = dig_port->saved_port_bits | 295 DDI_PORT_WIDTH(crtc_state->lane_count) | 296 DDI_BUF_TRANS_SELECT(0); 297 298 if (IS_ALDERLAKE_P(i915) && intel_phy_is_tc(i915, phy)) { 299 intel_dp->DP |= ddi_buf_phy_link_rate(crtc_state->port_clock); 300 if (!intel_tc_port_in_tbt_alt_mode(dig_port)) 301 intel_dp->DP |= DDI_BUF_CTL_TC_PHY_OWNERSHIP; 302 } 303 } 304 305 static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv, 306 enum port port) 307 { 308 u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK; 309 310 switch (val) { 311 case DDI_CLK_SEL_NONE: 312 return 0; 313 case DDI_CLK_SEL_TBT_162: 314 return 162000; 315 case DDI_CLK_SEL_TBT_270: 316 return 270000; 317 case DDI_CLK_SEL_TBT_540: 318 return 540000; 319 case DDI_CLK_SEL_TBT_810: 320 return 810000; 321 default: 322 MISSING_CASE(val); 323 return 0; 324 } 325 } 326 327 static void ddi_dotclock_get(struct intel_crtc_state *pipe_config) 328 { 329 /* CRT dotclock is determined via other means */ 330 if (pipe_config->has_pch_encoder) 331 return; 332 333 pipe_config->hw.adjusted_mode.crtc_clock = 334 intel_crtc_dotclock(pipe_config); 335 } 336 337 void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state, 338 const struct drm_connector_state *conn_state) 339 { 340 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 341 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 342 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 343 u32 temp; 344 345 if (!intel_crtc_has_dp_encoder(crtc_state)) 346 return; 347 348 drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)); 349 350 temp = DP_MSA_MISC_SYNC_CLOCK; 351 352 switch (crtc_state->pipe_bpp) { 353 case 18: 354 temp |= DP_MSA_MISC_6_BPC; 355 break; 356 case 24: 357 temp |= DP_MSA_MISC_8_BPC; 358 break; 359 case 30: 360 temp |= DP_MSA_MISC_10_BPC; 361 break; 362 case 36: 363 temp |= DP_MSA_MISC_12_BPC; 364 break; 365 default: 366 MISSING_CASE(crtc_state->pipe_bpp); 367 break; 368 } 369 370 /* nonsense combination */ 371 drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range && 372 crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB); 373 374 if (crtc_state->limited_color_range) 375 temp |= DP_MSA_MISC_COLOR_CEA_RGB; 376 377 /* 378 * As per DP 1.2 spec section 2.3.4.3 while sending 379 * YCBCR 444 signals we should program MSA MISC1/0 fields with 380 * colorspace information. 381 */ 382 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) 383 temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709; 384 385 /* 386 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication 387 * of Color Encoding Format and Content Color Gamut] while sending 388 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields 389 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format. 390 */ 391 if (intel_dp_needs_vsc_sdp(crtc_state, conn_state)) 392 temp |= DP_MSA_MISC_COLOR_VSC_SDP; 393 394 intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp); 395 } 396 397 static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder) 398 { 399 if (master_transcoder == TRANSCODER_EDP) 400 return 0; 401 else 402 return master_transcoder + 1; 403 } 404 405 static void 406 intel_ddi_config_transcoder_dp2(struct intel_encoder *encoder, 407 const struct intel_crtc_state *crtc_state) 408 { 409 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 410 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 411 u32 val = 0; 412 413 if (intel_dp_is_uhbr(crtc_state)) 414 val = TRANS_DP2_128B132B_CHANNEL_CODING; 415 416 intel_de_write(i915, TRANS_DP2_CTL(cpu_transcoder), val); 417 } 418 419 /* 420 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state. 421 * 422 * Only intended to be used by intel_ddi_enable_transcoder_func() and 423 * intel_ddi_config_transcoder_func(). 424 */ 425 static u32 426 intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder, 427 const struct intel_crtc_state *crtc_state) 428 { 429 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 430 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 431 enum pipe pipe = crtc->pipe; 432 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 433 enum port port = encoder->port; 434 u32 temp; 435 436 /* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */ 437 temp = TRANS_DDI_FUNC_ENABLE; 438 if (DISPLAY_VER(dev_priv) >= 12) 439 temp |= TGL_TRANS_DDI_SELECT_PORT(port); 440 else 441 temp |= TRANS_DDI_SELECT_PORT(port); 442 443 switch (crtc_state->pipe_bpp) { 444 default: 445 MISSING_CASE(crtc_state->pipe_bpp); 446 fallthrough; 447 case 18: 448 temp |= TRANS_DDI_BPC_6; 449 break; 450 case 24: 451 temp |= TRANS_DDI_BPC_8; 452 break; 453 case 30: 454 temp |= TRANS_DDI_BPC_10; 455 break; 456 case 36: 457 temp |= TRANS_DDI_BPC_12; 458 break; 459 } 460 461 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC) 462 temp |= TRANS_DDI_PVSYNC; 463 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC) 464 temp |= TRANS_DDI_PHSYNC; 465 466 if (cpu_transcoder == TRANSCODER_EDP) { 467 switch (pipe) { 468 default: 469 MISSING_CASE(pipe); 470 fallthrough; 471 case PIPE_A: 472 /* On Haswell, can only use the always-on power well for 473 * eDP when not using the panel fitter, and when not 474 * using motion blur mitigation (which we don't 475 * support). */ 476 if (crtc_state->pch_pfit.force_thru) 477 temp |= TRANS_DDI_EDP_INPUT_A_ONOFF; 478 else 479 temp |= TRANS_DDI_EDP_INPUT_A_ON; 480 break; 481 case PIPE_B: 482 temp |= TRANS_DDI_EDP_INPUT_B_ONOFF; 483 break; 484 case PIPE_C: 485 temp |= TRANS_DDI_EDP_INPUT_C_ONOFF; 486 break; 487 } 488 } 489 490 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 491 if (crtc_state->has_hdmi_sink) 492 temp |= TRANS_DDI_MODE_SELECT_HDMI; 493 else 494 temp |= TRANS_DDI_MODE_SELECT_DVI; 495 496 if (crtc_state->hdmi_scrambling) 497 temp |= TRANS_DDI_HDMI_SCRAMBLING; 498 if (crtc_state->hdmi_high_tmds_clock_ratio) 499 temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE; 500 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) { 501 temp |= TRANS_DDI_MODE_SELECT_FDI_OR_128B132B; 502 temp |= (crtc_state->fdi_lanes - 1) << 1; 503 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 504 if (intel_dp_is_uhbr(crtc_state)) 505 temp |= TRANS_DDI_MODE_SELECT_FDI_OR_128B132B; 506 else 507 temp |= TRANS_DDI_MODE_SELECT_DP_MST; 508 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 509 510 if (DISPLAY_VER(dev_priv) >= 12) { 511 enum transcoder master; 512 513 master = crtc_state->mst_master_transcoder; 514 drm_WARN_ON(&dev_priv->drm, 515 master == INVALID_TRANSCODER); 516 temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master); 517 } 518 } else { 519 temp |= TRANS_DDI_MODE_SELECT_DP_SST; 520 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 521 } 522 523 if (IS_DISPLAY_VER(dev_priv, 8, 10) && 524 crtc_state->master_transcoder != INVALID_TRANSCODER) { 525 u8 master_select = 526 bdw_trans_port_sync_master_select(crtc_state->master_transcoder); 527 528 temp |= TRANS_DDI_PORT_SYNC_ENABLE | 529 TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select); 530 } 531 532 return temp; 533 } 534 535 void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder, 536 const struct intel_crtc_state *crtc_state) 537 { 538 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 539 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 540 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 541 542 if (DISPLAY_VER(dev_priv) >= 11) { 543 enum transcoder master_transcoder = crtc_state->master_transcoder; 544 u32 ctl2 = 0; 545 546 if (master_transcoder != INVALID_TRANSCODER) { 547 u8 master_select = 548 bdw_trans_port_sync_master_select(master_transcoder); 549 550 ctl2 |= PORT_SYNC_MODE_ENABLE | 551 PORT_SYNC_MODE_MASTER_SELECT(master_select); 552 } 553 554 intel_de_write(dev_priv, 555 TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2); 556 } 557 558 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), 559 intel_ddi_transcoder_func_reg_val_get(encoder, 560 crtc_state)); 561 } 562 563 /* 564 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable 565 * bit. 566 */ 567 static void 568 intel_ddi_config_transcoder_func(struct intel_encoder *encoder, 569 const struct intel_crtc_state *crtc_state) 570 { 571 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 572 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 573 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 574 u32 ctl; 575 576 ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state); 577 ctl &= ~TRANS_DDI_FUNC_ENABLE; 578 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 579 } 580 581 void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state) 582 { 583 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 584 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 585 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 586 u32 ctl; 587 588 if (DISPLAY_VER(dev_priv) >= 11) 589 intel_de_write(dev_priv, 590 TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0); 591 592 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 593 594 drm_WARN_ON(crtc->base.dev, ctl & TRANS_DDI_HDCP_SIGNALLING); 595 596 ctl &= ~TRANS_DDI_FUNC_ENABLE; 597 598 if (IS_DISPLAY_VER(dev_priv, 8, 10)) 599 ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE | 600 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK); 601 602 if (DISPLAY_VER(dev_priv) >= 12) { 603 if (!intel_dp_mst_is_master_trans(crtc_state)) { 604 ctl &= ~(TGL_TRANS_DDI_PORT_MASK | 605 TRANS_DDI_MODE_SELECT_MASK); 606 } 607 } else { 608 ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK); 609 } 610 611 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 612 613 if (intel_has_quirk(dev_priv, QUIRK_INCREASE_DDI_DISABLED_TIME) && 614 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 615 drm_dbg_kms(&dev_priv->drm, 616 "Quirk Increase DDI disabled time\n"); 617 /* Quirk time at 100ms for reliable operation */ 618 msleep(100); 619 } 620 } 621 622 int intel_ddi_toggle_hdcp_bits(struct intel_encoder *intel_encoder, 623 enum transcoder cpu_transcoder, 624 bool enable, u32 hdcp_mask) 625 { 626 struct drm_device *dev = intel_encoder->base.dev; 627 struct drm_i915_private *dev_priv = to_i915(dev); 628 intel_wakeref_t wakeref; 629 int ret = 0; 630 u32 tmp; 631 632 wakeref = intel_display_power_get_if_enabled(dev_priv, 633 intel_encoder->power_domain); 634 if (drm_WARN_ON(dev, !wakeref)) 635 return -ENXIO; 636 637 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 638 if (enable) 639 tmp |= hdcp_mask; 640 else 641 tmp &= ~hdcp_mask; 642 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), tmp); 643 intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref); 644 return ret; 645 } 646 647 bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector) 648 { 649 struct drm_device *dev = intel_connector->base.dev; 650 struct drm_i915_private *dev_priv = to_i915(dev); 651 struct intel_encoder *encoder = intel_attached_encoder(intel_connector); 652 int type = intel_connector->base.connector_type; 653 enum port port = encoder->port; 654 enum transcoder cpu_transcoder; 655 intel_wakeref_t wakeref; 656 enum pipe pipe = 0; 657 u32 tmp; 658 bool ret; 659 660 wakeref = intel_display_power_get_if_enabled(dev_priv, 661 encoder->power_domain); 662 if (!wakeref) 663 return false; 664 665 if (!encoder->get_hw_state(encoder, &pipe)) { 666 ret = false; 667 goto out; 668 } 669 670 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 671 cpu_transcoder = TRANSCODER_EDP; 672 else 673 cpu_transcoder = (enum transcoder) pipe; 674 675 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 676 677 switch (tmp & TRANS_DDI_MODE_SELECT_MASK) { 678 case TRANS_DDI_MODE_SELECT_HDMI: 679 case TRANS_DDI_MODE_SELECT_DVI: 680 ret = type == DRM_MODE_CONNECTOR_HDMIA; 681 break; 682 683 case TRANS_DDI_MODE_SELECT_DP_SST: 684 ret = type == DRM_MODE_CONNECTOR_eDP || 685 type == DRM_MODE_CONNECTOR_DisplayPort; 686 break; 687 688 case TRANS_DDI_MODE_SELECT_DP_MST: 689 /* if the transcoder is in MST state then 690 * connector isn't connected */ 691 ret = false; 692 break; 693 694 case TRANS_DDI_MODE_SELECT_FDI_OR_128B132B: 695 if (HAS_DP20(dev_priv)) 696 /* 128b/132b */ 697 ret = false; 698 else 699 /* FDI */ 700 ret = type == DRM_MODE_CONNECTOR_VGA; 701 break; 702 703 default: 704 ret = false; 705 break; 706 } 707 708 out: 709 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 710 711 return ret; 712 } 713 714 static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder, 715 u8 *pipe_mask, bool *is_dp_mst) 716 { 717 struct drm_device *dev = encoder->base.dev; 718 struct drm_i915_private *dev_priv = to_i915(dev); 719 enum port port = encoder->port; 720 intel_wakeref_t wakeref; 721 enum pipe p; 722 u32 tmp; 723 u8 mst_pipe_mask; 724 725 *pipe_mask = 0; 726 *is_dp_mst = false; 727 728 wakeref = intel_display_power_get_if_enabled(dev_priv, 729 encoder->power_domain); 730 if (!wakeref) 731 return; 732 733 tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 734 if (!(tmp & DDI_BUF_CTL_ENABLE)) 735 goto out; 736 737 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) { 738 tmp = intel_de_read(dev_priv, 739 TRANS_DDI_FUNC_CTL(TRANSCODER_EDP)); 740 741 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { 742 default: 743 MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK); 744 fallthrough; 745 case TRANS_DDI_EDP_INPUT_A_ON: 746 case TRANS_DDI_EDP_INPUT_A_ONOFF: 747 *pipe_mask = BIT(PIPE_A); 748 break; 749 case TRANS_DDI_EDP_INPUT_B_ONOFF: 750 *pipe_mask = BIT(PIPE_B); 751 break; 752 case TRANS_DDI_EDP_INPUT_C_ONOFF: 753 *pipe_mask = BIT(PIPE_C); 754 break; 755 } 756 757 goto out; 758 } 759 760 mst_pipe_mask = 0; 761 for_each_pipe(dev_priv, p) { 762 enum transcoder cpu_transcoder = (enum transcoder)p; 763 unsigned int port_mask, ddi_select; 764 intel_wakeref_t trans_wakeref; 765 766 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 767 POWER_DOMAIN_TRANSCODER(cpu_transcoder)); 768 if (!trans_wakeref) 769 continue; 770 771 if (DISPLAY_VER(dev_priv) >= 12) { 772 port_mask = TGL_TRANS_DDI_PORT_MASK; 773 ddi_select = TGL_TRANS_DDI_SELECT_PORT(port); 774 } else { 775 port_mask = TRANS_DDI_PORT_MASK; 776 ddi_select = TRANS_DDI_SELECT_PORT(port); 777 } 778 779 tmp = intel_de_read(dev_priv, 780 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 781 intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder), 782 trans_wakeref); 783 784 if ((tmp & port_mask) != ddi_select) 785 continue; 786 787 if ((tmp & TRANS_DDI_MODE_SELECT_MASK) == TRANS_DDI_MODE_SELECT_DP_MST || 788 (HAS_DP20(dev_priv) && 789 (tmp & TRANS_DDI_MODE_SELECT_MASK) == TRANS_DDI_MODE_SELECT_FDI_OR_128B132B)) 790 mst_pipe_mask |= BIT(p); 791 792 *pipe_mask |= BIT(p); 793 } 794 795 if (!*pipe_mask) 796 drm_dbg_kms(&dev_priv->drm, 797 "No pipe for [ENCODER:%d:%s] found\n", 798 encoder->base.base.id, encoder->base.name); 799 800 if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) { 801 drm_dbg_kms(&dev_priv->drm, 802 "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n", 803 encoder->base.base.id, encoder->base.name, 804 *pipe_mask); 805 *pipe_mask = BIT(ffs(*pipe_mask) - 1); 806 } 807 808 if (mst_pipe_mask && mst_pipe_mask != *pipe_mask) 809 drm_dbg_kms(&dev_priv->drm, 810 "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n", 811 encoder->base.base.id, encoder->base.name, 812 *pipe_mask, mst_pipe_mask); 813 else 814 *is_dp_mst = mst_pipe_mask; 815 816 out: 817 if (*pipe_mask && (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv))) { 818 tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port)); 819 if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK | 820 BXT_PHY_LANE_POWERDOWN_ACK | 821 BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED) 822 drm_err(&dev_priv->drm, 823 "[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n", 824 encoder->base.base.id, encoder->base.name, tmp); 825 } 826 827 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 828 } 829 830 bool intel_ddi_get_hw_state(struct intel_encoder *encoder, 831 enum pipe *pipe) 832 { 833 u8 pipe_mask; 834 bool is_mst; 835 836 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 837 838 if (is_mst || !pipe_mask) 839 return false; 840 841 *pipe = ffs(pipe_mask) - 1; 842 843 return true; 844 } 845 846 static enum intel_display_power_domain 847 intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port) 848 { 849 /* ICL+ HW requires corresponding AUX IOs to be powered up for PSR with 850 * DC states enabled at the same time, while for driver initiated AUX 851 * transfers we need the same AUX IOs to be powered but with DC states 852 * disabled. Accordingly use the AUX power domain here which leaves DC 853 * states enabled. 854 * However, for non-A AUX ports the corresponding non-EDP transcoders 855 * would have already enabled power well 2 and DC_OFF. This means we can 856 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a 857 * specific AUX_IO reference without powering up any extra wells. 858 * Note that PSR is enabled only on Port A even though this function 859 * returns the correct domain for other ports too. 860 */ 861 return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A : 862 intel_aux_power_domain(dig_port); 863 } 864 865 static void intel_ddi_get_power_domains(struct intel_encoder *encoder, 866 struct intel_crtc_state *crtc_state) 867 { 868 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 869 struct intel_digital_port *dig_port; 870 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 871 872 /* 873 * TODO: Add support for MST encoders. Atm, the following should never 874 * happen since fake-MST encoders don't set their get_power_domains() 875 * hook. 876 */ 877 if (drm_WARN_ON(&dev_priv->drm, 878 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST))) 879 return; 880 881 dig_port = enc_to_dig_port(encoder); 882 883 if (!intel_tc_port_in_tbt_alt_mode(dig_port)) { 884 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 885 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 886 dig_port->ddi_io_power_domain); 887 } 888 889 /* 890 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC 891 * ports. 892 */ 893 if (intel_crtc_has_dp_encoder(crtc_state) || 894 intel_phy_is_tc(dev_priv, phy)) { 895 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 896 dig_port->aux_wakeref = 897 intel_display_power_get(dev_priv, 898 intel_ddi_main_link_aux_domain(dig_port)); 899 } 900 } 901 902 void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder, 903 const struct intel_crtc_state *crtc_state) 904 { 905 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 906 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 907 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 908 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 909 u32 val; 910 911 if (cpu_transcoder != TRANSCODER_EDP) { 912 if (DISPLAY_VER(dev_priv) >= 13) 913 val = TGL_TRANS_CLK_SEL_PORT(phy); 914 else if (DISPLAY_VER(dev_priv) >= 12) 915 val = TGL_TRANS_CLK_SEL_PORT(encoder->port); 916 else 917 val = TRANS_CLK_SEL_PORT(encoder->port); 918 919 intel_de_write(dev_priv, TRANS_CLK_SEL(cpu_transcoder), val); 920 } 921 } 922 923 void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state) 924 { 925 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 926 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 927 928 if (cpu_transcoder != TRANSCODER_EDP) { 929 if (DISPLAY_VER(dev_priv) >= 12) 930 intel_de_write(dev_priv, 931 TRANS_CLK_SEL(cpu_transcoder), 932 TGL_TRANS_CLK_SEL_DISABLED); 933 else 934 intel_de_write(dev_priv, 935 TRANS_CLK_SEL(cpu_transcoder), 936 TRANS_CLK_SEL_DISABLED); 937 } 938 } 939 940 static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv, 941 enum port port, u8 iboost) 942 { 943 u32 tmp; 944 945 tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0); 946 tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port)); 947 if (iboost) 948 tmp |= iboost << BALANCE_LEG_SHIFT(port); 949 else 950 tmp |= BALANCE_LEG_DISABLE(port); 951 intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp); 952 } 953 954 static void skl_ddi_set_iboost(struct intel_encoder *encoder, 955 const struct intel_crtc_state *crtc_state, 956 int level) 957 { 958 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 959 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 960 u8 iboost; 961 962 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 963 iboost = intel_bios_encoder_hdmi_boost_level(encoder->devdata); 964 else 965 iboost = intel_bios_encoder_dp_boost_level(encoder->devdata); 966 967 if (iboost == 0) { 968 const struct intel_ddi_buf_trans *trans; 969 int n_entries; 970 971 trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries); 972 if (drm_WARN_ON_ONCE(&dev_priv->drm, !trans)) 973 return; 974 975 iboost = trans->entries[level].hsw.i_boost; 976 } 977 978 /* Make sure that the requested I_boost is valid */ 979 if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) { 980 drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost); 981 return; 982 } 983 984 _skl_ddi_set_iboost(dev_priv, encoder->port, iboost); 985 986 if (encoder->port == PORT_A && dig_port->max_lanes == 4) 987 _skl_ddi_set_iboost(dev_priv, PORT_E, iboost); 988 } 989 990 static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp, 991 const struct intel_crtc_state *crtc_state) 992 { 993 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 994 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 995 int n_entries; 996 997 encoder->get_buf_trans(encoder, crtc_state, &n_entries); 998 999 if (drm_WARN_ON(&dev_priv->drm, n_entries < 1)) 1000 n_entries = 1; 1001 if (drm_WARN_ON(&dev_priv->drm, 1002 n_entries > ARRAY_SIZE(index_to_dp_signal_levels))) 1003 n_entries = ARRAY_SIZE(index_to_dp_signal_levels); 1004 1005 return index_to_dp_signal_levels[n_entries - 1] & 1006 DP_TRAIN_VOLTAGE_SWING_MASK; 1007 } 1008 1009 /* 1010 * We assume that the full set of pre-emphasis values can be 1011 * used on all DDI platforms. Should that change we need to 1012 * rethink this code. 1013 */ 1014 static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp) 1015 { 1016 return DP_TRAIN_PRE_EMPH_LEVEL_3; 1017 } 1018 1019 static u32 icl_combo_phy_loadgen_select(const struct intel_crtc_state *crtc_state, 1020 int lane) 1021 { 1022 if (crtc_state->port_clock > 600000) 1023 return 0; 1024 1025 if (crtc_state->lane_count == 4) 1026 return lane >= 1 ? LOADGEN_SELECT : 0; 1027 else 1028 return lane == 1 || lane == 2 ? LOADGEN_SELECT : 0; 1029 } 1030 1031 static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder, 1032 const struct intel_crtc_state *crtc_state) 1033 { 1034 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1035 const struct intel_ddi_buf_trans *trans; 1036 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1037 int n_entries, ln; 1038 u32 val; 1039 1040 trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries); 1041 if (drm_WARN_ON_ONCE(&dev_priv->drm, !trans)) 1042 return; 1043 1044 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) { 1045 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1046 1047 val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED; 1048 intel_dp->hobl_active = is_hobl_buf_trans(trans); 1049 intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val, 1050 intel_dp->hobl_active ? val : 0); 1051 } 1052 1053 /* Set PORT_TX_DW5 */ 1054 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy)); 1055 val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK | 1056 TAP2_DISABLE | TAP3_DISABLE); 1057 val |= SCALING_MODE_SEL(0x2); 1058 val |= RTERM_SELECT(0x6); 1059 val |= TAP3_DISABLE; 1060 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1061 1062 /* Program PORT_TX_DW2 */ 1063 for (ln = 0; ln < 4; ln++) { 1064 int level = intel_ddi_level(encoder, crtc_state, ln); 1065 1066 intel_de_rmw(dev_priv, ICL_PORT_TX_DW2_LN(ln, phy), 1067 SWING_SEL_UPPER_MASK | SWING_SEL_LOWER_MASK | RCOMP_SCALAR_MASK, 1068 SWING_SEL_UPPER(trans->entries[level].icl.dw2_swing_sel) | 1069 SWING_SEL_LOWER(trans->entries[level].icl.dw2_swing_sel) | 1070 RCOMP_SCALAR(0x98)); 1071 } 1072 1073 /* Program PORT_TX_DW4 */ 1074 /* We cannot write to GRP. It would overwrite individual loadgen. */ 1075 for (ln = 0; ln < 4; ln++) { 1076 int level = intel_ddi_level(encoder, crtc_state, ln); 1077 1078 intel_de_rmw(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), 1079 POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | CURSOR_COEFF_MASK, 1080 POST_CURSOR_1(trans->entries[level].icl.dw4_post_cursor_1) | 1081 POST_CURSOR_2(trans->entries[level].icl.dw4_post_cursor_2) | 1082 CURSOR_COEFF(trans->entries[level].icl.dw4_cursor_coeff)); 1083 } 1084 1085 /* Program PORT_TX_DW7 */ 1086 for (ln = 0; ln < 4; ln++) { 1087 int level = intel_ddi_level(encoder, crtc_state, ln); 1088 1089 intel_de_rmw(dev_priv, ICL_PORT_TX_DW7_LN(ln, phy), 1090 N_SCALAR_MASK, 1091 N_SCALAR(trans->entries[level].icl.dw7_n_scalar)); 1092 } 1093 } 1094 1095 static void icl_combo_phy_set_signal_levels(struct intel_encoder *encoder, 1096 const struct intel_crtc_state *crtc_state) 1097 { 1098 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1099 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1100 u32 val; 1101 int ln; 1102 1103 /* 1104 * 1. If port type is eDP or DP, 1105 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 1106 * else clear to 0b. 1107 */ 1108 val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN(0, phy)); 1109 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1110 val &= ~COMMON_KEEPER_EN; 1111 else 1112 val |= COMMON_KEEPER_EN; 1113 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val); 1114 1115 /* 2. Program loadgen select */ 1116 /* 1117 * Program PORT_TX_DW4 depending on Bit rate and used lanes 1118 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 1119 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 1120 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 1121 */ 1122 for (ln = 0; ln < 4; ln++) { 1123 intel_de_rmw(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), 1124 LOADGEN_SELECT, 1125 icl_combo_phy_loadgen_select(crtc_state, ln)); 1126 } 1127 1128 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 1129 intel_de_rmw(dev_priv, ICL_PORT_CL_DW5(phy), 1130 0, SUS_CLOCK_CONFIG); 1131 1132 /* 4. Clear training enable to change swing values */ 1133 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy)); 1134 val &= ~TX_TRAINING_EN; 1135 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1136 1137 /* 5. Program swing and de-emphasis */ 1138 icl_ddi_combo_vswing_program(encoder, crtc_state); 1139 1140 /* 6. Set training enable to trigger update */ 1141 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy)); 1142 val |= TX_TRAINING_EN; 1143 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1144 } 1145 1146 static void icl_mg_phy_set_signal_levels(struct intel_encoder *encoder, 1147 const struct intel_crtc_state *crtc_state) 1148 { 1149 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1150 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 1151 const struct intel_ddi_buf_trans *trans; 1152 int n_entries, ln; 1153 1154 if (intel_tc_port_in_tbt_alt_mode(enc_to_dig_port(encoder))) 1155 return; 1156 1157 trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries); 1158 if (drm_WARN_ON_ONCE(&dev_priv->drm, !trans)) 1159 return; 1160 1161 for (ln = 0; ln < 2; ln++) { 1162 intel_de_rmw(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), 1163 CRI_USE_FS32, 0); 1164 intel_de_rmw(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), 1165 CRI_USE_FS32, 0); 1166 } 1167 1168 /* Program MG_TX_SWINGCTRL with values from vswing table */ 1169 for (ln = 0; ln < 2; ln++) { 1170 int level; 1171 1172 level = intel_ddi_level(encoder, crtc_state, 2*ln+0); 1173 1174 intel_de_rmw(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), 1175 CRI_TXDEEMPH_OVERRIDE_17_12_MASK, 1176 CRI_TXDEEMPH_OVERRIDE_17_12(trans->entries[level].mg.cri_txdeemph_override_17_12)); 1177 1178 level = intel_ddi_level(encoder, crtc_state, 2*ln+1); 1179 1180 intel_de_rmw(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), 1181 CRI_TXDEEMPH_OVERRIDE_17_12_MASK, 1182 CRI_TXDEEMPH_OVERRIDE_17_12(trans->entries[level].mg.cri_txdeemph_override_17_12)); 1183 } 1184 1185 /* Program MG_TX_DRVCTRL with values from vswing table */ 1186 for (ln = 0; ln < 2; ln++) { 1187 int level; 1188 1189 level = intel_ddi_level(encoder, crtc_state, 2*ln+0); 1190 1191 intel_de_rmw(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), 1192 CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 1193 CRI_TXDEEMPH_OVERRIDE_5_0_MASK, 1194 CRI_TXDEEMPH_OVERRIDE_11_6(trans->entries[level].mg.cri_txdeemph_override_11_6) | 1195 CRI_TXDEEMPH_OVERRIDE_5_0(trans->entries[level].mg.cri_txdeemph_override_5_0) | 1196 CRI_TXDEEMPH_OVERRIDE_EN); 1197 1198 level = intel_ddi_level(encoder, crtc_state, 2*ln+1); 1199 1200 intel_de_rmw(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), 1201 CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 1202 CRI_TXDEEMPH_OVERRIDE_5_0_MASK, 1203 CRI_TXDEEMPH_OVERRIDE_11_6(trans->entries[level].mg.cri_txdeemph_override_11_6) | 1204 CRI_TXDEEMPH_OVERRIDE_5_0(trans->entries[level].mg.cri_txdeemph_override_5_0) | 1205 CRI_TXDEEMPH_OVERRIDE_EN); 1206 1207 /* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */ 1208 } 1209 1210 /* 1211 * Program MG_CLKHUB<LN, port being used> with value from frequency table 1212 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the 1213 * values from table for which TX1 and TX2 enabled. 1214 */ 1215 for (ln = 0; ln < 2; ln++) { 1216 intel_de_rmw(dev_priv, MG_CLKHUB(ln, tc_port), 1217 CFG_LOW_RATE_LKREN_EN, 1218 crtc_state->port_clock < 300000 ? CFG_LOW_RATE_LKREN_EN : 0); 1219 } 1220 1221 /* Program the MG_TX_DCC<LN, port being used> based on the link frequency */ 1222 for (ln = 0; ln < 2; ln++) { 1223 intel_de_rmw(dev_priv, MG_TX1_DCC(ln, tc_port), 1224 CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK | 1225 CFG_AMI_CK_DIV_OVERRIDE_EN, 1226 crtc_state->port_clock > 500000 ? 1227 CFG_AMI_CK_DIV_OVERRIDE_VAL(1) | 1228 CFG_AMI_CK_DIV_OVERRIDE_EN : 0); 1229 1230 intel_de_rmw(dev_priv, MG_TX2_DCC(ln, tc_port), 1231 CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK | 1232 CFG_AMI_CK_DIV_OVERRIDE_EN, 1233 crtc_state->port_clock > 500000 ? 1234 CFG_AMI_CK_DIV_OVERRIDE_VAL(1) | 1235 CFG_AMI_CK_DIV_OVERRIDE_EN : 0); 1236 } 1237 1238 /* Program MG_TX_PISO_READLOAD with values from vswing table */ 1239 for (ln = 0; ln < 2; ln++) { 1240 intel_de_rmw(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port), 1241 0, CRI_CALCINIT); 1242 intel_de_rmw(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port), 1243 0, CRI_CALCINIT); 1244 } 1245 } 1246 1247 static void tgl_dkl_phy_set_signal_levels(struct intel_encoder *encoder, 1248 const struct intel_crtc_state *crtc_state) 1249 { 1250 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1251 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 1252 const struct intel_ddi_buf_trans *trans; 1253 int n_entries, ln; 1254 1255 if (intel_tc_port_in_tbt_alt_mode(enc_to_dig_port(encoder))) 1256 return; 1257 1258 trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries); 1259 if (drm_WARN_ON_ONCE(&dev_priv->drm, !trans)) 1260 return; 1261 1262 for (ln = 0; ln < 2; ln++) { 1263 int level; 1264 1265 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 1266 HIP_INDEX_VAL(tc_port, ln)); 1267 1268 intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0); 1269 1270 level = intel_ddi_level(encoder, crtc_state, 2*ln+0); 1271 1272 intel_de_rmw(dev_priv, DKL_TX_DPCNTL0(tc_port), 1273 DKL_TX_PRESHOOT_COEFF_MASK | 1274 DKL_TX_DE_EMPAHSIS_COEFF_MASK | 1275 DKL_TX_VSWING_CONTROL_MASK, 1276 DKL_TX_PRESHOOT_COEFF(trans->entries[level].dkl.preshoot) | 1277 DKL_TX_DE_EMPHASIS_COEFF(trans->entries[level].dkl.de_emphasis) | 1278 DKL_TX_VSWING_CONTROL(trans->entries[level].dkl.vswing)); 1279 1280 level = intel_ddi_level(encoder, crtc_state, 2*ln+1); 1281 1282 intel_de_rmw(dev_priv, DKL_TX_DPCNTL1(tc_port), 1283 DKL_TX_PRESHOOT_COEFF_MASK | 1284 DKL_TX_DE_EMPAHSIS_COEFF_MASK | 1285 DKL_TX_VSWING_CONTROL_MASK, 1286 DKL_TX_PRESHOOT_COEFF(trans->entries[level].dkl.preshoot) | 1287 DKL_TX_DE_EMPHASIS_COEFF(trans->entries[level].dkl.de_emphasis) | 1288 DKL_TX_VSWING_CONTROL(trans->entries[level].dkl.vswing)); 1289 1290 intel_de_rmw(dev_priv, DKL_TX_DPCNTL2(tc_port), 1291 DKL_TX_DP20BITMODE, 0); 1292 1293 if (IS_ALDERLAKE_P(dev_priv)) { 1294 u32 val; 1295 1296 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 1297 if (ln == 0) { 1298 val = DKL_TX_DPCNTL2_CFG_LOADGENSELECT_TX1(0); 1299 val |= DKL_TX_DPCNTL2_CFG_LOADGENSELECT_TX2(2); 1300 } else { 1301 val = DKL_TX_DPCNTL2_CFG_LOADGENSELECT_TX1(3); 1302 val |= DKL_TX_DPCNTL2_CFG_LOADGENSELECT_TX2(3); 1303 } 1304 } else { 1305 val = DKL_TX_DPCNTL2_CFG_LOADGENSELECT_TX1(0); 1306 val |= DKL_TX_DPCNTL2_CFG_LOADGENSELECT_TX2(0); 1307 } 1308 1309 intel_de_rmw(dev_priv, DKL_TX_DPCNTL2(tc_port), 1310 DKL_TX_DPCNTL2_CFG_LOADGENSELECT_TX1_MASK | 1311 DKL_TX_DPCNTL2_CFG_LOADGENSELECT_TX2_MASK, 1312 val); 1313 } 1314 } 1315 } 1316 1317 static int translate_signal_level(struct intel_dp *intel_dp, 1318 u8 signal_levels) 1319 { 1320 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 1321 int i; 1322 1323 for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) { 1324 if (index_to_dp_signal_levels[i] == signal_levels) 1325 return i; 1326 } 1327 1328 drm_WARN(&i915->drm, 1, 1329 "Unsupported voltage swing/pre-emphasis level: 0x%x\n", 1330 signal_levels); 1331 1332 return 0; 1333 } 1334 1335 static int intel_ddi_dp_level(struct intel_dp *intel_dp, 1336 const struct intel_crtc_state *crtc_state, 1337 int lane) 1338 { 1339 u8 train_set = intel_dp->train_set[lane]; 1340 1341 if (intel_dp_is_uhbr(crtc_state)) { 1342 return train_set & DP_TX_FFE_PRESET_VALUE_MASK; 1343 } else { 1344 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1345 DP_TRAIN_PRE_EMPHASIS_MASK); 1346 1347 return translate_signal_level(intel_dp, signal_levels); 1348 } 1349 } 1350 1351 int intel_ddi_level(struct intel_encoder *encoder, 1352 const struct intel_crtc_state *crtc_state, 1353 int lane) 1354 { 1355 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1356 const struct intel_ddi_buf_trans *trans; 1357 int level, n_entries; 1358 1359 trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries); 1360 if (drm_WARN_ON_ONCE(&i915->drm, !trans)) 1361 return 0; 1362 1363 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1364 level = intel_ddi_hdmi_level(encoder, trans); 1365 else 1366 level = intel_ddi_dp_level(enc_to_intel_dp(encoder), crtc_state, 1367 lane); 1368 1369 if (drm_WARN_ON_ONCE(&i915->drm, level >= n_entries)) 1370 level = n_entries - 1; 1371 1372 return level; 1373 } 1374 1375 static void 1376 hsw_set_signal_levels(struct intel_encoder *encoder, 1377 const struct intel_crtc_state *crtc_state) 1378 { 1379 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1380 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1381 int level = intel_ddi_level(encoder, crtc_state, 0); 1382 enum port port = encoder->port; 1383 u32 signal_levels; 1384 1385 if (has_iboost(dev_priv)) 1386 skl_ddi_set_iboost(encoder, crtc_state, level); 1387 1388 /* HDMI ignores the rest */ 1389 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1390 return; 1391 1392 signal_levels = DDI_BUF_TRANS_SELECT(level); 1393 1394 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1395 signal_levels); 1396 1397 intel_dp->DP &= ~DDI_BUF_EMP_MASK; 1398 intel_dp->DP |= signal_levels; 1399 1400 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 1401 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 1402 } 1403 1404 static void _icl_ddi_enable_clock(struct drm_i915_private *i915, i915_reg_t reg, 1405 u32 clk_sel_mask, u32 clk_sel, u32 clk_off) 1406 { 1407 mutex_lock(&i915->display.dpll.lock); 1408 1409 intel_de_rmw(i915, reg, clk_sel_mask, clk_sel); 1410 1411 /* 1412 * "This step and the step before must be 1413 * done with separate register writes." 1414 */ 1415 intel_de_rmw(i915, reg, clk_off, 0); 1416 1417 mutex_unlock(&i915->display.dpll.lock); 1418 } 1419 1420 static void _icl_ddi_disable_clock(struct drm_i915_private *i915, i915_reg_t reg, 1421 u32 clk_off) 1422 { 1423 mutex_lock(&i915->display.dpll.lock); 1424 1425 intel_de_rmw(i915, reg, 0, clk_off); 1426 1427 mutex_unlock(&i915->display.dpll.lock); 1428 } 1429 1430 static bool _icl_ddi_is_clock_enabled(struct drm_i915_private *i915, i915_reg_t reg, 1431 u32 clk_off) 1432 { 1433 return !(intel_de_read(i915, reg) & clk_off); 1434 } 1435 1436 static struct intel_shared_dpll * 1437 _icl_ddi_get_pll(struct drm_i915_private *i915, i915_reg_t reg, 1438 u32 clk_sel_mask, u32 clk_sel_shift) 1439 { 1440 enum intel_dpll_id id; 1441 1442 id = (intel_de_read(i915, reg) & clk_sel_mask) >> clk_sel_shift; 1443 1444 return intel_get_shared_dpll_by_id(i915, id); 1445 } 1446 1447 static void adls_ddi_enable_clock(struct intel_encoder *encoder, 1448 const struct intel_crtc_state *crtc_state) 1449 { 1450 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1451 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1452 enum phy phy = intel_port_to_phy(i915, encoder->port); 1453 1454 if (drm_WARN_ON(&i915->drm, !pll)) 1455 return; 1456 1457 _icl_ddi_enable_clock(i915, ADLS_DPCLKA_CFGCR(phy), 1458 ADLS_DPCLKA_CFGCR_DDI_CLK_SEL_MASK(phy), 1459 pll->info->id << ADLS_DPCLKA_CFGCR_DDI_SHIFT(phy), 1460 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1461 } 1462 1463 static void adls_ddi_disable_clock(struct intel_encoder *encoder) 1464 { 1465 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1466 enum phy phy = intel_port_to_phy(i915, encoder->port); 1467 1468 _icl_ddi_disable_clock(i915, ADLS_DPCLKA_CFGCR(phy), 1469 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1470 } 1471 1472 static bool adls_ddi_is_clock_enabled(struct intel_encoder *encoder) 1473 { 1474 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1475 enum phy phy = intel_port_to_phy(i915, encoder->port); 1476 1477 return _icl_ddi_is_clock_enabled(i915, ADLS_DPCLKA_CFGCR(phy), 1478 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1479 } 1480 1481 static struct intel_shared_dpll *adls_ddi_get_pll(struct intel_encoder *encoder) 1482 { 1483 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1484 enum phy phy = intel_port_to_phy(i915, encoder->port); 1485 1486 return _icl_ddi_get_pll(i915, ADLS_DPCLKA_CFGCR(phy), 1487 ADLS_DPCLKA_CFGCR_DDI_CLK_SEL_MASK(phy), 1488 ADLS_DPCLKA_CFGCR_DDI_SHIFT(phy)); 1489 } 1490 1491 static void rkl_ddi_enable_clock(struct intel_encoder *encoder, 1492 const struct intel_crtc_state *crtc_state) 1493 { 1494 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1495 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1496 enum phy phy = intel_port_to_phy(i915, encoder->port); 1497 1498 if (drm_WARN_ON(&i915->drm, !pll)) 1499 return; 1500 1501 _icl_ddi_enable_clock(i915, ICL_DPCLKA_CFGCR0, 1502 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1503 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1504 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1505 } 1506 1507 static void rkl_ddi_disable_clock(struct intel_encoder *encoder) 1508 { 1509 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1510 enum phy phy = intel_port_to_phy(i915, encoder->port); 1511 1512 _icl_ddi_disable_clock(i915, ICL_DPCLKA_CFGCR0, 1513 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1514 } 1515 1516 static bool rkl_ddi_is_clock_enabled(struct intel_encoder *encoder) 1517 { 1518 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1519 enum phy phy = intel_port_to_phy(i915, encoder->port); 1520 1521 return _icl_ddi_is_clock_enabled(i915, ICL_DPCLKA_CFGCR0, 1522 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1523 } 1524 1525 static struct intel_shared_dpll *rkl_ddi_get_pll(struct intel_encoder *encoder) 1526 { 1527 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1528 enum phy phy = intel_port_to_phy(i915, encoder->port); 1529 1530 return _icl_ddi_get_pll(i915, ICL_DPCLKA_CFGCR0, 1531 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1532 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy)); 1533 } 1534 1535 static void dg1_ddi_enable_clock(struct intel_encoder *encoder, 1536 const struct intel_crtc_state *crtc_state) 1537 { 1538 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1539 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1540 enum phy phy = intel_port_to_phy(i915, encoder->port); 1541 1542 if (drm_WARN_ON(&i915->drm, !pll)) 1543 return; 1544 1545 /* 1546 * If we fail this, something went very wrong: first 2 PLLs should be 1547 * used by first 2 phys and last 2 PLLs by last phys 1548 */ 1549 if (drm_WARN_ON(&i915->drm, 1550 (pll->info->id < DPLL_ID_DG1_DPLL2 && phy >= PHY_C) || 1551 (pll->info->id >= DPLL_ID_DG1_DPLL2 && phy < PHY_C))) 1552 return; 1553 1554 _icl_ddi_enable_clock(i915, DG1_DPCLKA_CFGCR0(phy), 1555 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1556 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1557 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1558 } 1559 1560 static void dg1_ddi_disable_clock(struct intel_encoder *encoder) 1561 { 1562 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1563 enum phy phy = intel_port_to_phy(i915, encoder->port); 1564 1565 _icl_ddi_disable_clock(i915, DG1_DPCLKA_CFGCR0(phy), 1566 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1567 } 1568 1569 static bool dg1_ddi_is_clock_enabled(struct intel_encoder *encoder) 1570 { 1571 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1572 enum phy phy = intel_port_to_phy(i915, encoder->port); 1573 1574 return _icl_ddi_is_clock_enabled(i915, DG1_DPCLKA_CFGCR0(phy), 1575 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1576 } 1577 1578 static struct intel_shared_dpll *dg1_ddi_get_pll(struct intel_encoder *encoder) 1579 { 1580 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1581 enum phy phy = intel_port_to_phy(i915, encoder->port); 1582 enum intel_dpll_id id; 1583 u32 val; 1584 1585 val = intel_de_read(i915, DG1_DPCLKA_CFGCR0(phy)); 1586 val &= DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 1587 val >>= DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy); 1588 id = val; 1589 1590 /* 1591 * _DG1_DPCLKA0_CFGCR0 maps between DPLL 0 and 1 with one bit for phy A 1592 * and B while _DG1_DPCLKA1_CFGCR0 maps between DPLL 2 and 3 with one 1593 * bit for phy C and D. 1594 */ 1595 if (phy >= PHY_C) 1596 id += DPLL_ID_DG1_DPLL2; 1597 1598 return intel_get_shared_dpll_by_id(i915, id); 1599 } 1600 1601 static void icl_ddi_combo_enable_clock(struct intel_encoder *encoder, 1602 const struct intel_crtc_state *crtc_state) 1603 { 1604 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1605 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1606 enum phy phy = intel_port_to_phy(i915, encoder->port); 1607 1608 if (drm_WARN_ON(&i915->drm, !pll)) 1609 return; 1610 1611 _icl_ddi_enable_clock(i915, ICL_DPCLKA_CFGCR0, 1612 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1613 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1614 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1615 } 1616 1617 static void icl_ddi_combo_disable_clock(struct intel_encoder *encoder) 1618 { 1619 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1620 enum phy phy = intel_port_to_phy(i915, encoder->port); 1621 1622 _icl_ddi_disable_clock(i915, ICL_DPCLKA_CFGCR0, 1623 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1624 } 1625 1626 static bool icl_ddi_combo_is_clock_enabled(struct intel_encoder *encoder) 1627 { 1628 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1629 enum phy phy = intel_port_to_phy(i915, encoder->port); 1630 1631 return _icl_ddi_is_clock_enabled(i915, ICL_DPCLKA_CFGCR0, 1632 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1633 } 1634 1635 struct intel_shared_dpll *icl_ddi_combo_get_pll(struct intel_encoder *encoder) 1636 { 1637 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1638 enum phy phy = intel_port_to_phy(i915, encoder->port); 1639 1640 return _icl_ddi_get_pll(i915, ICL_DPCLKA_CFGCR0, 1641 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1642 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy)); 1643 } 1644 1645 static void jsl_ddi_tc_enable_clock(struct intel_encoder *encoder, 1646 const struct intel_crtc_state *crtc_state) 1647 { 1648 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1649 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1650 enum port port = encoder->port; 1651 1652 if (drm_WARN_ON(&i915->drm, !pll)) 1653 return; 1654 1655 /* 1656 * "For DDIC and DDID, program DDI_CLK_SEL to map the MG clock to the port. 1657 * MG does not exist, but the programming is required to ungate DDIC and DDID." 1658 */ 1659 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_MG); 1660 1661 icl_ddi_combo_enable_clock(encoder, crtc_state); 1662 } 1663 1664 static void jsl_ddi_tc_disable_clock(struct intel_encoder *encoder) 1665 { 1666 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1667 enum port port = encoder->port; 1668 1669 icl_ddi_combo_disable_clock(encoder); 1670 1671 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_NONE); 1672 } 1673 1674 static bool jsl_ddi_tc_is_clock_enabled(struct intel_encoder *encoder) 1675 { 1676 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1677 enum port port = encoder->port; 1678 u32 tmp; 1679 1680 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1681 1682 if ((tmp & DDI_CLK_SEL_MASK) == DDI_CLK_SEL_NONE) 1683 return false; 1684 1685 return icl_ddi_combo_is_clock_enabled(encoder); 1686 } 1687 1688 static void icl_ddi_tc_enable_clock(struct intel_encoder *encoder, 1689 const struct intel_crtc_state *crtc_state) 1690 { 1691 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1692 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1693 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1694 enum port port = encoder->port; 1695 1696 if (drm_WARN_ON(&i915->drm, !pll)) 1697 return; 1698 1699 intel_de_write(i915, DDI_CLK_SEL(port), 1700 icl_pll_to_ddi_clk_sel(encoder, crtc_state)); 1701 1702 mutex_lock(&i915->display.dpll.lock); 1703 1704 intel_de_rmw(i915, ICL_DPCLKA_CFGCR0, 1705 ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port), 0); 1706 1707 mutex_unlock(&i915->display.dpll.lock); 1708 } 1709 1710 static void icl_ddi_tc_disable_clock(struct intel_encoder *encoder) 1711 { 1712 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1713 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1714 enum port port = encoder->port; 1715 1716 mutex_lock(&i915->display.dpll.lock); 1717 1718 intel_de_rmw(i915, ICL_DPCLKA_CFGCR0, 1719 0, ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port)); 1720 1721 mutex_unlock(&i915->display.dpll.lock); 1722 1723 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_NONE); 1724 } 1725 1726 static bool icl_ddi_tc_is_clock_enabled(struct intel_encoder *encoder) 1727 { 1728 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1729 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1730 enum port port = encoder->port; 1731 u32 tmp; 1732 1733 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1734 1735 if ((tmp & DDI_CLK_SEL_MASK) == DDI_CLK_SEL_NONE) 1736 return false; 1737 1738 tmp = intel_de_read(i915, ICL_DPCLKA_CFGCR0); 1739 1740 return !(tmp & ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port)); 1741 } 1742 1743 static struct intel_shared_dpll *icl_ddi_tc_get_pll(struct intel_encoder *encoder) 1744 { 1745 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1746 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1747 enum port port = encoder->port; 1748 enum intel_dpll_id id; 1749 u32 tmp; 1750 1751 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1752 1753 switch (tmp & DDI_CLK_SEL_MASK) { 1754 case DDI_CLK_SEL_TBT_162: 1755 case DDI_CLK_SEL_TBT_270: 1756 case DDI_CLK_SEL_TBT_540: 1757 case DDI_CLK_SEL_TBT_810: 1758 id = DPLL_ID_ICL_TBTPLL; 1759 break; 1760 case DDI_CLK_SEL_MG: 1761 id = icl_tc_port_to_pll_id(tc_port); 1762 break; 1763 default: 1764 MISSING_CASE(tmp); 1765 fallthrough; 1766 case DDI_CLK_SEL_NONE: 1767 return NULL; 1768 } 1769 1770 return intel_get_shared_dpll_by_id(i915, id); 1771 } 1772 1773 static struct intel_shared_dpll *bxt_ddi_get_pll(struct intel_encoder *encoder) 1774 { 1775 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1776 enum intel_dpll_id id; 1777 1778 switch (encoder->port) { 1779 case PORT_A: 1780 id = DPLL_ID_SKL_DPLL0; 1781 break; 1782 case PORT_B: 1783 id = DPLL_ID_SKL_DPLL1; 1784 break; 1785 case PORT_C: 1786 id = DPLL_ID_SKL_DPLL2; 1787 break; 1788 default: 1789 MISSING_CASE(encoder->port); 1790 return NULL; 1791 } 1792 1793 return intel_get_shared_dpll_by_id(i915, id); 1794 } 1795 1796 static void skl_ddi_enable_clock(struct intel_encoder *encoder, 1797 const struct intel_crtc_state *crtc_state) 1798 { 1799 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1800 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1801 enum port port = encoder->port; 1802 1803 if (drm_WARN_ON(&i915->drm, !pll)) 1804 return; 1805 1806 mutex_lock(&i915->display.dpll.lock); 1807 1808 intel_de_rmw(i915, DPLL_CTRL2, 1809 DPLL_CTRL2_DDI_CLK_OFF(port) | 1810 DPLL_CTRL2_DDI_CLK_SEL_MASK(port), 1811 DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) | 1812 DPLL_CTRL2_DDI_SEL_OVERRIDE(port)); 1813 1814 mutex_unlock(&i915->display.dpll.lock); 1815 } 1816 1817 static void skl_ddi_disable_clock(struct intel_encoder *encoder) 1818 { 1819 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1820 enum port port = encoder->port; 1821 1822 mutex_lock(&i915->display.dpll.lock); 1823 1824 intel_de_rmw(i915, DPLL_CTRL2, 1825 0, DPLL_CTRL2_DDI_CLK_OFF(port)); 1826 1827 mutex_unlock(&i915->display.dpll.lock); 1828 } 1829 1830 static bool skl_ddi_is_clock_enabled(struct intel_encoder *encoder) 1831 { 1832 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1833 enum port port = encoder->port; 1834 1835 /* 1836 * FIXME Not sure if the override affects both 1837 * the PLL selection and the CLK_OFF bit. 1838 */ 1839 return !(intel_de_read(i915, DPLL_CTRL2) & DPLL_CTRL2_DDI_CLK_OFF(port)); 1840 } 1841 1842 static struct intel_shared_dpll *skl_ddi_get_pll(struct intel_encoder *encoder) 1843 { 1844 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1845 enum port port = encoder->port; 1846 enum intel_dpll_id id; 1847 u32 tmp; 1848 1849 tmp = intel_de_read(i915, DPLL_CTRL2); 1850 1851 /* 1852 * FIXME Not sure if the override affects both 1853 * the PLL selection and the CLK_OFF bit. 1854 */ 1855 if ((tmp & DPLL_CTRL2_DDI_SEL_OVERRIDE(port)) == 0) 1856 return NULL; 1857 1858 id = (tmp & DPLL_CTRL2_DDI_CLK_SEL_MASK(port)) >> 1859 DPLL_CTRL2_DDI_CLK_SEL_SHIFT(port); 1860 1861 return intel_get_shared_dpll_by_id(i915, id); 1862 } 1863 1864 void hsw_ddi_enable_clock(struct intel_encoder *encoder, 1865 const struct intel_crtc_state *crtc_state) 1866 { 1867 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1868 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1869 enum port port = encoder->port; 1870 1871 if (drm_WARN_ON(&i915->drm, !pll)) 1872 return; 1873 1874 intel_de_write(i915, PORT_CLK_SEL(port), hsw_pll_to_ddi_pll_sel(pll)); 1875 } 1876 1877 void hsw_ddi_disable_clock(struct intel_encoder *encoder) 1878 { 1879 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1880 enum port port = encoder->port; 1881 1882 intel_de_write(i915, PORT_CLK_SEL(port), PORT_CLK_SEL_NONE); 1883 } 1884 1885 bool hsw_ddi_is_clock_enabled(struct intel_encoder *encoder) 1886 { 1887 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1888 enum port port = encoder->port; 1889 1890 return intel_de_read(i915, PORT_CLK_SEL(port)) != PORT_CLK_SEL_NONE; 1891 } 1892 1893 static struct intel_shared_dpll *hsw_ddi_get_pll(struct intel_encoder *encoder) 1894 { 1895 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1896 enum port port = encoder->port; 1897 enum intel_dpll_id id; 1898 u32 tmp; 1899 1900 tmp = intel_de_read(i915, PORT_CLK_SEL(port)); 1901 1902 switch (tmp & PORT_CLK_SEL_MASK) { 1903 case PORT_CLK_SEL_WRPLL1: 1904 id = DPLL_ID_WRPLL1; 1905 break; 1906 case PORT_CLK_SEL_WRPLL2: 1907 id = DPLL_ID_WRPLL2; 1908 break; 1909 case PORT_CLK_SEL_SPLL: 1910 id = DPLL_ID_SPLL; 1911 break; 1912 case PORT_CLK_SEL_LCPLL_810: 1913 id = DPLL_ID_LCPLL_810; 1914 break; 1915 case PORT_CLK_SEL_LCPLL_1350: 1916 id = DPLL_ID_LCPLL_1350; 1917 break; 1918 case PORT_CLK_SEL_LCPLL_2700: 1919 id = DPLL_ID_LCPLL_2700; 1920 break; 1921 default: 1922 MISSING_CASE(tmp); 1923 fallthrough; 1924 case PORT_CLK_SEL_NONE: 1925 return NULL; 1926 } 1927 1928 return intel_get_shared_dpll_by_id(i915, id); 1929 } 1930 1931 void intel_ddi_enable_clock(struct intel_encoder *encoder, 1932 const struct intel_crtc_state *crtc_state) 1933 { 1934 if (encoder->enable_clock) 1935 encoder->enable_clock(encoder, crtc_state); 1936 } 1937 1938 void intel_ddi_disable_clock(struct intel_encoder *encoder) 1939 { 1940 if (encoder->disable_clock) 1941 encoder->disable_clock(encoder); 1942 } 1943 1944 void intel_ddi_sanitize_encoder_pll_mapping(struct intel_encoder *encoder) 1945 { 1946 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1947 u32 port_mask; 1948 bool ddi_clk_needed; 1949 1950 /* 1951 * In case of DP MST, we sanitize the primary encoder only, not the 1952 * virtual ones. 1953 */ 1954 if (encoder->type == INTEL_OUTPUT_DP_MST) 1955 return; 1956 1957 if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) { 1958 u8 pipe_mask; 1959 bool is_mst; 1960 1961 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 1962 /* 1963 * In the unlikely case that BIOS enables DP in MST mode, just 1964 * warn since our MST HW readout is incomplete. 1965 */ 1966 if (drm_WARN_ON(&i915->drm, is_mst)) 1967 return; 1968 } 1969 1970 port_mask = BIT(encoder->port); 1971 ddi_clk_needed = encoder->base.crtc; 1972 1973 if (encoder->type == INTEL_OUTPUT_DSI) { 1974 struct intel_encoder *other_encoder; 1975 1976 port_mask = intel_dsi_encoder_ports(encoder); 1977 /* 1978 * Sanity check that we haven't incorrectly registered another 1979 * encoder using any of the ports of this DSI encoder. 1980 */ 1981 for_each_intel_encoder(&i915->drm, other_encoder) { 1982 if (other_encoder == encoder) 1983 continue; 1984 1985 if (drm_WARN_ON(&i915->drm, 1986 port_mask & BIT(other_encoder->port))) 1987 return; 1988 } 1989 /* 1990 * For DSI we keep the ddi clocks gated 1991 * except during enable/disable sequence. 1992 */ 1993 ddi_clk_needed = false; 1994 } 1995 1996 if (ddi_clk_needed || !encoder->is_clock_enabled || 1997 !encoder->is_clock_enabled(encoder)) 1998 return; 1999 2000 drm_notice(&i915->drm, 2001 "[ENCODER:%d:%s] is disabled/in DSI mode with an ungated DDI clock, gate it\n", 2002 encoder->base.base.id, encoder->base.name); 2003 2004 encoder->disable_clock(encoder); 2005 } 2006 2007 static void 2008 icl_program_mg_dp_mode(struct intel_digital_port *dig_port, 2009 const struct intel_crtc_state *crtc_state) 2010 { 2011 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 2012 enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port); 2013 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port); 2014 u32 ln0, ln1, pin_assignment; 2015 u8 width; 2016 2017 if (!intel_phy_is_tc(dev_priv, phy) || 2018 intel_tc_port_in_tbt_alt_mode(dig_port)) 2019 return; 2020 2021 if (DISPLAY_VER(dev_priv) >= 12) { 2022 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2023 HIP_INDEX_VAL(tc_port, 0x0)); 2024 ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 2025 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2026 HIP_INDEX_VAL(tc_port, 0x1)); 2027 ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 2028 } else { 2029 ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port)); 2030 ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port)); 2031 } 2032 2033 ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 2034 ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 2035 2036 /* DPPATC */ 2037 pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port); 2038 width = crtc_state->lane_count; 2039 2040 switch (pin_assignment) { 2041 case 0x0: 2042 drm_WARN_ON(&dev_priv->drm, 2043 !intel_tc_port_in_legacy_mode(dig_port)); 2044 if (width == 1) { 2045 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2046 } else { 2047 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2048 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2049 } 2050 break; 2051 case 0x1: 2052 if (width == 4) { 2053 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2054 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2055 } 2056 break; 2057 case 0x2: 2058 if (width == 2) { 2059 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2060 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2061 } 2062 break; 2063 case 0x3: 2064 case 0x5: 2065 if (width == 1) { 2066 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 2067 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2068 } else { 2069 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2070 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2071 } 2072 break; 2073 case 0x4: 2074 case 0x6: 2075 if (width == 1) { 2076 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 2077 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2078 } else { 2079 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2080 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2081 } 2082 break; 2083 default: 2084 MISSING_CASE(pin_assignment); 2085 } 2086 2087 if (DISPLAY_VER(dev_priv) >= 12) { 2088 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2089 HIP_INDEX_VAL(tc_port, 0x0)); 2090 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0); 2091 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2092 HIP_INDEX_VAL(tc_port, 0x1)); 2093 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1); 2094 } else { 2095 intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0); 2096 intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1); 2097 } 2098 } 2099 2100 static enum transcoder 2101 tgl_dp_tp_transcoder(const struct intel_crtc_state *crtc_state) 2102 { 2103 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) 2104 return crtc_state->mst_master_transcoder; 2105 else 2106 return crtc_state->cpu_transcoder; 2107 } 2108 2109 i915_reg_t dp_tp_ctl_reg(struct intel_encoder *encoder, 2110 const struct intel_crtc_state *crtc_state) 2111 { 2112 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2113 2114 if (DISPLAY_VER(dev_priv) >= 12) 2115 return TGL_DP_TP_CTL(tgl_dp_tp_transcoder(crtc_state)); 2116 else 2117 return DP_TP_CTL(encoder->port); 2118 } 2119 2120 i915_reg_t dp_tp_status_reg(struct intel_encoder *encoder, 2121 const struct intel_crtc_state *crtc_state) 2122 { 2123 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2124 2125 if (DISPLAY_VER(dev_priv) >= 12) 2126 return TGL_DP_TP_STATUS(tgl_dp_tp_transcoder(crtc_state)); 2127 else 2128 return DP_TP_STATUS(encoder->port); 2129 } 2130 2131 static void intel_dp_sink_set_msa_timing_par_ignore_state(struct intel_dp *intel_dp, 2132 const struct intel_crtc_state *crtc_state, 2133 bool enable) 2134 { 2135 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 2136 2137 if (!crtc_state->vrr.enable) 2138 return; 2139 2140 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_DOWNSPREAD_CTRL, 2141 enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0) <= 0) 2142 drm_dbg_kms(&i915->drm, 2143 "Failed to %s MSA_TIMING_PAR_IGNORE in the sink\n", 2144 str_enable_disable(enable)); 2145 } 2146 2147 static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp, 2148 const struct intel_crtc_state *crtc_state) 2149 { 2150 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 2151 2152 if (!crtc_state->fec_enable) 2153 return; 2154 2155 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0) 2156 drm_dbg_kms(&i915->drm, 2157 "Failed to set FEC_READY in the sink\n"); 2158 } 2159 2160 static void intel_ddi_enable_fec(struct intel_encoder *encoder, 2161 const struct intel_crtc_state *crtc_state) 2162 { 2163 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2164 struct intel_dp *intel_dp; 2165 u32 val; 2166 2167 if (!crtc_state->fec_enable) 2168 return; 2169 2170 intel_dp = enc_to_intel_dp(encoder); 2171 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2172 val |= DP_TP_CTL_FEC_ENABLE; 2173 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2174 } 2175 2176 static void intel_ddi_disable_fec_state(struct intel_encoder *encoder, 2177 const struct intel_crtc_state *crtc_state) 2178 { 2179 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2180 struct intel_dp *intel_dp; 2181 u32 val; 2182 2183 if (!crtc_state->fec_enable) 2184 return; 2185 2186 intel_dp = enc_to_intel_dp(encoder); 2187 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2188 val &= ~DP_TP_CTL_FEC_ENABLE; 2189 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2190 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2191 } 2192 2193 static void intel_ddi_power_up_lanes(struct intel_encoder *encoder, 2194 const struct intel_crtc_state *crtc_state) 2195 { 2196 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2197 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2198 enum phy phy = intel_port_to_phy(i915, encoder->port); 2199 2200 if (intel_phy_is_combo(i915, phy)) { 2201 bool lane_reversal = 2202 dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL; 2203 2204 intel_combo_phy_power_up_lanes(i915, phy, false, 2205 crtc_state->lane_count, 2206 lane_reversal); 2207 } 2208 } 2209 2210 /* Splitter enable for eDP MSO is limited to certain pipes. */ 2211 static u8 intel_ddi_splitter_pipe_mask(struct drm_i915_private *i915) 2212 { 2213 if (IS_ALDERLAKE_P(i915)) 2214 return BIT(PIPE_A) | BIT(PIPE_B); 2215 else 2216 return BIT(PIPE_A); 2217 } 2218 2219 static void intel_ddi_mso_get_config(struct intel_encoder *encoder, 2220 struct intel_crtc_state *pipe_config) 2221 { 2222 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 2223 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 2224 enum pipe pipe = crtc->pipe; 2225 u32 dss1; 2226 2227 if (!HAS_MSO(i915)) 2228 return; 2229 2230 dss1 = intel_de_read(i915, ICL_PIPE_DSS_CTL1(pipe)); 2231 2232 pipe_config->splitter.enable = dss1 & SPLITTER_ENABLE; 2233 if (!pipe_config->splitter.enable) 2234 return; 2235 2236 if (drm_WARN_ON(&i915->drm, !(intel_ddi_splitter_pipe_mask(i915) & BIT(pipe)))) { 2237 pipe_config->splitter.enable = false; 2238 return; 2239 } 2240 2241 switch (dss1 & SPLITTER_CONFIGURATION_MASK) { 2242 default: 2243 drm_WARN(&i915->drm, true, 2244 "Invalid splitter configuration, dss1=0x%08x\n", dss1); 2245 fallthrough; 2246 case SPLITTER_CONFIGURATION_2_SEGMENT: 2247 pipe_config->splitter.link_count = 2; 2248 break; 2249 case SPLITTER_CONFIGURATION_4_SEGMENT: 2250 pipe_config->splitter.link_count = 4; 2251 break; 2252 } 2253 2254 pipe_config->splitter.pixel_overlap = REG_FIELD_GET(OVERLAP_PIXELS_MASK, dss1); 2255 } 2256 2257 static void intel_ddi_mso_configure(const struct intel_crtc_state *crtc_state) 2258 { 2259 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2260 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 2261 enum pipe pipe = crtc->pipe; 2262 u32 dss1 = 0; 2263 2264 if (!HAS_MSO(i915)) 2265 return; 2266 2267 if (crtc_state->splitter.enable) { 2268 dss1 |= SPLITTER_ENABLE; 2269 dss1 |= OVERLAP_PIXELS(crtc_state->splitter.pixel_overlap); 2270 if (crtc_state->splitter.link_count == 2) 2271 dss1 |= SPLITTER_CONFIGURATION_2_SEGMENT; 2272 else 2273 dss1 |= SPLITTER_CONFIGURATION_4_SEGMENT; 2274 } 2275 2276 intel_de_rmw(i915, ICL_PIPE_DSS_CTL1(pipe), 2277 SPLITTER_ENABLE | SPLITTER_CONFIGURATION_MASK | 2278 OVERLAP_PIXELS_MASK, dss1); 2279 } 2280 2281 static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state, 2282 struct intel_encoder *encoder, 2283 const struct intel_crtc_state *crtc_state, 2284 const struct drm_connector_state *conn_state) 2285 { 2286 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2287 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2288 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2289 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 2290 2291 intel_dp_set_link_params(intel_dp, 2292 crtc_state->port_clock, 2293 crtc_state->lane_count); 2294 2295 /* 2296 * We only configure what the register value will be here. Actual 2297 * enabling happens during link training farther down. 2298 */ 2299 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 2300 2301 /* 2302 * 1. Enable Power Wells 2303 * 2304 * This was handled at the beginning of intel_atomic_commit_tail(), 2305 * before we called down into this function. 2306 */ 2307 2308 /* 2. Enable Panel Power if PPS is required */ 2309 intel_pps_on(intel_dp); 2310 2311 /* 2312 * 3. For non-TBT Type-C ports, set FIA lane count 2313 * (DFLEXDPSP.DPX4TXLATC) 2314 * 2315 * This was done before tgl_ddi_pre_enable_dp by 2316 * hsw_crtc_enable()->intel_encoders_pre_pll_enable(). 2317 */ 2318 2319 /* 2320 * 4. Enable the port PLL. 2321 * 2322 * The PLL enabling itself was already done before this function by 2323 * hsw_crtc_enable()->intel_enable_shared_dpll(). We need only 2324 * configure the PLL to port mapping here. 2325 */ 2326 intel_ddi_enable_clock(encoder, crtc_state); 2327 2328 /* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */ 2329 if (!intel_tc_port_in_tbt_alt_mode(dig_port)) { 2330 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2331 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2332 dig_port->ddi_io_power_domain); 2333 } 2334 2335 /* 6. Program DP_MODE */ 2336 icl_program_mg_dp_mode(dig_port, crtc_state); 2337 2338 /* 2339 * 7. The rest of the below are substeps under the bspec's "Enable and 2340 * Train Display Port" step. Note that steps that are specific to 2341 * MST will be handled by intel_mst_pre_enable_dp() before/after it 2342 * calls into this function. Also intel_mst_pre_enable_dp() only calls 2343 * us when active_mst_links==0, so any steps designated for "single 2344 * stream or multi-stream master transcoder" can just be performed 2345 * unconditionally here. 2346 */ 2347 2348 /* 2349 * 7.a Configure Transcoder Clock Select to direct the Port clock to the 2350 * Transcoder. 2351 */ 2352 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2353 2354 if (HAS_DP20(dev_priv)) 2355 intel_ddi_config_transcoder_dp2(encoder, crtc_state); 2356 2357 /* 2358 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST 2359 * Transport Select 2360 */ 2361 intel_ddi_config_transcoder_func(encoder, crtc_state); 2362 2363 /* 2364 * 7.c Configure & enable DP_TP_CTL with link training pattern 1 2365 * selected 2366 * 2367 * This will be handled by the intel_dp_start_link_train() farther 2368 * down this function. 2369 */ 2370 2371 /* 7.e Configure voltage swing and related IO settings */ 2372 encoder->set_signal_levels(encoder, crtc_state); 2373 2374 /* 2375 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up 2376 * the used lanes of the DDI. 2377 */ 2378 intel_ddi_power_up_lanes(encoder, crtc_state); 2379 2380 /* 2381 * 7.g Program CoG/MSO configuration bits in DSS_CTL1 if selected. 2382 */ 2383 intel_ddi_mso_configure(crtc_state); 2384 2385 if (!is_mst) 2386 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 2387 2388 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 2389 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true); 2390 /* 2391 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit 2392 * in the FEC_CONFIGURATION register to 1 before initiating link 2393 * training 2394 */ 2395 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 2396 2397 intel_dp_check_frl_training(intel_dp); 2398 intel_dp_pcon_dsc_configure(intel_dp, crtc_state); 2399 2400 /* 2401 * 7.i Follow DisplayPort specification training sequence (see notes for 2402 * failure handling) 2403 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle 2404 * Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent) 2405 * (timeout after 800 us) 2406 */ 2407 intel_dp_start_link_train(intel_dp, crtc_state); 2408 2409 /* 7.k Set DP_TP_CTL link training to Normal */ 2410 if (!is_trans_port_sync_mode(crtc_state)) 2411 intel_dp_stop_link_train(intel_dp, crtc_state); 2412 2413 /* 7.l Configure and enable FEC if needed */ 2414 intel_ddi_enable_fec(encoder, crtc_state); 2415 2416 intel_dsc_dp_pps_write(encoder, crtc_state); 2417 } 2418 2419 static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state, 2420 struct intel_encoder *encoder, 2421 const struct intel_crtc_state *crtc_state, 2422 const struct drm_connector_state *conn_state) 2423 { 2424 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2425 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2426 enum port port = encoder->port; 2427 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2428 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 2429 2430 if (DISPLAY_VER(dev_priv) < 11) 2431 drm_WARN_ON(&dev_priv->drm, 2432 is_mst && (port == PORT_A || port == PORT_E)); 2433 else 2434 drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A); 2435 2436 intel_dp_set_link_params(intel_dp, 2437 crtc_state->port_clock, 2438 crtc_state->lane_count); 2439 2440 /* 2441 * We only configure what the register value will be here. Actual 2442 * enabling happens during link training farther down. 2443 */ 2444 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 2445 2446 intel_pps_on(intel_dp); 2447 2448 intel_ddi_enable_clock(encoder, crtc_state); 2449 2450 if (!intel_tc_port_in_tbt_alt_mode(dig_port)) { 2451 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2452 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2453 dig_port->ddi_io_power_domain); 2454 } 2455 2456 icl_program_mg_dp_mode(dig_port, crtc_state); 2457 2458 if (has_buf_trans_select(dev_priv)) 2459 hsw_prepare_dp_ddi_buffers(encoder, crtc_state); 2460 2461 encoder->set_signal_levels(encoder, crtc_state); 2462 2463 intel_ddi_power_up_lanes(encoder, crtc_state); 2464 2465 if (!is_mst) 2466 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 2467 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 2468 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, 2469 true); 2470 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 2471 intel_dp_start_link_train(intel_dp, crtc_state); 2472 if ((port != PORT_A || DISPLAY_VER(dev_priv) >= 9) && 2473 !is_trans_port_sync_mode(crtc_state)) 2474 intel_dp_stop_link_train(intel_dp, crtc_state); 2475 2476 intel_ddi_enable_fec(encoder, crtc_state); 2477 2478 if (!is_mst) 2479 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2480 2481 intel_dsc_dp_pps_write(encoder, crtc_state); 2482 } 2483 2484 static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state, 2485 struct intel_encoder *encoder, 2486 const struct intel_crtc_state *crtc_state, 2487 const struct drm_connector_state *conn_state) 2488 { 2489 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2490 2491 if (DISPLAY_VER(dev_priv) >= 12) 2492 tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 2493 else 2494 hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 2495 2496 /* MST will call a setting of MSA after an allocating of Virtual Channel 2497 * from MST encoder pre_enable callback. 2498 */ 2499 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) 2500 intel_ddi_set_dp_msa(crtc_state, conn_state); 2501 } 2502 2503 static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state, 2504 struct intel_encoder *encoder, 2505 const struct intel_crtc_state *crtc_state, 2506 const struct drm_connector_state *conn_state) 2507 { 2508 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2509 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 2510 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2511 2512 intel_dp_dual_mode_set_tmds_output(intel_hdmi, true); 2513 intel_ddi_enable_clock(encoder, crtc_state); 2514 2515 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2516 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2517 dig_port->ddi_io_power_domain); 2518 2519 icl_program_mg_dp_mode(dig_port, crtc_state); 2520 2521 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2522 2523 dig_port->set_infoframes(encoder, 2524 crtc_state->has_infoframe, 2525 crtc_state, conn_state); 2526 } 2527 2528 static void intel_ddi_pre_enable(struct intel_atomic_state *state, 2529 struct intel_encoder *encoder, 2530 const struct intel_crtc_state *crtc_state, 2531 const struct drm_connector_state *conn_state) 2532 { 2533 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2534 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2535 enum pipe pipe = crtc->pipe; 2536 2537 /* 2538 * When called from DP MST code: 2539 * - conn_state will be NULL 2540 * - encoder will be the main encoder (ie. mst->primary) 2541 * - the main connector associated with this port 2542 * won't be active or linked to a crtc 2543 * - crtc_state will be the state of the first stream to 2544 * be activated on this port, and it may not be the same 2545 * stream that will be deactivated last, but each stream 2546 * should have a state that is identical when it comes to 2547 * the DP link parameteres 2548 */ 2549 2550 drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder); 2551 2552 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true); 2553 2554 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 2555 intel_ddi_pre_enable_hdmi(state, encoder, crtc_state, 2556 conn_state); 2557 } else { 2558 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2559 2560 intel_ddi_pre_enable_dp(state, encoder, crtc_state, 2561 conn_state); 2562 2563 /* FIXME precompute everything properly */ 2564 /* FIXME how do we turn infoframes off again? */ 2565 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 2566 dig_port->set_infoframes(encoder, 2567 crtc_state->has_infoframe, 2568 crtc_state, conn_state); 2569 } 2570 } 2571 2572 static void intel_disable_ddi_buf(struct intel_encoder *encoder, 2573 const struct intel_crtc_state *crtc_state) 2574 { 2575 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2576 enum port port = encoder->port; 2577 bool wait = false; 2578 u32 val; 2579 2580 val = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 2581 if (val & DDI_BUF_CTL_ENABLE) { 2582 val &= ~DDI_BUF_CTL_ENABLE; 2583 intel_de_write(dev_priv, DDI_BUF_CTL(port), val); 2584 wait = true; 2585 } 2586 2587 if (intel_crtc_has_dp_encoder(crtc_state)) { 2588 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2589 val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 2590 val |= DP_TP_CTL_LINK_TRAIN_PAT1; 2591 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2592 } 2593 2594 /* Disable FEC in DP Sink */ 2595 intel_ddi_disable_fec_state(encoder, crtc_state); 2596 2597 if (wait) 2598 intel_wait_ddi_buf_idle(dev_priv, port); 2599 } 2600 2601 static void intel_ddi_post_disable_dp(struct intel_atomic_state *state, 2602 struct intel_encoder *encoder, 2603 const struct intel_crtc_state *old_crtc_state, 2604 const struct drm_connector_state *old_conn_state) 2605 { 2606 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2607 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2608 struct intel_dp *intel_dp = &dig_port->dp; 2609 bool is_mst = intel_crtc_has_type(old_crtc_state, 2610 INTEL_OUTPUT_DP_MST); 2611 2612 if (!is_mst) 2613 intel_dp_set_infoframes(encoder, false, 2614 old_crtc_state, old_conn_state); 2615 2616 /* 2617 * Power down sink before disabling the port, otherwise we end 2618 * up getting interrupts from the sink on detecting link loss. 2619 */ 2620 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 2621 2622 if (DISPLAY_VER(dev_priv) >= 12) { 2623 if (is_mst) { 2624 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder; 2625 u32 val; 2626 2627 val = intel_de_read(dev_priv, 2628 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2629 val &= ~(TGL_TRANS_DDI_PORT_MASK | 2630 TRANS_DDI_MODE_SELECT_MASK); 2631 intel_de_write(dev_priv, 2632 TRANS_DDI_FUNC_CTL(cpu_transcoder), 2633 val); 2634 } 2635 } else { 2636 if (!is_mst) 2637 intel_ddi_disable_pipe_clock(old_crtc_state); 2638 } 2639 2640 intel_disable_ddi_buf(encoder, old_crtc_state); 2641 2642 /* 2643 * From TGL spec: "If single stream or multi-stream master transcoder: 2644 * Configure Transcoder Clock select to direct no clock to the 2645 * transcoder" 2646 */ 2647 if (DISPLAY_VER(dev_priv) >= 12) 2648 intel_ddi_disable_pipe_clock(old_crtc_state); 2649 2650 intel_pps_vdd_on(intel_dp); 2651 intel_pps_off(intel_dp); 2652 2653 if (!intel_tc_port_in_tbt_alt_mode(dig_port)) 2654 intel_display_power_put(dev_priv, 2655 dig_port->ddi_io_power_domain, 2656 fetch_and_zero(&dig_port->ddi_io_wakeref)); 2657 2658 intel_ddi_disable_clock(encoder); 2659 } 2660 2661 static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state, 2662 struct intel_encoder *encoder, 2663 const struct intel_crtc_state *old_crtc_state, 2664 const struct drm_connector_state *old_conn_state) 2665 { 2666 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2667 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2668 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 2669 2670 dig_port->set_infoframes(encoder, false, 2671 old_crtc_state, old_conn_state); 2672 2673 if (DISPLAY_VER(dev_priv) < 12) 2674 intel_ddi_disable_pipe_clock(old_crtc_state); 2675 2676 intel_disable_ddi_buf(encoder, old_crtc_state); 2677 2678 if (DISPLAY_VER(dev_priv) >= 12) 2679 intel_ddi_disable_pipe_clock(old_crtc_state); 2680 2681 intel_display_power_put(dev_priv, 2682 dig_port->ddi_io_power_domain, 2683 fetch_and_zero(&dig_port->ddi_io_wakeref)); 2684 2685 intel_ddi_disable_clock(encoder); 2686 2687 intel_dp_dual_mode_set_tmds_output(intel_hdmi, false); 2688 } 2689 2690 static void intel_ddi_post_disable(struct intel_atomic_state *state, 2691 struct intel_encoder *encoder, 2692 const struct intel_crtc_state *old_crtc_state, 2693 const struct drm_connector_state *old_conn_state) 2694 { 2695 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2696 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2697 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2698 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 2699 struct intel_crtc *slave_crtc; 2700 2701 if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) { 2702 intel_crtc_vblank_off(old_crtc_state); 2703 2704 intel_disable_transcoder(old_crtc_state); 2705 2706 intel_vrr_disable(old_crtc_state); 2707 2708 intel_ddi_disable_transcoder_func(old_crtc_state); 2709 2710 intel_dsc_disable(old_crtc_state); 2711 2712 if (DISPLAY_VER(dev_priv) >= 9) 2713 skl_scaler_disable(old_crtc_state); 2714 else 2715 ilk_pfit_disable(old_crtc_state); 2716 } 2717 2718 for_each_intel_crtc_in_pipe_mask(&dev_priv->drm, slave_crtc, 2719 intel_crtc_bigjoiner_slave_pipes(old_crtc_state)) { 2720 const struct intel_crtc_state *old_slave_crtc_state = 2721 intel_atomic_get_old_crtc_state(state, slave_crtc); 2722 2723 intel_crtc_vblank_off(old_slave_crtc_state); 2724 2725 intel_dsc_disable(old_slave_crtc_state); 2726 skl_scaler_disable(old_slave_crtc_state); 2727 } 2728 2729 /* 2730 * When called from DP MST code: 2731 * - old_conn_state will be NULL 2732 * - encoder will be the main encoder (ie. mst->primary) 2733 * - the main connector associated with this port 2734 * won't be active or linked to a crtc 2735 * - old_crtc_state will be the state of the last stream to 2736 * be deactivated on this port, and it may not be the same 2737 * stream that was activated last, but each stream 2738 * should have a state that is identical when it comes to 2739 * the DP link parameteres 2740 */ 2741 2742 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 2743 intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state, 2744 old_conn_state); 2745 else 2746 intel_ddi_post_disable_dp(state, encoder, old_crtc_state, 2747 old_conn_state); 2748 2749 if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port) 2750 intel_display_power_put(dev_priv, 2751 intel_ddi_main_link_aux_domain(dig_port), 2752 fetch_and_zero(&dig_port->aux_wakeref)); 2753 2754 if (is_tc_port) 2755 intel_tc_port_put_link(dig_port); 2756 } 2757 2758 static void trans_port_sync_stop_link_train(struct intel_atomic_state *state, 2759 struct intel_encoder *encoder, 2760 const struct intel_crtc_state *crtc_state) 2761 { 2762 const struct drm_connector_state *conn_state; 2763 struct drm_connector *conn; 2764 int i; 2765 2766 if (!crtc_state->sync_mode_slaves_mask) 2767 return; 2768 2769 for_each_new_connector_in_state(&state->base, conn, conn_state, i) { 2770 struct intel_encoder *slave_encoder = 2771 to_intel_encoder(conn_state->best_encoder); 2772 struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc); 2773 const struct intel_crtc_state *slave_crtc_state; 2774 2775 if (!slave_crtc) 2776 continue; 2777 2778 slave_crtc_state = 2779 intel_atomic_get_new_crtc_state(state, slave_crtc); 2780 2781 if (slave_crtc_state->master_transcoder != 2782 crtc_state->cpu_transcoder) 2783 continue; 2784 2785 intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder), 2786 slave_crtc_state); 2787 } 2788 2789 usleep_range(200, 400); 2790 2791 intel_dp_stop_link_train(enc_to_intel_dp(encoder), 2792 crtc_state); 2793 } 2794 2795 static void intel_enable_ddi_dp(struct intel_atomic_state *state, 2796 struct intel_encoder *encoder, 2797 const struct intel_crtc_state *crtc_state, 2798 const struct drm_connector_state *conn_state) 2799 { 2800 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2801 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2802 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2803 enum port port = encoder->port; 2804 2805 if (port == PORT_A && DISPLAY_VER(dev_priv) < 9) 2806 intel_dp_stop_link_train(intel_dp, crtc_state); 2807 2808 drm_connector_update_privacy_screen(conn_state); 2809 intel_edp_backlight_on(crtc_state, conn_state); 2810 2811 if (!dig_port->lspcon.active || dig_port->dp.has_hdmi_sink) 2812 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 2813 2814 intel_audio_codec_enable(encoder, crtc_state, conn_state); 2815 2816 trans_port_sync_stop_link_train(state, encoder, crtc_state); 2817 } 2818 2819 static i915_reg_t 2820 gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv, 2821 enum port port) 2822 { 2823 static const enum transcoder trans[] = { 2824 [PORT_A] = TRANSCODER_EDP, 2825 [PORT_B] = TRANSCODER_A, 2826 [PORT_C] = TRANSCODER_B, 2827 [PORT_D] = TRANSCODER_C, 2828 [PORT_E] = TRANSCODER_A, 2829 }; 2830 2831 drm_WARN_ON(&dev_priv->drm, DISPLAY_VER(dev_priv) < 9); 2832 2833 if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E)) 2834 port = PORT_A; 2835 2836 return CHICKEN_TRANS(trans[port]); 2837 } 2838 2839 static void intel_enable_ddi_hdmi(struct intel_atomic_state *state, 2840 struct intel_encoder *encoder, 2841 const struct intel_crtc_state *crtc_state, 2842 const struct drm_connector_state *conn_state) 2843 { 2844 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2845 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2846 struct drm_connector *connector = conn_state->connector; 2847 enum port port = encoder->port; 2848 enum phy phy = intel_port_to_phy(dev_priv, port); 2849 u32 buf_ctl; 2850 2851 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 2852 crtc_state->hdmi_high_tmds_clock_ratio, 2853 crtc_state->hdmi_scrambling)) 2854 drm_dbg_kms(&dev_priv->drm, 2855 "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n", 2856 connector->base.id, connector->name); 2857 2858 if (has_buf_trans_select(dev_priv)) 2859 hsw_prepare_hdmi_ddi_buffers(encoder, crtc_state); 2860 2861 encoder->set_signal_levels(encoder, crtc_state); 2862 2863 /* Display WA #1143: skl,kbl,cfl */ 2864 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv)) { 2865 /* 2866 * For some reason these chicken bits have been 2867 * stuffed into a transcoder register, event though 2868 * the bits affect a specific DDI port rather than 2869 * a specific transcoder. 2870 */ 2871 i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port); 2872 u32 val; 2873 2874 val = intel_de_read(dev_priv, reg); 2875 2876 if (port == PORT_E) 2877 val |= DDIE_TRAINING_OVERRIDE_ENABLE | 2878 DDIE_TRAINING_OVERRIDE_VALUE; 2879 else 2880 val |= DDI_TRAINING_OVERRIDE_ENABLE | 2881 DDI_TRAINING_OVERRIDE_VALUE; 2882 2883 intel_de_write(dev_priv, reg, val); 2884 intel_de_posting_read(dev_priv, reg); 2885 2886 udelay(1); 2887 2888 if (port == PORT_E) 2889 val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE | 2890 DDIE_TRAINING_OVERRIDE_VALUE); 2891 else 2892 val &= ~(DDI_TRAINING_OVERRIDE_ENABLE | 2893 DDI_TRAINING_OVERRIDE_VALUE); 2894 2895 intel_de_write(dev_priv, reg, val); 2896 } 2897 2898 intel_ddi_power_up_lanes(encoder, crtc_state); 2899 2900 /* In HDMI/DVI mode, the port width, and swing/emphasis values 2901 * are ignored so nothing special needs to be done besides 2902 * enabling the port. 2903 * 2904 * On ADL_P the PHY link rate and lane count must be programmed but 2905 * these are both 0 for HDMI. 2906 */ 2907 buf_ctl = dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE; 2908 if (IS_ALDERLAKE_P(dev_priv) && intel_phy_is_tc(dev_priv, phy)) { 2909 drm_WARN_ON(&dev_priv->drm, !intel_tc_port_in_legacy_mode(dig_port)); 2910 buf_ctl |= DDI_BUF_CTL_TC_PHY_OWNERSHIP; 2911 } 2912 intel_de_write(dev_priv, DDI_BUF_CTL(port), buf_ctl); 2913 2914 intel_audio_codec_enable(encoder, crtc_state, conn_state); 2915 } 2916 2917 static void intel_enable_ddi(struct intel_atomic_state *state, 2918 struct intel_encoder *encoder, 2919 const struct intel_crtc_state *crtc_state, 2920 const struct drm_connector_state *conn_state) 2921 { 2922 drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder); 2923 2924 if (!intel_crtc_is_bigjoiner_slave(crtc_state)) 2925 intel_ddi_enable_transcoder_func(encoder, crtc_state); 2926 2927 intel_vrr_enable(encoder, crtc_state); 2928 2929 intel_enable_transcoder(crtc_state); 2930 2931 intel_crtc_vblank_on(crtc_state); 2932 2933 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2934 intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state); 2935 else 2936 intel_enable_ddi_dp(state, encoder, crtc_state, conn_state); 2937 2938 /* Enable hdcp if it's desired */ 2939 if (conn_state->content_protection == 2940 DRM_MODE_CONTENT_PROTECTION_DESIRED) 2941 intel_hdcp_enable(to_intel_connector(conn_state->connector), 2942 crtc_state, 2943 (u8)conn_state->hdcp_content_type); 2944 } 2945 2946 static void intel_disable_ddi_dp(struct intel_atomic_state *state, 2947 struct intel_encoder *encoder, 2948 const struct intel_crtc_state *old_crtc_state, 2949 const struct drm_connector_state *old_conn_state) 2950 { 2951 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2952 2953 intel_dp->link_trained = false; 2954 2955 intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state); 2956 2957 intel_psr_disable(intel_dp, old_crtc_state); 2958 intel_edp_backlight_off(old_conn_state); 2959 /* Disable the decompression in DP Sink */ 2960 intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state, 2961 false); 2962 /* Disable Ignore_MSA bit in DP Sink */ 2963 intel_dp_sink_set_msa_timing_par_ignore_state(intel_dp, old_crtc_state, 2964 false); 2965 } 2966 2967 static void intel_disable_ddi_hdmi(struct intel_atomic_state *state, 2968 struct intel_encoder *encoder, 2969 const struct intel_crtc_state *old_crtc_state, 2970 const struct drm_connector_state *old_conn_state) 2971 { 2972 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2973 struct drm_connector *connector = old_conn_state->connector; 2974 2975 intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state); 2976 2977 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 2978 false, false)) 2979 drm_dbg_kms(&i915->drm, 2980 "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n", 2981 connector->base.id, connector->name); 2982 } 2983 2984 static void intel_disable_ddi(struct intel_atomic_state *state, 2985 struct intel_encoder *encoder, 2986 const struct intel_crtc_state *old_crtc_state, 2987 const struct drm_connector_state *old_conn_state) 2988 { 2989 intel_hdcp_disable(to_intel_connector(old_conn_state->connector)); 2990 2991 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 2992 intel_disable_ddi_hdmi(state, encoder, old_crtc_state, 2993 old_conn_state); 2994 else 2995 intel_disable_ddi_dp(state, encoder, old_crtc_state, 2996 old_conn_state); 2997 } 2998 2999 static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state, 3000 struct intel_encoder *encoder, 3001 const struct intel_crtc_state *crtc_state, 3002 const struct drm_connector_state *conn_state) 3003 { 3004 intel_ddi_set_dp_msa(crtc_state, conn_state); 3005 3006 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 3007 3008 intel_backlight_update(state, encoder, crtc_state, conn_state); 3009 drm_connector_update_privacy_screen(conn_state); 3010 } 3011 3012 void intel_ddi_update_pipe(struct intel_atomic_state *state, 3013 struct intel_encoder *encoder, 3014 const struct intel_crtc_state *crtc_state, 3015 const struct drm_connector_state *conn_state) 3016 { 3017 3018 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) && 3019 !intel_encoder_is_mst(encoder)) 3020 intel_ddi_update_pipe_dp(state, encoder, crtc_state, 3021 conn_state); 3022 3023 intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state); 3024 } 3025 3026 static void 3027 intel_ddi_update_prepare(struct intel_atomic_state *state, 3028 struct intel_encoder *encoder, 3029 struct intel_crtc *crtc) 3030 { 3031 struct drm_i915_private *i915 = to_i915(state->base.dev); 3032 struct intel_crtc_state *crtc_state = 3033 crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL; 3034 int required_lanes = crtc_state ? crtc_state->lane_count : 1; 3035 3036 drm_WARN_ON(state->base.dev, crtc && crtc->active); 3037 3038 intel_tc_port_get_link(enc_to_dig_port(encoder), 3039 required_lanes); 3040 if (crtc_state && crtc_state->hw.active) { 3041 struct intel_crtc *slave_crtc; 3042 3043 intel_update_active_dpll(state, crtc, encoder); 3044 3045 for_each_intel_crtc_in_pipe_mask(&i915->drm, slave_crtc, 3046 intel_crtc_bigjoiner_slave_pipes(crtc_state)) 3047 intel_update_active_dpll(state, slave_crtc, encoder); 3048 } 3049 } 3050 3051 static void 3052 intel_ddi_update_complete(struct intel_atomic_state *state, 3053 struct intel_encoder *encoder, 3054 struct intel_crtc *crtc) 3055 { 3056 intel_tc_port_put_link(enc_to_dig_port(encoder)); 3057 } 3058 3059 static void 3060 intel_ddi_pre_pll_enable(struct intel_atomic_state *state, 3061 struct intel_encoder *encoder, 3062 const struct intel_crtc_state *crtc_state, 3063 const struct drm_connector_state *conn_state) 3064 { 3065 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3066 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3067 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3068 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 3069 3070 if (is_tc_port) 3071 intel_tc_port_get_link(dig_port, crtc_state->lane_count); 3072 3073 if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port) { 3074 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 3075 dig_port->aux_wakeref = 3076 intel_display_power_get(dev_priv, 3077 intel_ddi_main_link_aux_domain(dig_port)); 3078 } 3079 3080 if (is_tc_port && !intel_tc_port_in_tbt_alt_mode(dig_port)) 3081 /* 3082 * Program the lane count for static/dynamic connections on 3083 * Type-C ports. Skip this step for TBT. 3084 */ 3085 intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count); 3086 else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 3087 bxt_ddi_phy_set_lane_optim_mask(encoder, 3088 crtc_state->lane_lat_optim_mask); 3089 } 3090 3091 static void adlp_tbt_to_dp_alt_switch_wa(struct intel_encoder *encoder) 3092 { 3093 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3094 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 3095 int ln; 3096 3097 for (ln = 0; ln < 2; ln++) { 3098 intel_de_write(i915, HIP_INDEX_REG(tc_port), HIP_INDEX_VAL(tc_port, ln)); 3099 intel_de_rmw(i915, DKL_PCS_DW5(tc_port), DKL_PCS_DW5_CORE_SOFTRESET, 0); 3100 } 3101 } 3102 3103 static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp, 3104 const struct intel_crtc_state *crtc_state) 3105 { 3106 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 3107 struct intel_encoder *encoder = &dig_port->base; 3108 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3109 enum port port = encoder->port; 3110 u32 dp_tp_ctl, ddi_buf_ctl; 3111 bool wait = false; 3112 3113 dp_tp_ctl = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3114 3115 if (dp_tp_ctl & DP_TP_CTL_ENABLE) { 3116 ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 3117 if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) { 3118 intel_de_write(dev_priv, DDI_BUF_CTL(port), 3119 ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE); 3120 wait = true; 3121 } 3122 3123 dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 3124 dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1; 3125 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 3126 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3127 3128 if (wait) 3129 intel_wait_ddi_buf_idle(dev_priv, port); 3130 } 3131 3132 dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1; 3133 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 3134 dp_tp_ctl |= DP_TP_CTL_MODE_MST; 3135 } else { 3136 dp_tp_ctl |= DP_TP_CTL_MODE_SST; 3137 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 3138 dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE; 3139 } 3140 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 3141 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3142 3143 if (IS_ALDERLAKE_P(dev_priv) && 3144 (intel_tc_port_in_dp_alt_mode(dig_port) || intel_tc_port_in_legacy_mode(dig_port))) 3145 adlp_tbt_to_dp_alt_switch_wa(encoder); 3146 3147 intel_dp->DP |= DDI_BUF_CTL_ENABLE; 3148 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 3149 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 3150 3151 intel_wait_ddi_buf_active(dev_priv, port); 3152 } 3153 3154 static void intel_ddi_set_link_train(struct intel_dp *intel_dp, 3155 const struct intel_crtc_state *crtc_state, 3156 u8 dp_train_pat) 3157 { 3158 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3159 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3160 u32 temp; 3161 3162 temp = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3163 3164 temp &= ~DP_TP_CTL_LINK_TRAIN_MASK; 3165 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 3166 case DP_TRAINING_PATTERN_DISABLE: 3167 temp |= DP_TP_CTL_LINK_TRAIN_NORMAL; 3168 break; 3169 case DP_TRAINING_PATTERN_1: 3170 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 3171 break; 3172 case DP_TRAINING_PATTERN_2: 3173 temp |= DP_TP_CTL_LINK_TRAIN_PAT2; 3174 break; 3175 case DP_TRAINING_PATTERN_3: 3176 temp |= DP_TP_CTL_LINK_TRAIN_PAT3; 3177 break; 3178 case DP_TRAINING_PATTERN_4: 3179 temp |= DP_TP_CTL_LINK_TRAIN_PAT4; 3180 break; 3181 } 3182 3183 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), temp); 3184 } 3185 3186 static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp, 3187 const struct intel_crtc_state *crtc_state) 3188 { 3189 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3190 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3191 enum port port = encoder->port; 3192 u32 val; 3193 3194 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3195 val &= ~DP_TP_CTL_LINK_TRAIN_MASK; 3196 val |= DP_TP_CTL_LINK_TRAIN_IDLE; 3197 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3198 3199 /* 3200 * Until TGL on PORT_A we can have only eDP in SST mode. There the only 3201 * reason we need to set idle transmission mode is to work around a HW 3202 * issue where we enable the pipe while not in idle link-training mode. 3203 * In this case there is requirement to wait for a minimum number of 3204 * idle patterns to be sent. 3205 */ 3206 if (port == PORT_A && DISPLAY_VER(dev_priv) < 12) 3207 return; 3208 3209 if (intel_de_wait_for_set(dev_priv, 3210 dp_tp_status_reg(encoder, crtc_state), 3211 DP_TP_STATUS_IDLE_DONE, 1)) 3212 drm_err(&dev_priv->drm, 3213 "Timed out waiting for DP idle patterns\n"); 3214 } 3215 3216 static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv, 3217 enum transcoder cpu_transcoder) 3218 { 3219 if (cpu_transcoder == TRANSCODER_EDP) 3220 return false; 3221 3222 if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO_MMIO)) 3223 return false; 3224 3225 return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) & 3226 AUDIO_OUTPUT_ENABLE(cpu_transcoder); 3227 } 3228 3229 void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv, 3230 struct intel_crtc_state *crtc_state) 3231 { 3232 if (DISPLAY_VER(dev_priv) >= 12 && crtc_state->port_clock > 594000) 3233 crtc_state->min_voltage_level = 2; 3234 else if (IS_JSL_EHL(dev_priv) && crtc_state->port_clock > 594000) 3235 crtc_state->min_voltage_level = 3; 3236 else if (DISPLAY_VER(dev_priv) >= 11 && crtc_state->port_clock > 594000) 3237 crtc_state->min_voltage_level = 1; 3238 } 3239 3240 static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv, 3241 enum transcoder cpu_transcoder) 3242 { 3243 u32 master_select; 3244 3245 if (DISPLAY_VER(dev_priv) >= 11) { 3246 u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder)); 3247 3248 if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0) 3249 return INVALID_TRANSCODER; 3250 3251 master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2); 3252 } else { 3253 u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 3254 3255 if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0) 3256 return INVALID_TRANSCODER; 3257 3258 master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl); 3259 } 3260 3261 if (master_select == 0) 3262 return TRANSCODER_EDP; 3263 else 3264 return master_select - 1; 3265 } 3266 3267 static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state) 3268 { 3269 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 3270 u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | 3271 BIT(TRANSCODER_C) | BIT(TRANSCODER_D); 3272 enum transcoder cpu_transcoder; 3273 3274 crtc_state->master_transcoder = 3275 bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder); 3276 3277 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) { 3278 enum intel_display_power_domain power_domain; 3279 intel_wakeref_t trans_wakeref; 3280 3281 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder); 3282 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 3283 power_domain); 3284 3285 if (!trans_wakeref) 3286 continue; 3287 3288 if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) == 3289 crtc_state->cpu_transcoder) 3290 crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder); 3291 3292 intel_display_power_put(dev_priv, power_domain, trans_wakeref); 3293 } 3294 3295 drm_WARN_ON(&dev_priv->drm, 3296 crtc_state->master_transcoder != INVALID_TRANSCODER && 3297 crtc_state->sync_mode_slaves_mask); 3298 } 3299 3300 static void intel_ddi_read_func_ctl(struct intel_encoder *encoder, 3301 struct intel_crtc_state *pipe_config) 3302 { 3303 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3304 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 3305 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 3306 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3307 u32 temp, flags = 0; 3308 3309 temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 3310 if (temp & TRANS_DDI_PHSYNC) 3311 flags |= DRM_MODE_FLAG_PHSYNC; 3312 else 3313 flags |= DRM_MODE_FLAG_NHSYNC; 3314 if (temp & TRANS_DDI_PVSYNC) 3315 flags |= DRM_MODE_FLAG_PVSYNC; 3316 else 3317 flags |= DRM_MODE_FLAG_NVSYNC; 3318 3319 pipe_config->hw.adjusted_mode.flags |= flags; 3320 3321 switch (temp & TRANS_DDI_BPC_MASK) { 3322 case TRANS_DDI_BPC_6: 3323 pipe_config->pipe_bpp = 18; 3324 break; 3325 case TRANS_DDI_BPC_8: 3326 pipe_config->pipe_bpp = 24; 3327 break; 3328 case TRANS_DDI_BPC_10: 3329 pipe_config->pipe_bpp = 30; 3330 break; 3331 case TRANS_DDI_BPC_12: 3332 pipe_config->pipe_bpp = 36; 3333 break; 3334 default: 3335 break; 3336 } 3337 3338 switch (temp & TRANS_DDI_MODE_SELECT_MASK) { 3339 case TRANS_DDI_MODE_SELECT_HDMI: 3340 pipe_config->has_hdmi_sink = true; 3341 3342 pipe_config->infoframes.enable |= 3343 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3344 3345 if (pipe_config->infoframes.enable) 3346 pipe_config->has_infoframe = true; 3347 3348 if (temp & TRANS_DDI_HDMI_SCRAMBLING) 3349 pipe_config->hdmi_scrambling = true; 3350 if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE) 3351 pipe_config->hdmi_high_tmds_clock_ratio = true; 3352 fallthrough; 3353 case TRANS_DDI_MODE_SELECT_DVI: 3354 pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI); 3355 pipe_config->lane_count = 4; 3356 break; 3357 case TRANS_DDI_MODE_SELECT_DP_SST: 3358 if (encoder->type == INTEL_OUTPUT_EDP) 3359 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 3360 else 3361 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 3362 pipe_config->lane_count = 3363 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 3364 3365 intel_cpu_transcoder_get_m1_n1(crtc, cpu_transcoder, 3366 &pipe_config->dp_m_n); 3367 intel_cpu_transcoder_get_m2_n2(crtc, cpu_transcoder, 3368 &pipe_config->dp_m2_n2); 3369 3370 if (DISPLAY_VER(dev_priv) >= 11) { 3371 i915_reg_t dp_tp_ctl = dp_tp_ctl_reg(encoder, pipe_config); 3372 3373 pipe_config->fec_enable = 3374 intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE; 3375 3376 drm_dbg_kms(&dev_priv->drm, 3377 "[ENCODER:%d:%s] Fec status: %u\n", 3378 encoder->base.base.id, encoder->base.name, 3379 pipe_config->fec_enable); 3380 } 3381 3382 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 3383 pipe_config->infoframes.enable |= 3384 intel_lspcon_infoframes_enabled(encoder, pipe_config); 3385 else 3386 pipe_config->infoframes.enable |= 3387 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3388 break; 3389 case TRANS_DDI_MODE_SELECT_FDI_OR_128B132B: 3390 if (!HAS_DP20(dev_priv)) { 3391 /* FDI */ 3392 pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG); 3393 break; 3394 } 3395 fallthrough; /* 128b/132b */ 3396 case TRANS_DDI_MODE_SELECT_DP_MST: 3397 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST); 3398 pipe_config->lane_count = 3399 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 3400 3401 if (DISPLAY_VER(dev_priv) >= 12) 3402 pipe_config->mst_master_transcoder = 3403 REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp); 3404 3405 intel_cpu_transcoder_get_m1_n1(crtc, cpu_transcoder, 3406 &pipe_config->dp_m_n); 3407 3408 pipe_config->infoframes.enable |= 3409 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3410 break; 3411 default: 3412 break; 3413 } 3414 } 3415 3416 static void intel_ddi_get_config(struct intel_encoder *encoder, 3417 struct intel_crtc_state *pipe_config) 3418 { 3419 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3420 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 3421 3422 /* XXX: DSI transcoder paranoia */ 3423 if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder))) 3424 return; 3425 3426 intel_ddi_read_func_ctl(encoder, pipe_config); 3427 3428 intel_ddi_mso_get_config(encoder, pipe_config); 3429 3430 pipe_config->has_audio = 3431 intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder); 3432 3433 if (encoder->type == INTEL_OUTPUT_EDP) 3434 intel_edp_fixup_vbt_bpp(encoder, pipe_config->pipe_bpp); 3435 3436 ddi_dotclock_get(pipe_config); 3437 3438 if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 3439 pipe_config->lane_lat_optim_mask = 3440 bxt_ddi_phy_get_lane_lat_optim_mask(encoder); 3441 3442 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 3443 3444 intel_hdmi_read_gcp_infoframe(encoder, pipe_config); 3445 3446 intel_read_infoframe(encoder, pipe_config, 3447 HDMI_INFOFRAME_TYPE_AVI, 3448 &pipe_config->infoframes.avi); 3449 intel_read_infoframe(encoder, pipe_config, 3450 HDMI_INFOFRAME_TYPE_SPD, 3451 &pipe_config->infoframes.spd); 3452 intel_read_infoframe(encoder, pipe_config, 3453 HDMI_INFOFRAME_TYPE_VENDOR, 3454 &pipe_config->infoframes.hdmi); 3455 intel_read_infoframe(encoder, pipe_config, 3456 HDMI_INFOFRAME_TYPE_DRM, 3457 &pipe_config->infoframes.drm); 3458 3459 if (DISPLAY_VER(dev_priv) >= 8) 3460 bdw_get_trans_port_sync_config(pipe_config); 3461 3462 intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA); 3463 intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC); 3464 3465 intel_psr_get_config(encoder, pipe_config); 3466 } 3467 3468 void intel_ddi_get_clock(struct intel_encoder *encoder, 3469 struct intel_crtc_state *crtc_state, 3470 struct intel_shared_dpll *pll) 3471 { 3472 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3473 enum icl_port_dpll_id port_dpll_id = ICL_PORT_DPLL_DEFAULT; 3474 struct icl_port_dpll *port_dpll = &crtc_state->icl_port_dplls[port_dpll_id]; 3475 bool pll_active; 3476 3477 if (drm_WARN_ON(&i915->drm, !pll)) 3478 return; 3479 3480 port_dpll->pll = pll; 3481 pll_active = intel_dpll_get_hw_state(i915, pll, &port_dpll->hw_state); 3482 drm_WARN_ON(&i915->drm, !pll_active); 3483 3484 icl_set_active_port_dpll(crtc_state, port_dpll_id); 3485 3486 crtc_state->port_clock = intel_dpll_get_freq(i915, crtc_state->shared_dpll, 3487 &crtc_state->dpll_hw_state); 3488 } 3489 3490 static void dg2_ddi_get_config(struct intel_encoder *encoder, 3491 struct intel_crtc_state *crtc_state) 3492 { 3493 intel_mpllb_readout_hw_state(encoder, &crtc_state->mpllb_state); 3494 crtc_state->port_clock = intel_mpllb_calc_port_clock(encoder, &crtc_state->mpllb_state); 3495 3496 intel_ddi_get_config(encoder, crtc_state); 3497 } 3498 3499 static void adls_ddi_get_config(struct intel_encoder *encoder, 3500 struct intel_crtc_state *crtc_state) 3501 { 3502 intel_ddi_get_clock(encoder, crtc_state, adls_ddi_get_pll(encoder)); 3503 intel_ddi_get_config(encoder, crtc_state); 3504 } 3505 3506 static void rkl_ddi_get_config(struct intel_encoder *encoder, 3507 struct intel_crtc_state *crtc_state) 3508 { 3509 intel_ddi_get_clock(encoder, crtc_state, rkl_ddi_get_pll(encoder)); 3510 intel_ddi_get_config(encoder, crtc_state); 3511 } 3512 3513 static void dg1_ddi_get_config(struct intel_encoder *encoder, 3514 struct intel_crtc_state *crtc_state) 3515 { 3516 intel_ddi_get_clock(encoder, crtc_state, dg1_ddi_get_pll(encoder)); 3517 intel_ddi_get_config(encoder, crtc_state); 3518 } 3519 3520 static void icl_ddi_combo_get_config(struct intel_encoder *encoder, 3521 struct intel_crtc_state *crtc_state) 3522 { 3523 intel_ddi_get_clock(encoder, crtc_state, icl_ddi_combo_get_pll(encoder)); 3524 intel_ddi_get_config(encoder, crtc_state); 3525 } 3526 3527 static void icl_ddi_tc_get_clock(struct intel_encoder *encoder, 3528 struct intel_crtc_state *crtc_state, 3529 struct intel_shared_dpll *pll) 3530 { 3531 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3532 enum icl_port_dpll_id port_dpll_id; 3533 struct icl_port_dpll *port_dpll; 3534 bool pll_active; 3535 3536 if (drm_WARN_ON(&i915->drm, !pll)) 3537 return; 3538 3539 if (intel_get_shared_dpll_id(i915, pll) == DPLL_ID_ICL_TBTPLL) 3540 port_dpll_id = ICL_PORT_DPLL_DEFAULT; 3541 else 3542 port_dpll_id = ICL_PORT_DPLL_MG_PHY; 3543 3544 port_dpll = &crtc_state->icl_port_dplls[port_dpll_id]; 3545 3546 port_dpll->pll = pll; 3547 pll_active = intel_dpll_get_hw_state(i915, pll, &port_dpll->hw_state); 3548 drm_WARN_ON(&i915->drm, !pll_active); 3549 3550 icl_set_active_port_dpll(crtc_state, port_dpll_id); 3551 3552 if (intel_get_shared_dpll_id(i915, crtc_state->shared_dpll) == DPLL_ID_ICL_TBTPLL) 3553 crtc_state->port_clock = icl_calc_tbt_pll_link(i915, encoder->port); 3554 else 3555 crtc_state->port_clock = intel_dpll_get_freq(i915, crtc_state->shared_dpll, 3556 &crtc_state->dpll_hw_state); 3557 } 3558 3559 static void icl_ddi_tc_get_config(struct intel_encoder *encoder, 3560 struct intel_crtc_state *crtc_state) 3561 { 3562 icl_ddi_tc_get_clock(encoder, crtc_state, icl_ddi_tc_get_pll(encoder)); 3563 intel_ddi_get_config(encoder, crtc_state); 3564 } 3565 3566 static void bxt_ddi_get_config(struct intel_encoder *encoder, 3567 struct intel_crtc_state *crtc_state) 3568 { 3569 intel_ddi_get_clock(encoder, crtc_state, bxt_ddi_get_pll(encoder)); 3570 intel_ddi_get_config(encoder, crtc_state); 3571 } 3572 3573 static void skl_ddi_get_config(struct intel_encoder *encoder, 3574 struct intel_crtc_state *crtc_state) 3575 { 3576 intel_ddi_get_clock(encoder, crtc_state, skl_ddi_get_pll(encoder)); 3577 intel_ddi_get_config(encoder, crtc_state); 3578 } 3579 3580 void hsw_ddi_get_config(struct intel_encoder *encoder, 3581 struct intel_crtc_state *crtc_state) 3582 { 3583 intel_ddi_get_clock(encoder, crtc_state, hsw_ddi_get_pll(encoder)); 3584 intel_ddi_get_config(encoder, crtc_state); 3585 } 3586 3587 static void intel_ddi_sync_state(struct intel_encoder *encoder, 3588 const struct intel_crtc_state *crtc_state) 3589 { 3590 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3591 enum phy phy = intel_port_to_phy(i915, encoder->port); 3592 3593 if (intel_phy_is_tc(i915, phy)) 3594 intel_tc_port_sanitize(enc_to_dig_port(encoder)); 3595 3596 if (crtc_state && intel_crtc_has_dp_encoder(crtc_state)) 3597 intel_dp_sync_state(encoder, crtc_state); 3598 } 3599 3600 static bool intel_ddi_initial_fastset_check(struct intel_encoder *encoder, 3601 struct intel_crtc_state *crtc_state) 3602 { 3603 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3604 enum phy phy = intel_port_to_phy(i915, encoder->port); 3605 bool fastset = true; 3606 3607 if (intel_phy_is_tc(i915, phy)) { 3608 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] Forcing full modeset to compute TC port DPLLs\n", 3609 encoder->base.base.id, encoder->base.name); 3610 crtc_state->uapi.mode_changed = true; 3611 fastset = false; 3612 } 3613 3614 if (intel_crtc_has_dp_encoder(crtc_state) && 3615 !intel_dp_initial_fastset_check(encoder, crtc_state)) 3616 fastset = false; 3617 3618 return fastset; 3619 } 3620 3621 static enum intel_output_type 3622 intel_ddi_compute_output_type(struct intel_encoder *encoder, 3623 struct intel_crtc_state *crtc_state, 3624 struct drm_connector_state *conn_state) 3625 { 3626 switch (conn_state->connector->connector_type) { 3627 case DRM_MODE_CONNECTOR_HDMIA: 3628 return INTEL_OUTPUT_HDMI; 3629 case DRM_MODE_CONNECTOR_eDP: 3630 return INTEL_OUTPUT_EDP; 3631 case DRM_MODE_CONNECTOR_DisplayPort: 3632 return INTEL_OUTPUT_DP; 3633 default: 3634 MISSING_CASE(conn_state->connector->connector_type); 3635 return INTEL_OUTPUT_UNUSED; 3636 } 3637 } 3638 3639 static int intel_ddi_compute_config(struct intel_encoder *encoder, 3640 struct intel_crtc_state *pipe_config, 3641 struct drm_connector_state *conn_state) 3642 { 3643 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 3644 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3645 enum port port = encoder->port; 3646 int ret; 3647 3648 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 3649 pipe_config->cpu_transcoder = TRANSCODER_EDP; 3650 3651 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) { 3652 ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state); 3653 } else { 3654 ret = intel_dp_compute_config(encoder, pipe_config, conn_state); 3655 } 3656 3657 if (ret) 3658 return ret; 3659 3660 if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A && 3661 pipe_config->cpu_transcoder == TRANSCODER_EDP) 3662 pipe_config->pch_pfit.force_thru = 3663 pipe_config->pch_pfit.enabled || 3664 pipe_config->crc_enabled; 3665 3666 if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 3667 pipe_config->lane_lat_optim_mask = 3668 bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 3669 3670 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 3671 3672 return 0; 3673 } 3674 3675 static bool mode_equal(const struct drm_display_mode *mode1, 3676 const struct drm_display_mode *mode2) 3677 { 3678 return drm_mode_match(mode1, mode2, 3679 DRM_MODE_MATCH_TIMINGS | 3680 DRM_MODE_MATCH_FLAGS | 3681 DRM_MODE_MATCH_3D_FLAGS) && 3682 mode1->clock == mode2->clock; /* we want an exact match */ 3683 } 3684 3685 static bool m_n_equal(const struct intel_link_m_n *m_n_1, 3686 const struct intel_link_m_n *m_n_2) 3687 { 3688 return m_n_1->tu == m_n_2->tu && 3689 m_n_1->data_m == m_n_2->data_m && 3690 m_n_1->data_n == m_n_2->data_n && 3691 m_n_1->link_m == m_n_2->link_m && 3692 m_n_1->link_n == m_n_2->link_n; 3693 } 3694 3695 static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1, 3696 const struct intel_crtc_state *crtc_state2) 3697 { 3698 return crtc_state1->hw.active && crtc_state2->hw.active && 3699 crtc_state1->output_types == crtc_state2->output_types && 3700 crtc_state1->output_format == crtc_state2->output_format && 3701 crtc_state1->lane_count == crtc_state2->lane_count && 3702 crtc_state1->port_clock == crtc_state2->port_clock && 3703 mode_equal(&crtc_state1->hw.adjusted_mode, 3704 &crtc_state2->hw.adjusted_mode) && 3705 m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n); 3706 } 3707 3708 static u8 3709 intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state, 3710 int tile_group_id) 3711 { 3712 struct drm_connector *connector; 3713 const struct drm_connector_state *conn_state; 3714 struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev); 3715 struct intel_atomic_state *state = 3716 to_intel_atomic_state(ref_crtc_state->uapi.state); 3717 u8 transcoders = 0; 3718 int i; 3719 3720 /* 3721 * We don't enable port sync on BDW due to missing w/as and 3722 * due to not having adjusted the modeset sequence appropriately. 3723 */ 3724 if (DISPLAY_VER(dev_priv) < 9) 3725 return 0; 3726 3727 if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP)) 3728 return 0; 3729 3730 for_each_new_connector_in_state(&state->base, connector, conn_state, i) { 3731 struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc); 3732 const struct intel_crtc_state *crtc_state; 3733 3734 if (!crtc) 3735 continue; 3736 3737 if (!connector->has_tile || 3738 connector->tile_group->id != 3739 tile_group_id) 3740 continue; 3741 crtc_state = intel_atomic_get_new_crtc_state(state, 3742 crtc); 3743 if (!crtcs_port_sync_compatible(ref_crtc_state, 3744 crtc_state)) 3745 continue; 3746 transcoders |= BIT(crtc_state->cpu_transcoder); 3747 } 3748 3749 return transcoders; 3750 } 3751 3752 static int intel_ddi_compute_config_late(struct intel_encoder *encoder, 3753 struct intel_crtc_state *crtc_state, 3754 struct drm_connector_state *conn_state) 3755 { 3756 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3757 struct drm_connector *connector = conn_state->connector; 3758 u8 port_sync_transcoders = 0; 3759 3760 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]", 3761 encoder->base.base.id, encoder->base.name, 3762 crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name); 3763 3764 if (connector->has_tile) 3765 port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state, 3766 connector->tile_group->id); 3767 3768 /* 3769 * EDP Transcoders cannot be ensalved 3770 * make them a master always when present 3771 */ 3772 if (port_sync_transcoders & BIT(TRANSCODER_EDP)) 3773 crtc_state->master_transcoder = TRANSCODER_EDP; 3774 else 3775 crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1; 3776 3777 if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) { 3778 crtc_state->master_transcoder = INVALID_TRANSCODER; 3779 crtc_state->sync_mode_slaves_mask = 3780 port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder); 3781 } 3782 3783 return 0; 3784 } 3785 3786 static void intel_ddi_encoder_destroy(struct drm_encoder *encoder) 3787 { 3788 struct drm_i915_private *i915 = to_i915(encoder->dev); 3789 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder)); 3790 enum phy phy = intel_port_to_phy(i915, dig_port->base.port); 3791 3792 intel_dp_encoder_flush_work(encoder); 3793 if (intel_phy_is_tc(i915, phy)) 3794 intel_tc_port_flush_work(dig_port); 3795 intel_display_power_flush_work(i915); 3796 3797 drm_encoder_cleanup(encoder); 3798 kfree(dig_port->hdcp_port_data.streams); 3799 kfree(dig_port); 3800 } 3801 3802 static void intel_ddi_encoder_reset(struct drm_encoder *encoder) 3803 { 3804 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 3805 3806 intel_dp->reset_link_params = true; 3807 3808 intel_pps_encoder_reset(intel_dp); 3809 } 3810 3811 static const struct drm_encoder_funcs intel_ddi_funcs = { 3812 .reset = intel_ddi_encoder_reset, 3813 .destroy = intel_ddi_encoder_destroy, 3814 }; 3815 3816 static struct intel_connector * 3817 intel_ddi_init_dp_connector(struct intel_digital_port *dig_port) 3818 { 3819 struct intel_connector *connector; 3820 enum port port = dig_port->base.port; 3821 3822 connector = intel_connector_alloc(); 3823 if (!connector) 3824 return NULL; 3825 3826 dig_port->dp.output_reg = DDI_BUF_CTL(port); 3827 dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain; 3828 dig_port->dp.set_link_train = intel_ddi_set_link_train; 3829 dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train; 3830 3831 dig_port->dp.voltage_max = intel_ddi_dp_voltage_max; 3832 dig_port->dp.preemph_max = intel_ddi_dp_preemph_max; 3833 3834 if (!intel_dp_init_connector(dig_port, connector)) { 3835 kfree(connector); 3836 return NULL; 3837 } 3838 3839 if (dig_port->base.type == INTEL_OUTPUT_EDP) { 3840 struct drm_device *dev = dig_port->base.base.dev; 3841 struct drm_privacy_screen *privacy_screen; 3842 3843 privacy_screen = drm_privacy_screen_get(dev->dev, NULL); 3844 if (!IS_ERR(privacy_screen)) { 3845 drm_connector_attach_privacy_screen_provider(&connector->base, 3846 privacy_screen); 3847 } else if (PTR_ERR(privacy_screen) != -ENODEV) { 3848 drm_warn(dev, "Error getting privacy-screen\n"); 3849 } 3850 } 3851 3852 return connector; 3853 } 3854 3855 static int modeset_pipe(struct drm_crtc *crtc, 3856 struct drm_modeset_acquire_ctx *ctx) 3857 { 3858 struct drm_atomic_state *state; 3859 struct drm_crtc_state *crtc_state; 3860 int ret; 3861 3862 state = drm_atomic_state_alloc(crtc->dev); 3863 if (!state) 3864 return -ENOMEM; 3865 3866 state->acquire_ctx = ctx; 3867 3868 crtc_state = drm_atomic_get_crtc_state(state, crtc); 3869 if (IS_ERR(crtc_state)) { 3870 ret = PTR_ERR(crtc_state); 3871 goto out; 3872 } 3873 3874 crtc_state->connectors_changed = true; 3875 3876 ret = drm_atomic_commit(state); 3877 out: 3878 drm_atomic_state_put(state); 3879 3880 return ret; 3881 } 3882 3883 static int intel_hdmi_reset_link(struct intel_encoder *encoder, 3884 struct drm_modeset_acquire_ctx *ctx) 3885 { 3886 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3887 struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder); 3888 struct intel_connector *connector = hdmi->attached_connector; 3889 struct i2c_adapter *adapter = 3890 intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus); 3891 struct drm_connector_state *conn_state; 3892 struct intel_crtc_state *crtc_state; 3893 struct intel_crtc *crtc; 3894 u8 config; 3895 int ret; 3896 3897 if (!connector || connector->base.status != connector_status_connected) 3898 return 0; 3899 3900 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex, 3901 ctx); 3902 if (ret) 3903 return ret; 3904 3905 conn_state = connector->base.state; 3906 3907 crtc = to_intel_crtc(conn_state->crtc); 3908 if (!crtc) 3909 return 0; 3910 3911 ret = drm_modeset_lock(&crtc->base.mutex, ctx); 3912 if (ret) 3913 return ret; 3914 3915 crtc_state = to_intel_crtc_state(crtc->base.state); 3916 3917 drm_WARN_ON(&dev_priv->drm, 3918 !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)); 3919 3920 if (!crtc_state->hw.active) 3921 return 0; 3922 3923 if (!crtc_state->hdmi_high_tmds_clock_ratio && 3924 !crtc_state->hdmi_scrambling) 3925 return 0; 3926 3927 if (conn_state->commit && 3928 !try_wait_for_completion(&conn_state->commit->hw_done)) 3929 return 0; 3930 3931 ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config); 3932 if (ret < 0) { 3933 drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n", 3934 ret); 3935 return 0; 3936 } 3937 3938 if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) == 3939 crtc_state->hdmi_high_tmds_clock_ratio && 3940 !!(config & SCDC_SCRAMBLING_ENABLE) == 3941 crtc_state->hdmi_scrambling) 3942 return 0; 3943 3944 /* 3945 * HDMI 2.0 says that one should not send scrambled data 3946 * prior to configuring the sink scrambling, and that 3947 * TMDS clock/data transmission should be suspended when 3948 * changing the TMDS clock rate in the sink. So let's 3949 * just do a full modeset here, even though some sinks 3950 * would be perfectly happy if were to just reconfigure 3951 * the SCDC settings on the fly. 3952 */ 3953 return modeset_pipe(&crtc->base, ctx); 3954 } 3955 3956 static enum intel_hotplug_state 3957 intel_ddi_hotplug(struct intel_encoder *encoder, 3958 struct intel_connector *connector) 3959 { 3960 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3961 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3962 struct intel_dp *intel_dp = &dig_port->dp; 3963 enum phy phy = intel_port_to_phy(i915, encoder->port); 3964 bool is_tc = intel_phy_is_tc(i915, phy); 3965 struct drm_modeset_acquire_ctx ctx; 3966 enum intel_hotplug_state state; 3967 int ret; 3968 3969 if (intel_dp->compliance.test_active && 3970 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 3971 intel_dp_phy_test(encoder); 3972 /* just do the PHY test and nothing else */ 3973 return INTEL_HOTPLUG_UNCHANGED; 3974 } 3975 3976 state = intel_encoder_hotplug(encoder, connector); 3977 3978 drm_modeset_acquire_init(&ctx, 0); 3979 3980 for (;;) { 3981 if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA) 3982 ret = intel_hdmi_reset_link(encoder, &ctx); 3983 else 3984 ret = intel_dp_retrain_link(encoder, &ctx); 3985 3986 if (ret == -EDEADLK) { 3987 drm_modeset_backoff(&ctx); 3988 continue; 3989 } 3990 3991 break; 3992 } 3993 3994 drm_modeset_drop_locks(&ctx); 3995 drm_modeset_acquire_fini(&ctx); 3996 drm_WARN(encoder->base.dev, ret, 3997 "Acquiring modeset locks failed with %i\n", ret); 3998 3999 /* 4000 * Unpowered type-c dongles can take some time to boot and be 4001 * responsible, so here giving some time to those dongles to power up 4002 * and then retrying the probe. 4003 * 4004 * On many platforms the HDMI live state signal is known to be 4005 * unreliable, so we can't use it to detect if a sink is connected or 4006 * not. Instead we detect if it's connected based on whether we can 4007 * read the EDID or not. That in turn has a problem during disconnect, 4008 * since the HPD interrupt may be raised before the DDC lines get 4009 * disconnected (due to how the required length of DDC vs. HPD 4010 * connector pins are specified) and so we'll still be able to get a 4011 * valid EDID. To solve this schedule another detection cycle if this 4012 * time around we didn't detect any change in the sink's connection 4013 * status. 4014 * 4015 * Type-c connectors which get their HPD signal deasserted then 4016 * reasserted, without unplugging/replugging the sink from the 4017 * connector, introduce a delay until the AUX channel communication 4018 * becomes functional. Retry the detection for 5 seconds on type-c 4019 * connectors to account for this delay. 4020 */ 4021 if (state == INTEL_HOTPLUG_UNCHANGED && 4022 connector->hotplug_retries < (is_tc ? 5 : 1) && 4023 !dig_port->dp.is_mst) 4024 state = INTEL_HOTPLUG_RETRY; 4025 4026 return state; 4027 } 4028 4029 static bool lpt_digital_port_connected(struct intel_encoder *encoder) 4030 { 4031 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4032 u32 bit = dev_priv->display.hotplug.pch_hpd[encoder->hpd_pin]; 4033 4034 return intel_de_read(dev_priv, SDEISR) & bit; 4035 } 4036 4037 static bool hsw_digital_port_connected(struct intel_encoder *encoder) 4038 { 4039 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4040 u32 bit = dev_priv->display.hotplug.hpd[encoder->hpd_pin]; 4041 4042 return intel_de_read(dev_priv, DEISR) & bit; 4043 } 4044 4045 static bool bdw_digital_port_connected(struct intel_encoder *encoder) 4046 { 4047 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4048 u32 bit = dev_priv->display.hotplug.hpd[encoder->hpd_pin]; 4049 4050 return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit; 4051 } 4052 4053 static struct intel_connector * 4054 intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port) 4055 { 4056 struct intel_connector *connector; 4057 enum port port = dig_port->base.port; 4058 4059 connector = intel_connector_alloc(); 4060 if (!connector) 4061 return NULL; 4062 4063 dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port); 4064 intel_hdmi_init_connector(dig_port, connector); 4065 4066 return connector; 4067 } 4068 4069 static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port) 4070 { 4071 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4072 4073 if (dig_port->base.port != PORT_A) 4074 return false; 4075 4076 if (dig_port->saved_port_bits & DDI_A_4_LANES) 4077 return false; 4078 4079 /* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only 4080 * supported configuration 4081 */ 4082 if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 4083 return true; 4084 4085 return false; 4086 } 4087 4088 static int 4089 intel_ddi_max_lanes(struct intel_digital_port *dig_port) 4090 { 4091 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4092 enum port port = dig_port->base.port; 4093 int max_lanes = 4; 4094 4095 if (DISPLAY_VER(dev_priv) >= 11) 4096 return max_lanes; 4097 4098 if (port == PORT_A || port == PORT_E) { 4099 if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES) 4100 max_lanes = port == PORT_A ? 4 : 0; 4101 else 4102 /* Both A and E share 2 lanes */ 4103 max_lanes = 2; 4104 } 4105 4106 /* 4107 * Some BIOS might fail to set this bit on port A if eDP 4108 * wasn't lit up at boot. Force this bit set when needed 4109 * so we use the proper lane count for our calculations. 4110 */ 4111 if (intel_ddi_a_force_4_lanes(dig_port)) { 4112 drm_dbg_kms(&dev_priv->drm, 4113 "Forcing DDI_A_4_LANES for port A\n"); 4114 dig_port->saved_port_bits |= DDI_A_4_LANES; 4115 max_lanes = 4; 4116 } 4117 4118 return max_lanes; 4119 } 4120 4121 static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy) 4122 { 4123 return i915->hti_state & HDPORT_ENABLED && 4124 i915->hti_state & HDPORT_DDI_USED(phy); 4125 } 4126 4127 static enum hpd_pin xelpd_hpd_pin(struct drm_i915_private *dev_priv, 4128 enum port port) 4129 { 4130 if (port >= PORT_D_XELPD) 4131 return HPD_PORT_D + port - PORT_D_XELPD; 4132 else if (port >= PORT_TC1) 4133 return HPD_PORT_TC1 + port - PORT_TC1; 4134 else 4135 return HPD_PORT_A + port - PORT_A; 4136 } 4137 4138 static enum hpd_pin dg1_hpd_pin(struct drm_i915_private *dev_priv, 4139 enum port port) 4140 { 4141 if (port >= PORT_TC1) 4142 return HPD_PORT_C + port - PORT_TC1; 4143 else 4144 return HPD_PORT_A + port - PORT_A; 4145 } 4146 4147 static enum hpd_pin tgl_hpd_pin(struct drm_i915_private *dev_priv, 4148 enum port port) 4149 { 4150 if (port >= PORT_TC1) 4151 return HPD_PORT_TC1 + port - PORT_TC1; 4152 else 4153 return HPD_PORT_A + port - PORT_A; 4154 } 4155 4156 static enum hpd_pin rkl_hpd_pin(struct drm_i915_private *dev_priv, 4157 enum port port) 4158 { 4159 if (HAS_PCH_TGP(dev_priv)) 4160 return tgl_hpd_pin(dev_priv, port); 4161 4162 if (port >= PORT_TC1) 4163 return HPD_PORT_C + port - PORT_TC1; 4164 else 4165 return HPD_PORT_A + port - PORT_A; 4166 } 4167 4168 static enum hpd_pin icl_hpd_pin(struct drm_i915_private *dev_priv, 4169 enum port port) 4170 { 4171 if (port >= PORT_C) 4172 return HPD_PORT_TC1 + port - PORT_C; 4173 else 4174 return HPD_PORT_A + port - PORT_A; 4175 } 4176 4177 static enum hpd_pin ehl_hpd_pin(struct drm_i915_private *dev_priv, 4178 enum port port) 4179 { 4180 if (port == PORT_D) 4181 return HPD_PORT_A; 4182 4183 if (HAS_PCH_TGP(dev_priv)) 4184 return icl_hpd_pin(dev_priv, port); 4185 4186 return HPD_PORT_A + port - PORT_A; 4187 } 4188 4189 static enum hpd_pin skl_hpd_pin(struct drm_i915_private *dev_priv, enum port port) 4190 { 4191 if (HAS_PCH_TGP(dev_priv)) 4192 return icl_hpd_pin(dev_priv, port); 4193 4194 return HPD_PORT_A + port - PORT_A; 4195 } 4196 4197 static bool intel_ddi_is_tc(struct drm_i915_private *i915, enum port port) 4198 { 4199 if (DISPLAY_VER(i915) >= 12) 4200 return port >= PORT_TC1; 4201 else if (DISPLAY_VER(i915) >= 11) 4202 return port >= PORT_C; 4203 else 4204 return false; 4205 } 4206 4207 static void intel_ddi_encoder_suspend(struct intel_encoder *encoder) 4208 { 4209 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4210 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 4211 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 4212 enum phy phy = intel_port_to_phy(i915, encoder->port); 4213 4214 intel_dp_encoder_suspend(encoder); 4215 4216 if (!intel_phy_is_tc(i915, phy)) 4217 return; 4218 4219 intel_tc_port_flush_work(dig_port); 4220 } 4221 4222 static void intel_ddi_encoder_shutdown(struct intel_encoder *encoder) 4223 { 4224 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4225 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 4226 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 4227 enum phy phy = intel_port_to_phy(i915, encoder->port); 4228 4229 intel_dp_encoder_shutdown(encoder); 4230 intel_hdmi_encoder_shutdown(encoder); 4231 4232 if (!intel_phy_is_tc(i915, phy)) 4233 return; 4234 4235 intel_tc_port_flush_work(dig_port); 4236 } 4237 4238 #define port_tc_name(port) ((port) - PORT_TC1 + '1') 4239 #define tc_port_name(tc_port) ((tc_port) - TC_PORT_1 + '1') 4240 4241 void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port) 4242 { 4243 struct intel_digital_port *dig_port; 4244 struct intel_encoder *encoder; 4245 const struct intel_bios_encoder_data *devdata; 4246 bool init_hdmi, init_dp; 4247 enum phy phy = intel_port_to_phy(dev_priv, port); 4248 4249 /* 4250 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may 4251 * have taken over some of the PHYs and made them unavailable to the 4252 * driver. In that case we should skip initializing the corresponding 4253 * outputs. 4254 */ 4255 if (hti_uses_phy(dev_priv, phy)) { 4256 drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n", 4257 port_name(port), phy_name(phy)); 4258 return; 4259 } 4260 4261 devdata = intel_bios_encoder_data_lookup(dev_priv, port); 4262 if (!devdata) { 4263 drm_dbg_kms(&dev_priv->drm, 4264 "VBT says port %c is not present\n", 4265 port_name(port)); 4266 return; 4267 } 4268 4269 init_hdmi = intel_bios_encoder_supports_dvi(devdata) || 4270 intel_bios_encoder_supports_hdmi(devdata); 4271 init_dp = intel_bios_encoder_supports_dp(devdata); 4272 4273 if (intel_bios_is_lspcon_present(dev_priv, port)) { 4274 /* 4275 * Lspcon device needs to be driven with DP connector 4276 * with special detection sequence. So make sure DP 4277 * is initialized before lspcon. 4278 */ 4279 init_dp = true; 4280 init_hdmi = false; 4281 drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n", 4282 port_name(port)); 4283 } 4284 4285 if (!init_dp && !init_hdmi) { 4286 drm_dbg_kms(&dev_priv->drm, 4287 "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n", 4288 port_name(port)); 4289 return; 4290 } 4291 4292 if (intel_phy_is_snps(dev_priv, phy) && 4293 dev_priv->snps_phy_failed_calibration & BIT(phy)) { 4294 drm_dbg_kms(&dev_priv->drm, 4295 "SNPS PHY %c failed to calibrate, proceeding anyway\n", 4296 phy_name(phy)); 4297 } 4298 4299 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 4300 if (!dig_port) 4301 return; 4302 4303 encoder = &dig_port->base; 4304 encoder->devdata = devdata; 4305 4306 if (DISPLAY_VER(dev_priv) >= 13 && port >= PORT_D_XELPD) { 4307 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4308 DRM_MODE_ENCODER_TMDS, 4309 "DDI %c/PHY %c", 4310 port_name(port - PORT_D_XELPD + PORT_D), 4311 phy_name(phy)); 4312 } else if (DISPLAY_VER(dev_priv) >= 12) { 4313 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 4314 4315 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4316 DRM_MODE_ENCODER_TMDS, 4317 "DDI %s%c/PHY %s%c", 4318 port >= PORT_TC1 ? "TC" : "", 4319 port >= PORT_TC1 ? port_tc_name(port) : port_name(port), 4320 tc_port != TC_PORT_NONE ? "TC" : "", 4321 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 4322 } else if (DISPLAY_VER(dev_priv) >= 11) { 4323 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 4324 4325 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4326 DRM_MODE_ENCODER_TMDS, 4327 "DDI %c%s/PHY %s%c", 4328 port_name(port), 4329 port >= PORT_C ? " (TC)" : "", 4330 tc_port != TC_PORT_NONE ? "TC" : "", 4331 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 4332 } else { 4333 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4334 DRM_MODE_ENCODER_TMDS, 4335 "DDI %c/PHY %c", port_name(port), phy_name(phy)); 4336 } 4337 4338 mutex_init(&dig_port->hdcp_mutex); 4339 dig_port->num_hdcp_streams = 0; 4340 4341 encoder->hotplug = intel_ddi_hotplug; 4342 encoder->compute_output_type = intel_ddi_compute_output_type; 4343 encoder->compute_config = intel_ddi_compute_config; 4344 encoder->compute_config_late = intel_ddi_compute_config_late; 4345 encoder->enable = intel_enable_ddi; 4346 encoder->pre_pll_enable = intel_ddi_pre_pll_enable; 4347 encoder->pre_enable = intel_ddi_pre_enable; 4348 encoder->disable = intel_disable_ddi; 4349 encoder->post_disable = intel_ddi_post_disable; 4350 encoder->update_pipe = intel_ddi_update_pipe; 4351 encoder->get_hw_state = intel_ddi_get_hw_state; 4352 encoder->sync_state = intel_ddi_sync_state; 4353 encoder->initial_fastset_check = intel_ddi_initial_fastset_check; 4354 encoder->suspend = intel_ddi_encoder_suspend; 4355 encoder->shutdown = intel_ddi_encoder_shutdown; 4356 encoder->get_power_domains = intel_ddi_get_power_domains; 4357 4358 encoder->type = INTEL_OUTPUT_DDI; 4359 encoder->power_domain = intel_display_power_ddi_lanes_domain(dev_priv, port); 4360 encoder->port = port; 4361 encoder->cloneable = 0; 4362 encoder->pipe_mask = ~0; 4363 4364 if (IS_DG2(dev_priv)) { 4365 encoder->enable_clock = intel_mpllb_enable; 4366 encoder->disable_clock = intel_mpllb_disable; 4367 encoder->get_config = dg2_ddi_get_config; 4368 } else if (IS_ALDERLAKE_S(dev_priv)) { 4369 encoder->enable_clock = adls_ddi_enable_clock; 4370 encoder->disable_clock = adls_ddi_disable_clock; 4371 encoder->is_clock_enabled = adls_ddi_is_clock_enabled; 4372 encoder->get_config = adls_ddi_get_config; 4373 } else if (IS_ROCKETLAKE(dev_priv)) { 4374 encoder->enable_clock = rkl_ddi_enable_clock; 4375 encoder->disable_clock = rkl_ddi_disable_clock; 4376 encoder->is_clock_enabled = rkl_ddi_is_clock_enabled; 4377 encoder->get_config = rkl_ddi_get_config; 4378 } else if (IS_DG1(dev_priv)) { 4379 encoder->enable_clock = dg1_ddi_enable_clock; 4380 encoder->disable_clock = dg1_ddi_disable_clock; 4381 encoder->is_clock_enabled = dg1_ddi_is_clock_enabled; 4382 encoder->get_config = dg1_ddi_get_config; 4383 } else if (IS_JSL_EHL(dev_priv)) { 4384 if (intel_ddi_is_tc(dev_priv, port)) { 4385 encoder->enable_clock = jsl_ddi_tc_enable_clock; 4386 encoder->disable_clock = jsl_ddi_tc_disable_clock; 4387 encoder->is_clock_enabled = jsl_ddi_tc_is_clock_enabled; 4388 encoder->get_config = icl_ddi_combo_get_config; 4389 } else { 4390 encoder->enable_clock = icl_ddi_combo_enable_clock; 4391 encoder->disable_clock = icl_ddi_combo_disable_clock; 4392 encoder->is_clock_enabled = icl_ddi_combo_is_clock_enabled; 4393 encoder->get_config = icl_ddi_combo_get_config; 4394 } 4395 } else if (DISPLAY_VER(dev_priv) >= 11) { 4396 if (intel_ddi_is_tc(dev_priv, port)) { 4397 encoder->enable_clock = icl_ddi_tc_enable_clock; 4398 encoder->disable_clock = icl_ddi_tc_disable_clock; 4399 encoder->is_clock_enabled = icl_ddi_tc_is_clock_enabled; 4400 encoder->get_config = icl_ddi_tc_get_config; 4401 } else { 4402 encoder->enable_clock = icl_ddi_combo_enable_clock; 4403 encoder->disable_clock = icl_ddi_combo_disable_clock; 4404 encoder->is_clock_enabled = icl_ddi_combo_is_clock_enabled; 4405 encoder->get_config = icl_ddi_combo_get_config; 4406 } 4407 } else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) { 4408 /* BXT/GLK have fixed PLL->port mapping */ 4409 encoder->get_config = bxt_ddi_get_config; 4410 } else if (DISPLAY_VER(dev_priv) == 9) { 4411 encoder->enable_clock = skl_ddi_enable_clock; 4412 encoder->disable_clock = skl_ddi_disable_clock; 4413 encoder->is_clock_enabled = skl_ddi_is_clock_enabled; 4414 encoder->get_config = skl_ddi_get_config; 4415 } else if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv)) { 4416 encoder->enable_clock = hsw_ddi_enable_clock; 4417 encoder->disable_clock = hsw_ddi_disable_clock; 4418 encoder->is_clock_enabled = hsw_ddi_is_clock_enabled; 4419 encoder->get_config = hsw_ddi_get_config; 4420 } 4421 4422 if (IS_DG2(dev_priv)) { 4423 encoder->set_signal_levels = intel_snps_phy_set_signal_levels; 4424 } else if (DISPLAY_VER(dev_priv) >= 12) { 4425 if (intel_phy_is_combo(dev_priv, phy)) 4426 encoder->set_signal_levels = icl_combo_phy_set_signal_levels; 4427 else 4428 encoder->set_signal_levels = tgl_dkl_phy_set_signal_levels; 4429 } else if (DISPLAY_VER(dev_priv) >= 11) { 4430 if (intel_phy_is_combo(dev_priv, phy)) 4431 encoder->set_signal_levels = icl_combo_phy_set_signal_levels; 4432 else 4433 encoder->set_signal_levels = icl_mg_phy_set_signal_levels; 4434 } else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) { 4435 encoder->set_signal_levels = bxt_ddi_phy_set_signal_levels; 4436 } else { 4437 encoder->set_signal_levels = hsw_set_signal_levels; 4438 } 4439 4440 intel_ddi_buf_trans_init(encoder); 4441 4442 if (DISPLAY_VER(dev_priv) >= 13) 4443 encoder->hpd_pin = xelpd_hpd_pin(dev_priv, port); 4444 else if (IS_DG1(dev_priv)) 4445 encoder->hpd_pin = dg1_hpd_pin(dev_priv, port); 4446 else if (IS_ROCKETLAKE(dev_priv)) 4447 encoder->hpd_pin = rkl_hpd_pin(dev_priv, port); 4448 else if (DISPLAY_VER(dev_priv) >= 12) 4449 encoder->hpd_pin = tgl_hpd_pin(dev_priv, port); 4450 else if (IS_JSL_EHL(dev_priv)) 4451 encoder->hpd_pin = ehl_hpd_pin(dev_priv, port); 4452 else if (DISPLAY_VER(dev_priv) == 11) 4453 encoder->hpd_pin = icl_hpd_pin(dev_priv, port); 4454 else if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv)) 4455 encoder->hpd_pin = skl_hpd_pin(dev_priv, port); 4456 else 4457 encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 4458 4459 if (DISPLAY_VER(dev_priv) >= 11) 4460 dig_port->saved_port_bits = 4461 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 4462 & DDI_BUF_PORT_REVERSAL; 4463 else 4464 dig_port->saved_port_bits = 4465 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 4466 & (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES); 4467 4468 if (intel_bios_is_lane_reversal_needed(dev_priv, port)) 4469 dig_port->saved_port_bits |= DDI_BUF_PORT_REVERSAL; 4470 4471 dig_port->dp.output_reg = INVALID_MMIO_REG; 4472 dig_port->max_lanes = intel_ddi_max_lanes(dig_port); 4473 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 4474 4475 if (intel_phy_is_tc(dev_priv, phy)) { 4476 bool is_legacy = 4477 !intel_bios_encoder_supports_typec_usb(devdata) && 4478 !intel_bios_encoder_supports_tbt(devdata); 4479 4480 intel_tc_port_init(dig_port, is_legacy); 4481 4482 encoder->update_prepare = intel_ddi_update_prepare; 4483 encoder->update_complete = intel_ddi_update_complete; 4484 } 4485 4486 drm_WARN_ON(&dev_priv->drm, port > PORT_I); 4487 dig_port->ddi_io_power_domain = intel_display_power_ddi_io_domain(dev_priv, port); 4488 4489 if (init_dp) { 4490 if (!intel_ddi_init_dp_connector(dig_port)) 4491 goto err; 4492 4493 dig_port->hpd_pulse = intel_dp_hpd_pulse; 4494 4495 if (dig_port->dp.mso_link_count) 4496 encoder->pipe_mask = intel_ddi_splitter_pipe_mask(dev_priv); 4497 } 4498 4499 /* In theory we don't need the encoder->type check, but leave it just in 4500 * case we have some really bad VBTs... */ 4501 if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) { 4502 if (!intel_ddi_init_hdmi_connector(dig_port)) 4503 goto err; 4504 } 4505 4506 if (DISPLAY_VER(dev_priv) >= 11) { 4507 if (intel_phy_is_tc(dev_priv, phy)) 4508 dig_port->connected = intel_tc_port_connected; 4509 else 4510 dig_port->connected = lpt_digital_port_connected; 4511 } else if (DISPLAY_VER(dev_priv) >= 8) { 4512 if (port == PORT_A || IS_GEMINILAKE(dev_priv) || 4513 IS_BROXTON(dev_priv)) 4514 dig_port->connected = bdw_digital_port_connected; 4515 else 4516 dig_port->connected = lpt_digital_port_connected; 4517 } else { 4518 if (port == PORT_A) 4519 dig_port->connected = hsw_digital_port_connected; 4520 else 4521 dig_port->connected = lpt_digital_port_connected; 4522 } 4523 4524 intel_infoframe_init(dig_port); 4525 4526 return; 4527 4528 err: 4529 drm_encoder_cleanup(&encoder->base); 4530 kfree(dig_port); 4531 } 4532