1 /* 2 * Copyright © 2012 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Eugeni Dodonov <eugeni.dodonov@intel.com> 25 * 26 */ 27 28 #include <drm/drm_scdc_helper.h> 29 30 #include "i915_drv.h" 31 #include "intel_audio.h" 32 #include "intel_combo_phy.h" 33 #include "intel_connector.h" 34 #include "intel_ddi.h" 35 #include "intel_ddi_buf_trans.h" 36 #include "intel_display_types.h" 37 #include "intel_dp.h" 38 #include "intel_dp_link_training.h" 39 #include "intel_dp_mst.h" 40 #include "intel_dpio_phy.h" 41 #include "intel_dsi.h" 42 #include "intel_fdi.h" 43 #include "intel_fifo_underrun.h" 44 #include "intel_gmbus.h" 45 #include "intel_hdcp.h" 46 #include "intel_hdmi.h" 47 #include "intel_hotplug.h" 48 #include "intel_lspcon.h" 49 #include "intel_panel.h" 50 #include "intel_pps.h" 51 #include "intel_psr.h" 52 #include "intel_sprite.h" 53 #include "intel_tc.h" 54 #include "intel_vdsc.h" 55 #include "intel_vrr.h" 56 #include "skl_scaler.h" 57 #include "skl_universal_plane.h" 58 59 static const u8 index_to_dp_signal_levels[] = { 60 [0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0, 61 [1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1, 62 [2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2, 63 [3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3, 64 [4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0, 65 [5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1, 66 [6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2, 67 [7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0, 68 [8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1, 69 [9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0, 70 }; 71 72 static int intel_ddi_hdmi_level(struct intel_encoder *encoder, 73 const struct intel_crtc_state *crtc_state) 74 { 75 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 76 int n_entries, level, default_entry; 77 78 n_entries = intel_ddi_hdmi_num_entries(encoder, crtc_state, &default_entry); 79 if (n_entries == 0) 80 return 0; 81 level = intel_bios_hdmi_level_shift(encoder); 82 if (level < 0) 83 level = default_entry; 84 85 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 86 level = n_entries - 1; 87 88 return level; 89 } 90 91 /* 92 * Starting with Haswell, DDI port buffers must be programmed with correct 93 * values in advance. This function programs the correct values for 94 * DP/eDP/FDI use cases. 95 */ 96 void intel_prepare_dp_ddi_buffers(struct intel_encoder *encoder, 97 const struct intel_crtc_state *crtc_state) 98 { 99 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 100 u32 iboost_bit = 0; 101 int i, n_entries; 102 enum port port = encoder->port; 103 const struct ddi_buf_trans *ddi_translations; 104 105 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) 106 ddi_translations = intel_ddi_get_buf_trans_fdi(dev_priv, 107 &n_entries); 108 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 109 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, 110 &n_entries); 111 else 112 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, 113 &n_entries); 114 115 /* If we're boosting the current, set bit 31 of trans1 */ 116 if (IS_GEN9_BC(dev_priv) && intel_bios_encoder_dp_boost_level(encoder->devdata)) 117 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 118 119 for (i = 0; i < n_entries; i++) { 120 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i), 121 ddi_translations[i].trans1 | iboost_bit); 122 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i), 123 ddi_translations[i].trans2); 124 } 125 } 126 127 /* 128 * Starting with Haswell, DDI port buffers must be programmed with correct 129 * values in advance. This function programs the correct values for 130 * HDMI/DVI use cases. 131 */ 132 static void intel_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder, 133 int level) 134 { 135 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 136 u32 iboost_bit = 0; 137 int n_entries; 138 enum port port = encoder->port; 139 const struct ddi_buf_trans *ddi_translations; 140 141 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 142 143 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 144 return; 145 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 146 level = n_entries - 1; 147 148 /* If we're boosting the current, set bit 31 of trans1 */ 149 if (IS_GEN9_BC(dev_priv) && intel_bios_encoder_hdmi_boost_level(encoder->devdata)) 150 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 151 152 /* Entry 9 is for HDMI: */ 153 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9), 154 ddi_translations[level].trans1 | iboost_bit); 155 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9), 156 ddi_translations[level].trans2); 157 } 158 159 void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv, 160 enum port port) 161 { 162 if (IS_BROXTON(dev_priv)) { 163 udelay(16); 164 return; 165 } 166 167 if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 168 DDI_BUF_IS_IDLE), 8)) 169 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n", 170 port_name(port)); 171 } 172 173 static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv, 174 enum port port) 175 { 176 /* Wait > 518 usecs for DDI_BUF_CTL to be non idle */ 177 if (DISPLAY_VER(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) { 178 usleep_range(518, 1000); 179 return; 180 } 181 182 if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 183 DDI_BUF_IS_IDLE), 500)) 184 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n", 185 port_name(port)); 186 } 187 188 static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll) 189 { 190 switch (pll->info->id) { 191 case DPLL_ID_WRPLL1: 192 return PORT_CLK_SEL_WRPLL1; 193 case DPLL_ID_WRPLL2: 194 return PORT_CLK_SEL_WRPLL2; 195 case DPLL_ID_SPLL: 196 return PORT_CLK_SEL_SPLL; 197 case DPLL_ID_LCPLL_810: 198 return PORT_CLK_SEL_LCPLL_810; 199 case DPLL_ID_LCPLL_1350: 200 return PORT_CLK_SEL_LCPLL_1350; 201 case DPLL_ID_LCPLL_2700: 202 return PORT_CLK_SEL_LCPLL_2700; 203 default: 204 MISSING_CASE(pll->info->id); 205 return PORT_CLK_SEL_NONE; 206 } 207 } 208 209 static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder, 210 const struct intel_crtc_state *crtc_state) 211 { 212 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 213 int clock = crtc_state->port_clock; 214 const enum intel_dpll_id id = pll->info->id; 215 216 switch (id) { 217 default: 218 /* 219 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used 220 * here, so do warn if this get passed in 221 */ 222 MISSING_CASE(id); 223 return DDI_CLK_SEL_NONE; 224 case DPLL_ID_ICL_TBTPLL: 225 switch (clock) { 226 case 162000: 227 return DDI_CLK_SEL_TBT_162; 228 case 270000: 229 return DDI_CLK_SEL_TBT_270; 230 case 540000: 231 return DDI_CLK_SEL_TBT_540; 232 case 810000: 233 return DDI_CLK_SEL_TBT_810; 234 default: 235 MISSING_CASE(clock); 236 return DDI_CLK_SEL_NONE; 237 } 238 case DPLL_ID_ICL_MGPLL1: 239 case DPLL_ID_ICL_MGPLL2: 240 case DPLL_ID_ICL_MGPLL3: 241 case DPLL_ID_ICL_MGPLL4: 242 case DPLL_ID_TGL_MGPLL5: 243 case DPLL_ID_TGL_MGPLL6: 244 return DDI_CLK_SEL_MG; 245 } 246 } 247 248 static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder, 249 const struct intel_crtc_state *crtc_state) 250 { 251 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 252 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 253 254 intel_dp->DP = dig_port->saved_port_bits | 255 DDI_BUF_CTL_ENABLE | DDI_BUF_TRANS_SELECT(0); 256 intel_dp->DP |= DDI_PORT_WIDTH(crtc_state->lane_count); 257 } 258 259 static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv, 260 enum port port) 261 { 262 u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK; 263 264 switch (val) { 265 case DDI_CLK_SEL_NONE: 266 return 0; 267 case DDI_CLK_SEL_TBT_162: 268 return 162000; 269 case DDI_CLK_SEL_TBT_270: 270 return 270000; 271 case DDI_CLK_SEL_TBT_540: 272 return 540000; 273 case DDI_CLK_SEL_TBT_810: 274 return 810000; 275 default: 276 MISSING_CASE(val); 277 return 0; 278 } 279 } 280 281 static void ddi_dotclock_get(struct intel_crtc_state *pipe_config) 282 { 283 int dotclock; 284 285 if (pipe_config->has_pch_encoder) 286 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 287 &pipe_config->fdi_m_n); 288 else if (intel_crtc_has_dp_encoder(pipe_config)) 289 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 290 &pipe_config->dp_m_n); 291 else if (pipe_config->has_hdmi_sink && pipe_config->pipe_bpp > 24) 292 dotclock = pipe_config->port_clock * 24 / pipe_config->pipe_bpp; 293 else 294 dotclock = pipe_config->port_clock; 295 296 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 && 297 !intel_crtc_has_dp_encoder(pipe_config)) 298 dotclock *= 2; 299 300 if (pipe_config->pixel_multiplier) 301 dotclock /= pipe_config->pixel_multiplier; 302 303 pipe_config->hw.adjusted_mode.crtc_clock = dotclock; 304 } 305 306 void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state, 307 const struct drm_connector_state *conn_state) 308 { 309 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 310 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 311 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 312 u32 temp; 313 314 if (!intel_crtc_has_dp_encoder(crtc_state)) 315 return; 316 317 drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)); 318 319 temp = DP_MSA_MISC_SYNC_CLOCK; 320 321 switch (crtc_state->pipe_bpp) { 322 case 18: 323 temp |= DP_MSA_MISC_6_BPC; 324 break; 325 case 24: 326 temp |= DP_MSA_MISC_8_BPC; 327 break; 328 case 30: 329 temp |= DP_MSA_MISC_10_BPC; 330 break; 331 case 36: 332 temp |= DP_MSA_MISC_12_BPC; 333 break; 334 default: 335 MISSING_CASE(crtc_state->pipe_bpp); 336 break; 337 } 338 339 /* nonsense combination */ 340 drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range && 341 crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB); 342 343 if (crtc_state->limited_color_range) 344 temp |= DP_MSA_MISC_COLOR_CEA_RGB; 345 346 /* 347 * As per DP 1.2 spec section 2.3.4.3 while sending 348 * YCBCR 444 signals we should program MSA MISC1/0 fields with 349 * colorspace information. 350 */ 351 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) 352 temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709; 353 354 /* 355 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication 356 * of Color Encoding Format and Content Color Gamut] while sending 357 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields 358 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format. 359 */ 360 if (intel_dp_needs_vsc_sdp(crtc_state, conn_state)) 361 temp |= DP_MSA_MISC_COLOR_VSC_SDP; 362 363 intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp); 364 } 365 366 static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder) 367 { 368 if (master_transcoder == TRANSCODER_EDP) 369 return 0; 370 else 371 return master_transcoder + 1; 372 } 373 374 /* 375 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state. 376 * 377 * Only intended to be used by intel_ddi_enable_transcoder_func() and 378 * intel_ddi_config_transcoder_func(). 379 */ 380 static u32 381 intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder, 382 const struct intel_crtc_state *crtc_state) 383 { 384 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 385 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 386 enum pipe pipe = crtc->pipe; 387 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 388 enum port port = encoder->port; 389 u32 temp; 390 391 /* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */ 392 temp = TRANS_DDI_FUNC_ENABLE; 393 if (DISPLAY_VER(dev_priv) >= 12) 394 temp |= TGL_TRANS_DDI_SELECT_PORT(port); 395 else 396 temp |= TRANS_DDI_SELECT_PORT(port); 397 398 switch (crtc_state->pipe_bpp) { 399 case 18: 400 temp |= TRANS_DDI_BPC_6; 401 break; 402 case 24: 403 temp |= TRANS_DDI_BPC_8; 404 break; 405 case 30: 406 temp |= TRANS_DDI_BPC_10; 407 break; 408 case 36: 409 temp |= TRANS_DDI_BPC_12; 410 break; 411 default: 412 BUG(); 413 } 414 415 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC) 416 temp |= TRANS_DDI_PVSYNC; 417 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC) 418 temp |= TRANS_DDI_PHSYNC; 419 420 if (cpu_transcoder == TRANSCODER_EDP) { 421 switch (pipe) { 422 case PIPE_A: 423 /* On Haswell, can only use the always-on power well for 424 * eDP when not using the panel fitter, and when not 425 * using motion blur mitigation (which we don't 426 * support). */ 427 if (crtc_state->pch_pfit.force_thru) 428 temp |= TRANS_DDI_EDP_INPUT_A_ONOFF; 429 else 430 temp |= TRANS_DDI_EDP_INPUT_A_ON; 431 break; 432 case PIPE_B: 433 temp |= TRANS_DDI_EDP_INPUT_B_ONOFF; 434 break; 435 case PIPE_C: 436 temp |= TRANS_DDI_EDP_INPUT_C_ONOFF; 437 break; 438 default: 439 BUG(); 440 break; 441 } 442 } 443 444 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 445 if (crtc_state->has_hdmi_sink) 446 temp |= TRANS_DDI_MODE_SELECT_HDMI; 447 else 448 temp |= TRANS_DDI_MODE_SELECT_DVI; 449 450 if (crtc_state->hdmi_scrambling) 451 temp |= TRANS_DDI_HDMI_SCRAMBLING; 452 if (crtc_state->hdmi_high_tmds_clock_ratio) 453 temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE; 454 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) { 455 temp |= TRANS_DDI_MODE_SELECT_FDI; 456 temp |= (crtc_state->fdi_lanes - 1) << 1; 457 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 458 temp |= TRANS_DDI_MODE_SELECT_DP_MST; 459 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 460 461 if (DISPLAY_VER(dev_priv) >= 12) { 462 enum transcoder master; 463 464 master = crtc_state->mst_master_transcoder; 465 drm_WARN_ON(&dev_priv->drm, 466 master == INVALID_TRANSCODER); 467 temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master); 468 } 469 } else { 470 temp |= TRANS_DDI_MODE_SELECT_DP_SST; 471 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 472 } 473 474 if (IS_DISPLAY_RANGE(dev_priv, 8, 10) && 475 crtc_state->master_transcoder != INVALID_TRANSCODER) { 476 u8 master_select = 477 bdw_trans_port_sync_master_select(crtc_state->master_transcoder); 478 479 temp |= TRANS_DDI_PORT_SYNC_ENABLE | 480 TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select); 481 } 482 483 return temp; 484 } 485 486 void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder, 487 const struct intel_crtc_state *crtc_state) 488 { 489 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 490 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 491 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 492 493 if (DISPLAY_VER(dev_priv) >= 11) { 494 enum transcoder master_transcoder = crtc_state->master_transcoder; 495 u32 ctl2 = 0; 496 497 if (master_transcoder != INVALID_TRANSCODER) { 498 u8 master_select = 499 bdw_trans_port_sync_master_select(master_transcoder); 500 501 ctl2 |= PORT_SYNC_MODE_ENABLE | 502 PORT_SYNC_MODE_MASTER_SELECT(master_select); 503 } 504 505 intel_de_write(dev_priv, 506 TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2); 507 } 508 509 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), 510 intel_ddi_transcoder_func_reg_val_get(encoder, 511 crtc_state)); 512 } 513 514 /* 515 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable 516 * bit. 517 */ 518 static void 519 intel_ddi_config_transcoder_func(struct intel_encoder *encoder, 520 const struct intel_crtc_state *crtc_state) 521 { 522 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 523 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 524 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 525 u32 ctl; 526 527 ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state); 528 ctl &= ~TRANS_DDI_FUNC_ENABLE; 529 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 530 } 531 532 void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state) 533 { 534 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 535 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 536 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 537 u32 ctl; 538 539 if (DISPLAY_VER(dev_priv) >= 11) 540 intel_de_write(dev_priv, 541 TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0); 542 543 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 544 545 drm_WARN_ON(crtc->base.dev, ctl & TRANS_DDI_HDCP_SIGNALLING); 546 547 ctl &= ~TRANS_DDI_FUNC_ENABLE; 548 549 if (IS_DISPLAY_RANGE(dev_priv, 8, 10)) 550 ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE | 551 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK); 552 553 if (DISPLAY_VER(dev_priv) >= 12) { 554 if (!intel_dp_mst_is_master_trans(crtc_state)) { 555 ctl &= ~(TGL_TRANS_DDI_PORT_MASK | 556 TRANS_DDI_MODE_SELECT_MASK); 557 } 558 } else { 559 ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK); 560 } 561 562 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 563 564 if (dev_priv->quirks & QUIRK_INCREASE_DDI_DISABLED_TIME && 565 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 566 drm_dbg_kms(&dev_priv->drm, 567 "Quirk Increase DDI disabled time\n"); 568 /* Quirk time at 100ms for reliable operation */ 569 msleep(100); 570 } 571 } 572 573 int intel_ddi_toggle_hdcp_bits(struct intel_encoder *intel_encoder, 574 enum transcoder cpu_transcoder, 575 bool enable, u32 hdcp_mask) 576 { 577 struct drm_device *dev = intel_encoder->base.dev; 578 struct drm_i915_private *dev_priv = to_i915(dev); 579 intel_wakeref_t wakeref; 580 int ret = 0; 581 u32 tmp; 582 583 wakeref = intel_display_power_get_if_enabled(dev_priv, 584 intel_encoder->power_domain); 585 if (drm_WARN_ON(dev, !wakeref)) 586 return -ENXIO; 587 588 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 589 if (enable) 590 tmp |= hdcp_mask; 591 else 592 tmp &= ~hdcp_mask; 593 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), tmp); 594 intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref); 595 return ret; 596 } 597 598 bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector) 599 { 600 struct drm_device *dev = intel_connector->base.dev; 601 struct drm_i915_private *dev_priv = to_i915(dev); 602 struct intel_encoder *encoder = intel_attached_encoder(intel_connector); 603 int type = intel_connector->base.connector_type; 604 enum port port = encoder->port; 605 enum transcoder cpu_transcoder; 606 intel_wakeref_t wakeref; 607 enum pipe pipe = 0; 608 u32 tmp; 609 bool ret; 610 611 wakeref = intel_display_power_get_if_enabled(dev_priv, 612 encoder->power_domain); 613 if (!wakeref) 614 return false; 615 616 if (!encoder->get_hw_state(encoder, &pipe)) { 617 ret = false; 618 goto out; 619 } 620 621 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 622 cpu_transcoder = TRANSCODER_EDP; 623 else 624 cpu_transcoder = (enum transcoder) pipe; 625 626 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 627 628 switch (tmp & TRANS_DDI_MODE_SELECT_MASK) { 629 case TRANS_DDI_MODE_SELECT_HDMI: 630 case TRANS_DDI_MODE_SELECT_DVI: 631 ret = type == DRM_MODE_CONNECTOR_HDMIA; 632 break; 633 634 case TRANS_DDI_MODE_SELECT_DP_SST: 635 ret = type == DRM_MODE_CONNECTOR_eDP || 636 type == DRM_MODE_CONNECTOR_DisplayPort; 637 break; 638 639 case TRANS_DDI_MODE_SELECT_DP_MST: 640 /* if the transcoder is in MST state then 641 * connector isn't connected */ 642 ret = false; 643 break; 644 645 case TRANS_DDI_MODE_SELECT_FDI: 646 ret = type == DRM_MODE_CONNECTOR_VGA; 647 break; 648 649 default: 650 ret = false; 651 break; 652 } 653 654 out: 655 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 656 657 return ret; 658 } 659 660 static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder, 661 u8 *pipe_mask, bool *is_dp_mst) 662 { 663 struct drm_device *dev = encoder->base.dev; 664 struct drm_i915_private *dev_priv = to_i915(dev); 665 enum port port = encoder->port; 666 intel_wakeref_t wakeref; 667 enum pipe p; 668 u32 tmp; 669 u8 mst_pipe_mask; 670 671 *pipe_mask = 0; 672 *is_dp_mst = false; 673 674 wakeref = intel_display_power_get_if_enabled(dev_priv, 675 encoder->power_domain); 676 if (!wakeref) 677 return; 678 679 tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 680 if (!(tmp & DDI_BUF_CTL_ENABLE)) 681 goto out; 682 683 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) { 684 tmp = intel_de_read(dev_priv, 685 TRANS_DDI_FUNC_CTL(TRANSCODER_EDP)); 686 687 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { 688 default: 689 MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK); 690 fallthrough; 691 case TRANS_DDI_EDP_INPUT_A_ON: 692 case TRANS_DDI_EDP_INPUT_A_ONOFF: 693 *pipe_mask = BIT(PIPE_A); 694 break; 695 case TRANS_DDI_EDP_INPUT_B_ONOFF: 696 *pipe_mask = BIT(PIPE_B); 697 break; 698 case TRANS_DDI_EDP_INPUT_C_ONOFF: 699 *pipe_mask = BIT(PIPE_C); 700 break; 701 } 702 703 goto out; 704 } 705 706 mst_pipe_mask = 0; 707 for_each_pipe(dev_priv, p) { 708 enum transcoder cpu_transcoder = (enum transcoder)p; 709 unsigned int port_mask, ddi_select; 710 intel_wakeref_t trans_wakeref; 711 712 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 713 POWER_DOMAIN_TRANSCODER(cpu_transcoder)); 714 if (!trans_wakeref) 715 continue; 716 717 if (DISPLAY_VER(dev_priv) >= 12) { 718 port_mask = TGL_TRANS_DDI_PORT_MASK; 719 ddi_select = TGL_TRANS_DDI_SELECT_PORT(port); 720 } else { 721 port_mask = TRANS_DDI_PORT_MASK; 722 ddi_select = TRANS_DDI_SELECT_PORT(port); 723 } 724 725 tmp = intel_de_read(dev_priv, 726 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 727 intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder), 728 trans_wakeref); 729 730 if ((tmp & port_mask) != ddi_select) 731 continue; 732 733 if ((tmp & TRANS_DDI_MODE_SELECT_MASK) == 734 TRANS_DDI_MODE_SELECT_DP_MST) 735 mst_pipe_mask |= BIT(p); 736 737 *pipe_mask |= BIT(p); 738 } 739 740 if (!*pipe_mask) 741 drm_dbg_kms(&dev_priv->drm, 742 "No pipe for [ENCODER:%d:%s] found\n", 743 encoder->base.base.id, encoder->base.name); 744 745 if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) { 746 drm_dbg_kms(&dev_priv->drm, 747 "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n", 748 encoder->base.base.id, encoder->base.name, 749 *pipe_mask); 750 *pipe_mask = BIT(ffs(*pipe_mask) - 1); 751 } 752 753 if (mst_pipe_mask && mst_pipe_mask != *pipe_mask) 754 drm_dbg_kms(&dev_priv->drm, 755 "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n", 756 encoder->base.base.id, encoder->base.name, 757 *pipe_mask, mst_pipe_mask); 758 else 759 *is_dp_mst = mst_pipe_mask; 760 761 out: 762 if (*pipe_mask && IS_GEN9_LP(dev_priv)) { 763 tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port)); 764 if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK | 765 BXT_PHY_LANE_POWERDOWN_ACK | 766 BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED) 767 drm_err(&dev_priv->drm, 768 "[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n", 769 encoder->base.base.id, encoder->base.name, tmp); 770 } 771 772 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 773 } 774 775 bool intel_ddi_get_hw_state(struct intel_encoder *encoder, 776 enum pipe *pipe) 777 { 778 u8 pipe_mask; 779 bool is_mst; 780 781 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 782 783 if (is_mst || !pipe_mask) 784 return false; 785 786 *pipe = ffs(pipe_mask) - 1; 787 788 return true; 789 } 790 791 static enum intel_display_power_domain 792 intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port) 793 { 794 /* CNL+ HW requires corresponding AUX IOs to be powered up for PSR with 795 * DC states enabled at the same time, while for driver initiated AUX 796 * transfers we need the same AUX IOs to be powered but with DC states 797 * disabled. Accordingly use the AUX power domain here which leaves DC 798 * states enabled. 799 * However, for non-A AUX ports the corresponding non-EDP transcoders 800 * would have already enabled power well 2 and DC_OFF. This means we can 801 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a 802 * specific AUX_IO reference without powering up any extra wells. 803 * Note that PSR is enabled only on Port A even though this function 804 * returns the correct domain for other ports too. 805 */ 806 return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A : 807 intel_aux_power_domain(dig_port); 808 } 809 810 static void intel_ddi_get_power_domains(struct intel_encoder *encoder, 811 struct intel_crtc_state *crtc_state) 812 { 813 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 814 struct intel_digital_port *dig_port; 815 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 816 817 /* 818 * TODO: Add support for MST encoders. Atm, the following should never 819 * happen since fake-MST encoders don't set their get_power_domains() 820 * hook. 821 */ 822 if (drm_WARN_ON(&dev_priv->drm, 823 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST))) 824 return; 825 826 dig_port = enc_to_dig_port(encoder); 827 828 if (!intel_phy_is_tc(dev_priv, phy) || 829 dig_port->tc_mode != TC_PORT_TBT_ALT) { 830 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 831 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 832 dig_port->ddi_io_power_domain); 833 } 834 835 /* 836 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC 837 * ports. 838 */ 839 if (intel_crtc_has_dp_encoder(crtc_state) || 840 intel_phy_is_tc(dev_priv, phy)) { 841 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 842 dig_port->aux_wakeref = 843 intel_display_power_get(dev_priv, 844 intel_ddi_main_link_aux_domain(dig_port)); 845 } 846 } 847 848 void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder, 849 const struct intel_crtc_state *crtc_state) 850 { 851 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 852 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 853 enum port port = encoder->port; 854 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 855 856 if (cpu_transcoder != TRANSCODER_EDP) { 857 if (DISPLAY_VER(dev_priv) >= 12) 858 intel_de_write(dev_priv, 859 TRANS_CLK_SEL(cpu_transcoder), 860 TGL_TRANS_CLK_SEL_PORT(port)); 861 else 862 intel_de_write(dev_priv, 863 TRANS_CLK_SEL(cpu_transcoder), 864 TRANS_CLK_SEL_PORT(port)); 865 } 866 } 867 868 void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state) 869 { 870 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 871 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 872 873 if (cpu_transcoder != TRANSCODER_EDP) { 874 if (DISPLAY_VER(dev_priv) >= 12) 875 intel_de_write(dev_priv, 876 TRANS_CLK_SEL(cpu_transcoder), 877 TGL_TRANS_CLK_SEL_DISABLED); 878 else 879 intel_de_write(dev_priv, 880 TRANS_CLK_SEL(cpu_transcoder), 881 TRANS_CLK_SEL_DISABLED); 882 } 883 } 884 885 static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv, 886 enum port port, u8 iboost) 887 { 888 u32 tmp; 889 890 tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0); 891 tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port)); 892 if (iboost) 893 tmp |= iboost << BALANCE_LEG_SHIFT(port); 894 else 895 tmp |= BALANCE_LEG_DISABLE(port); 896 intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp); 897 } 898 899 static void skl_ddi_set_iboost(struct intel_encoder *encoder, 900 const struct intel_crtc_state *crtc_state, 901 int level) 902 { 903 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 904 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 905 u8 iboost; 906 907 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 908 iboost = intel_bios_encoder_hdmi_boost_level(encoder->devdata); 909 else 910 iboost = intel_bios_encoder_dp_boost_level(encoder->devdata); 911 912 if (iboost == 0) { 913 const struct ddi_buf_trans *ddi_translations; 914 int n_entries; 915 916 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 917 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 918 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 919 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, &n_entries); 920 else 921 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, &n_entries); 922 923 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 924 return; 925 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 926 level = n_entries - 1; 927 928 iboost = ddi_translations[level].i_boost; 929 } 930 931 /* Make sure that the requested I_boost is valid */ 932 if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) { 933 drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost); 934 return; 935 } 936 937 _skl_ddi_set_iboost(dev_priv, encoder->port, iboost); 938 939 if (encoder->port == PORT_A && dig_port->max_lanes == 4) 940 _skl_ddi_set_iboost(dev_priv, PORT_E, iboost); 941 } 942 943 static void bxt_ddi_vswing_sequence(struct intel_encoder *encoder, 944 const struct intel_crtc_state *crtc_state, 945 int level) 946 { 947 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 948 const struct bxt_ddi_buf_trans *ddi_translations; 949 enum port port = encoder->port; 950 int n_entries; 951 952 ddi_translations = bxt_get_buf_trans(encoder, crtc_state, &n_entries); 953 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 954 return; 955 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 956 level = n_entries - 1; 957 958 bxt_ddi_phy_set_signal_level(dev_priv, port, 959 ddi_translations[level].margin, 960 ddi_translations[level].scale, 961 ddi_translations[level].enable, 962 ddi_translations[level].deemphasis); 963 } 964 965 static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp, 966 const struct intel_crtc_state *crtc_state) 967 { 968 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 969 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 970 enum port port = encoder->port; 971 enum phy phy = intel_port_to_phy(dev_priv, port); 972 int n_entries; 973 974 if (DISPLAY_VER(dev_priv) >= 12) { 975 if (intel_phy_is_combo(dev_priv, phy)) 976 tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 977 else 978 tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 979 } else if (IS_DISPLAY_VER(dev_priv, 11)) { 980 if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 981 jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 982 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 983 ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 984 else if (intel_phy_is_combo(dev_priv, phy)) 985 icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 986 else 987 icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 988 } else if (IS_CANNONLAKE(dev_priv)) { 989 cnl_get_buf_trans(encoder, crtc_state, &n_entries); 990 } else if (IS_GEN9_LP(dev_priv)) { 991 bxt_get_buf_trans(encoder, crtc_state, &n_entries); 992 } else { 993 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 994 intel_ddi_get_buf_trans_edp(encoder, &n_entries); 995 else 996 intel_ddi_get_buf_trans_dp(encoder, &n_entries); 997 } 998 999 if (drm_WARN_ON(&dev_priv->drm, n_entries < 1)) 1000 n_entries = 1; 1001 if (drm_WARN_ON(&dev_priv->drm, 1002 n_entries > ARRAY_SIZE(index_to_dp_signal_levels))) 1003 n_entries = ARRAY_SIZE(index_to_dp_signal_levels); 1004 1005 return index_to_dp_signal_levels[n_entries - 1] & 1006 DP_TRAIN_VOLTAGE_SWING_MASK; 1007 } 1008 1009 /* 1010 * We assume that the full set of pre-emphasis values can be 1011 * used on all DDI platforms. Should that change we need to 1012 * rethink this code. 1013 */ 1014 static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp) 1015 { 1016 return DP_TRAIN_PRE_EMPH_LEVEL_3; 1017 } 1018 1019 static void cnl_ddi_vswing_program(struct intel_encoder *encoder, 1020 const struct intel_crtc_state *crtc_state, 1021 int level) 1022 { 1023 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1024 const struct cnl_ddi_buf_trans *ddi_translations; 1025 enum port port = encoder->port; 1026 int n_entries, ln; 1027 u32 val; 1028 1029 ddi_translations = cnl_get_buf_trans(encoder, crtc_state, &n_entries); 1030 1031 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1032 return; 1033 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1034 level = n_entries - 1; 1035 1036 /* Set PORT_TX_DW5 Scaling Mode Sel to 010b. */ 1037 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 1038 val &= ~SCALING_MODE_SEL_MASK; 1039 val |= SCALING_MODE_SEL(2); 1040 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 1041 1042 /* Program PORT_TX_DW2 */ 1043 val = intel_de_read(dev_priv, CNL_PORT_TX_DW2_LN0(port)); 1044 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 1045 RCOMP_SCALAR_MASK); 1046 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 1047 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 1048 /* Rcomp scalar is fixed as 0x98 for every table entry */ 1049 val |= RCOMP_SCALAR(0x98); 1050 intel_de_write(dev_priv, CNL_PORT_TX_DW2_GRP(port), val); 1051 1052 /* Program PORT_TX_DW4 */ 1053 /* We cannot write to GRP. It would overrite individual loadgen */ 1054 for (ln = 0; ln < 4; ln++) { 1055 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 1056 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 1057 CURSOR_COEFF_MASK); 1058 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 1059 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 1060 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 1061 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 1062 } 1063 1064 /* Program PORT_TX_DW5 */ 1065 /* All DW5 values are fixed for every table entry */ 1066 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 1067 val &= ~RTERM_SELECT_MASK; 1068 val |= RTERM_SELECT(6); 1069 val |= TAP3_DISABLE; 1070 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 1071 1072 /* Program PORT_TX_DW7 */ 1073 val = intel_de_read(dev_priv, CNL_PORT_TX_DW7_LN0(port)); 1074 val &= ~N_SCALAR_MASK; 1075 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 1076 intel_de_write(dev_priv, CNL_PORT_TX_DW7_GRP(port), val); 1077 } 1078 1079 static void cnl_ddi_vswing_sequence(struct intel_encoder *encoder, 1080 const struct intel_crtc_state *crtc_state, 1081 int level) 1082 { 1083 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1084 enum port port = encoder->port; 1085 int width, rate, ln; 1086 u32 val; 1087 1088 width = crtc_state->lane_count; 1089 rate = crtc_state->port_clock; 1090 1091 /* 1092 * 1. If port type is eDP or DP, 1093 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 1094 * else clear to 0b. 1095 */ 1096 val = intel_de_read(dev_priv, CNL_PORT_PCS_DW1_LN0(port)); 1097 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1098 val &= ~COMMON_KEEPER_EN; 1099 else 1100 val |= COMMON_KEEPER_EN; 1101 intel_de_write(dev_priv, CNL_PORT_PCS_DW1_GRP(port), val); 1102 1103 /* 2. Program loadgen select */ 1104 /* 1105 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 1106 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 1107 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 1108 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 1109 */ 1110 for (ln = 0; ln <= 3; ln++) { 1111 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 1112 val &= ~LOADGEN_SELECT; 1113 1114 if ((rate <= 600000 && width == 4 && ln >= 1) || 1115 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 1116 val |= LOADGEN_SELECT; 1117 } 1118 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 1119 } 1120 1121 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 1122 val = intel_de_read(dev_priv, CNL_PORT_CL1CM_DW5); 1123 val |= SUS_CLOCK_CONFIG; 1124 intel_de_write(dev_priv, CNL_PORT_CL1CM_DW5, val); 1125 1126 /* 4. Clear training enable to change swing values */ 1127 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 1128 val &= ~TX_TRAINING_EN; 1129 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 1130 1131 /* 5. Program swing and de-emphasis */ 1132 cnl_ddi_vswing_program(encoder, crtc_state, level); 1133 1134 /* 6. Set training enable to trigger update */ 1135 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 1136 val |= TX_TRAINING_EN; 1137 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 1138 } 1139 1140 static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder, 1141 const struct intel_crtc_state *crtc_state, 1142 int level) 1143 { 1144 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1145 const struct cnl_ddi_buf_trans *ddi_translations; 1146 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1147 int n_entries, ln; 1148 u32 val; 1149 1150 if (DISPLAY_VER(dev_priv) >= 12) 1151 ddi_translations = tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 1152 else if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 1153 ddi_translations = jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 1154 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 1155 ddi_translations = ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 1156 else 1157 ddi_translations = icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 1158 1159 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1160 return; 1161 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1162 level = n_entries - 1; 1163 1164 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) { 1165 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1166 1167 val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED; 1168 intel_dp->hobl_active = is_hobl_buf_trans(ddi_translations); 1169 intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val, 1170 intel_dp->hobl_active ? val : 0); 1171 } 1172 1173 /* Set PORT_TX_DW5 */ 1174 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 1175 val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK | 1176 TAP2_DISABLE | TAP3_DISABLE); 1177 val |= SCALING_MODE_SEL(0x2); 1178 val |= RTERM_SELECT(0x6); 1179 val |= TAP3_DISABLE; 1180 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1181 1182 /* Program PORT_TX_DW2 */ 1183 val = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy)); 1184 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 1185 RCOMP_SCALAR_MASK); 1186 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 1187 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 1188 /* Program Rcomp scalar for every table entry */ 1189 val |= RCOMP_SCALAR(0x98); 1190 intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), val); 1191 1192 /* Program PORT_TX_DW4 */ 1193 /* We cannot write to GRP. It would overwrite individual loadgen. */ 1194 for (ln = 0; ln <= 3; ln++) { 1195 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 1196 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 1197 CURSOR_COEFF_MASK); 1198 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 1199 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 1200 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 1201 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 1202 } 1203 1204 /* Program PORT_TX_DW7 */ 1205 val = intel_de_read(dev_priv, ICL_PORT_TX_DW7_LN0(phy)); 1206 val &= ~N_SCALAR_MASK; 1207 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 1208 intel_de_write(dev_priv, ICL_PORT_TX_DW7_GRP(phy), val); 1209 } 1210 1211 static void icl_combo_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 1212 const struct intel_crtc_state *crtc_state, 1213 int level) 1214 { 1215 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1216 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1217 int width, rate, ln; 1218 u32 val; 1219 1220 width = crtc_state->lane_count; 1221 rate = crtc_state->port_clock; 1222 1223 /* 1224 * 1. If port type is eDP or DP, 1225 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 1226 * else clear to 0b. 1227 */ 1228 val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy)); 1229 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1230 val &= ~COMMON_KEEPER_EN; 1231 else 1232 val |= COMMON_KEEPER_EN; 1233 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val); 1234 1235 /* 2. Program loadgen select */ 1236 /* 1237 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 1238 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 1239 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 1240 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 1241 */ 1242 for (ln = 0; ln <= 3; ln++) { 1243 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 1244 val &= ~LOADGEN_SELECT; 1245 1246 if ((rate <= 600000 && width == 4 && ln >= 1) || 1247 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 1248 val |= LOADGEN_SELECT; 1249 } 1250 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 1251 } 1252 1253 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 1254 val = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy)); 1255 val |= SUS_CLOCK_CONFIG; 1256 intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), val); 1257 1258 /* 4. Clear training enable to change swing values */ 1259 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 1260 val &= ~TX_TRAINING_EN; 1261 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1262 1263 /* 5. Program swing and de-emphasis */ 1264 icl_ddi_combo_vswing_program(encoder, crtc_state, level); 1265 1266 /* 6. Set training enable to trigger update */ 1267 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 1268 val |= TX_TRAINING_EN; 1269 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1270 } 1271 1272 static void icl_mg_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 1273 const struct intel_crtc_state *crtc_state, 1274 int level) 1275 { 1276 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1277 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 1278 const struct icl_mg_phy_ddi_buf_trans *ddi_translations; 1279 int n_entries, ln; 1280 u32 val; 1281 1282 if (enc_to_dig_port(encoder)->tc_mode == TC_PORT_TBT_ALT) 1283 return; 1284 1285 ddi_translations = icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 1286 1287 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1288 return; 1289 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1290 level = n_entries - 1; 1291 1292 /* Set MG_TX_LINK_PARAMS cri_use_fs32 to 0. */ 1293 for (ln = 0; ln < 2; ln++) { 1294 val = intel_de_read(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port)); 1295 val &= ~CRI_USE_FS32; 1296 intel_de_write(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), val); 1297 1298 val = intel_de_read(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port)); 1299 val &= ~CRI_USE_FS32; 1300 intel_de_write(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), val); 1301 } 1302 1303 /* Program MG_TX_SWINGCTRL with values from vswing table */ 1304 for (ln = 0; ln < 2; ln++) { 1305 val = intel_de_read(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port)); 1306 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 1307 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 1308 ddi_translations[level].cri_txdeemph_override_17_12); 1309 intel_de_write(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), val); 1310 1311 val = intel_de_read(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port)); 1312 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 1313 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 1314 ddi_translations[level].cri_txdeemph_override_17_12); 1315 intel_de_write(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), val); 1316 } 1317 1318 /* Program MG_TX_DRVCTRL with values from vswing table */ 1319 for (ln = 0; ln < 2; ln++) { 1320 val = intel_de_read(dev_priv, MG_TX1_DRVCTRL(ln, tc_port)); 1321 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 1322 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 1323 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 1324 ddi_translations[level].cri_txdeemph_override_5_0) | 1325 CRI_TXDEEMPH_OVERRIDE_11_6( 1326 ddi_translations[level].cri_txdeemph_override_11_6) | 1327 CRI_TXDEEMPH_OVERRIDE_EN; 1328 intel_de_write(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), val); 1329 1330 val = intel_de_read(dev_priv, MG_TX2_DRVCTRL(ln, tc_port)); 1331 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 1332 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 1333 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 1334 ddi_translations[level].cri_txdeemph_override_5_0) | 1335 CRI_TXDEEMPH_OVERRIDE_11_6( 1336 ddi_translations[level].cri_txdeemph_override_11_6) | 1337 CRI_TXDEEMPH_OVERRIDE_EN; 1338 intel_de_write(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), val); 1339 1340 /* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */ 1341 } 1342 1343 /* 1344 * Program MG_CLKHUB<LN, port being used> with value from frequency table 1345 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the 1346 * values from table for which TX1 and TX2 enabled. 1347 */ 1348 for (ln = 0; ln < 2; ln++) { 1349 val = intel_de_read(dev_priv, MG_CLKHUB(ln, tc_port)); 1350 if (crtc_state->port_clock < 300000) 1351 val |= CFG_LOW_RATE_LKREN_EN; 1352 else 1353 val &= ~CFG_LOW_RATE_LKREN_EN; 1354 intel_de_write(dev_priv, MG_CLKHUB(ln, tc_port), val); 1355 } 1356 1357 /* Program the MG_TX_DCC<LN, port being used> based on the link frequency */ 1358 for (ln = 0; ln < 2; ln++) { 1359 val = intel_de_read(dev_priv, MG_TX1_DCC(ln, tc_port)); 1360 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 1361 if (crtc_state->port_clock <= 500000) { 1362 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 1363 } else { 1364 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 1365 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 1366 } 1367 intel_de_write(dev_priv, MG_TX1_DCC(ln, tc_port), val); 1368 1369 val = intel_de_read(dev_priv, MG_TX2_DCC(ln, tc_port)); 1370 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 1371 if (crtc_state->port_clock <= 500000) { 1372 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 1373 } else { 1374 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 1375 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 1376 } 1377 intel_de_write(dev_priv, MG_TX2_DCC(ln, tc_port), val); 1378 } 1379 1380 /* Program MG_TX_PISO_READLOAD with values from vswing table */ 1381 for (ln = 0; ln < 2; ln++) { 1382 val = intel_de_read(dev_priv, 1383 MG_TX1_PISO_READLOAD(ln, tc_port)); 1384 val |= CRI_CALCINIT; 1385 intel_de_write(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port), 1386 val); 1387 1388 val = intel_de_read(dev_priv, 1389 MG_TX2_PISO_READLOAD(ln, tc_port)); 1390 val |= CRI_CALCINIT; 1391 intel_de_write(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port), 1392 val); 1393 } 1394 } 1395 1396 static void icl_ddi_vswing_sequence(struct intel_encoder *encoder, 1397 const struct intel_crtc_state *crtc_state, 1398 int level) 1399 { 1400 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1401 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1402 1403 if (intel_phy_is_combo(dev_priv, phy)) 1404 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 1405 else 1406 icl_mg_phy_ddi_vswing_sequence(encoder, crtc_state, level); 1407 } 1408 1409 static void 1410 tgl_dkl_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 1411 const struct intel_crtc_state *crtc_state, 1412 int level) 1413 { 1414 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1415 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 1416 const struct tgl_dkl_phy_ddi_buf_trans *ddi_translations; 1417 u32 val, dpcnt_mask, dpcnt_val; 1418 int n_entries, ln; 1419 1420 if (enc_to_dig_port(encoder)->tc_mode == TC_PORT_TBT_ALT) 1421 return; 1422 1423 ddi_translations = tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 1424 1425 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1426 return; 1427 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1428 level = n_entries - 1; 1429 1430 dpcnt_mask = (DKL_TX_PRESHOOT_COEFF_MASK | 1431 DKL_TX_DE_EMPAHSIS_COEFF_MASK | 1432 DKL_TX_VSWING_CONTROL_MASK); 1433 dpcnt_val = DKL_TX_VSWING_CONTROL(ddi_translations[level].dkl_vswing_control); 1434 dpcnt_val |= DKL_TX_DE_EMPHASIS_COEFF(ddi_translations[level].dkl_de_emphasis_control); 1435 dpcnt_val |= DKL_TX_PRESHOOT_COEFF(ddi_translations[level].dkl_preshoot_control); 1436 1437 for (ln = 0; ln < 2; ln++) { 1438 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 1439 HIP_INDEX_VAL(tc_port, ln)); 1440 1441 intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0); 1442 1443 /* All the registers are RMW */ 1444 val = intel_de_read(dev_priv, DKL_TX_DPCNTL0(tc_port)); 1445 val &= ~dpcnt_mask; 1446 val |= dpcnt_val; 1447 intel_de_write(dev_priv, DKL_TX_DPCNTL0(tc_port), val); 1448 1449 val = intel_de_read(dev_priv, DKL_TX_DPCNTL1(tc_port)); 1450 val &= ~dpcnt_mask; 1451 val |= dpcnt_val; 1452 intel_de_write(dev_priv, DKL_TX_DPCNTL1(tc_port), val); 1453 1454 val = intel_de_read(dev_priv, DKL_TX_DPCNTL2(tc_port)); 1455 val &= ~DKL_TX_DP20BITMODE; 1456 intel_de_write(dev_priv, DKL_TX_DPCNTL2(tc_port), val); 1457 } 1458 } 1459 1460 static void tgl_ddi_vswing_sequence(struct intel_encoder *encoder, 1461 const struct intel_crtc_state *crtc_state, 1462 int level) 1463 { 1464 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1465 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1466 1467 if (intel_phy_is_combo(dev_priv, phy)) 1468 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 1469 else 1470 tgl_dkl_phy_ddi_vswing_sequence(encoder, crtc_state, level); 1471 } 1472 1473 static int translate_signal_level(struct intel_dp *intel_dp, 1474 u8 signal_levels) 1475 { 1476 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 1477 int i; 1478 1479 for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) { 1480 if (index_to_dp_signal_levels[i] == signal_levels) 1481 return i; 1482 } 1483 1484 drm_WARN(&i915->drm, 1, 1485 "Unsupported voltage swing/pre-emphasis level: 0x%x\n", 1486 signal_levels); 1487 1488 return 0; 1489 } 1490 1491 static int intel_ddi_dp_level(struct intel_dp *intel_dp) 1492 { 1493 u8 train_set = intel_dp->train_set[0]; 1494 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1495 DP_TRAIN_PRE_EMPHASIS_MASK); 1496 1497 return translate_signal_level(intel_dp, signal_levels); 1498 } 1499 1500 static void 1501 tgl_set_signal_levels(struct intel_dp *intel_dp, 1502 const struct intel_crtc_state *crtc_state) 1503 { 1504 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1505 int level = intel_ddi_dp_level(intel_dp); 1506 1507 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 1508 } 1509 1510 static void 1511 icl_set_signal_levels(struct intel_dp *intel_dp, 1512 const struct intel_crtc_state *crtc_state) 1513 { 1514 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1515 int level = intel_ddi_dp_level(intel_dp); 1516 1517 icl_ddi_vswing_sequence(encoder, crtc_state, level); 1518 } 1519 1520 static void 1521 cnl_set_signal_levels(struct intel_dp *intel_dp, 1522 const struct intel_crtc_state *crtc_state) 1523 { 1524 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1525 int level = intel_ddi_dp_level(intel_dp); 1526 1527 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 1528 } 1529 1530 static void 1531 bxt_set_signal_levels(struct intel_dp *intel_dp, 1532 const struct intel_crtc_state *crtc_state) 1533 { 1534 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1535 int level = intel_ddi_dp_level(intel_dp); 1536 1537 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 1538 } 1539 1540 static void 1541 hsw_set_signal_levels(struct intel_dp *intel_dp, 1542 const struct intel_crtc_state *crtc_state) 1543 { 1544 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1545 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1546 int level = intel_ddi_dp_level(intel_dp); 1547 enum port port = encoder->port; 1548 u32 signal_levels; 1549 1550 signal_levels = DDI_BUF_TRANS_SELECT(level); 1551 1552 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1553 signal_levels); 1554 1555 intel_dp->DP &= ~DDI_BUF_EMP_MASK; 1556 intel_dp->DP |= signal_levels; 1557 1558 if (IS_GEN9_BC(dev_priv)) 1559 skl_ddi_set_iboost(encoder, crtc_state, level); 1560 1561 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 1562 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 1563 } 1564 1565 static void _cnl_ddi_enable_clock(struct drm_i915_private *i915, i915_reg_t reg, 1566 u32 clk_sel_mask, u32 clk_sel, u32 clk_off) 1567 { 1568 mutex_lock(&i915->dpll.lock); 1569 1570 intel_de_rmw(i915, reg, clk_sel_mask, clk_sel); 1571 1572 /* 1573 * "This step and the step before must be 1574 * done with separate register writes." 1575 */ 1576 intel_de_rmw(i915, reg, clk_off, 0); 1577 1578 mutex_unlock(&i915->dpll.lock); 1579 } 1580 1581 static void _cnl_ddi_disable_clock(struct drm_i915_private *i915, i915_reg_t reg, 1582 u32 clk_off) 1583 { 1584 mutex_lock(&i915->dpll.lock); 1585 1586 intel_de_rmw(i915, reg, 0, clk_off); 1587 1588 mutex_unlock(&i915->dpll.lock); 1589 } 1590 1591 static bool _cnl_ddi_is_clock_enabled(struct drm_i915_private *i915, i915_reg_t reg, 1592 u32 clk_off) 1593 { 1594 return !(intel_de_read(i915, reg) & clk_off); 1595 } 1596 1597 static struct intel_shared_dpll * 1598 _cnl_ddi_get_pll(struct drm_i915_private *i915, i915_reg_t reg, 1599 u32 clk_sel_mask, u32 clk_sel_shift) 1600 { 1601 enum intel_dpll_id id; 1602 1603 id = (intel_de_read(i915, reg) & clk_sel_mask) >> clk_sel_shift; 1604 1605 return intel_get_shared_dpll_by_id(i915, id); 1606 } 1607 1608 static void adls_ddi_enable_clock(struct intel_encoder *encoder, 1609 const struct intel_crtc_state *crtc_state) 1610 { 1611 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1612 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1613 enum phy phy = intel_port_to_phy(i915, encoder->port); 1614 1615 if (drm_WARN_ON(&i915->drm, !pll)) 1616 return; 1617 1618 _cnl_ddi_enable_clock(i915, ADLS_DPCLKA_CFGCR(phy), 1619 ADLS_DPCLKA_CFGCR_DDI_CLK_SEL_MASK(phy), 1620 pll->info->id << ADLS_DPCLKA_CFGCR_DDI_SHIFT(phy), 1621 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1622 } 1623 1624 static void adls_ddi_disable_clock(struct intel_encoder *encoder) 1625 { 1626 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1627 enum phy phy = intel_port_to_phy(i915, encoder->port); 1628 1629 _cnl_ddi_disable_clock(i915, ADLS_DPCLKA_CFGCR(phy), 1630 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1631 } 1632 1633 static bool adls_ddi_is_clock_enabled(struct intel_encoder *encoder) 1634 { 1635 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1636 enum phy phy = intel_port_to_phy(i915, encoder->port); 1637 1638 return _cnl_ddi_is_clock_enabled(i915, ADLS_DPCLKA_CFGCR(phy), 1639 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1640 } 1641 1642 static struct intel_shared_dpll *adls_ddi_get_pll(struct intel_encoder *encoder) 1643 { 1644 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1645 enum phy phy = intel_port_to_phy(i915, encoder->port); 1646 1647 return _cnl_ddi_get_pll(i915, ADLS_DPCLKA_CFGCR(phy), 1648 ADLS_DPCLKA_CFGCR_DDI_CLK_SEL_MASK(phy), 1649 ADLS_DPCLKA_CFGCR_DDI_SHIFT(phy)); 1650 } 1651 1652 static void rkl_ddi_enable_clock(struct intel_encoder *encoder, 1653 const struct intel_crtc_state *crtc_state) 1654 { 1655 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1656 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1657 enum phy phy = intel_port_to_phy(i915, encoder->port); 1658 1659 if (drm_WARN_ON(&i915->drm, !pll)) 1660 return; 1661 1662 _cnl_ddi_enable_clock(i915, ICL_DPCLKA_CFGCR0, 1663 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1664 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1665 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1666 } 1667 1668 static void rkl_ddi_disable_clock(struct intel_encoder *encoder) 1669 { 1670 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1671 enum phy phy = intel_port_to_phy(i915, encoder->port); 1672 1673 _cnl_ddi_disable_clock(i915, ICL_DPCLKA_CFGCR0, 1674 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1675 } 1676 1677 static bool rkl_ddi_is_clock_enabled(struct intel_encoder *encoder) 1678 { 1679 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1680 enum phy phy = intel_port_to_phy(i915, encoder->port); 1681 1682 return _cnl_ddi_is_clock_enabled(i915, ICL_DPCLKA_CFGCR0, 1683 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1684 } 1685 1686 static struct intel_shared_dpll *rkl_ddi_get_pll(struct intel_encoder *encoder) 1687 { 1688 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1689 enum phy phy = intel_port_to_phy(i915, encoder->port); 1690 1691 return _cnl_ddi_get_pll(i915, ICL_DPCLKA_CFGCR0, 1692 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1693 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy)); 1694 } 1695 1696 static void dg1_ddi_enable_clock(struct intel_encoder *encoder, 1697 const struct intel_crtc_state *crtc_state) 1698 { 1699 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1700 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1701 enum phy phy = intel_port_to_phy(i915, encoder->port); 1702 1703 if (drm_WARN_ON(&i915->drm, !pll)) 1704 return; 1705 1706 /* 1707 * If we fail this, something went very wrong: first 2 PLLs should be 1708 * used by first 2 phys and last 2 PLLs by last phys 1709 */ 1710 if (drm_WARN_ON(&i915->drm, 1711 (pll->info->id < DPLL_ID_DG1_DPLL2 && phy >= PHY_C) || 1712 (pll->info->id >= DPLL_ID_DG1_DPLL2 && phy < PHY_C))) 1713 return; 1714 1715 _cnl_ddi_enable_clock(i915, DG1_DPCLKA_CFGCR0(phy), 1716 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1717 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1718 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1719 } 1720 1721 static void dg1_ddi_disable_clock(struct intel_encoder *encoder) 1722 { 1723 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1724 enum phy phy = intel_port_to_phy(i915, encoder->port); 1725 1726 _cnl_ddi_disable_clock(i915, DG1_DPCLKA_CFGCR0(phy), 1727 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1728 } 1729 1730 static bool dg1_ddi_is_clock_enabled(struct intel_encoder *encoder) 1731 { 1732 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1733 enum phy phy = intel_port_to_phy(i915, encoder->port); 1734 1735 return _cnl_ddi_is_clock_enabled(i915, DG1_DPCLKA_CFGCR0(phy), 1736 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1737 } 1738 1739 static struct intel_shared_dpll *dg1_ddi_get_pll(struct intel_encoder *encoder) 1740 { 1741 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1742 enum phy phy = intel_port_to_phy(i915, encoder->port); 1743 1744 return _cnl_ddi_get_pll(i915, DG1_DPCLKA_CFGCR0(phy), 1745 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1746 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy)); 1747 } 1748 1749 static void icl_ddi_combo_enable_clock(struct intel_encoder *encoder, 1750 const struct intel_crtc_state *crtc_state) 1751 { 1752 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1753 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1754 enum phy phy = intel_port_to_phy(i915, encoder->port); 1755 1756 if (drm_WARN_ON(&i915->drm, !pll)) 1757 return; 1758 1759 _cnl_ddi_enable_clock(i915, ICL_DPCLKA_CFGCR0, 1760 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1761 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1762 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1763 } 1764 1765 static void icl_ddi_combo_disable_clock(struct intel_encoder *encoder) 1766 { 1767 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1768 enum phy phy = intel_port_to_phy(i915, encoder->port); 1769 1770 _cnl_ddi_disable_clock(i915, ICL_DPCLKA_CFGCR0, 1771 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1772 } 1773 1774 static bool icl_ddi_combo_is_clock_enabled(struct intel_encoder *encoder) 1775 { 1776 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1777 enum phy phy = intel_port_to_phy(i915, encoder->port); 1778 1779 return _cnl_ddi_is_clock_enabled(i915, ICL_DPCLKA_CFGCR0, 1780 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1781 } 1782 1783 struct intel_shared_dpll *icl_ddi_combo_get_pll(struct intel_encoder *encoder) 1784 { 1785 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1786 enum phy phy = intel_port_to_phy(i915, encoder->port); 1787 1788 return _cnl_ddi_get_pll(i915, ICL_DPCLKA_CFGCR0, 1789 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1790 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy)); 1791 } 1792 1793 static void jsl_ddi_tc_enable_clock(struct intel_encoder *encoder, 1794 const struct intel_crtc_state *crtc_state) 1795 { 1796 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1797 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1798 enum port port = encoder->port; 1799 1800 if (drm_WARN_ON(&i915->drm, !pll)) 1801 return; 1802 1803 /* 1804 * "For DDIC and DDID, program DDI_CLK_SEL to map the MG clock to the port. 1805 * MG does not exist, but the programming is required to ungate DDIC and DDID." 1806 */ 1807 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_MG); 1808 1809 icl_ddi_combo_enable_clock(encoder, crtc_state); 1810 } 1811 1812 static void jsl_ddi_tc_disable_clock(struct intel_encoder *encoder) 1813 { 1814 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1815 enum port port = encoder->port; 1816 1817 icl_ddi_combo_disable_clock(encoder); 1818 1819 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_NONE); 1820 } 1821 1822 static bool jsl_ddi_tc_is_clock_enabled(struct intel_encoder *encoder) 1823 { 1824 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1825 enum port port = encoder->port; 1826 u32 tmp; 1827 1828 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1829 1830 if ((tmp & DDI_CLK_SEL_MASK) == DDI_CLK_SEL_NONE) 1831 return false; 1832 1833 return icl_ddi_combo_is_clock_enabled(encoder); 1834 } 1835 1836 static void icl_ddi_tc_enable_clock(struct intel_encoder *encoder, 1837 const struct intel_crtc_state *crtc_state) 1838 { 1839 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1840 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1841 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1842 enum port port = encoder->port; 1843 1844 if (drm_WARN_ON(&i915->drm, !pll)) 1845 return; 1846 1847 intel_de_write(i915, DDI_CLK_SEL(port), 1848 icl_pll_to_ddi_clk_sel(encoder, crtc_state)); 1849 1850 mutex_lock(&i915->dpll.lock); 1851 1852 intel_de_rmw(i915, ICL_DPCLKA_CFGCR0, 1853 ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port), 0); 1854 1855 mutex_unlock(&i915->dpll.lock); 1856 } 1857 1858 static void icl_ddi_tc_disable_clock(struct intel_encoder *encoder) 1859 { 1860 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1861 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1862 enum port port = encoder->port; 1863 1864 mutex_lock(&i915->dpll.lock); 1865 1866 intel_de_rmw(i915, ICL_DPCLKA_CFGCR0, 1867 0, ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port)); 1868 1869 mutex_unlock(&i915->dpll.lock); 1870 1871 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_NONE); 1872 } 1873 1874 static bool icl_ddi_tc_is_clock_enabled(struct intel_encoder *encoder) 1875 { 1876 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1877 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1878 enum port port = encoder->port; 1879 u32 tmp; 1880 1881 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1882 1883 if ((tmp & DDI_CLK_SEL_MASK) == DDI_CLK_SEL_NONE) 1884 return false; 1885 1886 tmp = intel_de_read(i915, ICL_DPCLKA_CFGCR0); 1887 1888 return !(tmp & ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port)); 1889 } 1890 1891 static struct intel_shared_dpll *icl_ddi_tc_get_pll(struct intel_encoder *encoder) 1892 { 1893 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1894 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1895 enum port port = encoder->port; 1896 enum intel_dpll_id id; 1897 u32 tmp; 1898 1899 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1900 1901 switch (tmp & DDI_CLK_SEL_MASK) { 1902 case DDI_CLK_SEL_TBT_162: 1903 case DDI_CLK_SEL_TBT_270: 1904 case DDI_CLK_SEL_TBT_540: 1905 case DDI_CLK_SEL_TBT_810: 1906 id = DPLL_ID_ICL_TBTPLL; 1907 break; 1908 case DDI_CLK_SEL_MG: 1909 id = icl_tc_port_to_pll_id(tc_port); 1910 break; 1911 default: 1912 MISSING_CASE(tmp); 1913 fallthrough; 1914 case DDI_CLK_SEL_NONE: 1915 return NULL; 1916 } 1917 1918 return intel_get_shared_dpll_by_id(i915, id); 1919 } 1920 1921 static void cnl_ddi_enable_clock(struct intel_encoder *encoder, 1922 const struct intel_crtc_state *crtc_state) 1923 { 1924 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1925 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1926 enum port port = encoder->port; 1927 1928 if (drm_WARN_ON(&i915->drm, !pll)) 1929 return; 1930 1931 _cnl_ddi_enable_clock(i915, DPCLKA_CFGCR0, 1932 DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port), 1933 DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port), 1934 DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 1935 } 1936 1937 static void cnl_ddi_disable_clock(struct intel_encoder *encoder) 1938 { 1939 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1940 enum port port = encoder->port; 1941 1942 _cnl_ddi_disable_clock(i915, DPCLKA_CFGCR0, 1943 DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 1944 } 1945 1946 static bool cnl_ddi_is_clock_enabled(struct intel_encoder *encoder) 1947 { 1948 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1949 enum port port = encoder->port; 1950 1951 return _cnl_ddi_is_clock_enabled(i915, DPCLKA_CFGCR0, 1952 DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 1953 } 1954 1955 static struct intel_shared_dpll *cnl_ddi_get_pll(struct intel_encoder *encoder) 1956 { 1957 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1958 enum port port = encoder->port; 1959 1960 return _cnl_ddi_get_pll(i915, DPCLKA_CFGCR0, 1961 DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port), 1962 DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(port)); 1963 } 1964 1965 static struct intel_shared_dpll *bxt_ddi_get_pll(struct intel_encoder *encoder) 1966 { 1967 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1968 enum intel_dpll_id id; 1969 1970 switch (encoder->port) { 1971 case PORT_A: 1972 id = DPLL_ID_SKL_DPLL0; 1973 break; 1974 case PORT_B: 1975 id = DPLL_ID_SKL_DPLL1; 1976 break; 1977 case PORT_C: 1978 id = DPLL_ID_SKL_DPLL2; 1979 break; 1980 default: 1981 MISSING_CASE(encoder->port); 1982 return NULL; 1983 } 1984 1985 return intel_get_shared_dpll_by_id(i915, id); 1986 } 1987 1988 static void skl_ddi_enable_clock(struct intel_encoder *encoder, 1989 const struct intel_crtc_state *crtc_state) 1990 { 1991 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1992 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1993 enum port port = encoder->port; 1994 1995 if (drm_WARN_ON(&i915->drm, !pll)) 1996 return; 1997 1998 mutex_lock(&i915->dpll.lock); 1999 2000 intel_de_rmw(i915, DPLL_CTRL2, 2001 DPLL_CTRL2_DDI_CLK_OFF(port) | 2002 DPLL_CTRL2_DDI_CLK_SEL_MASK(port), 2003 DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) | 2004 DPLL_CTRL2_DDI_SEL_OVERRIDE(port)); 2005 2006 mutex_unlock(&i915->dpll.lock); 2007 } 2008 2009 static void skl_ddi_disable_clock(struct intel_encoder *encoder) 2010 { 2011 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2012 enum port port = encoder->port; 2013 2014 mutex_lock(&i915->dpll.lock); 2015 2016 intel_de_rmw(i915, DPLL_CTRL2, 2017 0, DPLL_CTRL2_DDI_CLK_OFF(port)); 2018 2019 mutex_unlock(&i915->dpll.lock); 2020 } 2021 2022 static bool skl_ddi_is_clock_enabled(struct intel_encoder *encoder) 2023 { 2024 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2025 enum port port = encoder->port; 2026 2027 /* 2028 * FIXME Not sure if the override affects both 2029 * the PLL selection and the CLK_OFF bit. 2030 */ 2031 return !(intel_de_read(i915, DPLL_CTRL2) & DPLL_CTRL2_DDI_CLK_OFF(port)); 2032 } 2033 2034 static struct intel_shared_dpll *skl_ddi_get_pll(struct intel_encoder *encoder) 2035 { 2036 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2037 enum port port = encoder->port; 2038 enum intel_dpll_id id; 2039 u32 tmp; 2040 2041 tmp = intel_de_read(i915, DPLL_CTRL2); 2042 2043 /* 2044 * FIXME Not sure if the override affects both 2045 * the PLL selection and the CLK_OFF bit. 2046 */ 2047 if ((tmp & DPLL_CTRL2_DDI_SEL_OVERRIDE(port)) == 0) 2048 return NULL; 2049 2050 id = (tmp & DPLL_CTRL2_DDI_CLK_SEL_MASK(port)) >> 2051 DPLL_CTRL2_DDI_CLK_SEL_SHIFT(port); 2052 2053 return intel_get_shared_dpll_by_id(i915, id); 2054 } 2055 2056 void hsw_ddi_enable_clock(struct intel_encoder *encoder, 2057 const struct intel_crtc_state *crtc_state) 2058 { 2059 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2060 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 2061 enum port port = encoder->port; 2062 2063 if (drm_WARN_ON(&i915->drm, !pll)) 2064 return; 2065 2066 intel_de_write(i915, PORT_CLK_SEL(port), hsw_pll_to_ddi_pll_sel(pll)); 2067 } 2068 2069 void hsw_ddi_disable_clock(struct intel_encoder *encoder) 2070 { 2071 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2072 enum port port = encoder->port; 2073 2074 intel_de_write(i915, PORT_CLK_SEL(port), PORT_CLK_SEL_NONE); 2075 } 2076 2077 bool hsw_ddi_is_clock_enabled(struct intel_encoder *encoder) 2078 { 2079 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2080 enum port port = encoder->port; 2081 2082 return intel_de_read(i915, PORT_CLK_SEL(port)) != PORT_CLK_SEL_NONE; 2083 } 2084 2085 static struct intel_shared_dpll *hsw_ddi_get_pll(struct intel_encoder *encoder) 2086 { 2087 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2088 enum port port = encoder->port; 2089 enum intel_dpll_id id; 2090 u32 tmp; 2091 2092 tmp = intel_de_read(i915, PORT_CLK_SEL(port)); 2093 2094 switch (tmp & PORT_CLK_SEL_MASK) { 2095 case PORT_CLK_SEL_WRPLL1: 2096 id = DPLL_ID_WRPLL1; 2097 break; 2098 case PORT_CLK_SEL_WRPLL2: 2099 id = DPLL_ID_WRPLL2; 2100 break; 2101 case PORT_CLK_SEL_SPLL: 2102 id = DPLL_ID_SPLL; 2103 break; 2104 case PORT_CLK_SEL_LCPLL_810: 2105 id = DPLL_ID_LCPLL_810; 2106 break; 2107 case PORT_CLK_SEL_LCPLL_1350: 2108 id = DPLL_ID_LCPLL_1350; 2109 break; 2110 case PORT_CLK_SEL_LCPLL_2700: 2111 id = DPLL_ID_LCPLL_2700; 2112 break; 2113 default: 2114 MISSING_CASE(tmp); 2115 fallthrough; 2116 case PORT_CLK_SEL_NONE: 2117 return NULL; 2118 } 2119 2120 return intel_get_shared_dpll_by_id(i915, id); 2121 } 2122 2123 void intel_ddi_enable_clock(struct intel_encoder *encoder, 2124 const struct intel_crtc_state *crtc_state) 2125 { 2126 if (encoder->enable_clock) 2127 encoder->enable_clock(encoder, crtc_state); 2128 } 2129 2130 static void intel_ddi_disable_clock(struct intel_encoder *encoder) 2131 { 2132 if (encoder->disable_clock) 2133 encoder->disable_clock(encoder); 2134 } 2135 2136 void intel_ddi_sanitize_encoder_pll_mapping(struct intel_encoder *encoder) 2137 { 2138 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2139 u32 port_mask; 2140 bool ddi_clk_needed; 2141 2142 /* 2143 * In case of DP MST, we sanitize the primary encoder only, not the 2144 * virtual ones. 2145 */ 2146 if (encoder->type == INTEL_OUTPUT_DP_MST) 2147 return; 2148 2149 if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) { 2150 u8 pipe_mask; 2151 bool is_mst; 2152 2153 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 2154 /* 2155 * In the unlikely case that BIOS enables DP in MST mode, just 2156 * warn since our MST HW readout is incomplete. 2157 */ 2158 if (drm_WARN_ON(&i915->drm, is_mst)) 2159 return; 2160 } 2161 2162 port_mask = BIT(encoder->port); 2163 ddi_clk_needed = encoder->base.crtc; 2164 2165 if (encoder->type == INTEL_OUTPUT_DSI) { 2166 struct intel_encoder *other_encoder; 2167 2168 port_mask = intel_dsi_encoder_ports(encoder); 2169 /* 2170 * Sanity check that we haven't incorrectly registered another 2171 * encoder using any of the ports of this DSI encoder. 2172 */ 2173 for_each_intel_encoder(&i915->drm, other_encoder) { 2174 if (other_encoder == encoder) 2175 continue; 2176 2177 if (drm_WARN_ON(&i915->drm, 2178 port_mask & BIT(other_encoder->port))) 2179 return; 2180 } 2181 /* 2182 * For DSI we keep the ddi clocks gated 2183 * except during enable/disable sequence. 2184 */ 2185 ddi_clk_needed = false; 2186 } 2187 2188 if (ddi_clk_needed || !encoder->disable_clock || 2189 !encoder->is_clock_enabled(encoder)) 2190 return; 2191 2192 drm_notice(&i915->drm, 2193 "[ENCODER:%d:%s] is disabled/in DSI mode with an ungated DDI clock, gate it\n", 2194 encoder->base.base.id, encoder->base.name); 2195 2196 encoder->disable_clock(encoder); 2197 } 2198 2199 static void 2200 icl_program_mg_dp_mode(struct intel_digital_port *dig_port, 2201 const struct intel_crtc_state *crtc_state) 2202 { 2203 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 2204 enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port); 2205 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port); 2206 u32 ln0, ln1, pin_assignment; 2207 u8 width; 2208 2209 if (!intel_phy_is_tc(dev_priv, phy) || 2210 dig_port->tc_mode == TC_PORT_TBT_ALT) 2211 return; 2212 2213 if (DISPLAY_VER(dev_priv) >= 12) { 2214 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2215 HIP_INDEX_VAL(tc_port, 0x0)); 2216 ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 2217 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2218 HIP_INDEX_VAL(tc_port, 0x1)); 2219 ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 2220 } else { 2221 ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port)); 2222 ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port)); 2223 } 2224 2225 ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 2226 ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 2227 2228 /* DPPATC */ 2229 pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port); 2230 width = crtc_state->lane_count; 2231 2232 switch (pin_assignment) { 2233 case 0x0: 2234 drm_WARN_ON(&dev_priv->drm, 2235 dig_port->tc_mode != TC_PORT_LEGACY); 2236 if (width == 1) { 2237 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2238 } else { 2239 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2240 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2241 } 2242 break; 2243 case 0x1: 2244 if (width == 4) { 2245 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2246 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2247 } 2248 break; 2249 case 0x2: 2250 if (width == 2) { 2251 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2252 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2253 } 2254 break; 2255 case 0x3: 2256 case 0x5: 2257 if (width == 1) { 2258 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 2259 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2260 } else { 2261 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2262 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2263 } 2264 break; 2265 case 0x4: 2266 case 0x6: 2267 if (width == 1) { 2268 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 2269 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2270 } else { 2271 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2272 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2273 } 2274 break; 2275 default: 2276 MISSING_CASE(pin_assignment); 2277 } 2278 2279 if (DISPLAY_VER(dev_priv) >= 12) { 2280 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2281 HIP_INDEX_VAL(tc_port, 0x0)); 2282 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0); 2283 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2284 HIP_INDEX_VAL(tc_port, 0x1)); 2285 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1); 2286 } else { 2287 intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0); 2288 intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1); 2289 } 2290 } 2291 2292 static enum transcoder 2293 tgl_dp_tp_transcoder(const struct intel_crtc_state *crtc_state) 2294 { 2295 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) 2296 return crtc_state->mst_master_transcoder; 2297 else 2298 return crtc_state->cpu_transcoder; 2299 } 2300 2301 i915_reg_t dp_tp_ctl_reg(struct intel_encoder *encoder, 2302 const struct intel_crtc_state *crtc_state) 2303 { 2304 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2305 2306 if (DISPLAY_VER(dev_priv) >= 12) 2307 return TGL_DP_TP_CTL(tgl_dp_tp_transcoder(crtc_state)); 2308 else 2309 return DP_TP_CTL(encoder->port); 2310 } 2311 2312 i915_reg_t dp_tp_status_reg(struct intel_encoder *encoder, 2313 const struct intel_crtc_state *crtc_state) 2314 { 2315 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2316 2317 if (DISPLAY_VER(dev_priv) >= 12) 2318 return TGL_DP_TP_STATUS(tgl_dp_tp_transcoder(crtc_state)); 2319 else 2320 return DP_TP_STATUS(encoder->port); 2321 } 2322 2323 static void intel_dp_sink_set_msa_timing_par_ignore_state(struct intel_dp *intel_dp, 2324 const struct intel_crtc_state *crtc_state, 2325 bool enable) 2326 { 2327 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 2328 2329 if (!crtc_state->vrr.enable) 2330 return; 2331 2332 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_DOWNSPREAD_CTRL, 2333 enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0) <= 0) 2334 drm_dbg_kms(&i915->drm, 2335 "Failed to set MSA_TIMING_PAR_IGNORE %s in the sink\n", 2336 enable ? "enable" : "disable"); 2337 } 2338 2339 static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp, 2340 const struct intel_crtc_state *crtc_state) 2341 { 2342 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 2343 2344 if (!crtc_state->fec_enable) 2345 return; 2346 2347 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0) 2348 drm_dbg_kms(&i915->drm, 2349 "Failed to set FEC_READY in the sink\n"); 2350 } 2351 2352 static void intel_ddi_enable_fec(struct intel_encoder *encoder, 2353 const struct intel_crtc_state *crtc_state) 2354 { 2355 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2356 struct intel_dp *intel_dp; 2357 u32 val; 2358 2359 if (!crtc_state->fec_enable) 2360 return; 2361 2362 intel_dp = enc_to_intel_dp(encoder); 2363 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2364 val |= DP_TP_CTL_FEC_ENABLE; 2365 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2366 } 2367 2368 static void intel_ddi_disable_fec_state(struct intel_encoder *encoder, 2369 const struct intel_crtc_state *crtc_state) 2370 { 2371 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2372 struct intel_dp *intel_dp; 2373 u32 val; 2374 2375 if (!crtc_state->fec_enable) 2376 return; 2377 2378 intel_dp = enc_to_intel_dp(encoder); 2379 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2380 val &= ~DP_TP_CTL_FEC_ENABLE; 2381 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2382 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2383 } 2384 2385 static void intel_ddi_power_up_lanes(struct intel_encoder *encoder, 2386 const struct intel_crtc_state *crtc_state) 2387 { 2388 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2389 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2390 enum phy phy = intel_port_to_phy(i915, encoder->port); 2391 2392 if (intel_phy_is_combo(i915, phy)) { 2393 bool lane_reversal = 2394 dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL; 2395 2396 intel_combo_phy_power_up_lanes(i915, phy, false, 2397 crtc_state->lane_count, 2398 lane_reversal); 2399 } 2400 } 2401 2402 static void intel_ddi_mso_get_config(struct intel_encoder *encoder, 2403 struct intel_crtc_state *pipe_config) 2404 { 2405 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 2406 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 2407 enum pipe pipe = crtc->pipe; 2408 u32 dss1; 2409 2410 if (!HAS_MSO(i915)) 2411 return; 2412 2413 dss1 = intel_de_read(i915, ICL_PIPE_DSS_CTL1(pipe)); 2414 2415 pipe_config->splitter.enable = dss1 & SPLITTER_ENABLE; 2416 if (!pipe_config->splitter.enable) 2417 return; 2418 2419 /* Splitter enable is supported for pipe A only. */ 2420 if (drm_WARN_ON(&i915->drm, pipe != PIPE_A)) { 2421 pipe_config->splitter.enable = false; 2422 return; 2423 } 2424 2425 switch (dss1 & SPLITTER_CONFIGURATION_MASK) { 2426 default: 2427 drm_WARN(&i915->drm, true, 2428 "Invalid splitter configuration, dss1=0x%08x\n", dss1); 2429 fallthrough; 2430 case SPLITTER_CONFIGURATION_2_SEGMENT: 2431 pipe_config->splitter.link_count = 2; 2432 break; 2433 case SPLITTER_CONFIGURATION_4_SEGMENT: 2434 pipe_config->splitter.link_count = 4; 2435 break; 2436 } 2437 2438 pipe_config->splitter.pixel_overlap = REG_FIELD_GET(OVERLAP_PIXELS_MASK, dss1); 2439 } 2440 2441 static void intel_ddi_mso_configure(const struct intel_crtc_state *crtc_state) 2442 { 2443 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2444 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 2445 enum pipe pipe = crtc->pipe; 2446 u32 dss1 = 0; 2447 2448 if (!HAS_MSO(i915)) 2449 return; 2450 2451 if (crtc_state->splitter.enable) { 2452 /* Splitter enable is supported for pipe A only. */ 2453 if (drm_WARN_ON(&i915->drm, pipe != PIPE_A)) 2454 return; 2455 2456 dss1 |= SPLITTER_ENABLE; 2457 dss1 |= OVERLAP_PIXELS(crtc_state->splitter.pixel_overlap); 2458 if (crtc_state->splitter.link_count == 2) 2459 dss1 |= SPLITTER_CONFIGURATION_2_SEGMENT; 2460 else 2461 dss1 |= SPLITTER_CONFIGURATION_4_SEGMENT; 2462 } 2463 2464 intel_de_rmw(i915, ICL_PIPE_DSS_CTL1(pipe), 2465 SPLITTER_ENABLE | SPLITTER_CONFIGURATION_MASK | 2466 OVERLAP_PIXELS_MASK, dss1); 2467 } 2468 2469 static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state, 2470 struct intel_encoder *encoder, 2471 const struct intel_crtc_state *crtc_state, 2472 const struct drm_connector_state *conn_state) 2473 { 2474 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2475 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2476 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2477 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2478 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 2479 int level = intel_ddi_dp_level(intel_dp); 2480 2481 intel_dp_set_link_params(intel_dp, 2482 crtc_state->port_clock, 2483 crtc_state->lane_count); 2484 2485 /* 2486 * 1. Enable Power Wells 2487 * 2488 * This was handled at the beginning of intel_atomic_commit_tail(), 2489 * before we called down into this function. 2490 */ 2491 2492 /* 2. Enable Panel Power if PPS is required */ 2493 intel_pps_on(intel_dp); 2494 2495 /* 2496 * 3. For non-TBT Type-C ports, set FIA lane count 2497 * (DFLEXDPSP.DPX4TXLATC) 2498 * 2499 * This was done before tgl_ddi_pre_enable_dp by 2500 * hsw_crtc_enable()->intel_encoders_pre_pll_enable(). 2501 */ 2502 2503 /* 2504 * 4. Enable the port PLL. 2505 * 2506 * The PLL enabling itself was already done before this function by 2507 * hsw_crtc_enable()->intel_enable_shared_dpll(). We need only 2508 * configure the PLL to port mapping here. 2509 */ 2510 intel_ddi_enable_clock(encoder, crtc_state); 2511 2512 /* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */ 2513 if (!intel_phy_is_tc(dev_priv, phy) || 2514 dig_port->tc_mode != TC_PORT_TBT_ALT) { 2515 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2516 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2517 dig_port->ddi_io_power_domain); 2518 } 2519 2520 /* 6. Program DP_MODE */ 2521 icl_program_mg_dp_mode(dig_port, crtc_state); 2522 2523 /* 2524 * 7. The rest of the below are substeps under the bspec's "Enable and 2525 * Train Display Port" step. Note that steps that are specific to 2526 * MST will be handled by intel_mst_pre_enable_dp() before/after it 2527 * calls into this function. Also intel_mst_pre_enable_dp() only calls 2528 * us when active_mst_links==0, so any steps designated for "single 2529 * stream or multi-stream master transcoder" can just be performed 2530 * unconditionally here. 2531 */ 2532 2533 /* 2534 * 7.a Configure Transcoder Clock Select to direct the Port clock to the 2535 * Transcoder. 2536 */ 2537 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2538 2539 /* 2540 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST 2541 * Transport Select 2542 */ 2543 intel_ddi_config_transcoder_func(encoder, crtc_state); 2544 2545 /* 2546 * 7.c Configure & enable DP_TP_CTL with link training pattern 1 2547 * selected 2548 * 2549 * This will be handled by the intel_dp_start_link_train() farther 2550 * down this function. 2551 */ 2552 2553 /* 7.e Configure voltage swing and related IO settings */ 2554 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 2555 2556 /* 2557 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up 2558 * the used lanes of the DDI. 2559 */ 2560 intel_ddi_power_up_lanes(encoder, crtc_state); 2561 2562 /* 2563 * 7.g Program CoG/MSO configuration bits in DSS_CTL1 if selected. 2564 */ 2565 intel_ddi_mso_configure(crtc_state); 2566 2567 /* 2568 * 7.g Configure and enable DDI_BUF_CTL 2569 * 7.h Wait for DDI_BUF_CTL DDI Idle Status = 0b (Not Idle), timeout 2570 * after 500 us. 2571 * 2572 * We only configure what the register value will be here. Actual 2573 * enabling happens during link training farther down. 2574 */ 2575 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 2576 2577 if (!is_mst) 2578 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 2579 2580 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 2581 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true); 2582 /* 2583 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit 2584 * in the FEC_CONFIGURATION register to 1 before initiating link 2585 * training 2586 */ 2587 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 2588 2589 intel_dp_check_frl_training(intel_dp); 2590 intel_dp_pcon_dsc_configure(intel_dp, crtc_state); 2591 2592 /* 2593 * 7.i Follow DisplayPort specification training sequence (see notes for 2594 * failure handling) 2595 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle 2596 * Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent) 2597 * (timeout after 800 us) 2598 */ 2599 intel_dp_start_link_train(intel_dp, crtc_state); 2600 2601 /* 7.k Set DP_TP_CTL link training to Normal */ 2602 if (!is_trans_port_sync_mode(crtc_state)) 2603 intel_dp_stop_link_train(intel_dp, crtc_state); 2604 2605 /* 7.l Configure and enable FEC if needed */ 2606 intel_ddi_enable_fec(encoder, crtc_state); 2607 if (!crtc_state->bigjoiner) 2608 intel_dsc_enable(encoder, crtc_state); 2609 } 2610 2611 static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state, 2612 struct intel_encoder *encoder, 2613 const struct intel_crtc_state *crtc_state, 2614 const struct drm_connector_state *conn_state) 2615 { 2616 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2617 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2618 enum port port = encoder->port; 2619 enum phy phy = intel_port_to_phy(dev_priv, port); 2620 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2621 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 2622 int level = intel_ddi_dp_level(intel_dp); 2623 2624 if (DISPLAY_VER(dev_priv) < 11) 2625 drm_WARN_ON(&dev_priv->drm, 2626 is_mst && (port == PORT_A || port == PORT_E)); 2627 else 2628 drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A); 2629 2630 intel_dp_set_link_params(intel_dp, 2631 crtc_state->port_clock, 2632 crtc_state->lane_count); 2633 2634 intel_pps_on(intel_dp); 2635 2636 intel_ddi_enable_clock(encoder, crtc_state); 2637 2638 if (!intel_phy_is_tc(dev_priv, phy) || 2639 dig_port->tc_mode != TC_PORT_TBT_ALT) { 2640 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2641 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2642 dig_port->ddi_io_power_domain); 2643 } 2644 2645 icl_program_mg_dp_mode(dig_port, crtc_state); 2646 2647 if (DISPLAY_VER(dev_priv) >= 11) 2648 icl_ddi_vswing_sequence(encoder, crtc_state, level); 2649 else if (IS_CANNONLAKE(dev_priv)) 2650 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 2651 else if (IS_GEN9_LP(dev_priv)) 2652 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 2653 else 2654 intel_prepare_dp_ddi_buffers(encoder, crtc_state); 2655 2656 intel_ddi_power_up_lanes(encoder, crtc_state); 2657 2658 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 2659 if (!is_mst) 2660 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 2661 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 2662 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, 2663 true); 2664 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 2665 intel_dp_start_link_train(intel_dp, crtc_state); 2666 if ((port != PORT_A || DISPLAY_VER(dev_priv) >= 9) && 2667 !is_trans_port_sync_mode(crtc_state)) 2668 intel_dp_stop_link_train(intel_dp, crtc_state); 2669 2670 intel_ddi_enable_fec(encoder, crtc_state); 2671 2672 if (!is_mst) 2673 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2674 2675 if (!crtc_state->bigjoiner) 2676 intel_dsc_enable(encoder, crtc_state); 2677 } 2678 2679 static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state, 2680 struct intel_encoder *encoder, 2681 const struct intel_crtc_state *crtc_state, 2682 const struct drm_connector_state *conn_state) 2683 { 2684 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2685 2686 if (DISPLAY_VER(dev_priv) >= 12) 2687 tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 2688 else 2689 hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 2690 2691 /* MST will call a setting of MSA after an allocating of Virtual Channel 2692 * from MST encoder pre_enable callback. 2693 */ 2694 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 2695 intel_ddi_set_dp_msa(crtc_state, conn_state); 2696 2697 intel_dp_set_m_n(crtc_state, M1_N1); 2698 } 2699 } 2700 2701 static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state, 2702 struct intel_encoder *encoder, 2703 const struct intel_crtc_state *crtc_state, 2704 const struct drm_connector_state *conn_state) 2705 { 2706 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2707 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 2708 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2709 2710 intel_dp_dual_mode_set_tmds_output(intel_hdmi, true); 2711 intel_ddi_enable_clock(encoder, crtc_state); 2712 2713 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2714 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2715 dig_port->ddi_io_power_domain); 2716 2717 icl_program_mg_dp_mode(dig_port, crtc_state); 2718 2719 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2720 2721 dig_port->set_infoframes(encoder, 2722 crtc_state->has_infoframe, 2723 crtc_state, conn_state); 2724 } 2725 2726 static void intel_ddi_pre_enable(struct intel_atomic_state *state, 2727 struct intel_encoder *encoder, 2728 const struct intel_crtc_state *crtc_state, 2729 const struct drm_connector_state *conn_state) 2730 { 2731 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2732 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2733 enum pipe pipe = crtc->pipe; 2734 2735 /* 2736 * When called from DP MST code: 2737 * - conn_state will be NULL 2738 * - encoder will be the main encoder (ie. mst->primary) 2739 * - the main connector associated with this port 2740 * won't be active or linked to a crtc 2741 * - crtc_state will be the state of the first stream to 2742 * be activated on this port, and it may not be the same 2743 * stream that will be deactivated last, but each stream 2744 * should have a state that is identical when it comes to 2745 * the DP link parameteres 2746 */ 2747 2748 drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder); 2749 2750 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true); 2751 2752 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 2753 intel_ddi_pre_enable_hdmi(state, encoder, crtc_state, 2754 conn_state); 2755 } else { 2756 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2757 2758 intel_ddi_pre_enable_dp(state, encoder, crtc_state, 2759 conn_state); 2760 2761 /* FIXME precompute everything properly */ 2762 /* FIXME how do we turn infoframes off again? */ 2763 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 2764 dig_port->set_infoframes(encoder, 2765 crtc_state->has_infoframe, 2766 crtc_state, conn_state); 2767 } 2768 } 2769 2770 static void intel_disable_ddi_buf(struct intel_encoder *encoder, 2771 const struct intel_crtc_state *crtc_state) 2772 { 2773 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2774 enum port port = encoder->port; 2775 bool wait = false; 2776 u32 val; 2777 2778 val = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 2779 if (val & DDI_BUF_CTL_ENABLE) { 2780 val &= ~DDI_BUF_CTL_ENABLE; 2781 intel_de_write(dev_priv, DDI_BUF_CTL(port), val); 2782 wait = true; 2783 } 2784 2785 if (intel_crtc_has_dp_encoder(crtc_state)) { 2786 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2787 val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 2788 val |= DP_TP_CTL_LINK_TRAIN_PAT1; 2789 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2790 } 2791 2792 /* Disable FEC in DP Sink */ 2793 intel_ddi_disable_fec_state(encoder, crtc_state); 2794 2795 if (wait) 2796 intel_wait_ddi_buf_idle(dev_priv, port); 2797 } 2798 2799 static void intel_ddi_post_disable_dp(struct intel_atomic_state *state, 2800 struct intel_encoder *encoder, 2801 const struct intel_crtc_state *old_crtc_state, 2802 const struct drm_connector_state *old_conn_state) 2803 { 2804 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2805 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2806 struct intel_dp *intel_dp = &dig_port->dp; 2807 bool is_mst = intel_crtc_has_type(old_crtc_state, 2808 INTEL_OUTPUT_DP_MST); 2809 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2810 2811 if (!is_mst) 2812 intel_dp_set_infoframes(encoder, false, 2813 old_crtc_state, old_conn_state); 2814 2815 /* 2816 * Power down sink before disabling the port, otherwise we end 2817 * up getting interrupts from the sink on detecting link loss. 2818 */ 2819 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 2820 2821 if (DISPLAY_VER(dev_priv) >= 12) { 2822 if (is_mst) { 2823 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder; 2824 u32 val; 2825 2826 val = intel_de_read(dev_priv, 2827 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2828 val &= ~(TGL_TRANS_DDI_PORT_MASK | 2829 TRANS_DDI_MODE_SELECT_MASK); 2830 intel_de_write(dev_priv, 2831 TRANS_DDI_FUNC_CTL(cpu_transcoder), 2832 val); 2833 } 2834 } else { 2835 if (!is_mst) 2836 intel_ddi_disable_pipe_clock(old_crtc_state); 2837 } 2838 2839 intel_disable_ddi_buf(encoder, old_crtc_state); 2840 2841 /* 2842 * From TGL spec: "If single stream or multi-stream master transcoder: 2843 * Configure Transcoder Clock select to direct no clock to the 2844 * transcoder" 2845 */ 2846 if (DISPLAY_VER(dev_priv) >= 12) 2847 intel_ddi_disable_pipe_clock(old_crtc_state); 2848 2849 intel_pps_vdd_on(intel_dp); 2850 intel_pps_off(intel_dp); 2851 2852 if (!intel_phy_is_tc(dev_priv, phy) || 2853 dig_port->tc_mode != TC_PORT_TBT_ALT) 2854 intel_display_power_put(dev_priv, 2855 dig_port->ddi_io_power_domain, 2856 fetch_and_zero(&dig_port->ddi_io_wakeref)); 2857 2858 intel_ddi_disable_clock(encoder); 2859 } 2860 2861 static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state, 2862 struct intel_encoder *encoder, 2863 const struct intel_crtc_state *old_crtc_state, 2864 const struct drm_connector_state *old_conn_state) 2865 { 2866 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2867 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2868 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 2869 2870 dig_port->set_infoframes(encoder, false, 2871 old_crtc_state, old_conn_state); 2872 2873 intel_ddi_disable_pipe_clock(old_crtc_state); 2874 2875 intel_disable_ddi_buf(encoder, old_crtc_state); 2876 2877 intel_display_power_put(dev_priv, 2878 dig_port->ddi_io_power_domain, 2879 fetch_and_zero(&dig_port->ddi_io_wakeref)); 2880 2881 intel_ddi_disable_clock(encoder); 2882 2883 intel_dp_dual_mode_set_tmds_output(intel_hdmi, false); 2884 } 2885 2886 static void intel_ddi_post_disable(struct intel_atomic_state *state, 2887 struct intel_encoder *encoder, 2888 const struct intel_crtc_state *old_crtc_state, 2889 const struct drm_connector_state *old_conn_state) 2890 { 2891 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2892 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2893 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2894 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 2895 2896 if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) { 2897 intel_crtc_vblank_off(old_crtc_state); 2898 2899 intel_disable_pipe(old_crtc_state); 2900 2901 intel_vrr_disable(old_crtc_state); 2902 2903 intel_ddi_disable_transcoder_func(old_crtc_state); 2904 2905 intel_dsc_disable(old_crtc_state); 2906 2907 if (DISPLAY_VER(dev_priv) >= 9) 2908 skl_scaler_disable(old_crtc_state); 2909 else 2910 ilk_pfit_disable(old_crtc_state); 2911 } 2912 2913 if (old_crtc_state->bigjoiner_linked_crtc) { 2914 struct intel_atomic_state *state = 2915 to_intel_atomic_state(old_crtc_state->uapi.state); 2916 struct intel_crtc *slave = 2917 old_crtc_state->bigjoiner_linked_crtc; 2918 const struct intel_crtc_state *old_slave_crtc_state = 2919 intel_atomic_get_old_crtc_state(state, slave); 2920 2921 intel_crtc_vblank_off(old_slave_crtc_state); 2922 2923 intel_dsc_disable(old_slave_crtc_state); 2924 skl_scaler_disable(old_slave_crtc_state); 2925 } 2926 2927 /* 2928 * When called from DP MST code: 2929 * - old_conn_state will be NULL 2930 * - encoder will be the main encoder (ie. mst->primary) 2931 * - the main connector associated with this port 2932 * won't be active or linked to a crtc 2933 * - old_crtc_state will be the state of the last stream to 2934 * be deactivated on this port, and it may not be the same 2935 * stream that was activated last, but each stream 2936 * should have a state that is identical when it comes to 2937 * the DP link parameteres 2938 */ 2939 2940 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 2941 intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state, 2942 old_conn_state); 2943 else 2944 intel_ddi_post_disable_dp(state, encoder, old_crtc_state, 2945 old_conn_state); 2946 2947 if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port) 2948 intel_display_power_put(dev_priv, 2949 intel_ddi_main_link_aux_domain(dig_port), 2950 fetch_and_zero(&dig_port->aux_wakeref)); 2951 2952 if (is_tc_port) 2953 intel_tc_port_put_link(dig_port); 2954 } 2955 2956 void intel_ddi_fdi_post_disable(struct intel_atomic_state *state, 2957 struct intel_encoder *encoder, 2958 const struct intel_crtc_state *old_crtc_state, 2959 const struct drm_connector_state *old_conn_state) 2960 { 2961 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2962 u32 val; 2963 2964 /* 2965 * Bspec lists this as both step 13 (before DDI_BUF_CTL disable) 2966 * and step 18 (after clearing PORT_CLK_SEL). Based on a BUN, 2967 * step 13 is the correct place for it. Step 18 is where it was 2968 * originally before the BUN. 2969 */ 2970 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 2971 val &= ~FDI_RX_ENABLE; 2972 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 2973 2974 intel_disable_ddi_buf(encoder, old_crtc_state); 2975 intel_ddi_disable_clock(encoder); 2976 2977 val = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 2978 val &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 2979 val |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2); 2980 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), val); 2981 2982 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 2983 val &= ~FDI_PCDCLK; 2984 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 2985 2986 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 2987 val &= ~FDI_RX_PLL_ENABLE; 2988 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 2989 } 2990 2991 static void trans_port_sync_stop_link_train(struct intel_atomic_state *state, 2992 struct intel_encoder *encoder, 2993 const struct intel_crtc_state *crtc_state) 2994 { 2995 const struct drm_connector_state *conn_state; 2996 struct drm_connector *conn; 2997 int i; 2998 2999 if (!crtc_state->sync_mode_slaves_mask) 3000 return; 3001 3002 for_each_new_connector_in_state(&state->base, conn, conn_state, i) { 3003 struct intel_encoder *slave_encoder = 3004 to_intel_encoder(conn_state->best_encoder); 3005 struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc); 3006 const struct intel_crtc_state *slave_crtc_state; 3007 3008 if (!slave_crtc) 3009 continue; 3010 3011 slave_crtc_state = 3012 intel_atomic_get_new_crtc_state(state, slave_crtc); 3013 3014 if (slave_crtc_state->master_transcoder != 3015 crtc_state->cpu_transcoder) 3016 continue; 3017 3018 intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder), 3019 slave_crtc_state); 3020 } 3021 3022 usleep_range(200, 400); 3023 3024 intel_dp_stop_link_train(enc_to_intel_dp(encoder), 3025 crtc_state); 3026 } 3027 3028 static void intel_enable_ddi_dp(struct intel_atomic_state *state, 3029 struct intel_encoder *encoder, 3030 const struct intel_crtc_state *crtc_state, 3031 const struct drm_connector_state *conn_state) 3032 { 3033 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3034 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3035 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3036 enum port port = encoder->port; 3037 3038 if (port == PORT_A && DISPLAY_VER(dev_priv) < 9) 3039 intel_dp_stop_link_train(intel_dp, crtc_state); 3040 3041 intel_edp_backlight_on(crtc_state, conn_state); 3042 intel_psr_enable(intel_dp, crtc_state, conn_state); 3043 3044 if (!dig_port->lspcon.active || dig_port->dp.has_hdmi_sink) 3045 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 3046 3047 intel_edp_drrs_enable(intel_dp, crtc_state); 3048 3049 if (crtc_state->has_audio) 3050 intel_audio_codec_enable(encoder, crtc_state, conn_state); 3051 3052 trans_port_sync_stop_link_train(state, encoder, crtc_state); 3053 } 3054 3055 static i915_reg_t 3056 gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv, 3057 enum port port) 3058 { 3059 static const enum transcoder trans[] = { 3060 [PORT_A] = TRANSCODER_EDP, 3061 [PORT_B] = TRANSCODER_A, 3062 [PORT_C] = TRANSCODER_B, 3063 [PORT_D] = TRANSCODER_C, 3064 [PORT_E] = TRANSCODER_A, 3065 }; 3066 3067 drm_WARN_ON(&dev_priv->drm, DISPLAY_VER(dev_priv) < 9); 3068 3069 if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E)) 3070 port = PORT_A; 3071 3072 return CHICKEN_TRANS(trans[port]); 3073 } 3074 3075 static void intel_enable_ddi_hdmi(struct intel_atomic_state *state, 3076 struct intel_encoder *encoder, 3077 const struct intel_crtc_state *crtc_state, 3078 const struct drm_connector_state *conn_state) 3079 { 3080 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3081 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3082 struct drm_connector *connector = conn_state->connector; 3083 int level = intel_ddi_hdmi_level(encoder, crtc_state); 3084 enum port port = encoder->port; 3085 3086 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 3087 crtc_state->hdmi_high_tmds_clock_ratio, 3088 crtc_state->hdmi_scrambling)) 3089 drm_dbg_kms(&dev_priv->drm, 3090 "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n", 3091 connector->base.id, connector->name); 3092 3093 if (DISPLAY_VER(dev_priv) >= 12) 3094 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3095 else if (IS_DISPLAY_VER(dev_priv, 11)) 3096 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3097 else if (IS_CANNONLAKE(dev_priv)) 3098 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3099 else if (IS_GEN9_LP(dev_priv)) 3100 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3101 else 3102 intel_prepare_hdmi_ddi_buffers(encoder, level); 3103 3104 if (IS_GEN9_BC(dev_priv)) 3105 skl_ddi_set_iboost(encoder, crtc_state, level); 3106 3107 /* Display WA #1143: skl,kbl,cfl */ 3108 if (IS_GEN9_BC(dev_priv)) { 3109 /* 3110 * For some reason these chicken bits have been 3111 * stuffed into a transcoder register, event though 3112 * the bits affect a specific DDI port rather than 3113 * a specific transcoder. 3114 */ 3115 i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port); 3116 u32 val; 3117 3118 val = intel_de_read(dev_priv, reg); 3119 3120 if (port == PORT_E) 3121 val |= DDIE_TRAINING_OVERRIDE_ENABLE | 3122 DDIE_TRAINING_OVERRIDE_VALUE; 3123 else 3124 val |= DDI_TRAINING_OVERRIDE_ENABLE | 3125 DDI_TRAINING_OVERRIDE_VALUE; 3126 3127 intel_de_write(dev_priv, reg, val); 3128 intel_de_posting_read(dev_priv, reg); 3129 3130 udelay(1); 3131 3132 if (port == PORT_E) 3133 val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE | 3134 DDIE_TRAINING_OVERRIDE_VALUE); 3135 else 3136 val &= ~(DDI_TRAINING_OVERRIDE_ENABLE | 3137 DDI_TRAINING_OVERRIDE_VALUE); 3138 3139 intel_de_write(dev_priv, reg, val); 3140 } 3141 3142 intel_ddi_power_up_lanes(encoder, crtc_state); 3143 3144 /* In HDMI/DVI mode, the port width, and swing/emphasis values 3145 * are ignored so nothing special needs to be done besides 3146 * enabling the port. 3147 */ 3148 intel_de_write(dev_priv, DDI_BUF_CTL(port), 3149 dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE); 3150 3151 if (crtc_state->has_audio) 3152 intel_audio_codec_enable(encoder, crtc_state, conn_state); 3153 } 3154 3155 static void intel_enable_ddi(struct intel_atomic_state *state, 3156 struct intel_encoder *encoder, 3157 const struct intel_crtc_state *crtc_state, 3158 const struct drm_connector_state *conn_state) 3159 { 3160 drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder); 3161 3162 if (!crtc_state->bigjoiner_slave) 3163 intel_ddi_enable_transcoder_func(encoder, crtc_state); 3164 3165 intel_vrr_enable(encoder, crtc_state); 3166 3167 intel_enable_pipe(crtc_state); 3168 3169 intel_crtc_vblank_on(crtc_state); 3170 3171 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 3172 intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state); 3173 else 3174 intel_enable_ddi_dp(state, encoder, crtc_state, conn_state); 3175 3176 /* Enable hdcp if it's desired */ 3177 if (conn_state->content_protection == 3178 DRM_MODE_CONTENT_PROTECTION_DESIRED) 3179 intel_hdcp_enable(to_intel_connector(conn_state->connector), 3180 crtc_state, 3181 (u8)conn_state->hdcp_content_type); 3182 } 3183 3184 static void intel_disable_ddi_dp(struct intel_atomic_state *state, 3185 struct intel_encoder *encoder, 3186 const struct intel_crtc_state *old_crtc_state, 3187 const struct drm_connector_state *old_conn_state) 3188 { 3189 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3190 3191 intel_dp->link_trained = false; 3192 3193 if (old_crtc_state->has_audio) 3194 intel_audio_codec_disable(encoder, 3195 old_crtc_state, old_conn_state); 3196 3197 intel_edp_drrs_disable(intel_dp, old_crtc_state); 3198 intel_psr_disable(intel_dp, old_crtc_state); 3199 intel_edp_backlight_off(old_conn_state); 3200 /* Disable the decompression in DP Sink */ 3201 intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state, 3202 false); 3203 /* Disable Ignore_MSA bit in DP Sink */ 3204 intel_dp_sink_set_msa_timing_par_ignore_state(intel_dp, old_crtc_state, 3205 false); 3206 } 3207 3208 static void intel_disable_ddi_hdmi(struct intel_atomic_state *state, 3209 struct intel_encoder *encoder, 3210 const struct intel_crtc_state *old_crtc_state, 3211 const struct drm_connector_state *old_conn_state) 3212 { 3213 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3214 struct drm_connector *connector = old_conn_state->connector; 3215 3216 if (old_crtc_state->has_audio) 3217 intel_audio_codec_disable(encoder, 3218 old_crtc_state, old_conn_state); 3219 3220 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 3221 false, false)) 3222 drm_dbg_kms(&i915->drm, 3223 "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n", 3224 connector->base.id, connector->name); 3225 } 3226 3227 static void intel_disable_ddi(struct intel_atomic_state *state, 3228 struct intel_encoder *encoder, 3229 const struct intel_crtc_state *old_crtc_state, 3230 const struct drm_connector_state *old_conn_state) 3231 { 3232 intel_hdcp_disable(to_intel_connector(old_conn_state->connector)); 3233 3234 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 3235 intel_disable_ddi_hdmi(state, encoder, old_crtc_state, 3236 old_conn_state); 3237 else 3238 intel_disable_ddi_dp(state, encoder, old_crtc_state, 3239 old_conn_state); 3240 } 3241 3242 static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state, 3243 struct intel_encoder *encoder, 3244 const struct intel_crtc_state *crtc_state, 3245 const struct drm_connector_state *conn_state) 3246 { 3247 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3248 3249 intel_ddi_set_dp_msa(crtc_state, conn_state); 3250 3251 intel_psr_update(intel_dp, crtc_state, conn_state); 3252 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 3253 intel_edp_drrs_update(intel_dp, crtc_state); 3254 3255 intel_panel_update_backlight(state, encoder, crtc_state, conn_state); 3256 } 3257 3258 void intel_ddi_update_pipe(struct intel_atomic_state *state, 3259 struct intel_encoder *encoder, 3260 const struct intel_crtc_state *crtc_state, 3261 const struct drm_connector_state *conn_state) 3262 { 3263 3264 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) && 3265 !intel_encoder_is_mst(encoder)) 3266 intel_ddi_update_pipe_dp(state, encoder, crtc_state, 3267 conn_state); 3268 3269 intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state); 3270 } 3271 3272 static void 3273 intel_ddi_update_prepare(struct intel_atomic_state *state, 3274 struct intel_encoder *encoder, 3275 struct intel_crtc *crtc) 3276 { 3277 struct intel_crtc_state *crtc_state = 3278 crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL; 3279 int required_lanes = crtc_state ? crtc_state->lane_count : 1; 3280 3281 drm_WARN_ON(state->base.dev, crtc && crtc->active); 3282 3283 intel_tc_port_get_link(enc_to_dig_port(encoder), 3284 required_lanes); 3285 if (crtc_state && crtc_state->hw.active) 3286 intel_update_active_dpll(state, crtc, encoder); 3287 } 3288 3289 static void 3290 intel_ddi_update_complete(struct intel_atomic_state *state, 3291 struct intel_encoder *encoder, 3292 struct intel_crtc *crtc) 3293 { 3294 intel_tc_port_put_link(enc_to_dig_port(encoder)); 3295 } 3296 3297 static void 3298 intel_ddi_pre_pll_enable(struct intel_atomic_state *state, 3299 struct intel_encoder *encoder, 3300 const struct intel_crtc_state *crtc_state, 3301 const struct drm_connector_state *conn_state) 3302 { 3303 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3304 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3305 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3306 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 3307 3308 if (is_tc_port) 3309 intel_tc_port_get_link(dig_port, crtc_state->lane_count); 3310 3311 if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port) { 3312 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 3313 dig_port->aux_wakeref = 3314 intel_display_power_get(dev_priv, 3315 intel_ddi_main_link_aux_domain(dig_port)); 3316 } 3317 3318 if (is_tc_port && dig_port->tc_mode != TC_PORT_TBT_ALT) 3319 /* 3320 * Program the lane count for static/dynamic connections on 3321 * Type-C ports. Skip this step for TBT. 3322 */ 3323 intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count); 3324 else if (IS_GEN9_LP(dev_priv)) 3325 bxt_ddi_phy_set_lane_optim_mask(encoder, 3326 crtc_state->lane_lat_optim_mask); 3327 } 3328 3329 static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp, 3330 const struct intel_crtc_state *crtc_state) 3331 { 3332 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3333 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3334 enum port port = encoder->port; 3335 u32 dp_tp_ctl, ddi_buf_ctl; 3336 bool wait = false; 3337 3338 dp_tp_ctl = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3339 3340 if (dp_tp_ctl & DP_TP_CTL_ENABLE) { 3341 ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 3342 if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) { 3343 intel_de_write(dev_priv, DDI_BUF_CTL(port), 3344 ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE); 3345 wait = true; 3346 } 3347 3348 dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 3349 dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1; 3350 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 3351 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3352 3353 if (wait) 3354 intel_wait_ddi_buf_idle(dev_priv, port); 3355 } 3356 3357 dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1; 3358 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 3359 dp_tp_ctl |= DP_TP_CTL_MODE_MST; 3360 } else { 3361 dp_tp_ctl |= DP_TP_CTL_MODE_SST; 3362 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 3363 dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE; 3364 } 3365 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 3366 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3367 3368 intel_dp->DP |= DDI_BUF_CTL_ENABLE; 3369 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 3370 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 3371 3372 intel_wait_ddi_buf_active(dev_priv, port); 3373 } 3374 3375 static void intel_ddi_set_link_train(struct intel_dp *intel_dp, 3376 const struct intel_crtc_state *crtc_state, 3377 u8 dp_train_pat) 3378 { 3379 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3380 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3381 u32 temp; 3382 3383 temp = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3384 3385 temp &= ~DP_TP_CTL_LINK_TRAIN_MASK; 3386 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 3387 case DP_TRAINING_PATTERN_DISABLE: 3388 temp |= DP_TP_CTL_LINK_TRAIN_NORMAL; 3389 break; 3390 case DP_TRAINING_PATTERN_1: 3391 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 3392 break; 3393 case DP_TRAINING_PATTERN_2: 3394 temp |= DP_TP_CTL_LINK_TRAIN_PAT2; 3395 break; 3396 case DP_TRAINING_PATTERN_3: 3397 temp |= DP_TP_CTL_LINK_TRAIN_PAT3; 3398 break; 3399 case DP_TRAINING_PATTERN_4: 3400 temp |= DP_TP_CTL_LINK_TRAIN_PAT4; 3401 break; 3402 } 3403 3404 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), temp); 3405 } 3406 3407 static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp, 3408 const struct intel_crtc_state *crtc_state) 3409 { 3410 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3411 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3412 enum port port = encoder->port; 3413 u32 val; 3414 3415 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3416 val &= ~DP_TP_CTL_LINK_TRAIN_MASK; 3417 val |= DP_TP_CTL_LINK_TRAIN_IDLE; 3418 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3419 3420 /* 3421 * Until TGL on PORT_A we can have only eDP in SST mode. There the only 3422 * reason we need to set idle transmission mode is to work around a HW 3423 * issue where we enable the pipe while not in idle link-training mode. 3424 * In this case there is requirement to wait for a minimum number of 3425 * idle patterns to be sent. 3426 */ 3427 if (port == PORT_A && DISPLAY_VER(dev_priv) < 12) 3428 return; 3429 3430 if (intel_de_wait_for_set(dev_priv, 3431 dp_tp_status_reg(encoder, crtc_state), 3432 DP_TP_STATUS_IDLE_DONE, 1)) 3433 drm_err(&dev_priv->drm, 3434 "Timed out waiting for DP idle patterns\n"); 3435 } 3436 3437 static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv, 3438 enum transcoder cpu_transcoder) 3439 { 3440 if (cpu_transcoder == TRANSCODER_EDP) 3441 return false; 3442 3443 if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO)) 3444 return false; 3445 3446 return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) & 3447 AUDIO_OUTPUT_ENABLE(cpu_transcoder); 3448 } 3449 3450 void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv, 3451 struct intel_crtc_state *crtc_state) 3452 { 3453 if (DISPLAY_VER(dev_priv) >= 12 && crtc_state->port_clock > 594000) 3454 crtc_state->min_voltage_level = 2; 3455 else if (IS_JSL_EHL(dev_priv) && crtc_state->port_clock > 594000) 3456 crtc_state->min_voltage_level = 3; 3457 else if (DISPLAY_VER(dev_priv) >= 11 && crtc_state->port_clock > 594000) 3458 crtc_state->min_voltage_level = 1; 3459 else if (IS_CANNONLAKE(dev_priv) && crtc_state->port_clock > 594000) 3460 crtc_state->min_voltage_level = 2; 3461 } 3462 3463 static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv, 3464 enum transcoder cpu_transcoder) 3465 { 3466 u32 master_select; 3467 3468 if (DISPLAY_VER(dev_priv) >= 11) { 3469 u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder)); 3470 3471 if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0) 3472 return INVALID_TRANSCODER; 3473 3474 master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2); 3475 } else { 3476 u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 3477 3478 if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0) 3479 return INVALID_TRANSCODER; 3480 3481 master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl); 3482 } 3483 3484 if (master_select == 0) 3485 return TRANSCODER_EDP; 3486 else 3487 return master_select - 1; 3488 } 3489 3490 static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state) 3491 { 3492 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 3493 u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | 3494 BIT(TRANSCODER_C) | BIT(TRANSCODER_D); 3495 enum transcoder cpu_transcoder; 3496 3497 crtc_state->master_transcoder = 3498 bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder); 3499 3500 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) { 3501 enum intel_display_power_domain power_domain; 3502 intel_wakeref_t trans_wakeref; 3503 3504 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder); 3505 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 3506 power_domain); 3507 3508 if (!trans_wakeref) 3509 continue; 3510 3511 if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) == 3512 crtc_state->cpu_transcoder) 3513 crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder); 3514 3515 intel_display_power_put(dev_priv, power_domain, trans_wakeref); 3516 } 3517 3518 drm_WARN_ON(&dev_priv->drm, 3519 crtc_state->master_transcoder != INVALID_TRANSCODER && 3520 crtc_state->sync_mode_slaves_mask); 3521 } 3522 3523 static void intel_ddi_read_func_ctl(struct intel_encoder *encoder, 3524 struct intel_crtc_state *pipe_config) 3525 { 3526 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3527 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc); 3528 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 3529 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3530 u32 temp, flags = 0; 3531 3532 temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 3533 if (temp & TRANS_DDI_PHSYNC) 3534 flags |= DRM_MODE_FLAG_PHSYNC; 3535 else 3536 flags |= DRM_MODE_FLAG_NHSYNC; 3537 if (temp & TRANS_DDI_PVSYNC) 3538 flags |= DRM_MODE_FLAG_PVSYNC; 3539 else 3540 flags |= DRM_MODE_FLAG_NVSYNC; 3541 3542 pipe_config->hw.adjusted_mode.flags |= flags; 3543 3544 switch (temp & TRANS_DDI_BPC_MASK) { 3545 case TRANS_DDI_BPC_6: 3546 pipe_config->pipe_bpp = 18; 3547 break; 3548 case TRANS_DDI_BPC_8: 3549 pipe_config->pipe_bpp = 24; 3550 break; 3551 case TRANS_DDI_BPC_10: 3552 pipe_config->pipe_bpp = 30; 3553 break; 3554 case TRANS_DDI_BPC_12: 3555 pipe_config->pipe_bpp = 36; 3556 break; 3557 default: 3558 break; 3559 } 3560 3561 switch (temp & TRANS_DDI_MODE_SELECT_MASK) { 3562 case TRANS_DDI_MODE_SELECT_HDMI: 3563 pipe_config->has_hdmi_sink = true; 3564 3565 pipe_config->infoframes.enable |= 3566 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3567 3568 if (pipe_config->infoframes.enable) 3569 pipe_config->has_infoframe = true; 3570 3571 if (temp & TRANS_DDI_HDMI_SCRAMBLING) 3572 pipe_config->hdmi_scrambling = true; 3573 if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE) 3574 pipe_config->hdmi_high_tmds_clock_ratio = true; 3575 fallthrough; 3576 case TRANS_DDI_MODE_SELECT_DVI: 3577 pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI); 3578 pipe_config->lane_count = 4; 3579 break; 3580 case TRANS_DDI_MODE_SELECT_FDI: 3581 pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG); 3582 break; 3583 case TRANS_DDI_MODE_SELECT_DP_SST: 3584 if (encoder->type == INTEL_OUTPUT_EDP) 3585 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 3586 else 3587 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 3588 pipe_config->lane_count = 3589 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 3590 intel_dp_get_m_n(intel_crtc, pipe_config); 3591 3592 if (DISPLAY_VER(dev_priv) >= 11) { 3593 i915_reg_t dp_tp_ctl = dp_tp_ctl_reg(encoder, pipe_config); 3594 3595 pipe_config->fec_enable = 3596 intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE; 3597 3598 drm_dbg_kms(&dev_priv->drm, 3599 "[ENCODER:%d:%s] Fec status: %u\n", 3600 encoder->base.base.id, encoder->base.name, 3601 pipe_config->fec_enable); 3602 } 3603 3604 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 3605 pipe_config->infoframes.enable |= 3606 intel_lspcon_infoframes_enabled(encoder, pipe_config); 3607 else 3608 pipe_config->infoframes.enable |= 3609 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3610 break; 3611 case TRANS_DDI_MODE_SELECT_DP_MST: 3612 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST); 3613 pipe_config->lane_count = 3614 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 3615 3616 if (DISPLAY_VER(dev_priv) >= 12) 3617 pipe_config->mst_master_transcoder = 3618 REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp); 3619 3620 intel_dp_get_m_n(intel_crtc, pipe_config); 3621 3622 pipe_config->infoframes.enable |= 3623 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3624 break; 3625 default: 3626 break; 3627 } 3628 } 3629 3630 static void intel_ddi_get_config(struct intel_encoder *encoder, 3631 struct intel_crtc_state *pipe_config) 3632 { 3633 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3634 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 3635 3636 /* XXX: DSI transcoder paranoia */ 3637 if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder))) 3638 return; 3639 3640 if (pipe_config->bigjoiner_slave) { 3641 /* read out pipe settings from master */ 3642 enum transcoder save = pipe_config->cpu_transcoder; 3643 3644 /* Our own transcoder needs to be disabled when reading it in intel_ddi_read_func_ctl() */ 3645 WARN_ON(pipe_config->output_types); 3646 pipe_config->cpu_transcoder = (enum transcoder)pipe_config->bigjoiner_linked_crtc->pipe; 3647 intel_ddi_read_func_ctl(encoder, pipe_config); 3648 pipe_config->cpu_transcoder = save; 3649 } else { 3650 intel_ddi_read_func_ctl(encoder, pipe_config); 3651 } 3652 3653 intel_ddi_mso_get_config(encoder, pipe_config); 3654 3655 pipe_config->has_audio = 3656 intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder); 3657 3658 if (encoder->type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.bpp && 3659 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 3660 /* 3661 * This is a big fat ugly hack. 3662 * 3663 * Some machines in UEFI boot mode provide us a VBT that has 18 3664 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 3665 * unknown we fail to light up. Yet the same BIOS boots up with 3666 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 3667 * max, not what it tells us to use. 3668 * 3669 * Note: This will still be broken if the eDP panel is not lit 3670 * up by the BIOS, and thus we can't get the mode at module 3671 * load. 3672 */ 3673 drm_dbg_kms(&dev_priv->drm, 3674 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 3675 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 3676 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 3677 } 3678 3679 if (!pipe_config->bigjoiner_slave) 3680 ddi_dotclock_get(pipe_config); 3681 3682 if (IS_GEN9_LP(dev_priv)) 3683 pipe_config->lane_lat_optim_mask = 3684 bxt_ddi_phy_get_lane_lat_optim_mask(encoder); 3685 3686 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 3687 3688 intel_hdmi_read_gcp_infoframe(encoder, pipe_config); 3689 3690 intel_read_infoframe(encoder, pipe_config, 3691 HDMI_INFOFRAME_TYPE_AVI, 3692 &pipe_config->infoframes.avi); 3693 intel_read_infoframe(encoder, pipe_config, 3694 HDMI_INFOFRAME_TYPE_SPD, 3695 &pipe_config->infoframes.spd); 3696 intel_read_infoframe(encoder, pipe_config, 3697 HDMI_INFOFRAME_TYPE_VENDOR, 3698 &pipe_config->infoframes.hdmi); 3699 intel_read_infoframe(encoder, pipe_config, 3700 HDMI_INFOFRAME_TYPE_DRM, 3701 &pipe_config->infoframes.drm); 3702 3703 if (DISPLAY_VER(dev_priv) >= 8) 3704 bdw_get_trans_port_sync_config(pipe_config); 3705 3706 intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA); 3707 intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC); 3708 } 3709 3710 void intel_ddi_get_clock(struct intel_encoder *encoder, 3711 struct intel_crtc_state *crtc_state, 3712 struct intel_shared_dpll *pll) 3713 { 3714 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3715 enum icl_port_dpll_id port_dpll_id = ICL_PORT_DPLL_DEFAULT; 3716 struct icl_port_dpll *port_dpll = &crtc_state->icl_port_dplls[port_dpll_id]; 3717 bool pll_active; 3718 3719 if (drm_WARN_ON(&i915->drm, !pll)) 3720 return; 3721 3722 port_dpll->pll = pll; 3723 pll_active = intel_dpll_get_hw_state(i915, pll, &port_dpll->hw_state); 3724 drm_WARN_ON(&i915->drm, !pll_active); 3725 3726 icl_set_active_port_dpll(crtc_state, port_dpll_id); 3727 3728 crtc_state->port_clock = intel_dpll_get_freq(i915, crtc_state->shared_dpll, 3729 &crtc_state->dpll_hw_state); 3730 } 3731 3732 static void adls_ddi_get_config(struct intel_encoder *encoder, 3733 struct intel_crtc_state *crtc_state) 3734 { 3735 intel_ddi_get_clock(encoder, crtc_state, adls_ddi_get_pll(encoder)); 3736 intel_ddi_get_config(encoder, crtc_state); 3737 } 3738 3739 static void rkl_ddi_get_config(struct intel_encoder *encoder, 3740 struct intel_crtc_state *crtc_state) 3741 { 3742 intel_ddi_get_clock(encoder, crtc_state, rkl_ddi_get_pll(encoder)); 3743 intel_ddi_get_config(encoder, crtc_state); 3744 } 3745 3746 static void dg1_ddi_get_config(struct intel_encoder *encoder, 3747 struct intel_crtc_state *crtc_state) 3748 { 3749 intel_ddi_get_clock(encoder, crtc_state, dg1_ddi_get_pll(encoder)); 3750 intel_ddi_get_config(encoder, crtc_state); 3751 } 3752 3753 static void icl_ddi_combo_get_config(struct intel_encoder *encoder, 3754 struct intel_crtc_state *crtc_state) 3755 { 3756 intel_ddi_get_clock(encoder, crtc_state, icl_ddi_combo_get_pll(encoder)); 3757 intel_ddi_get_config(encoder, crtc_state); 3758 } 3759 3760 static void icl_ddi_tc_get_clock(struct intel_encoder *encoder, 3761 struct intel_crtc_state *crtc_state, 3762 struct intel_shared_dpll *pll) 3763 { 3764 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3765 enum icl_port_dpll_id port_dpll_id; 3766 struct icl_port_dpll *port_dpll; 3767 bool pll_active; 3768 3769 if (drm_WARN_ON(&i915->drm, !pll)) 3770 return; 3771 3772 if (intel_get_shared_dpll_id(i915, pll) == DPLL_ID_ICL_TBTPLL) 3773 port_dpll_id = ICL_PORT_DPLL_DEFAULT; 3774 else 3775 port_dpll_id = ICL_PORT_DPLL_MG_PHY; 3776 3777 port_dpll = &crtc_state->icl_port_dplls[port_dpll_id]; 3778 3779 port_dpll->pll = pll; 3780 pll_active = intel_dpll_get_hw_state(i915, pll, &port_dpll->hw_state); 3781 drm_WARN_ON(&i915->drm, !pll_active); 3782 3783 icl_set_active_port_dpll(crtc_state, port_dpll_id); 3784 3785 if (intel_get_shared_dpll_id(i915, crtc_state->shared_dpll) == DPLL_ID_ICL_TBTPLL) 3786 crtc_state->port_clock = icl_calc_tbt_pll_link(i915, encoder->port); 3787 else 3788 crtc_state->port_clock = intel_dpll_get_freq(i915, crtc_state->shared_dpll, 3789 &crtc_state->dpll_hw_state); 3790 } 3791 3792 static void icl_ddi_tc_get_config(struct intel_encoder *encoder, 3793 struct intel_crtc_state *crtc_state) 3794 { 3795 icl_ddi_tc_get_clock(encoder, crtc_state, icl_ddi_tc_get_pll(encoder)); 3796 intel_ddi_get_config(encoder, crtc_state); 3797 } 3798 3799 static void cnl_ddi_get_config(struct intel_encoder *encoder, 3800 struct intel_crtc_state *crtc_state) 3801 { 3802 intel_ddi_get_clock(encoder, crtc_state, cnl_ddi_get_pll(encoder)); 3803 intel_ddi_get_config(encoder, crtc_state); 3804 } 3805 3806 static void bxt_ddi_get_config(struct intel_encoder *encoder, 3807 struct intel_crtc_state *crtc_state) 3808 { 3809 intel_ddi_get_clock(encoder, crtc_state, bxt_ddi_get_pll(encoder)); 3810 intel_ddi_get_config(encoder, crtc_state); 3811 } 3812 3813 static void skl_ddi_get_config(struct intel_encoder *encoder, 3814 struct intel_crtc_state *crtc_state) 3815 { 3816 intel_ddi_get_clock(encoder, crtc_state, skl_ddi_get_pll(encoder)); 3817 intel_ddi_get_config(encoder, crtc_state); 3818 } 3819 3820 void hsw_ddi_get_config(struct intel_encoder *encoder, 3821 struct intel_crtc_state *crtc_state) 3822 { 3823 intel_ddi_get_clock(encoder, crtc_state, hsw_ddi_get_pll(encoder)); 3824 intel_ddi_get_config(encoder, crtc_state); 3825 } 3826 3827 static void intel_ddi_sync_state(struct intel_encoder *encoder, 3828 const struct intel_crtc_state *crtc_state) 3829 { 3830 if (intel_crtc_has_dp_encoder(crtc_state)) 3831 intel_dp_sync_state(encoder, crtc_state); 3832 } 3833 3834 static bool intel_ddi_initial_fastset_check(struct intel_encoder *encoder, 3835 struct intel_crtc_state *crtc_state) 3836 { 3837 if (intel_crtc_has_dp_encoder(crtc_state)) 3838 return intel_dp_initial_fastset_check(encoder, crtc_state); 3839 3840 return true; 3841 } 3842 3843 static enum intel_output_type 3844 intel_ddi_compute_output_type(struct intel_encoder *encoder, 3845 struct intel_crtc_state *crtc_state, 3846 struct drm_connector_state *conn_state) 3847 { 3848 switch (conn_state->connector->connector_type) { 3849 case DRM_MODE_CONNECTOR_HDMIA: 3850 return INTEL_OUTPUT_HDMI; 3851 case DRM_MODE_CONNECTOR_eDP: 3852 return INTEL_OUTPUT_EDP; 3853 case DRM_MODE_CONNECTOR_DisplayPort: 3854 return INTEL_OUTPUT_DP; 3855 default: 3856 MISSING_CASE(conn_state->connector->connector_type); 3857 return INTEL_OUTPUT_UNUSED; 3858 } 3859 } 3860 3861 static int intel_ddi_compute_config(struct intel_encoder *encoder, 3862 struct intel_crtc_state *pipe_config, 3863 struct drm_connector_state *conn_state) 3864 { 3865 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 3866 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3867 enum port port = encoder->port; 3868 int ret; 3869 3870 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 3871 pipe_config->cpu_transcoder = TRANSCODER_EDP; 3872 3873 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) { 3874 ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state); 3875 } else { 3876 ret = intel_dp_compute_config(encoder, pipe_config, conn_state); 3877 } 3878 3879 if (ret) 3880 return ret; 3881 3882 if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A && 3883 pipe_config->cpu_transcoder == TRANSCODER_EDP) 3884 pipe_config->pch_pfit.force_thru = 3885 pipe_config->pch_pfit.enabled || 3886 pipe_config->crc_enabled; 3887 3888 if (IS_GEN9_LP(dev_priv)) 3889 pipe_config->lane_lat_optim_mask = 3890 bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 3891 3892 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 3893 3894 return 0; 3895 } 3896 3897 static bool mode_equal(const struct drm_display_mode *mode1, 3898 const struct drm_display_mode *mode2) 3899 { 3900 return drm_mode_match(mode1, mode2, 3901 DRM_MODE_MATCH_TIMINGS | 3902 DRM_MODE_MATCH_FLAGS | 3903 DRM_MODE_MATCH_3D_FLAGS) && 3904 mode1->clock == mode2->clock; /* we want an exact match */ 3905 } 3906 3907 static bool m_n_equal(const struct intel_link_m_n *m_n_1, 3908 const struct intel_link_m_n *m_n_2) 3909 { 3910 return m_n_1->tu == m_n_2->tu && 3911 m_n_1->gmch_m == m_n_2->gmch_m && 3912 m_n_1->gmch_n == m_n_2->gmch_n && 3913 m_n_1->link_m == m_n_2->link_m && 3914 m_n_1->link_n == m_n_2->link_n; 3915 } 3916 3917 static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1, 3918 const struct intel_crtc_state *crtc_state2) 3919 { 3920 return crtc_state1->hw.active && crtc_state2->hw.active && 3921 crtc_state1->output_types == crtc_state2->output_types && 3922 crtc_state1->output_format == crtc_state2->output_format && 3923 crtc_state1->lane_count == crtc_state2->lane_count && 3924 crtc_state1->port_clock == crtc_state2->port_clock && 3925 mode_equal(&crtc_state1->hw.adjusted_mode, 3926 &crtc_state2->hw.adjusted_mode) && 3927 m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n); 3928 } 3929 3930 static u8 3931 intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state, 3932 int tile_group_id) 3933 { 3934 struct drm_connector *connector; 3935 const struct drm_connector_state *conn_state; 3936 struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev); 3937 struct intel_atomic_state *state = 3938 to_intel_atomic_state(ref_crtc_state->uapi.state); 3939 u8 transcoders = 0; 3940 int i; 3941 3942 /* 3943 * We don't enable port sync on BDW due to missing w/as and 3944 * due to not having adjusted the modeset sequence appropriately. 3945 */ 3946 if (DISPLAY_VER(dev_priv) < 9) 3947 return 0; 3948 3949 if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP)) 3950 return 0; 3951 3952 for_each_new_connector_in_state(&state->base, connector, conn_state, i) { 3953 struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc); 3954 const struct intel_crtc_state *crtc_state; 3955 3956 if (!crtc) 3957 continue; 3958 3959 if (!connector->has_tile || 3960 connector->tile_group->id != 3961 tile_group_id) 3962 continue; 3963 crtc_state = intel_atomic_get_new_crtc_state(state, 3964 crtc); 3965 if (!crtcs_port_sync_compatible(ref_crtc_state, 3966 crtc_state)) 3967 continue; 3968 transcoders |= BIT(crtc_state->cpu_transcoder); 3969 } 3970 3971 return transcoders; 3972 } 3973 3974 static int intel_ddi_compute_config_late(struct intel_encoder *encoder, 3975 struct intel_crtc_state *crtc_state, 3976 struct drm_connector_state *conn_state) 3977 { 3978 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3979 struct drm_connector *connector = conn_state->connector; 3980 u8 port_sync_transcoders = 0; 3981 3982 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]", 3983 encoder->base.base.id, encoder->base.name, 3984 crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name); 3985 3986 if (connector->has_tile) 3987 port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state, 3988 connector->tile_group->id); 3989 3990 /* 3991 * EDP Transcoders cannot be ensalved 3992 * make them a master always when present 3993 */ 3994 if (port_sync_transcoders & BIT(TRANSCODER_EDP)) 3995 crtc_state->master_transcoder = TRANSCODER_EDP; 3996 else 3997 crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1; 3998 3999 if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) { 4000 crtc_state->master_transcoder = INVALID_TRANSCODER; 4001 crtc_state->sync_mode_slaves_mask = 4002 port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder); 4003 } 4004 4005 return 0; 4006 } 4007 4008 static void intel_ddi_encoder_destroy(struct drm_encoder *encoder) 4009 { 4010 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder)); 4011 4012 intel_dp_encoder_flush_work(encoder); 4013 4014 drm_encoder_cleanup(encoder); 4015 if (dig_port) 4016 kfree(dig_port->hdcp_port_data.streams); 4017 kfree(dig_port); 4018 } 4019 4020 static void intel_ddi_encoder_reset(struct drm_encoder *encoder) 4021 { 4022 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 4023 4024 intel_dp->reset_link_params = true; 4025 4026 intel_pps_encoder_reset(intel_dp); 4027 } 4028 4029 static const struct drm_encoder_funcs intel_ddi_funcs = { 4030 .reset = intel_ddi_encoder_reset, 4031 .destroy = intel_ddi_encoder_destroy, 4032 }; 4033 4034 static struct intel_connector * 4035 intel_ddi_init_dp_connector(struct intel_digital_port *dig_port) 4036 { 4037 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4038 struct intel_connector *connector; 4039 enum port port = dig_port->base.port; 4040 4041 connector = intel_connector_alloc(); 4042 if (!connector) 4043 return NULL; 4044 4045 dig_port->dp.output_reg = DDI_BUF_CTL(port); 4046 dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain; 4047 dig_port->dp.set_link_train = intel_ddi_set_link_train; 4048 dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train; 4049 4050 if (DISPLAY_VER(dev_priv) >= 12) 4051 dig_port->dp.set_signal_levels = tgl_set_signal_levels; 4052 else if (DISPLAY_VER(dev_priv) >= 11) 4053 dig_port->dp.set_signal_levels = icl_set_signal_levels; 4054 else if (IS_CANNONLAKE(dev_priv)) 4055 dig_port->dp.set_signal_levels = cnl_set_signal_levels; 4056 else if (IS_GEN9_LP(dev_priv)) 4057 dig_port->dp.set_signal_levels = bxt_set_signal_levels; 4058 else 4059 dig_port->dp.set_signal_levels = hsw_set_signal_levels; 4060 4061 dig_port->dp.voltage_max = intel_ddi_dp_voltage_max; 4062 dig_port->dp.preemph_max = intel_ddi_dp_preemph_max; 4063 4064 if (!intel_dp_init_connector(dig_port, connector)) { 4065 kfree(connector); 4066 return NULL; 4067 } 4068 4069 return connector; 4070 } 4071 4072 static int modeset_pipe(struct drm_crtc *crtc, 4073 struct drm_modeset_acquire_ctx *ctx) 4074 { 4075 struct drm_atomic_state *state; 4076 struct drm_crtc_state *crtc_state; 4077 int ret; 4078 4079 state = drm_atomic_state_alloc(crtc->dev); 4080 if (!state) 4081 return -ENOMEM; 4082 4083 state->acquire_ctx = ctx; 4084 4085 crtc_state = drm_atomic_get_crtc_state(state, crtc); 4086 if (IS_ERR(crtc_state)) { 4087 ret = PTR_ERR(crtc_state); 4088 goto out; 4089 } 4090 4091 crtc_state->connectors_changed = true; 4092 4093 ret = drm_atomic_commit(state); 4094 out: 4095 drm_atomic_state_put(state); 4096 4097 return ret; 4098 } 4099 4100 static int intel_hdmi_reset_link(struct intel_encoder *encoder, 4101 struct drm_modeset_acquire_ctx *ctx) 4102 { 4103 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4104 struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder); 4105 struct intel_connector *connector = hdmi->attached_connector; 4106 struct i2c_adapter *adapter = 4107 intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus); 4108 struct drm_connector_state *conn_state; 4109 struct intel_crtc_state *crtc_state; 4110 struct intel_crtc *crtc; 4111 u8 config; 4112 int ret; 4113 4114 if (!connector || connector->base.status != connector_status_connected) 4115 return 0; 4116 4117 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex, 4118 ctx); 4119 if (ret) 4120 return ret; 4121 4122 conn_state = connector->base.state; 4123 4124 crtc = to_intel_crtc(conn_state->crtc); 4125 if (!crtc) 4126 return 0; 4127 4128 ret = drm_modeset_lock(&crtc->base.mutex, ctx); 4129 if (ret) 4130 return ret; 4131 4132 crtc_state = to_intel_crtc_state(crtc->base.state); 4133 4134 drm_WARN_ON(&dev_priv->drm, 4135 !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)); 4136 4137 if (!crtc_state->hw.active) 4138 return 0; 4139 4140 if (!crtc_state->hdmi_high_tmds_clock_ratio && 4141 !crtc_state->hdmi_scrambling) 4142 return 0; 4143 4144 if (conn_state->commit && 4145 !try_wait_for_completion(&conn_state->commit->hw_done)) 4146 return 0; 4147 4148 ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config); 4149 if (ret < 0) { 4150 drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n", 4151 ret); 4152 return 0; 4153 } 4154 4155 if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) == 4156 crtc_state->hdmi_high_tmds_clock_ratio && 4157 !!(config & SCDC_SCRAMBLING_ENABLE) == 4158 crtc_state->hdmi_scrambling) 4159 return 0; 4160 4161 /* 4162 * HDMI 2.0 says that one should not send scrambled data 4163 * prior to configuring the sink scrambling, and that 4164 * TMDS clock/data transmission should be suspended when 4165 * changing the TMDS clock rate in the sink. So let's 4166 * just do a full modeset here, even though some sinks 4167 * would be perfectly happy if were to just reconfigure 4168 * the SCDC settings on the fly. 4169 */ 4170 return modeset_pipe(&crtc->base, ctx); 4171 } 4172 4173 static enum intel_hotplug_state 4174 intel_ddi_hotplug(struct intel_encoder *encoder, 4175 struct intel_connector *connector) 4176 { 4177 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 4178 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4179 struct intel_dp *intel_dp = &dig_port->dp; 4180 enum phy phy = intel_port_to_phy(i915, encoder->port); 4181 bool is_tc = intel_phy_is_tc(i915, phy); 4182 struct drm_modeset_acquire_ctx ctx; 4183 enum intel_hotplug_state state; 4184 int ret; 4185 4186 if (intel_dp->compliance.test_active && 4187 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 4188 intel_dp_phy_test(encoder); 4189 /* just do the PHY test and nothing else */ 4190 return INTEL_HOTPLUG_UNCHANGED; 4191 } 4192 4193 state = intel_encoder_hotplug(encoder, connector); 4194 4195 drm_modeset_acquire_init(&ctx, 0); 4196 4197 for (;;) { 4198 if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA) 4199 ret = intel_hdmi_reset_link(encoder, &ctx); 4200 else 4201 ret = intel_dp_retrain_link(encoder, &ctx); 4202 4203 if (ret == -EDEADLK) { 4204 drm_modeset_backoff(&ctx); 4205 continue; 4206 } 4207 4208 break; 4209 } 4210 4211 drm_modeset_drop_locks(&ctx); 4212 drm_modeset_acquire_fini(&ctx); 4213 drm_WARN(encoder->base.dev, ret, 4214 "Acquiring modeset locks failed with %i\n", ret); 4215 4216 /* 4217 * Unpowered type-c dongles can take some time to boot and be 4218 * responsible, so here giving some time to those dongles to power up 4219 * and then retrying the probe. 4220 * 4221 * On many platforms the HDMI live state signal is known to be 4222 * unreliable, so we can't use it to detect if a sink is connected or 4223 * not. Instead we detect if it's connected based on whether we can 4224 * read the EDID or not. That in turn has a problem during disconnect, 4225 * since the HPD interrupt may be raised before the DDC lines get 4226 * disconnected (due to how the required length of DDC vs. HPD 4227 * connector pins are specified) and so we'll still be able to get a 4228 * valid EDID. To solve this schedule another detection cycle if this 4229 * time around we didn't detect any change in the sink's connection 4230 * status. 4231 * 4232 * Type-c connectors which get their HPD signal deasserted then 4233 * reasserted, without unplugging/replugging the sink from the 4234 * connector, introduce a delay until the AUX channel communication 4235 * becomes functional. Retry the detection for 5 seconds on type-c 4236 * connectors to account for this delay. 4237 */ 4238 if (state == INTEL_HOTPLUG_UNCHANGED && 4239 connector->hotplug_retries < (is_tc ? 5 : 1) && 4240 !dig_port->dp.is_mst) 4241 state = INTEL_HOTPLUG_RETRY; 4242 4243 return state; 4244 } 4245 4246 static bool lpt_digital_port_connected(struct intel_encoder *encoder) 4247 { 4248 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4249 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 4250 4251 return intel_de_read(dev_priv, SDEISR) & bit; 4252 } 4253 4254 static bool hsw_digital_port_connected(struct intel_encoder *encoder) 4255 { 4256 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4257 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 4258 4259 return intel_de_read(dev_priv, DEISR) & bit; 4260 } 4261 4262 static bool bdw_digital_port_connected(struct intel_encoder *encoder) 4263 { 4264 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4265 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 4266 4267 return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit; 4268 } 4269 4270 static struct intel_connector * 4271 intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port) 4272 { 4273 struct intel_connector *connector; 4274 enum port port = dig_port->base.port; 4275 4276 connector = intel_connector_alloc(); 4277 if (!connector) 4278 return NULL; 4279 4280 dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port); 4281 intel_hdmi_init_connector(dig_port, connector); 4282 4283 return connector; 4284 } 4285 4286 static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port) 4287 { 4288 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4289 4290 if (dig_port->base.port != PORT_A) 4291 return false; 4292 4293 if (dig_port->saved_port_bits & DDI_A_4_LANES) 4294 return false; 4295 4296 /* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only 4297 * supported configuration 4298 */ 4299 if (IS_GEN9_LP(dev_priv)) 4300 return true; 4301 4302 /* Cannonlake: Most of SKUs don't support DDI_E, and the only 4303 * one who does also have a full A/E split called 4304 * DDI_F what makes DDI_E useless. However for this 4305 * case let's trust VBT info. 4306 */ 4307 if (IS_CANNONLAKE(dev_priv) && 4308 !intel_bios_is_port_present(dev_priv, PORT_E)) 4309 return true; 4310 4311 return false; 4312 } 4313 4314 static int 4315 intel_ddi_max_lanes(struct intel_digital_port *dig_port) 4316 { 4317 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4318 enum port port = dig_port->base.port; 4319 int max_lanes = 4; 4320 4321 if (DISPLAY_VER(dev_priv) >= 11) 4322 return max_lanes; 4323 4324 if (port == PORT_A || port == PORT_E) { 4325 if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES) 4326 max_lanes = port == PORT_A ? 4 : 0; 4327 else 4328 /* Both A and E share 2 lanes */ 4329 max_lanes = 2; 4330 } 4331 4332 /* 4333 * Some BIOS might fail to set this bit on port A if eDP 4334 * wasn't lit up at boot. Force this bit set when needed 4335 * so we use the proper lane count for our calculations. 4336 */ 4337 if (intel_ddi_a_force_4_lanes(dig_port)) { 4338 drm_dbg_kms(&dev_priv->drm, 4339 "Forcing DDI_A_4_LANES for port A\n"); 4340 dig_port->saved_port_bits |= DDI_A_4_LANES; 4341 max_lanes = 4; 4342 } 4343 4344 return max_lanes; 4345 } 4346 4347 static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy) 4348 { 4349 return i915->hti_state & HDPORT_ENABLED && 4350 i915->hti_state & HDPORT_DDI_USED(phy); 4351 } 4352 4353 static enum hpd_pin dg1_hpd_pin(struct drm_i915_private *dev_priv, 4354 enum port port) 4355 { 4356 if (port >= PORT_TC1) 4357 return HPD_PORT_C + port - PORT_TC1; 4358 else 4359 return HPD_PORT_A + port - PORT_A; 4360 } 4361 4362 static enum hpd_pin tgl_hpd_pin(struct drm_i915_private *dev_priv, 4363 enum port port) 4364 { 4365 if (port >= PORT_TC1) 4366 return HPD_PORT_TC1 + port - PORT_TC1; 4367 else 4368 return HPD_PORT_A + port - PORT_A; 4369 } 4370 4371 static enum hpd_pin rkl_hpd_pin(struct drm_i915_private *dev_priv, 4372 enum port port) 4373 { 4374 if (HAS_PCH_TGP(dev_priv)) 4375 return tgl_hpd_pin(dev_priv, port); 4376 4377 if (port >= PORT_TC1) 4378 return HPD_PORT_C + port - PORT_TC1; 4379 else 4380 return HPD_PORT_A + port - PORT_A; 4381 } 4382 4383 static enum hpd_pin icl_hpd_pin(struct drm_i915_private *dev_priv, 4384 enum port port) 4385 { 4386 if (port >= PORT_C) 4387 return HPD_PORT_TC1 + port - PORT_C; 4388 else 4389 return HPD_PORT_A + port - PORT_A; 4390 } 4391 4392 static enum hpd_pin ehl_hpd_pin(struct drm_i915_private *dev_priv, 4393 enum port port) 4394 { 4395 if (port == PORT_D) 4396 return HPD_PORT_A; 4397 4398 if (HAS_PCH_MCC(dev_priv)) 4399 return icl_hpd_pin(dev_priv, port); 4400 4401 return HPD_PORT_A + port - PORT_A; 4402 } 4403 4404 static enum hpd_pin cnl_hpd_pin(struct drm_i915_private *dev_priv, 4405 enum port port) 4406 { 4407 if (port == PORT_F) 4408 return HPD_PORT_E; 4409 4410 return HPD_PORT_A + port - PORT_A; 4411 } 4412 4413 static enum hpd_pin skl_hpd_pin(struct drm_i915_private *dev_priv, enum port port) 4414 { 4415 if (HAS_PCH_TGP(dev_priv)) 4416 return icl_hpd_pin(dev_priv, port); 4417 4418 return HPD_PORT_A + port - PORT_A; 4419 } 4420 4421 static bool intel_ddi_is_tc(struct drm_i915_private *i915, enum port port) 4422 { 4423 if (DISPLAY_VER(i915) >= 12) 4424 return port >= PORT_TC1; 4425 else if (DISPLAY_VER(i915) >= 11) 4426 return port >= PORT_C; 4427 else 4428 return false; 4429 } 4430 4431 #define port_tc_name(port) ((port) - PORT_TC1 + '1') 4432 #define tc_port_name(tc_port) ((tc_port) - TC_PORT_1 + '1') 4433 4434 void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port) 4435 { 4436 struct intel_digital_port *dig_port; 4437 struct intel_encoder *encoder; 4438 const struct intel_bios_encoder_data *devdata; 4439 bool init_hdmi, init_dp; 4440 enum phy phy = intel_port_to_phy(dev_priv, port); 4441 4442 /* 4443 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may 4444 * have taken over some of the PHYs and made them unavailable to the 4445 * driver. In that case we should skip initializing the corresponding 4446 * outputs. 4447 */ 4448 if (hti_uses_phy(dev_priv, phy)) { 4449 drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n", 4450 port_name(port), phy_name(phy)); 4451 return; 4452 } 4453 4454 devdata = intel_bios_encoder_data_lookup(dev_priv, port); 4455 if (!devdata) { 4456 drm_dbg_kms(&dev_priv->drm, 4457 "VBT says port %c is not present\n", 4458 port_name(port)); 4459 return; 4460 } 4461 4462 init_hdmi = intel_bios_encoder_supports_dvi(devdata) || 4463 intel_bios_encoder_supports_hdmi(devdata); 4464 init_dp = intel_bios_encoder_supports_dp(devdata); 4465 4466 if (intel_bios_is_lspcon_present(dev_priv, port)) { 4467 /* 4468 * Lspcon device needs to be driven with DP connector 4469 * with special detection sequence. So make sure DP 4470 * is initialized before lspcon. 4471 */ 4472 init_dp = true; 4473 init_hdmi = false; 4474 drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n", 4475 port_name(port)); 4476 } 4477 4478 if (!init_dp && !init_hdmi) { 4479 drm_dbg_kms(&dev_priv->drm, 4480 "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n", 4481 port_name(port)); 4482 return; 4483 } 4484 4485 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 4486 if (!dig_port) 4487 return; 4488 4489 encoder = &dig_port->base; 4490 encoder->devdata = devdata; 4491 4492 if (DISPLAY_VER(dev_priv) >= 12) { 4493 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 4494 4495 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4496 DRM_MODE_ENCODER_TMDS, 4497 "DDI %s%c/PHY %s%c", 4498 port >= PORT_TC1 ? "TC" : "", 4499 port >= PORT_TC1 ? port_tc_name(port) : port_name(port), 4500 tc_port != TC_PORT_NONE ? "TC" : "", 4501 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 4502 } else if (DISPLAY_VER(dev_priv) >= 11) { 4503 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 4504 4505 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4506 DRM_MODE_ENCODER_TMDS, 4507 "DDI %c%s/PHY %s%c", 4508 port_name(port), 4509 port >= PORT_C ? " (TC)" : "", 4510 tc_port != TC_PORT_NONE ? "TC" : "", 4511 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 4512 } else { 4513 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4514 DRM_MODE_ENCODER_TMDS, 4515 "DDI %c/PHY %c", port_name(port), phy_name(phy)); 4516 } 4517 4518 mutex_init(&dig_port->hdcp_mutex); 4519 dig_port->num_hdcp_streams = 0; 4520 4521 encoder->hotplug = intel_ddi_hotplug; 4522 encoder->compute_output_type = intel_ddi_compute_output_type; 4523 encoder->compute_config = intel_ddi_compute_config; 4524 encoder->compute_config_late = intel_ddi_compute_config_late; 4525 encoder->enable = intel_enable_ddi; 4526 encoder->pre_pll_enable = intel_ddi_pre_pll_enable; 4527 encoder->pre_enable = intel_ddi_pre_enable; 4528 encoder->disable = intel_disable_ddi; 4529 encoder->post_disable = intel_ddi_post_disable; 4530 encoder->update_pipe = intel_ddi_update_pipe; 4531 encoder->get_hw_state = intel_ddi_get_hw_state; 4532 encoder->sync_state = intel_ddi_sync_state; 4533 encoder->initial_fastset_check = intel_ddi_initial_fastset_check; 4534 encoder->suspend = intel_dp_encoder_suspend; 4535 encoder->shutdown = intel_dp_encoder_shutdown; 4536 encoder->get_power_domains = intel_ddi_get_power_domains; 4537 4538 encoder->type = INTEL_OUTPUT_DDI; 4539 encoder->power_domain = intel_port_to_power_domain(port); 4540 encoder->port = port; 4541 encoder->cloneable = 0; 4542 encoder->pipe_mask = ~0; 4543 4544 if (IS_ALDERLAKE_S(dev_priv)) { 4545 encoder->enable_clock = adls_ddi_enable_clock; 4546 encoder->disable_clock = adls_ddi_disable_clock; 4547 encoder->is_clock_enabled = adls_ddi_is_clock_enabled; 4548 encoder->get_config = adls_ddi_get_config; 4549 } else if (IS_ROCKETLAKE(dev_priv)) { 4550 encoder->enable_clock = rkl_ddi_enable_clock; 4551 encoder->disable_clock = rkl_ddi_disable_clock; 4552 encoder->is_clock_enabled = rkl_ddi_is_clock_enabled; 4553 encoder->get_config = rkl_ddi_get_config; 4554 } else if (IS_DG1(dev_priv)) { 4555 encoder->enable_clock = dg1_ddi_enable_clock; 4556 encoder->disable_clock = dg1_ddi_disable_clock; 4557 encoder->is_clock_enabled = dg1_ddi_is_clock_enabled; 4558 encoder->get_config = dg1_ddi_get_config; 4559 } else if (IS_JSL_EHL(dev_priv)) { 4560 if (intel_ddi_is_tc(dev_priv, port)) { 4561 encoder->enable_clock = jsl_ddi_tc_enable_clock; 4562 encoder->disable_clock = jsl_ddi_tc_disable_clock; 4563 encoder->is_clock_enabled = jsl_ddi_tc_is_clock_enabled; 4564 encoder->get_config = icl_ddi_combo_get_config; 4565 } else { 4566 encoder->enable_clock = icl_ddi_combo_enable_clock; 4567 encoder->disable_clock = icl_ddi_combo_disable_clock; 4568 encoder->is_clock_enabled = icl_ddi_combo_is_clock_enabled; 4569 encoder->get_config = icl_ddi_combo_get_config; 4570 } 4571 } else if (DISPLAY_VER(dev_priv) >= 11) { 4572 if (intel_ddi_is_tc(dev_priv, port)) { 4573 encoder->enable_clock = icl_ddi_tc_enable_clock; 4574 encoder->disable_clock = icl_ddi_tc_disable_clock; 4575 encoder->is_clock_enabled = icl_ddi_tc_is_clock_enabled; 4576 encoder->get_config = icl_ddi_tc_get_config; 4577 } else { 4578 encoder->enable_clock = icl_ddi_combo_enable_clock; 4579 encoder->disable_clock = icl_ddi_combo_disable_clock; 4580 encoder->is_clock_enabled = icl_ddi_combo_is_clock_enabled; 4581 encoder->get_config = icl_ddi_combo_get_config; 4582 } 4583 } else if (IS_CANNONLAKE(dev_priv)) { 4584 encoder->enable_clock = cnl_ddi_enable_clock; 4585 encoder->disable_clock = cnl_ddi_disable_clock; 4586 encoder->is_clock_enabled = cnl_ddi_is_clock_enabled; 4587 encoder->get_config = cnl_ddi_get_config; 4588 } else if (IS_GEN9_LP(dev_priv)) { 4589 /* BXT/GLK have fixed PLL->port mapping */ 4590 encoder->get_config = bxt_ddi_get_config; 4591 } else if (IS_GEN9_BC(dev_priv)) { 4592 encoder->enable_clock = skl_ddi_enable_clock; 4593 encoder->disable_clock = skl_ddi_disable_clock; 4594 encoder->is_clock_enabled = skl_ddi_is_clock_enabled; 4595 encoder->get_config = skl_ddi_get_config; 4596 } else if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv)) { 4597 encoder->enable_clock = hsw_ddi_enable_clock; 4598 encoder->disable_clock = hsw_ddi_disable_clock; 4599 encoder->is_clock_enabled = hsw_ddi_is_clock_enabled; 4600 encoder->get_config = hsw_ddi_get_config; 4601 } 4602 4603 if (IS_DG1(dev_priv)) 4604 encoder->hpd_pin = dg1_hpd_pin(dev_priv, port); 4605 else if (IS_ROCKETLAKE(dev_priv)) 4606 encoder->hpd_pin = rkl_hpd_pin(dev_priv, port); 4607 else if (DISPLAY_VER(dev_priv) >= 12) 4608 encoder->hpd_pin = tgl_hpd_pin(dev_priv, port); 4609 else if (IS_JSL_EHL(dev_priv)) 4610 encoder->hpd_pin = ehl_hpd_pin(dev_priv, port); 4611 else if (IS_DISPLAY_VER(dev_priv, 11)) 4612 encoder->hpd_pin = icl_hpd_pin(dev_priv, port); 4613 else if (IS_DISPLAY_VER(dev_priv, 10)) 4614 encoder->hpd_pin = cnl_hpd_pin(dev_priv, port); 4615 else if (IS_DISPLAY_VER(dev_priv, 9)) 4616 encoder->hpd_pin = skl_hpd_pin(dev_priv, port); 4617 else 4618 encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 4619 4620 if (DISPLAY_VER(dev_priv) >= 11) 4621 dig_port->saved_port_bits = 4622 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 4623 & DDI_BUF_PORT_REVERSAL; 4624 else 4625 dig_port->saved_port_bits = 4626 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 4627 & (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES); 4628 4629 if (intel_bios_is_lane_reversal_needed(dev_priv, port)) 4630 dig_port->saved_port_bits |= DDI_BUF_PORT_REVERSAL; 4631 4632 dig_port->dp.output_reg = INVALID_MMIO_REG; 4633 dig_port->max_lanes = intel_ddi_max_lanes(dig_port); 4634 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 4635 4636 if (intel_phy_is_tc(dev_priv, phy)) { 4637 bool is_legacy = 4638 !intel_bios_encoder_supports_typec_usb(devdata) && 4639 !intel_bios_encoder_supports_tbt(devdata); 4640 4641 intel_tc_port_init(dig_port, is_legacy); 4642 4643 encoder->update_prepare = intel_ddi_update_prepare; 4644 encoder->update_complete = intel_ddi_update_complete; 4645 } 4646 4647 drm_WARN_ON(&dev_priv->drm, port > PORT_I); 4648 dig_port->ddi_io_power_domain = POWER_DOMAIN_PORT_DDI_A_IO + 4649 port - PORT_A; 4650 4651 if (init_dp) { 4652 if (!intel_ddi_init_dp_connector(dig_port)) 4653 goto err; 4654 4655 dig_port->hpd_pulse = intel_dp_hpd_pulse; 4656 4657 /* Splitter enable for eDP MSO is supported for pipe A only. */ 4658 if (dig_port->dp.mso_link_count) 4659 encoder->pipe_mask = BIT(PIPE_A); 4660 } 4661 4662 /* In theory we don't need the encoder->type check, but leave it just in 4663 * case we have some really bad VBTs... */ 4664 if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) { 4665 if (!intel_ddi_init_hdmi_connector(dig_port)) 4666 goto err; 4667 } 4668 4669 if (DISPLAY_VER(dev_priv) >= 11) { 4670 if (intel_phy_is_tc(dev_priv, phy)) 4671 dig_port->connected = intel_tc_port_connected; 4672 else 4673 dig_port->connected = lpt_digital_port_connected; 4674 } else if (DISPLAY_VER(dev_priv) >= 8) { 4675 if (port == PORT_A || IS_GEN9_LP(dev_priv)) 4676 dig_port->connected = bdw_digital_port_connected; 4677 else 4678 dig_port->connected = lpt_digital_port_connected; 4679 } else { 4680 if (port == PORT_A) 4681 dig_port->connected = hsw_digital_port_connected; 4682 else 4683 dig_port->connected = lpt_digital_port_connected; 4684 } 4685 4686 intel_infoframe_init(dig_port); 4687 4688 return; 4689 4690 err: 4691 drm_encoder_cleanup(&encoder->base); 4692 kfree(dig_port); 4693 } 4694