1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code). 6 */ 7 8 #include "g4x_dp.h" 9 #include "intel_audio.h" 10 #include "intel_connector.h" 11 #include "intel_display_types.h" 12 #include "intel_dp.h" 13 #include "intel_dp_link_training.h" 14 #include "intel_dpio_phy.h" 15 #include "intel_fifo_underrun.h" 16 #include "intel_hdmi.h" 17 #include "intel_hotplug.h" 18 #include "intel_panel.h" 19 #include "intel_pps.h" 20 #include "intel_sideband.h" 21 22 struct dp_link_dpll { 23 int clock; 24 struct dpll dpll; 25 }; 26 27 static const struct dp_link_dpll g4x_dpll[] = { 28 { 162000, 29 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } }, 30 { 270000, 31 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } } 32 }; 33 34 static const struct dp_link_dpll pch_dpll[] = { 35 { 162000, 36 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } }, 37 { 270000, 38 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } } 39 }; 40 41 static const struct dp_link_dpll vlv_dpll[] = { 42 { 162000, 43 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } }, 44 { 270000, 45 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } } 46 }; 47 48 /* 49 * CHV supports eDP 1.4 that have more link rates. 50 * Below only provides the fixed rate but exclude variable rate. 51 */ 52 static const struct dp_link_dpll chv_dpll[] = { 53 /* 54 * CHV requires to program fractional division for m2. 55 * m2 is stored in fixed point format using formula below 56 * (m2_int << 22) | m2_fraction 57 */ 58 { 162000, /* m2_int = 32, m2_fraction = 1677722 */ 59 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } }, 60 { 270000, /* m2_int = 27, m2_fraction = 0 */ 61 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } }, 62 }; 63 64 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915) 65 { 66 return IS_CHERRYVIEW(i915) ? &chv_dpll[0].dpll : &vlv_dpll[0].dpll; 67 } 68 69 void g4x_dp_set_clock(struct intel_encoder *encoder, 70 struct intel_crtc_state *pipe_config) 71 { 72 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 73 const struct dp_link_dpll *divisor = NULL; 74 int i, count = 0; 75 76 if (IS_G4X(dev_priv)) { 77 divisor = g4x_dpll; 78 count = ARRAY_SIZE(g4x_dpll); 79 } else if (HAS_PCH_SPLIT(dev_priv)) { 80 divisor = pch_dpll; 81 count = ARRAY_SIZE(pch_dpll); 82 } else if (IS_CHERRYVIEW(dev_priv)) { 83 divisor = chv_dpll; 84 count = ARRAY_SIZE(chv_dpll); 85 } else if (IS_VALLEYVIEW(dev_priv)) { 86 divisor = vlv_dpll; 87 count = ARRAY_SIZE(vlv_dpll); 88 } 89 90 if (divisor && count) { 91 for (i = 0; i < count; i++) { 92 if (pipe_config->port_clock == divisor[i].clock) { 93 pipe_config->dpll = divisor[i].dpll; 94 pipe_config->clock_set = true; 95 break; 96 } 97 } 98 } 99 } 100 101 static void intel_dp_prepare(struct intel_encoder *encoder, 102 const struct intel_crtc_state *pipe_config) 103 { 104 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 105 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 106 enum port port = encoder->port; 107 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 108 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 109 110 intel_dp_set_link_params(intel_dp, 111 pipe_config->port_clock, 112 pipe_config->lane_count); 113 114 /* 115 * There are four kinds of DP registers: 116 * IBX PCH 117 * SNB CPU 118 * IVB CPU 119 * CPT PCH 120 * 121 * IBX PCH and CPU are the same for almost everything, 122 * except that the CPU DP PLL is configured in this 123 * register 124 * 125 * CPT PCH is quite different, having many bits moved 126 * to the TRANS_DP_CTL register instead. That 127 * configuration happens (oddly) in ilk_pch_enable 128 */ 129 130 /* Preserve the BIOS-computed detected bit. This is 131 * supposed to be read-only. 132 */ 133 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED; 134 135 /* Handle DP bits in common between all three register formats */ 136 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 137 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count); 138 139 /* Split out the IBX/CPU vs CPT settings */ 140 141 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) { 142 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 143 intel_dp->DP |= DP_SYNC_HS_HIGH; 144 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 145 intel_dp->DP |= DP_SYNC_VS_HIGH; 146 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 147 148 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 149 intel_dp->DP |= DP_ENHANCED_FRAMING; 150 151 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe); 152 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 153 u32 trans_dp; 154 155 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 156 157 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe)); 158 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 159 trans_dp |= TRANS_DP_ENH_FRAMING; 160 else 161 trans_dp &= ~TRANS_DP_ENH_FRAMING; 162 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp); 163 } else { 164 if (IS_G4X(dev_priv) && pipe_config->limited_color_range) 165 intel_dp->DP |= DP_COLOR_RANGE_16_235; 166 167 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 168 intel_dp->DP |= DP_SYNC_HS_HIGH; 169 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 170 intel_dp->DP |= DP_SYNC_VS_HIGH; 171 intel_dp->DP |= DP_LINK_TRAIN_OFF; 172 173 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 174 intel_dp->DP |= DP_ENHANCED_FRAMING; 175 176 if (IS_CHERRYVIEW(dev_priv)) 177 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe); 178 else 179 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe); 180 } 181 } 182 183 static void assert_dp_port(struct intel_dp *intel_dp, bool state) 184 { 185 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 186 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 187 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN; 188 189 I915_STATE_WARN(cur_state != state, 190 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n", 191 dig_port->base.base.base.id, dig_port->base.base.name, 192 onoff(state), onoff(cur_state)); 193 } 194 #define assert_dp_port_disabled(d) assert_dp_port((d), false) 195 196 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state) 197 { 198 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE; 199 200 I915_STATE_WARN(cur_state != state, 201 "eDP PLL state assertion failure (expected %s, current %s)\n", 202 onoff(state), onoff(cur_state)); 203 } 204 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true) 205 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false) 206 207 static void ilk_edp_pll_on(struct intel_dp *intel_dp, 208 const struct intel_crtc_state *pipe_config) 209 { 210 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 211 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 212 213 assert_pipe_disabled(dev_priv, pipe_config->cpu_transcoder); 214 assert_dp_port_disabled(intel_dp); 215 assert_edp_pll_disabled(dev_priv); 216 217 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n", 218 pipe_config->port_clock); 219 220 intel_dp->DP &= ~DP_PLL_FREQ_MASK; 221 222 if (pipe_config->port_clock == 162000) 223 intel_dp->DP |= DP_PLL_FREQ_162MHZ; 224 else 225 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 226 227 intel_de_write(dev_priv, DP_A, intel_dp->DP); 228 intel_de_posting_read(dev_priv, DP_A); 229 udelay(500); 230 231 /* 232 * [DevILK] Work around required when enabling DP PLL 233 * while a pipe is enabled going to FDI: 234 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI 235 * 2. Program DP PLL enable 236 */ 237 if (IS_IRONLAKE(dev_priv)) 238 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe); 239 240 intel_dp->DP |= DP_PLL_ENABLE; 241 242 intel_de_write(dev_priv, DP_A, intel_dp->DP); 243 intel_de_posting_read(dev_priv, DP_A); 244 udelay(200); 245 } 246 247 static void ilk_edp_pll_off(struct intel_dp *intel_dp, 248 const struct intel_crtc_state *old_crtc_state) 249 { 250 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 251 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 252 253 assert_pipe_disabled(dev_priv, old_crtc_state->cpu_transcoder); 254 assert_dp_port_disabled(intel_dp); 255 assert_edp_pll_enabled(dev_priv); 256 257 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n"); 258 259 intel_dp->DP &= ~DP_PLL_ENABLE; 260 261 intel_de_write(dev_priv, DP_A, intel_dp->DP); 262 intel_de_posting_read(dev_priv, DP_A); 263 udelay(200); 264 } 265 266 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv, 267 enum port port, enum pipe *pipe) 268 { 269 enum pipe p; 270 271 for_each_pipe(dev_priv, p) { 272 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p)); 273 274 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) { 275 *pipe = p; 276 return true; 277 } 278 } 279 280 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n", 281 port_name(port)); 282 283 /* must initialize pipe to something for the asserts */ 284 *pipe = PIPE_A; 285 286 return false; 287 } 288 289 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv, 290 i915_reg_t dp_reg, enum port port, 291 enum pipe *pipe) 292 { 293 bool ret; 294 u32 val; 295 296 val = intel_de_read(dev_priv, dp_reg); 297 298 ret = val & DP_PORT_EN; 299 300 /* asserts want to know the pipe even if the port is disabled */ 301 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 302 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB; 303 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) 304 ret &= cpt_dp_port_selected(dev_priv, port, pipe); 305 else if (IS_CHERRYVIEW(dev_priv)) 306 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV; 307 else 308 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT; 309 310 return ret; 311 } 312 313 static bool intel_dp_get_hw_state(struct intel_encoder *encoder, 314 enum pipe *pipe) 315 { 316 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 317 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 318 intel_wakeref_t wakeref; 319 bool ret; 320 321 wakeref = intel_display_power_get_if_enabled(dev_priv, 322 encoder->power_domain); 323 if (!wakeref) 324 return false; 325 326 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 327 encoder->port, pipe); 328 329 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 330 331 return ret; 332 } 333 334 static void intel_dp_get_config(struct intel_encoder *encoder, 335 struct intel_crtc_state *pipe_config) 336 { 337 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 338 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 339 u32 tmp, flags = 0; 340 enum port port = encoder->port; 341 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 342 343 if (encoder->type == INTEL_OUTPUT_EDP) 344 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 345 else 346 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 347 348 tmp = intel_de_read(dev_priv, intel_dp->output_reg); 349 350 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A; 351 352 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 353 u32 trans_dp = intel_de_read(dev_priv, 354 TRANS_DP_CTL(crtc->pipe)); 355 356 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH) 357 flags |= DRM_MODE_FLAG_PHSYNC; 358 else 359 flags |= DRM_MODE_FLAG_NHSYNC; 360 361 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH) 362 flags |= DRM_MODE_FLAG_PVSYNC; 363 else 364 flags |= DRM_MODE_FLAG_NVSYNC; 365 } else { 366 if (tmp & DP_SYNC_HS_HIGH) 367 flags |= DRM_MODE_FLAG_PHSYNC; 368 else 369 flags |= DRM_MODE_FLAG_NHSYNC; 370 371 if (tmp & DP_SYNC_VS_HIGH) 372 flags |= DRM_MODE_FLAG_PVSYNC; 373 else 374 flags |= DRM_MODE_FLAG_NVSYNC; 375 } 376 377 pipe_config->hw.adjusted_mode.flags |= flags; 378 379 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235) 380 pipe_config->limited_color_range = true; 381 382 pipe_config->lane_count = 383 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1; 384 385 intel_dp_get_m_n(crtc, pipe_config); 386 387 if (port == PORT_A) { 388 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ) 389 pipe_config->port_clock = 162000; 390 else 391 pipe_config->port_clock = 270000; 392 } 393 394 pipe_config->hw.adjusted_mode.crtc_clock = 395 intel_dotclock_calculate(pipe_config->port_clock, 396 &pipe_config->dp_m_n); 397 398 if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp && 399 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 400 /* 401 * This is a big fat ugly hack. 402 * 403 * Some machines in UEFI boot mode provide us a VBT that has 18 404 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 405 * unknown we fail to light up. Yet the same BIOS boots up with 406 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 407 * max, not what it tells us to use. 408 * 409 * Note: This will still be broken if the eDP panel is not lit 410 * up by the BIOS, and thus we can't get the mode at module 411 * load. 412 */ 413 drm_dbg_kms(&dev_priv->drm, 414 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 415 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 416 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 417 } 418 } 419 420 static void 421 intel_dp_link_down(struct intel_encoder *encoder, 422 const struct intel_crtc_state *old_crtc_state) 423 { 424 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 425 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 426 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 427 enum port port = encoder->port; 428 u32 DP = intel_dp->DP; 429 430 if (drm_WARN_ON(&dev_priv->drm, 431 (intel_de_read(dev_priv, intel_dp->output_reg) & 432 DP_PORT_EN) == 0)) 433 return; 434 435 drm_dbg_kms(&dev_priv->drm, "\n"); 436 437 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 438 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) { 439 DP &= ~DP_LINK_TRAIN_MASK_CPT; 440 DP |= DP_LINK_TRAIN_PAT_IDLE_CPT; 441 } else { 442 DP &= ~DP_LINK_TRAIN_MASK; 443 DP |= DP_LINK_TRAIN_PAT_IDLE; 444 } 445 intel_de_write(dev_priv, intel_dp->output_reg, DP); 446 intel_de_posting_read(dev_priv, intel_dp->output_reg); 447 448 DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE); 449 intel_de_write(dev_priv, intel_dp->output_reg, DP); 450 intel_de_posting_read(dev_priv, intel_dp->output_reg); 451 452 /* 453 * HW workaround for IBX, we need to move the port 454 * to transcoder A after disabling it to allow the 455 * matching HDMI port to be enabled on transcoder A. 456 */ 457 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) { 458 /* 459 * We get CPU/PCH FIFO underruns on the other pipe when 460 * doing the workaround. Sweep them under the rug. 461 */ 462 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false); 463 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false); 464 465 /* always enable with pattern 1 (as per spec) */ 466 DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK); 467 DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) | 468 DP_LINK_TRAIN_PAT_1; 469 intel_de_write(dev_priv, intel_dp->output_reg, DP); 470 intel_de_posting_read(dev_priv, intel_dp->output_reg); 471 472 DP &= ~DP_PORT_EN; 473 intel_de_write(dev_priv, intel_dp->output_reg, DP); 474 intel_de_posting_read(dev_priv, intel_dp->output_reg); 475 476 intel_wait_for_vblank_if_active(dev_priv, PIPE_A); 477 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true); 478 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true); 479 } 480 481 msleep(intel_dp->pps.panel_power_down_delay); 482 483 intel_dp->DP = DP; 484 485 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 486 intel_wakeref_t wakeref; 487 488 with_intel_pps_lock(intel_dp, wakeref) 489 intel_dp->pps.active_pipe = INVALID_PIPE; 490 } 491 } 492 493 static void intel_disable_dp(struct intel_atomic_state *state, 494 struct intel_encoder *encoder, 495 const struct intel_crtc_state *old_crtc_state, 496 const struct drm_connector_state *old_conn_state) 497 { 498 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 499 500 intel_dp->link_trained = false; 501 502 if (old_crtc_state->has_audio) 503 intel_audio_codec_disable(encoder, 504 old_crtc_state, old_conn_state); 505 506 /* 507 * Make sure the panel is off before trying to change the mode. 508 * But also ensure that we have vdd while we switch off the panel. 509 */ 510 intel_pps_vdd_on(intel_dp); 511 intel_edp_backlight_off(old_conn_state); 512 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 513 intel_pps_off(intel_dp); 514 } 515 516 static void g4x_disable_dp(struct intel_atomic_state *state, 517 struct intel_encoder *encoder, 518 const struct intel_crtc_state *old_crtc_state, 519 const struct drm_connector_state *old_conn_state) 520 { 521 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 522 } 523 524 static void vlv_disable_dp(struct intel_atomic_state *state, 525 struct intel_encoder *encoder, 526 const struct intel_crtc_state *old_crtc_state, 527 const struct drm_connector_state *old_conn_state) 528 { 529 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 530 } 531 532 static void g4x_post_disable_dp(struct intel_atomic_state *state, 533 struct intel_encoder *encoder, 534 const struct intel_crtc_state *old_crtc_state, 535 const struct drm_connector_state *old_conn_state) 536 { 537 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 538 enum port port = encoder->port; 539 540 /* 541 * Bspec does not list a specific disable sequence for g4x DP. 542 * Follow the ilk+ sequence (disable pipe before the port) for 543 * g4x DP as it does not suffer from underruns like the normal 544 * g4x modeset sequence (disable pipe after the port). 545 */ 546 intel_dp_link_down(encoder, old_crtc_state); 547 548 /* Only ilk+ has port A */ 549 if (port == PORT_A) 550 ilk_edp_pll_off(intel_dp, old_crtc_state); 551 } 552 553 static void vlv_post_disable_dp(struct intel_atomic_state *state, 554 struct intel_encoder *encoder, 555 const struct intel_crtc_state *old_crtc_state, 556 const struct drm_connector_state *old_conn_state) 557 { 558 intel_dp_link_down(encoder, old_crtc_state); 559 } 560 561 static void chv_post_disable_dp(struct intel_atomic_state *state, 562 struct intel_encoder *encoder, 563 const struct intel_crtc_state *old_crtc_state, 564 const struct drm_connector_state *old_conn_state) 565 { 566 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 567 568 intel_dp_link_down(encoder, old_crtc_state); 569 570 vlv_dpio_get(dev_priv); 571 572 /* Assert data lane reset */ 573 chv_data_lane_soft_reset(encoder, old_crtc_state, true); 574 575 vlv_dpio_put(dev_priv); 576 } 577 578 static void 579 cpt_set_link_train(struct intel_dp *intel_dp, 580 const struct intel_crtc_state *crtc_state, 581 u8 dp_train_pat) 582 { 583 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 584 u32 *DP = &intel_dp->DP; 585 586 *DP &= ~DP_LINK_TRAIN_MASK_CPT; 587 588 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 589 case DP_TRAINING_PATTERN_DISABLE: 590 *DP |= DP_LINK_TRAIN_OFF_CPT; 591 break; 592 case DP_TRAINING_PATTERN_1: 593 *DP |= DP_LINK_TRAIN_PAT_1_CPT; 594 break; 595 case DP_TRAINING_PATTERN_2: 596 *DP |= DP_LINK_TRAIN_PAT_2_CPT; 597 break; 598 default: 599 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 600 return; 601 } 602 603 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 604 intel_de_posting_read(dev_priv, intel_dp->output_reg); 605 } 606 607 static void 608 g4x_set_link_train(struct intel_dp *intel_dp, 609 const struct intel_crtc_state *crtc_state, 610 u8 dp_train_pat) 611 { 612 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 613 u32 *DP = &intel_dp->DP; 614 615 *DP &= ~DP_LINK_TRAIN_MASK; 616 617 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 618 case DP_TRAINING_PATTERN_DISABLE: 619 *DP |= DP_LINK_TRAIN_OFF; 620 break; 621 case DP_TRAINING_PATTERN_1: 622 *DP |= DP_LINK_TRAIN_PAT_1; 623 break; 624 case DP_TRAINING_PATTERN_2: 625 *DP |= DP_LINK_TRAIN_PAT_2; 626 break; 627 default: 628 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 629 return; 630 } 631 632 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 633 intel_de_posting_read(dev_priv, intel_dp->output_reg); 634 } 635 636 static void intel_dp_enable_port(struct intel_dp *intel_dp, 637 const struct intel_crtc_state *crtc_state) 638 { 639 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 640 641 /* enable with pattern 1 (as per spec) */ 642 643 intel_dp_program_link_training_pattern(intel_dp, crtc_state, 644 DP_TRAINING_PATTERN_1); 645 646 /* 647 * Magic for VLV/CHV. We _must_ first set up the register 648 * without actually enabling the port, and then do another 649 * write to enable the port. Otherwise link training will 650 * fail when the power sequencer is freshly used for this port. 651 */ 652 intel_dp->DP |= DP_PORT_EN; 653 if (crtc_state->has_audio) 654 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 655 656 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 657 intel_de_posting_read(dev_priv, intel_dp->output_reg); 658 } 659 660 static void intel_enable_dp(struct intel_atomic_state *state, 661 struct intel_encoder *encoder, 662 const struct intel_crtc_state *pipe_config, 663 const struct drm_connector_state *conn_state) 664 { 665 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 666 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 667 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 668 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg); 669 enum pipe pipe = crtc->pipe; 670 intel_wakeref_t wakeref; 671 672 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN)) 673 return; 674 675 with_intel_pps_lock(intel_dp, wakeref) { 676 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 677 vlv_pps_init(encoder, pipe_config); 678 679 intel_dp_enable_port(intel_dp, pipe_config); 680 681 intel_pps_vdd_on_unlocked(intel_dp); 682 intel_pps_on_unlocked(intel_dp); 683 intel_pps_vdd_off_unlocked(intel_dp, true); 684 } 685 686 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 687 unsigned int lane_mask = 0x0; 688 689 if (IS_CHERRYVIEW(dev_priv)) 690 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count); 691 692 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp), 693 lane_mask); 694 } 695 696 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 697 intel_dp_configure_protocol_converter(intel_dp, pipe_config); 698 intel_dp_check_frl_training(intel_dp); 699 intel_dp_pcon_dsc_configure(intel_dp, pipe_config); 700 intel_dp_start_link_train(intel_dp, pipe_config); 701 intel_dp_stop_link_train(intel_dp, pipe_config); 702 703 if (pipe_config->has_audio) { 704 drm_dbg(&dev_priv->drm, "Enabling DP audio on pipe %c\n", 705 pipe_name(pipe)); 706 intel_audio_codec_enable(encoder, pipe_config, conn_state); 707 } 708 } 709 710 static void g4x_enable_dp(struct intel_atomic_state *state, 711 struct intel_encoder *encoder, 712 const struct intel_crtc_state *pipe_config, 713 const struct drm_connector_state *conn_state) 714 { 715 intel_enable_dp(state, encoder, pipe_config, conn_state); 716 intel_edp_backlight_on(pipe_config, conn_state); 717 } 718 719 static void vlv_enable_dp(struct intel_atomic_state *state, 720 struct intel_encoder *encoder, 721 const struct intel_crtc_state *pipe_config, 722 const struct drm_connector_state *conn_state) 723 { 724 intel_edp_backlight_on(pipe_config, conn_state); 725 } 726 727 static void g4x_pre_enable_dp(struct intel_atomic_state *state, 728 struct intel_encoder *encoder, 729 const struct intel_crtc_state *pipe_config, 730 const struct drm_connector_state *conn_state) 731 { 732 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 733 enum port port = encoder->port; 734 735 intel_dp_prepare(encoder, pipe_config); 736 737 /* Only ilk+ has port A */ 738 if (port == PORT_A) 739 ilk_edp_pll_on(intel_dp, pipe_config); 740 } 741 742 static void vlv_pre_enable_dp(struct intel_atomic_state *state, 743 struct intel_encoder *encoder, 744 const struct intel_crtc_state *pipe_config, 745 const struct drm_connector_state *conn_state) 746 { 747 vlv_phy_pre_encoder_enable(encoder, pipe_config); 748 749 intel_enable_dp(state, encoder, pipe_config, conn_state); 750 } 751 752 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state, 753 struct intel_encoder *encoder, 754 const struct intel_crtc_state *pipe_config, 755 const struct drm_connector_state *conn_state) 756 { 757 intel_dp_prepare(encoder, pipe_config); 758 759 vlv_phy_pre_pll_enable(encoder, pipe_config); 760 } 761 762 static void chv_pre_enable_dp(struct intel_atomic_state *state, 763 struct intel_encoder *encoder, 764 const struct intel_crtc_state *pipe_config, 765 const struct drm_connector_state *conn_state) 766 { 767 chv_phy_pre_encoder_enable(encoder, pipe_config); 768 769 intel_enable_dp(state, encoder, pipe_config, conn_state); 770 771 /* Second common lane will stay alive on its own now */ 772 chv_phy_release_cl2_override(encoder); 773 } 774 775 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state, 776 struct intel_encoder *encoder, 777 const struct intel_crtc_state *pipe_config, 778 const struct drm_connector_state *conn_state) 779 { 780 intel_dp_prepare(encoder, pipe_config); 781 782 chv_phy_pre_pll_enable(encoder, pipe_config); 783 } 784 785 static void chv_dp_post_pll_disable(struct intel_atomic_state *state, 786 struct intel_encoder *encoder, 787 const struct intel_crtc_state *old_crtc_state, 788 const struct drm_connector_state *old_conn_state) 789 { 790 chv_phy_post_pll_disable(encoder, old_crtc_state); 791 } 792 793 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp, 794 const struct intel_crtc_state *crtc_state) 795 { 796 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2; 797 } 798 799 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp, 800 const struct intel_crtc_state *crtc_state) 801 { 802 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3; 803 } 804 805 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp) 806 { 807 return DP_TRAIN_PRE_EMPH_LEVEL_2; 808 } 809 810 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp) 811 { 812 return DP_TRAIN_PRE_EMPH_LEVEL_3; 813 } 814 815 static void vlv_set_signal_levels(struct intel_dp *intel_dp, 816 const struct intel_crtc_state *crtc_state) 817 { 818 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 819 unsigned long demph_reg_value, preemph_reg_value, 820 uniqtranscale_reg_value; 821 u8 train_set = intel_dp->train_set[0]; 822 823 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 824 case DP_TRAIN_PRE_EMPH_LEVEL_0: 825 preemph_reg_value = 0x0004000; 826 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 827 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 828 demph_reg_value = 0x2B405555; 829 uniqtranscale_reg_value = 0x552AB83A; 830 break; 831 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 832 demph_reg_value = 0x2B404040; 833 uniqtranscale_reg_value = 0x5548B83A; 834 break; 835 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 836 demph_reg_value = 0x2B245555; 837 uniqtranscale_reg_value = 0x5560B83A; 838 break; 839 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 840 demph_reg_value = 0x2B405555; 841 uniqtranscale_reg_value = 0x5598DA3A; 842 break; 843 default: 844 return; 845 } 846 break; 847 case DP_TRAIN_PRE_EMPH_LEVEL_1: 848 preemph_reg_value = 0x0002000; 849 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 850 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 851 demph_reg_value = 0x2B404040; 852 uniqtranscale_reg_value = 0x5552B83A; 853 break; 854 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 855 demph_reg_value = 0x2B404848; 856 uniqtranscale_reg_value = 0x5580B83A; 857 break; 858 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 859 demph_reg_value = 0x2B404040; 860 uniqtranscale_reg_value = 0x55ADDA3A; 861 break; 862 default: 863 return; 864 } 865 break; 866 case DP_TRAIN_PRE_EMPH_LEVEL_2: 867 preemph_reg_value = 0x0000000; 868 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 869 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 870 demph_reg_value = 0x2B305555; 871 uniqtranscale_reg_value = 0x5570B83A; 872 break; 873 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 874 demph_reg_value = 0x2B2B4040; 875 uniqtranscale_reg_value = 0x55ADDA3A; 876 break; 877 default: 878 return; 879 } 880 break; 881 case DP_TRAIN_PRE_EMPH_LEVEL_3: 882 preemph_reg_value = 0x0006000; 883 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 884 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 885 demph_reg_value = 0x1B405555; 886 uniqtranscale_reg_value = 0x55ADDA3A; 887 break; 888 default: 889 return; 890 } 891 break; 892 default: 893 return; 894 } 895 896 vlv_set_phy_signal_level(encoder, crtc_state, 897 demph_reg_value, preemph_reg_value, 898 uniqtranscale_reg_value, 0); 899 } 900 901 static void chv_set_signal_levels(struct intel_dp *intel_dp, 902 const struct intel_crtc_state *crtc_state) 903 { 904 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 905 u32 deemph_reg_value, margin_reg_value; 906 bool uniq_trans_scale = false; 907 u8 train_set = intel_dp->train_set[0]; 908 909 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 910 case DP_TRAIN_PRE_EMPH_LEVEL_0: 911 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 912 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 913 deemph_reg_value = 128; 914 margin_reg_value = 52; 915 break; 916 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 917 deemph_reg_value = 128; 918 margin_reg_value = 77; 919 break; 920 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 921 deemph_reg_value = 128; 922 margin_reg_value = 102; 923 break; 924 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 925 deemph_reg_value = 128; 926 margin_reg_value = 154; 927 uniq_trans_scale = true; 928 break; 929 default: 930 return; 931 } 932 break; 933 case DP_TRAIN_PRE_EMPH_LEVEL_1: 934 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 935 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 936 deemph_reg_value = 85; 937 margin_reg_value = 78; 938 break; 939 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 940 deemph_reg_value = 85; 941 margin_reg_value = 116; 942 break; 943 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 944 deemph_reg_value = 85; 945 margin_reg_value = 154; 946 break; 947 default: 948 return; 949 } 950 break; 951 case DP_TRAIN_PRE_EMPH_LEVEL_2: 952 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 953 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 954 deemph_reg_value = 64; 955 margin_reg_value = 104; 956 break; 957 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 958 deemph_reg_value = 64; 959 margin_reg_value = 154; 960 break; 961 default: 962 return; 963 } 964 break; 965 case DP_TRAIN_PRE_EMPH_LEVEL_3: 966 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 967 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 968 deemph_reg_value = 43; 969 margin_reg_value = 154; 970 break; 971 default: 972 return; 973 } 974 break; 975 default: 976 return; 977 } 978 979 chv_set_phy_signal_level(encoder, crtc_state, 980 deemph_reg_value, margin_reg_value, 981 uniq_trans_scale); 982 } 983 984 static u32 g4x_signal_levels(u8 train_set) 985 { 986 u32 signal_levels = 0; 987 988 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 989 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 990 default: 991 signal_levels |= DP_VOLTAGE_0_4; 992 break; 993 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 994 signal_levels |= DP_VOLTAGE_0_6; 995 break; 996 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 997 signal_levels |= DP_VOLTAGE_0_8; 998 break; 999 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 1000 signal_levels |= DP_VOLTAGE_1_2; 1001 break; 1002 } 1003 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1004 case DP_TRAIN_PRE_EMPH_LEVEL_0: 1005 default: 1006 signal_levels |= DP_PRE_EMPHASIS_0; 1007 break; 1008 case DP_TRAIN_PRE_EMPH_LEVEL_1: 1009 signal_levels |= DP_PRE_EMPHASIS_3_5; 1010 break; 1011 case DP_TRAIN_PRE_EMPH_LEVEL_2: 1012 signal_levels |= DP_PRE_EMPHASIS_6; 1013 break; 1014 case DP_TRAIN_PRE_EMPH_LEVEL_3: 1015 signal_levels |= DP_PRE_EMPHASIS_9_5; 1016 break; 1017 } 1018 return signal_levels; 1019 } 1020 1021 static void 1022 g4x_set_signal_levels(struct intel_dp *intel_dp, 1023 const struct intel_crtc_state *crtc_state) 1024 { 1025 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1026 u8 train_set = intel_dp->train_set[0]; 1027 u32 signal_levels; 1028 1029 signal_levels = g4x_signal_levels(train_set); 1030 1031 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1032 signal_levels); 1033 1034 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK); 1035 intel_dp->DP |= signal_levels; 1036 1037 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1038 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1039 } 1040 1041 /* SNB CPU eDP voltage swing and pre-emphasis control */ 1042 static u32 snb_cpu_edp_signal_levels(u8 train_set) 1043 { 1044 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1045 DP_TRAIN_PRE_EMPHASIS_MASK); 1046 1047 switch (signal_levels) { 1048 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1049 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1050 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1051 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1052 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1053 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1054 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1055 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1056 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1057 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1058 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1059 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1060 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1061 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1062 default: 1063 MISSING_CASE(signal_levels); 1064 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1065 } 1066 } 1067 1068 static void 1069 snb_cpu_edp_set_signal_levels(struct intel_dp *intel_dp, 1070 const struct intel_crtc_state *crtc_state) 1071 { 1072 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1073 u8 train_set = intel_dp->train_set[0]; 1074 u32 signal_levels; 1075 1076 signal_levels = snb_cpu_edp_signal_levels(train_set); 1077 1078 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1079 signal_levels); 1080 1081 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB; 1082 intel_dp->DP |= signal_levels; 1083 1084 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1085 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1086 } 1087 1088 /* IVB CPU eDP voltage swing and pre-emphasis control */ 1089 static u32 ivb_cpu_edp_signal_levels(u8 train_set) 1090 { 1091 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1092 DP_TRAIN_PRE_EMPHASIS_MASK); 1093 1094 switch (signal_levels) { 1095 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1096 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1097 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1098 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1099 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1100 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1101 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1102 1103 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1104 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1105 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1106 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1107 1108 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1109 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1110 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1111 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1112 1113 default: 1114 MISSING_CASE(signal_levels); 1115 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1116 } 1117 } 1118 1119 static void 1120 ivb_cpu_edp_set_signal_levels(struct intel_dp *intel_dp, 1121 const struct intel_crtc_state *crtc_state) 1122 { 1123 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1124 u8 train_set = intel_dp->train_set[0]; 1125 u32 signal_levels; 1126 1127 signal_levels = ivb_cpu_edp_signal_levels(train_set); 1128 1129 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1130 signal_levels); 1131 1132 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB; 1133 intel_dp->DP |= signal_levels; 1134 1135 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1136 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1137 } 1138 1139 /* 1140 * If display is now connected check links status, 1141 * there has been known issues of link loss triggering 1142 * long pulse. 1143 * 1144 * Some sinks (eg. ASUS PB287Q) seem to perform some 1145 * weird HPD ping pong during modesets. So we can apparently 1146 * end up with HPD going low during a modeset, and then 1147 * going back up soon after. And once that happens we must 1148 * retrain the link to get a picture. That's in case no 1149 * userspace component reacted to intermittent HPD dip. 1150 */ 1151 static enum intel_hotplug_state 1152 intel_dp_hotplug(struct intel_encoder *encoder, 1153 struct intel_connector *connector) 1154 { 1155 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1156 struct drm_modeset_acquire_ctx ctx; 1157 enum intel_hotplug_state state; 1158 int ret; 1159 1160 if (intel_dp->compliance.test_active && 1161 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 1162 intel_dp_phy_test(encoder); 1163 /* just do the PHY test and nothing else */ 1164 return INTEL_HOTPLUG_UNCHANGED; 1165 } 1166 1167 state = intel_encoder_hotplug(encoder, connector); 1168 1169 drm_modeset_acquire_init(&ctx, 0); 1170 1171 for (;;) { 1172 ret = intel_dp_retrain_link(encoder, &ctx); 1173 1174 if (ret == -EDEADLK) { 1175 drm_modeset_backoff(&ctx); 1176 continue; 1177 } 1178 1179 break; 1180 } 1181 1182 drm_modeset_drop_locks(&ctx); 1183 drm_modeset_acquire_fini(&ctx); 1184 drm_WARN(encoder->base.dev, ret, 1185 "Acquiring modeset locks failed with %i\n", ret); 1186 1187 /* 1188 * Keeping it consistent with intel_ddi_hotplug() and 1189 * intel_hdmi_hotplug(). 1190 */ 1191 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries) 1192 state = INTEL_HOTPLUG_RETRY; 1193 1194 return state; 1195 } 1196 1197 static bool ibx_digital_port_connected(struct intel_encoder *encoder) 1198 { 1199 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1200 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 1201 1202 return intel_de_read(dev_priv, SDEISR) & bit; 1203 } 1204 1205 static bool g4x_digital_port_connected(struct intel_encoder *encoder) 1206 { 1207 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1208 u32 bit; 1209 1210 switch (encoder->hpd_pin) { 1211 case HPD_PORT_B: 1212 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X; 1213 break; 1214 case HPD_PORT_C: 1215 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X; 1216 break; 1217 case HPD_PORT_D: 1218 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X; 1219 break; 1220 default: 1221 MISSING_CASE(encoder->hpd_pin); 1222 return false; 1223 } 1224 1225 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1226 } 1227 1228 static bool gm45_digital_port_connected(struct intel_encoder *encoder) 1229 { 1230 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1231 u32 bit; 1232 1233 switch (encoder->hpd_pin) { 1234 case HPD_PORT_B: 1235 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45; 1236 break; 1237 case HPD_PORT_C: 1238 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45; 1239 break; 1240 case HPD_PORT_D: 1241 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45; 1242 break; 1243 default: 1244 MISSING_CASE(encoder->hpd_pin); 1245 return false; 1246 } 1247 1248 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1249 } 1250 1251 static bool ilk_digital_port_connected(struct intel_encoder *encoder) 1252 { 1253 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1254 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 1255 1256 return intel_de_read(dev_priv, DEISR) & bit; 1257 } 1258 1259 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 1260 { 1261 intel_dp_encoder_flush_work(encoder); 1262 1263 drm_encoder_cleanup(encoder); 1264 kfree(enc_to_dig_port(to_intel_encoder(encoder))); 1265 } 1266 1267 enum pipe vlv_active_pipe(struct intel_dp *intel_dp) 1268 { 1269 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1270 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1271 enum pipe pipe; 1272 1273 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 1274 encoder->port, &pipe)) 1275 return pipe; 1276 1277 return INVALID_PIPE; 1278 } 1279 1280 static void intel_dp_encoder_reset(struct drm_encoder *encoder) 1281 { 1282 struct drm_i915_private *dev_priv = to_i915(encoder->dev); 1283 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 1284 1285 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg); 1286 1287 intel_dp->reset_link_params = true; 1288 1289 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1290 intel_wakeref_t wakeref; 1291 1292 with_intel_pps_lock(intel_dp, wakeref) 1293 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp); 1294 } 1295 1296 intel_pps_encoder_reset(intel_dp); 1297 } 1298 1299 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1300 .reset = intel_dp_encoder_reset, 1301 .destroy = intel_dp_encoder_destroy, 1302 }; 1303 1304 bool g4x_dp_init(struct drm_i915_private *dev_priv, 1305 i915_reg_t output_reg, enum port port) 1306 { 1307 struct intel_digital_port *dig_port; 1308 struct intel_encoder *intel_encoder; 1309 struct drm_encoder *encoder; 1310 struct intel_connector *intel_connector; 1311 1312 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 1313 if (!dig_port) 1314 return false; 1315 1316 intel_connector = intel_connector_alloc(); 1317 if (!intel_connector) 1318 goto err_connector_alloc; 1319 1320 intel_encoder = &dig_port->base; 1321 encoder = &intel_encoder->base; 1322 1323 mutex_init(&dig_port->hdcp_mutex); 1324 1325 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base, 1326 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS, 1327 "DP %c", port_name(port))) 1328 goto err_encoder_init; 1329 1330 intel_encoder->hotplug = intel_dp_hotplug; 1331 intel_encoder->compute_config = intel_dp_compute_config; 1332 intel_encoder->get_hw_state = intel_dp_get_hw_state; 1333 intel_encoder->get_config = intel_dp_get_config; 1334 intel_encoder->sync_state = intel_dp_sync_state; 1335 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check; 1336 intel_encoder->update_pipe = intel_panel_update_backlight; 1337 intel_encoder->suspend = intel_dp_encoder_suspend; 1338 intel_encoder->shutdown = intel_dp_encoder_shutdown; 1339 if (IS_CHERRYVIEW(dev_priv)) { 1340 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable; 1341 intel_encoder->pre_enable = chv_pre_enable_dp; 1342 intel_encoder->enable = vlv_enable_dp; 1343 intel_encoder->disable = vlv_disable_dp; 1344 intel_encoder->post_disable = chv_post_disable_dp; 1345 intel_encoder->post_pll_disable = chv_dp_post_pll_disable; 1346 } else if (IS_VALLEYVIEW(dev_priv)) { 1347 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable; 1348 intel_encoder->pre_enable = vlv_pre_enable_dp; 1349 intel_encoder->enable = vlv_enable_dp; 1350 intel_encoder->disable = vlv_disable_dp; 1351 intel_encoder->post_disable = vlv_post_disable_dp; 1352 } else { 1353 intel_encoder->pre_enable = g4x_pre_enable_dp; 1354 intel_encoder->enable = g4x_enable_dp; 1355 intel_encoder->disable = g4x_disable_dp; 1356 intel_encoder->post_disable = g4x_post_disable_dp; 1357 } 1358 1359 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 1360 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) 1361 dig_port->dp.set_link_train = cpt_set_link_train; 1362 else 1363 dig_port->dp.set_link_train = g4x_set_link_train; 1364 1365 if (IS_CHERRYVIEW(dev_priv)) 1366 dig_port->dp.set_signal_levels = chv_set_signal_levels; 1367 else if (IS_VALLEYVIEW(dev_priv)) 1368 dig_port->dp.set_signal_levels = vlv_set_signal_levels; 1369 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 1370 dig_port->dp.set_signal_levels = ivb_cpu_edp_set_signal_levels; 1371 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A) 1372 dig_port->dp.set_signal_levels = snb_cpu_edp_set_signal_levels; 1373 else 1374 dig_port->dp.set_signal_levels = g4x_set_signal_levels; 1375 1376 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) || 1377 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) { 1378 dig_port->dp.preemph_max = intel_dp_preemph_max_3; 1379 dig_port->dp.voltage_max = intel_dp_voltage_max_3; 1380 } else { 1381 dig_port->dp.preemph_max = intel_dp_preemph_max_2; 1382 dig_port->dp.voltage_max = intel_dp_voltage_max_2; 1383 } 1384 1385 dig_port->dp.output_reg = output_reg; 1386 dig_port->max_lanes = 4; 1387 1388 intel_encoder->type = INTEL_OUTPUT_DP; 1389 intel_encoder->power_domain = intel_port_to_power_domain(port); 1390 if (IS_CHERRYVIEW(dev_priv)) { 1391 if (port == PORT_D) 1392 intel_encoder->pipe_mask = BIT(PIPE_C); 1393 else 1394 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B); 1395 } else { 1396 intel_encoder->pipe_mask = ~0; 1397 } 1398 intel_encoder->cloneable = 0; 1399 intel_encoder->port = port; 1400 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 1401 1402 dig_port->hpd_pulse = intel_dp_hpd_pulse; 1403 1404 if (HAS_GMCH(dev_priv)) { 1405 if (IS_GM45(dev_priv)) 1406 dig_port->connected = gm45_digital_port_connected; 1407 else 1408 dig_port->connected = g4x_digital_port_connected; 1409 } else { 1410 if (port == PORT_A) 1411 dig_port->connected = ilk_digital_port_connected; 1412 else 1413 dig_port->connected = ibx_digital_port_connected; 1414 } 1415 1416 if (port != PORT_A) 1417 intel_infoframe_init(dig_port); 1418 1419 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 1420 if (!intel_dp_init_connector(dig_port, intel_connector)) 1421 goto err_init_connector; 1422 1423 return true; 1424 1425 err_init_connector: 1426 drm_encoder_cleanup(encoder); 1427 err_encoder_init: 1428 kfree(intel_connector); 1429 err_connector_alloc: 1430 kfree(dig_port); 1431 return false; 1432 } 1433