1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code). 6 */ 7 8 #include "g4x_dp.h" 9 #include "intel_audio.h" 10 #include "intel_connector.h" 11 #include "intel_de.h" 12 #include "intel_display_types.h" 13 #include "intel_dp.h" 14 #include "intel_dp_link_training.h" 15 #include "intel_dpio_phy.h" 16 #include "intel_fifo_underrun.h" 17 #include "intel_hdmi.h" 18 #include "intel_hotplug.h" 19 #include "intel_panel.h" 20 #include "intel_pps.h" 21 #include "intel_sideband.h" 22 23 struct dp_link_dpll { 24 int clock; 25 struct dpll dpll; 26 }; 27 28 static const struct dp_link_dpll g4x_dpll[] = { 29 { 162000, 30 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } }, 31 { 270000, 32 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } } 33 }; 34 35 static const struct dp_link_dpll pch_dpll[] = { 36 { 162000, 37 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } }, 38 { 270000, 39 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } } 40 }; 41 42 static const struct dp_link_dpll vlv_dpll[] = { 43 { 162000, 44 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } }, 45 { 270000, 46 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } } 47 }; 48 49 /* 50 * CHV supports eDP 1.4 that have more link rates. 51 * Below only provides the fixed rate but exclude variable rate. 52 */ 53 static const struct dp_link_dpll chv_dpll[] = { 54 /* 55 * CHV requires to program fractional division for m2. 56 * m2 is stored in fixed point format using formula below 57 * (m2_int << 22) | m2_fraction 58 */ 59 { 162000, /* m2_int = 32, m2_fraction = 1677722 */ 60 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } }, 61 { 270000, /* m2_int = 27, m2_fraction = 0 */ 62 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } }, 63 }; 64 65 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915) 66 { 67 return IS_CHERRYVIEW(i915) ? &chv_dpll[0].dpll : &vlv_dpll[0].dpll; 68 } 69 70 void g4x_dp_set_clock(struct intel_encoder *encoder, 71 struct intel_crtc_state *pipe_config) 72 { 73 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 74 const struct dp_link_dpll *divisor = NULL; 75 int i, count = 0; 76 77 if (IS_G4X(dev_priv)) { 78 divisor = g4x_dpll; 79 count = ARRAY_SIZE(g4x_dpll); 80 } else if (HAS_PCH_SPLIT(dev_priv)) { 81 divisor = pch_dpll; 82 count = ARRAY_SIZE(pch_dpll); 83 } else if (IS_CHERRYVIEW(dev_priv)) { 84 divisor = chv_dpll; 85 count = ARRAY_SIZE(chv_dpll); 86 } else if (IS_VALLEYVIEW(dev_priv)) { 87 divisor = vlv_dpll; 88 count = ARRAY_SIZE(vlv_dpll); 89 } 90 91 if (divisor && count) { 92 for (i = 0; i < count; i++) { 93 if (pipe_config->port_clock == divisor[i].clock) { 94 pipe_config->dpll = divisor[i].dpll; 95 pipe_config->clock_set = true; 96 break; 97 } 98 } 99 } 100 } 101 102 static void intel_dp_prepare(struct intel_encoder *encoder, 103 const struct intel_crtc_state *pipe_config) 104 { 105 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 106 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 107 enum port port = encoder->port; 108 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 109 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 110 111 intel_dp_set_link_params(intel_dp, 112 pipe_config->port_clock, 113 pipe_config->lane_count); 114 115 /* 116 * There are four kinds of DP registers: 117 * IBX PCH 118 * SNB CPU 119 * IVB CPU 120 * CPT PCH 121 * 122 * IBX PCH and CPU are the same for almost everything, 123 * except that the CPU DP PLL is configured in this 124 * register 125 * 126 * CPT PCH is quite different, having many bits moved 127 * to the TRANS_DP_CTL register instead. That 128 * configuration happens (oddly) in ilk_pch_enable 129 */ 130 131 /* Preserve the BIOS-computed detected bit. This is 132 * supposed to be read-only. 133 */ 134 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED; 135 136 /* Handle DP bits in common between all three register formats */ 137 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 138 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count); 139 140 /* Split out the IBX/CPU vs CPT settings */ 141 142 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) { 143 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 144 intel_dp->DP |= DP_SYNC_HS_HIGH; 145 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 146 intel_dp->DP |= DP_SYNC_VS_HIGH; 147 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 148 149 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 150 intel_dp->DP |= DP_ENHANCED_FRAMING; 151 152 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe); 153 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 154 u32 trans_dp; 155 156 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 157 158 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe)); 159 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 160 trans_dp |= TRANS_DP_ENH_FRAMING; 161 else 162 trans_dp &= ~TRANS_DP_ENH_FRAMING; 163 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp); 164 } else { 165 if (IS_G4X(dev_priv) && pipe_config->limited_color_range) 166 intel_dp->DP |= DP_COLOR_RANGE_16_235; 167 168 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 169 intel_dp->DP |= DP_SYNC_HS_HIGH; 170 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 171 intel_dp->DP |= DP_SYNC_VS_HIGH; 172 intel_dp->DP |= DP_LINK_TRAIN_OFF; 173 174 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 175 intel_dp->DP |= DP_ENHANCED_FRAMING; 176 177 if (IS_CHERRYVIEW(dev_priv)) 178 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe); 179 else 180 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe); 181 } 182 } 183 184 static void assert_dp_port(struct intel_dp *intel_dp, bool state) 185 { 186 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 187 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 188 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN; 189 190 I915_STATE_WARN(cur_state != state, 191 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n", 192 dig_port->base.base.base.id, dig_port->base.base.name, 193 onoff(state), onoff(cur_state)); 194 } 195 #define assert_dp_port_disabled(d) assert_dp_port((d), false) 196 197 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state) 198 { 199 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE; 200 201 I915_STATE_WARN(cur_state != state, 202 "eDP PLL state assertion failure (expected %s, current %s)\n", 203 onoff(state), onoff(cur_state)); 204 } 205 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true) 206 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false) 207 208 static void ilk_edp_pll_on(struct intel_dp *intel_dp, 209 const struct intel_crtc_state *pipe_config) 210 { 211 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 212 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 213 214 assert_pipe_disabled(dev_priv, pipe_config->cpu_transcoder); 215 assert_dp_port_disabled(intel_dp); 216 assert_edp_pll_disabled(dev_priv); 217 218 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n", 219 pipe_config->port_clock); 220 221 intel_dp->DP &= ~DP_PLL_FREQ_MASK; 222 223 if (pipe_config->port_clock == 162000) 224 intel_dp->DP |= DP_PLL_FREQ_162MHZ; 225 else 226 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 227 228 intel_de_write(dev_priv, DP_A, intel_dp->DP); 229 intel_de_posting_read(dev_priv, DP_A); 230 udelay(500); 231 232 /* 233 * [DevILK] Work around required when enabling DP PLL 234 * while a pipe is enabled going to FDI: 235 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI 236 * 2. Program DP PLL enable 237 */ 238 if (IS_IRONLAKE(dev_priv)) 239 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe); 240 241 intel_dp->DP |= DP_PLL_ENABLE; 242 243 intel_de_write(dev_priv, DP_A, intel_dp->DP); 244 intel_de_posting_read(dev_priv, DP_A); 245 udelay(200); 246 } 247 248 static void ilk_edp_pll_off(struct intel_dp *intel_dp, 249 const struct intel_crtc_state *old_crtc_state) 250 { 251 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 252 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 253 254 assert_pipe_disabled(dev_priv, old_crtc_state->cpu_transcoder); 255 assert_dp_port_disabled(intel_dp); 256 assert_edp_pll_enabled(dev_priv); 257 258 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n"); 259 260 intel_dp->DP &= ~DP_PLL_ENABLE; 261 262 intel_de_write(dev_priv, DP_A, intel_dp->DP); 263 intel_de_posting_read(dev_priv, DP_A); 264 udelay(200); 265 } 266 267 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv, 268 enum port port, enum pipe *pipe) 269 { 270 enum pipe p; 271 272 for_each_pipe(dev_priv, p) { 273 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p)); 274 275 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) { 276 *pipe = p; 277 return true; 278 } 279 } 280 281 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n", 282 port_name(port)); 283 284 /* must initialize pipe to something for the asserts */ 285 *pipe = PIPE_A; 286 287 return false; 288 } 289 290 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv, 291 i915_reg_t dp_reg, enum port port, 292 enum pipe *pipe) 293 { 294 bool ret; 295 u32 val; 296 297 val = intel_de_read(dev_priv, dp_reg); 298 299 ret = val & DP_PORT_EN; 300 301 /* asserts want to know the pipe even if the port is disabled */ 302 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 303 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB; 304 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) 305 ret &= cpt_dp_port_selected(dev_priv, port, pipe); 306 else if (IS_CHERRYVIEW(dev_priv)) 307 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV; 308 else 309 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT; 310 311 return ret; 312 } 313 314 static bool intel_dp_get_hw_state(struct intel_encoder *encoder, 315 enum pipe *pipe) 316 { 317 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 318 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 319 intel_wakeref_t wakeref; 320 bool ret; 321 322 wakeref = intel_display_power_get_if_enabled(dev_priv, 323 encoder->power_domain); 324 if (!wakeref) 325 return false; 326 327 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 328 encoder->port, pipe); 329 330 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 331 332 return ret; 333 } 334 335 static void intel_dp_get_config(struct intel_encoder *encoder, 336 struct intel_crtc_state *pipe_config) 337 { 338 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 339 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 340 u32 tmp, flags = 0; 341 enum port port = encoder->port; 342 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 343 344 if (encoder->type == INTEL_OUTPUT_EDP) 345 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 346 else 347 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 348 349 tmp = intel_de_read(dev_priv, intel_dp->output_reg); 350 351 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A; 352 353 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 354 u32 trans_dp = intel_de_read(dev_priv, 355 TRANS_DP_CTL(crtc->pipe)); 356 357 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH) 358 flags |= DRM_MODE_FLAG_PHSYNC; 359 else 360 flags |= DRM_MODE_FLAG_NHSYNC; 361 362 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH) 363 flags |= DRM_MODE_FLAG_PVSYNC; 364 else 365 flags |= DRM_MODE_FLAG_NVSYNC; 366 } else { 367 if (tmp & DP_SYNC_HS_HIGH) 368 flags |= DRM_MODE_FLAG_PHSYNC; 369 else 370 flags |= DRM_MODE_FLAG_NHSYNC; 371 372 if (tmp & DP_SYNC_VS_HIGH) 373 flags |= DRM_MODE_FLAG_PVSYNC; 374 else 375 flags |= DRM_MODE_FLAG_NVSYNC; 376 } 377 378 pipe_config->hw.adjusted_mode.flags |= flags; 379 380 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235) 381 pipe_config->limited_color_range = true; 382 383 pipe_config->lane_count = 384 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1; 385 386 intel_dp_get_m_n(crtc, pipe_config); 387 388 if (port == PORT_A) { 389 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ) 390 pipe_config->port_clock = 162000; 391 else 392 pipe_config->port_clock = 270000; 393 } 394 395 pipe_config->hw.adjusted_mode.crtc_clock = 396 intel_dotclock_calculate(pipe_config->port_clock, 397 &pipe_config->dp_m_n); 398 399 if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp && 400 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 401 /* 402 * This is a big fat ugly hack. 403 * 404 * Some machines in UEFI boot mode provide us a VBT that has 18 405 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 406 * unknown we fail to light up. Yet the same BIOS boots up with 407 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 408 * max, not what it tells us to use. 409 * 410 * Note: This will still be broken if the eDP panel is not lit 411 * up by the BIOS, and thus we can't get the mode at module 412 * load. 413 */ 414 drm_dbg_kms(&dev_priv->drm, 415 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 416 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 417 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 418 } 419 } 420 421 static void 422 intel_dp_link_down(struct intel_encoder *encoder, 423 const struct intel_crtc_state *old_crtc_state) 424 { 425 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 426 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 427 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 428 enum port port = encoder->port; 429 u32 DP = intel_dp->DP; 430 431 if (drm_WARN_ON(&dev_priv->drm, 432 (intel_de_read(dev_priv, intel_dp->output_reg) & 433 DP_PORT_EN) == 0)) 434 return; 435 436 drm_dbg_kms(&dev_priv->drm, "\n"); 437 438 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 439 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) { 440 DP &= ~DP_LINK_TRAIN_MASK_CPT; 441 DP |= DP_LINK_TRAIN_PAT_IDLE_CPT; 442 } else { 443 DP &= ~DP_LINK_TRAIN_MASK; 444 DP |= DP_LINK_TRAIN_PAT_IDLE; 445 } 446 intel_de_write(dev_priv, intel_dp->output_reg, DP); 447 intel_de_posting_read(dev_priv, intel_dp->output_reg); 448 449 DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE); 450 intel_de_write(dev_priv, intel_dp->output_reg, DP); 451 intel_de_posting_read(dev_priv, intel_dp->output_reg); 452 453 /* 454 * HW workaround for IBX, we need to move the port 455 * to transcoder A after disabling it to allow the 456 * matching HDMI port to be enabled on transcoder A. 457 */ 458 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) { 459 /* 460 * We get CPU/PCH FIFO underruns on the other pipe when 461 * doing the workaround. Sweep them under the rug. 462 */ 463 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false); 464 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false); 465 466 /* always enable with pattern 1 (as per spec) */ 467 DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK); 468 DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) | 469 DP_LINK_TRAIN_PAT_1; 470 intel_de_write(dev_priv, intel_dp->output_reg, DP); 471 intel_de_posting_read(dev_priv, intel_dp->output_reg); 472 473 DP &= ~DP_PORT_EN; 474 intel_de_write(dev_priv, intel_dp->output_reg, DP); 475 intel_de_posting_read(dev_priv, intel_dp->output_reg); 476 477 intel_wait_for_vblank_if_active(dev_priv, PIPE_A); 478 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true); 479 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true); 480 } 481 482 msleep(intel_dp->pps.panel_power_down_delay); 483 484 intel_dp->DP = DP; 485 486 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 487 intel_wakeref_t wakeref; 488 489 with_intel_pps_lock(intel_dp, wakeref) 490 intel_dp->pps.active_pipe = INVALID_PIPE; 491 } 492 } 493 494 static void intel_disable_dp(struct intel_atomic_state *state, 495 struct intel_encoder *encoder, 496 const struct intel_crtc_state *old_crtc_state, 497 const struct drm_connector_state *old_conn_state) 498 { 499 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 500 501 intel_dp->link_trained = false; 502 503 if (old_crtc_state->has_audio) 504 intel_audio_codec_disable(encoder, 505 old_crtc_state, old_conn_state); 506 507 /* 508 * Make sure the panel is off before trying to change the mode. 509 * But also ensure that we have vdd while we switch off the panel. 510 */ 511 intel_pps_vdd_on(intel_dp); 512 intel_edp_backlight_off(old_conn_state); 513 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 514 intel_pps_off(intel_dp); 515 } 516 517 static void g4x_disable_dp(struct intel_atomic_state *state, 518 struct intel_encoder *encoder, 519 const struct intel_crtc_state *old_crtc_state, 520 const struct drm_connector_state *old_conn_state) 521 { 522 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 523 } 524 525 static void vlv_disable_dp(struct intel_atomic_state *state, 526 struct intel_encoder *encoder, 527 const struct intel_crtc_state *old_crtc_state, 528 const struct drm_connector_state *old_conn_state) 529 { 530 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 531 } 532 533 static void g4x_post_disable_dp(struct intel_atomic_state *state, 534 struct intel_encoder *encoder, 535 const struct intel_crtc_state *old_crtc_state, 536 const struct drm_connector_state *old_conn_state) 537 { 538 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 539 enum port port = encoder->port; 540 541 /* 542 * Bspec does not list a specific disable sequence for g4x DP. 543 * Follow the ilk+ sequence (disable pipe before the port) for 544 * g4x DP as it does not suffer from underruns like the normal 545 * g4x modeset sequence (disable pipe after the port). 546 */ 547 intel_dp_link_down(encoder, old_crtc_state); 548 549 /* Only ilk+ has port A */ 550 if (port == PORT_A) 551 ilk_edp_pll_off(intel_dp, old_crtc_state); 552 } 553 554 static void vlv_post_disable_dp(struct intel_atomic_state *state, 555 struct intel_encoder *encoder, 556 const struct intel_crtc_state *old_crtc_state, 557 const struct drm_connector_state *old_conn_state) 558 { 559 intel_dp_link_down(encoder, old_crtc_state); 560 } 561 562 static void chv_post_disable_dp(struct intel_atomic_state *state, 563 struct intel_encoder *encoder, 564 const struct intel_crtc_state *old_crtc_state, 565 const struct drm_connector_state *old_conn_state) 566 { 567 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 568 569 intel_dp_link_down(encoder, old_crtc_state); 570 571 vlv_dpio_get(dev_priv); 572 573 /* Assert data lane reset */ 574 chv_data_lane_soft_reset(encoder, old_crtc_state, true); 575 576 vlv_dpio_put(dev_priv); 577 } 578 579 static void 580 cpt_set_link_train(struct intel_dp *intel_dp, 581 const struct intel_crtc_state *crtc_state, 582 u8 dp_train_pat) 583 { 584 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 585 u32 *DP = &intel_dp->DP; 586 587 *DP &= ~DP_LINK_TRAIN_MASK_CPT; 588 589 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 590 case DP_TRAINING_PATTERN_DISABLE: 591 *DP |= DP_LINK_TRAIN_OFF_CPT; 592 break; 593 case DP_TRAINING_PATTERN_1: 594 *DP |= DP_LINK_TRAIN_PAT_1_CPT; 595 break; 596 case DP_TRAINING_PATTERN_2: 597 *DP |= DP_LINK_TRAIN_PAT_2_CPT; 598 break; 599 default: 600 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 601 return; 602 } 603 604 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 605 intel_de_posting_read(dev_priv, intel_dp->output_reg); 606 } 607 608 static void 609 g4x_set_link_train(struct intel_dp *intel_dp, 610 const struct intel_crtc_state *crtc_state, 611 u8 dp_train_pat) 612 { 613 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 614 u32 *DP = &intel_dp->DP; 615 616 *DP &= ~DP_LINK_TRAIN_MASK; 617 618 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 619 case DP_TRAINING_PATTERN_DISABLE: 620 *DP |= DP_LINK_TRAIN_OFF; 621 break; 622 case DP_TRAINING_PATTERN_1: 623 *DP |= DP_LINK_TRAIN_PAT_1; 624 break; 625 case DP_TRAINING_PATTERN_2: 626 *DP |= DP_LINK_TRAIN_PAT_2; 627 break; 628 default: 629 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 630 return; 631 } 632 633 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 634 intel_de_posting_read(dev_priv, intel_dp->output_reg); 635 } 636 637 static void intel_dp_enable_port(struct intel_dp *intel_dp, 638 const struct intel_crtc_state *crtc_state) 639 { 640 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 641 642 /* enable with pattern 1 (as per spec) */ 643 644 intel_dp_program_link_training_pattern(intel_dp, crtc_state, 645 DP_TRAINING_PATTERN_1); 646 647 /* 648 * Magic for VLV/CHV. We _must_ first set up the register 649 * without actually enabling the port, and then do another 650 * write to enable the port. Otherwise link training will 651 * fail when the power sequencer is freshly used for this port. 652 */ 653 intel_dp->DP |= DP_PORT_EN; 654 if (crtc_state->has_audio) 655 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 656 657 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 658 intel_de_posting_read(dev_priv, intel_dp->output_reg); 659 } 660 661 static void intel_enable_dp(struct intel_atomic_state *state, 662 struct intel_encoder *encoder, 663 const struct intel_crtc_state *pipe_config, 664 const struct drm_connector_state *conn_state) 665 { 666 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 667 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 668 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 669 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg); 670 enum pipe pipe = crtc->pipe; 671 intel_wakeref_t wakeref; 672 673 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN)) 674 return; 675 676 with_intel_pps_lock(intel_dp, wakeref) { 677 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 678 vlv_pps_init(encoder, pipe_config); 679 680 intel_dp_enable_port(intel_dp, pipe_config); 681 682 intel_pps_vdd_on_unlocked(intel_dp); 683 intel_pps_on_unlocked(intel_dp); 684 intel_pps_vdd_off_unlocked(intel_dp, true); 685 } 686 687 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 688 unsigned int lane_mask = 0x0; 689 690 if (IS_CHERRYVIEW(dev_priv)) 691 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count); 692 693 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp), 694 lane_mask); 695 } 696 697 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 698 intel_dp_configure_protocol_converter(intel_dp, pipe_config); 699 intel_dp_check_frl_training(intel_dp); 700 intel_dp_pcon_dsc_configure(intel_dp, pipe_config); 701 intel_dp_start_link_train(intel_dp, pipe_config); 702 intel_dp_stop_link_train(intel_dp, pipe_config); 703 704 if (pipe_config->has_audio) { 705 drm_dbg(&dev_priv->drm, "Enabling DP audio on pipe %c\n", 706 pipe_name(pipe)); 707 intel_audio_codec_enable(encoder, pipe_config, conn_state); 708 } 709 } 710 711 static void g4x_enable_dp(struct intel_atomic_state *state, 712 struct intel_encoder *encoder, 713 const struct intel_crtc_state *pipe_config, 714 const struct drm_connector_state *conn_state) 715 { 716 intel_enable_dp(state, encoder, pipe_config, conn_state); 717 intel_edp_backlight_on(pipe_config, conn_state); 718 } 719 720 static void vlv_enable_dp(struct intel_atomic_state *state, 721 struct intel_encoder *encoder, 722 const struct intel_crtc_state *pipe_config, 723 const struct drm_connector_state *conn_state) 724 { 725 intel_edp_backlight_on(pipe_config, conn_state); 726 } 727 728 static void g4x_pre_enable_dp(struct intel_atomic_state *state, 729 struct intel_encoder *encoder, 730 const struct intel_crtc_state *pipe_config, 731 const struct drm_connector_state *conn_state) 732 { 733 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 734 enum port port = encoder->port; 735 736 intel_dp_prepare(encoder, pipe_config); 737 738 /* Only ilk+ has port A */ 739 if (port == PORT_A) 740 ilk_edp_pll_on(intel_dp, pipe_config); 741 } 742 743 static void vlv_pre_enable_dp(struct intel_atomic_state *state, 744 struct intel_encoder *encoder, 745 const struct intel_crtc_state *pipe_config, 746 const struct drm_connector_state *conn_state) 747 { 748 vlv_phy_pre_encoder_enable(encoder, pipe_config); 749 750 intel_enable_dp(state, encoder, pipe_config, conn_state); 751 } 752 753 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state, 754 struct intel_encoder *encoder, 755 const struct intel_crtc_state *pipe_config, 756 const struct drm_connector_state *conn_state) 757 { 758 intel_dp_prepare(encoder, pipe_config); 759 760 vlv_phy_pre_pll_enable(encoder, pipe_config); 761 } 762 763 static void chv_pre_enable_dp(struct intel_atomic_state *state, 764 struct intel_encoder *encoder, 765 const struct intel_crtc_state *pipe_config, 766 const struct drm_connector_state *conn_state) 767 { 768 chv_phy_pre_encoder_enable(encoder, pipe_config); 769 770 intel_enable_dp(state, encoder, pipe_config, conn_state); 771 772 /* Second common lane will stay alive on its own now */ 773 chv_phy_release_cl2_override(encoder); 774 } 775 776 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state, 777 struct intel_encoder *encoder, 778 const struct intel_crtc_state *pipe_config, 779 const struct drm_connector_state *conn_state) 780 { 781 intel_dp_prepare(encoder, pipe_config); 782 783 chv_phy_pre_pll_enable(encoder, pipe_config); 784 } 785 786 static void chv_dp_post_pll_disable(struct intel_atomic_state *state, 787 struct intel_encoder *encoder, 788 const struct intel_crtc_state *old_crtc_state, 789 const struct drm_connector_state *old_conn_state) 790 { 791 chv_phy_post_pll_disable(encoder, old_crtc_state); 792 } 793 794 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp, 795 const struct intel_crtc_state *crtc_state) 796 { 797 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2; 798 } 799 800 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp, 801 const struct intel_crtc_state *crtc_state) 802 { 803 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3; 804 } 805 806 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp) 807 { 808 return DP_TRAIN_PRE_EMPH_LEVEL_2; 809 } 810 811 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp) 812 { 813 return DP_TRAIN_PRE_EMPH_LEVEL_3; 814 } 815 816 static void vlv_set_signal_levels(struct intel_dp *intel_dp, 817 const struct intel_crtc_state *crtc_state) 818 { 819 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 820 unsigned long demph_reg_value, preemph_reg_value, 821 uniqtranscale_reg_value; 822 u8 train_set = intel_dp->train_set[0]; 823 824 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 825 case DP_TRAIN_PRE_EMPH_LEVEL_0: 826 preemph_reg_value = 0x0004000; 827 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 828 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 829 demph_reg_value = 0x2B405555; 830 uniqtranscale_reg_value = 0x552AB83A; 831 break; 832 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 833 demph_reg_value = 0x2B404040; 834 uniqtranscale_reg_value = 0x5548B83A; 835 break; 836 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 837 demph_reg_value = 0x2B245555; 838 uniqtranscale_reg_value = 0x5560B83A; 839 break; 840 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 841 demph_reg_value = 0x2B405555; 842 uniqtranscale_reg_value = 0x5598DA3A; 843 break; 844 default: 845 return; 846 } 847 break; 848 case DP_TRAIN_PRE_EMPH_LEVEL_1: 849 preemph_reg_value = 0x0002000; 850 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 851 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 852 demph_reg_value = 0x2B404040; 853 uniqtranscale_reg_value = 0x5552B83A; 854 break; 855 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 856 demph_reg_value = 0x2B404848; 857 uniqtranscale_reg_value = 0x5580B83A; 858 break; 859 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 860 demph_reg_value = 0x2B404040; 861 uniqtranscale_reg_value = 0x55ADDA3A; 862 break; 863 default: 864 return; 865 } 866 break; 867 case DP_TRAIN_PRE_EMPH_LEVEL_2: 868 preemph_reg_value = 0x0000000; 869 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 870 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 871 demph_reg_value = 0x2B305555; 872 uniqtranscale_reg_value = 0x5570B83A; 873 break; 874 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 875 demph_reg_value = 0x2B2B4040; 876 uniqtranscale_reg_value = 0x55ADDA3A; 877 break; 878 default: 879 return; 880 } 881 break; 882 case DP_TRAIN_PRE_EMPH_LEVEL_3: 883 preemph_reg_value = 0x0006000; 884 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 885 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 886 demph_reg_value = 0x1B405555; 887 uniqtranscale_reg_value = 0x55ADDA3A; 888 break; 889 default: 890 return; 891 } 892 break; 893 default: 894 return; 895 } 896 897 vlv_set_phy_signal_level(encoder, crtc_state, 898 demph_reg_value, preemph_reg_value, 899 uniqtranscale_reg_value, 0); 900 } 901 902 static void chv_set_signal_levels(struct intel_dp *intel_dp, 903 const struct intel_crtc_state *crtc_state) 904 { 905 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 906 u32 deemph_reg_value, margin_reg_value; 907 bool uniq_trans_scale = false; 908 u8 train_set = intel_dp->train_set[0]; 909 910 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 911 case DP_TRAIN_PRE_EMPH_LEVEL_0: 912 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 913 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 914 deemph_reg_value = 128; 915 margin_reg_value = 52; 916 break; 917 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 918 deemph_reg_value = 128; 919 margin_reg_value = 77; 920 break; 921 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 922 deemph_reg_value = 128; 923 margin_reg_value = 102; 924 break; 925 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 926 deemph_reg_value = 128; 927 margin_reg_value = 154; 928 uniq_trans_scale = true; 929 break; 930 default: 931 return; 932 } 933 break; 934 case DP_TRAIN_PRE_EMPH_LEVEL_1: 935 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 936 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 937 deemph_reg_value = 85; 938 margin_reg_value = 78; 939 break; 940 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 941 deemph_reg_value = 85; 942 margin_reg_value = 116; 943 break; 944 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 945 deemph_reg_value = 85; 946 margin_reg_value = 154; 947 break; 948 default: 949 return; 950 } 951 break; 952 case DP_TRAIN_PRE_EMPH_LEVEL_2: 953 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 954 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 955 deemph_reg_value = 64; 956 margin_reg_value = 104; 957 break; 958 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 959 deemph_reg_value = 64; 960 margin_reg_value = 154; 961 break; 962 default: 963 return; 964 } 965 break; 966 case DP_TRAIN_PRE_EMPH_LEVEL_3: 967 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 968 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 969 deemph_reg_value = 43; 970 margin_reg_value = 154; 971 break; 972 default: 973 return; 974 } 975 break; 976 default: 977 return; 978 } 979 980 chv_set_phy_signal_level(encoder, crtc_state, 981 deemph_reg_value, margin_reg_value, 982 uniq_trans_scale); 983 } 984 985 static u32 g4x_signal_levels(u8 train_set) 986 { 987 u32 signal_levels = 0; 988 989 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 990 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 991 default: 992 signal_levels |= DP_VOLTAGE_0_4; 993 break; 994 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 995 signal_levels |= DP_VOLTAGE_0_6; 996 break; 997 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 998 signal_levels |= DP_VOLTAGE_0_8; 999 break; 1000 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 1001 signal_levels |= DP_VOLTAGE_1_2; 1002 break; 1003 } 1004 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1005 case DP_TRAIN_PRE_EMPH_LEVEL_0: 1006 default: 1007 signal_levels |= DP_PRE_EMPHASIS_0; 1008 break; 1009 case DP_TRAIN_PRE_EMPH_LEVEL_1: 1010 signal_levels |= DP_PRE_EMPHASIS_3_5; 1011 break; 1012 case DP_TRAIN_PRE_EMPH_LEVEL_2: 1013 signal_levels |= DP_PRE_EMPHASIS_6; 1014 break; 1015 case DP_TRAIN_PRE_EMPH_LEVEL_3: 1016 signal_levels |= DP_PRE_EMPHASIS_9_5; 1017 break; 1018 } 1019 return signal_levels; 1020 } 1021 1022 static void 1023 g4x_set_signal_levels(struct intel_dp *intel_dp, 1024 const struct intel_crtc_state *crtc_state) 1025 { 1026 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1027 u8 train_set = intel_dp->train_set[0]; 1028 u32 signal_levels; 1029 1030 signal_levels = g4x_signal_levels(train_set); 1031 1032 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1033 signal_levels); 1034 1035 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK); 1036 intel_dp->DP |= signal_levels; 1037 1038 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1039 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1040 } 1041 1042 /* SNB CPU eDP voltage swing and pre-emphasis control */ 1043 static u32 snb_cpu_edp_signal_levels(u8 train_set) 1044 { 1045 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1046 DP_TRAIN_PRE_EMPHASIS_MASK); 1047 1048 switch (signal_levels) { 1049 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1050 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1051 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1052 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1053 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1054 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1055 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1056 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1057 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1058 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1059 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1060 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1061 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1062 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1063 default: 1064 MISSING_CASE(signal_levels); 1065 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1066 } 1067 } 1068 1069 static void 1070 snb_cpu_edp_set_signal_levels(struct intel_dp *intel_dp, 1071 const struct intel_crtc_state *crtc_state) 1072 { 1073 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1074 u8 train_set = intel_dp->train_set[0]; 1075 u32 signal_levels; 1076 1077 signal_levels = snb_cpu_edp_signal_levels(train_set); 1078 1079 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1080 signal_levels); 1081 1082 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB; 1083 intel_dp->DP |= signal_levels; 1084 1085 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1086 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1087 } 1088 1089 /* IVB CPU eDP voltage swing and pre-emphasis control */ 1090 static u32 ivb_cpu_edp_signal_levels(u8 train_set) 1091 { 1092 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1093 DP_TRAIN_PRE_EMPHASIS_MASK); 1094 1095 switch (signal_levels) { 1096 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1097 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1098 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1099 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1100 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1101 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1102 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1103 1104 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1105 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1106 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1107 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1108 1109 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1110 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1111 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1112 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1113 1114 default: 1115 MISSING_CASE(signal_levels); 1116 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1117 } 1118 } 1119 1120 static void 1121 ivb_cpu_edp_set_signal_levels(struct intel_dp *intel_dp, 1122 const struct intel_crtc_state *crtc_state) 1123 { 1124 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1125 u8 train_set = intel_dp->train_set[0]; 1126 u32 signal_levels; 1127 1128 signal_levels = ivb_cpu_edp_signal_levels(train_set); 1129 1130 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1131 signal_levels); 1132 1133 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB; 1134 intel_dp->DP |= signal_levels; 1135 1136 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1137 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1138 } 1139 1140 /* 1141 * If display is now connected check links status, 1142 * there has been known issues of link loss triggering 1143 * long pulse. 1144 * 1145 * Some sinks (eg. ASUS PB287Q) seem to perform some 1146 * weird HPD ping pong during modesets. So we can apparently 1147 * end up with HPD going low during a modeset, and then 1148 * going back up soon after. And once that happens we must 1149 * retrain the link to get a picture. That's in case no 1150 * userspace component reacted to intermittent HPD dip. 1151 */ 1152 static enum intel_hotplug_state 1153 intel_dp_hotplug(struct intel_encoder *encoder, 1154 struct intel_connector *connector) 1155 { 1156 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1157 struct drm_modeset_acquire_ctx ctx; 1158 enum intel_hotplug_state state; 1159 int ret; 1160 1161 if (intel_dp->compliance.test_active && 1162 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 1163 intel_dp_phy_test(encoder); 1164 /* just do the PHY test and nothing else */ 1165 return INTEL_HOTPLUG_UNCHANGED; 1166 } 1167 1168 state = intel_encoder_hotplug(encoder, connector); 1169 1170 drm_modeset_acquire_init(&ctx, 0); 1171 1172 for (;;) { 1173 ret = intel_dp_retrain_link(encoder, &ctx); 1174 1175 if (ret == -EDEADLK) { 1176 drm_modeset_backoff(&ctx); 1177 continue; 1178 } 1179 1180 break; 1181 } 1182 1183 drm_modeset_drop_locks(&ctx); 1184 drm_modeset_acquire_fini(&ctx); 1185 drm_WARN(encoder->base.dev, ret, 1186 "Acquiring modeset locks failed with %i\n", ret); 1187 1188 /* 1189 * Keeping it consistent with intel_ddi_hotplug() and 1190 * intel_hdmi_hotplug(). 1191 */ 1192 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries) 1193 state = INTEL_HOTPLUG_RETRY; 1194 1195 return state; 1196 } 1197 1198 static bool ibx_digital_port_connected(struct intel_encoder *encoder) 1199 { 1200 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1201 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 1202 1203 return intel_de_read(dev_priv, SDEISR) & bit; 1204 } 1205 1206 static bool g4x_digital_port_connected(struct intel_encoder *encoder) 1207 { 1208 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1209 u32 bit; 1210 1211 switch (encoder->hpd_pin) { 1212 case HPD_PORT_B: 1213 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X; 1214 break; 1215 case HPD_PORT_C: 1216 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X; 1217 break; 1218 case HPD_PORT_D: 1219 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X; 1220 break; 1221 default: 1222 MISSING_CASE(encoder->hpd_pin); 1223 return false; 1224 } 1225 1226 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1227 } 1228 1229 static bool gm45_digital_port_connected(struct intel_encoder *encoder) 1230 { 1231 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1232 u32 bit; 1233 1234 switch (encoder->hpd_pin) { 1235 case HPD_PORT_B: 1236 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45; 1237 break; 1238 case HPD_PORT_C: 1239 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45; 1240 break; 1241 case HPD_PORT_D: 1242 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45; 1243 break; 1244 default: 1245 MISSING_CASE(encoder->hpd_pin); 1246 return false; 1247 } 1248 1249 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1250 } 1251 1252 static bool ilk_digital_port_connected(struct intel_encoder *encoder) 1253 { 1254 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1255 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 1256 1257 return intel_de_read(dev_priv, DEISR) & bit; 1258 } 1259 1260 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 1261 { 1262 intel_dp_encoder_flush_work(encoder); 1263 1264 drm_encoder_cleanup(encoder); 1265 kfree(enc_to_dig_port(to_intel_encoder(encoder))); 1266 } 1267 1268 enum pipe vlv_active_pipe(struct intel_dp *intel_dp) 1269 { 1270 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1271 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1272 enum pipe pipe; 1273 1274 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 1275 encoder->port, &pipe)) 1276 return pipe; 1277 1278 return INVALID_PIPE; 1279 } 1280 1281 static void intel_dp_encoder_reset(struct drm_encoder *encoder) 1282 { 1283 struct drm_i915_private *dev_priv = to_i915(encoder->dev); 1284 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 1285 1286 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg); 1287 1288 intel_dp->reset_link_params = true; 1289 1290 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1291 intel_wakeref_t wakeref; 1292 1293 with_intel_pps_lock(intel_dp, wakeref) 1294 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp); 1295 } 1296 1297 intel_pps_encoder_reset(intel_dp); 1298 } 1299 1300 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1301 .reset = intel_dp_encoder_reset, 1302 .destroy = intel_dp_encoder_destroy, 1303 }; 1304 1305 bool g4x_dp_init(struct drm_i915_private *dev_priv, 1306 i915_reg_t output_reg, enum port port) 1307 { 1308 struct intel_digital_port *dig_port; 1309 struct intel_encoder *intel_encoder; 1310 struct drm_encoder *encoder; 1311 struct intel_connector *intel_connector; 1312 1313 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 1314 if (!dig_port) 1315 return false; 1316 1317 intel_connector = intel_connector_alloc(); 1318 if (!intel_connector) 1319 goto err_connector_alloc; 1320 1321 intel_encoder = &dig_port->base; 1322 encoder = &intel_encoder->base; 1323 1324 mutex_init(&dig_port->hdcp_mutex); 1325 1326 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base, 1327 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS, 1328 "DP %c", port_name(port))) 1329 goto err_encoder_init; 1330 1331 intel_encoder->hotplug = intel_dp_hotplug; 1332 intel_encoder->compute_config = intel_dp_compute_config; 1333 intel_encoder->get_hw_state = intel_dp_get_hw_state; 1334 intel_encoder->get_config = intel_dp_get_config; 1335 intel_encoder->sync_state = intel_dp_sync_state; 1336 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check; 1337 intel_encoder->update_pipe = intel_panel_update_backlight; 1338 intel_encoder->suspend = intel_dp_encoder_suspend; 1339 intel_encoder->shutdown = intel_dp_encoder_shutdown; 1340 if (IS_CHERRYVIEW(dev_priv)) { 1341 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable; 1342 intel_encoder->pre_enable = chv_pre_enable_dp; 1343 intel_encoder->enable = vlv_enable_dp; 1344 intel_encoder->disable = vlv_disable_dp; 1345 intel_encoder->post_disable = chv_post_disable_dp; 1346 intel_encoder->post_pll_disable = chv_dp_post_pll_disable; 1347 } else if (IS_VALLEYVIEW(dev_priv)) { 1348 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable; 1349 intel_encoder->pre_enable = vlv_pre_enable_dp; 1350 intel_encoder->enable = vlv_enable_dp; 1351 intel_encoder->disable = vlv_disable_dp; 1352 intel_encoder->post_disable = vlv_post_disable_dp; 1353 } else { 1354 intel_encoder->pre_enable = g4x_pre_enable_dp; 1355 intel_encoder->enable = g4x_enable_dp; 1356 intel_encoder->disable = g4x_disable_dp; 1357 intel_encoder->post_disable = g4x_post_disable_dp; 1358 } 1359 1360 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 1361 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) 1362 dig_port->dp.set_link_train = cpt_set_link_train; 1363 else 1364 dig_port->dp.set_link_train = g4x_set_link_train; 1365 1366 if (IS_CHERRYVIEW(dev_priv)) 1367 dig_port->dp.set_signal_levels = chv_set_signal_levels; 1368 else if (IS_VALLEYVIEW(dev_priv)) 1369 dig_port->dp.set_signal_levels = vlv_set_signal_levels; 1370 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 1371 dig_port->dp.set_signal_levels = ivb_cpu_edp_set_signal_levels; 1372 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A) 1373 dig_port->dp.set_signal_levels = snb_cpu_edp_set_signal_levels; 1374 else 1375 dig_port->dp.set_signal_levels = g4x_set_signal_levels; 1376 1377 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) || 1378 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) { 1379 dig_port->dp.preemph_max = intel_dp_preemph_max_3; 1380 dig_port->dp.voltage_max = intel_dp_voltage_max_3; 1381 } else { 1382 dig_port->dp.preemph_max = intel_dp_preemph_max_2; 1383 dig_port->dp.voltage_max = intel_dp_voltage_max_2; 1384 } 1385 1386 dig_port->dp.output_reg = output_reg; 1387 dig_port->max_lanes = 4; 1388 1389 intel_encoder->type = INTEL_OUTPUT_DP; 1390 intel_encoder->power_domain = intel_port_to_power_domain(port); 1391 if (IS_CHERRYVIEW(dev_priv)) { 1392 if (port == PORT_D) 1393 intel_encoder->pipe_mask = BIT(PIPE_C); 1394 else 1395 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B); 1396 } else { 1397 intel_encoder->pipe_mask = ~0; 1398 } 1399 intel_encoder->cloneable = 0; 1400 intel_encoder->port = port; 1401 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 1402 1403 dig_port->hpd_pulse = intel_dp_hpd_pulse; 1404 1405 if (HAS_GMCH(dev_priv)) { 1406 if (IS_GM45(dev_priv)) 1407 dig_port->connected = gm45_digital_port_connected; 1408 else 1409 dig_port->connected = g4x_digital_port_connected; 1410 } else { 1411 if (port == PORT_A) 1412 dig_port->connected = ilk_digital_port_connected; 1413 else 1414 dig_port->connected = ibx_digital_port_connected; 1415 } 1416 1417 if (port != PORT_A) 1418 intel_infoframe_init(dig_port); 1419 1420 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 1421 if (!intel_dp_init_connector(dig_port, intel_connector)) 1422 goto err_init_connector; 1423 1424 return true; 1425 1426 err_init_connector: 1427 drm_encoder_cleanup(encoder); 1428 err_encoder_init: 1429 kfree(intel_connector); 1430 err_connector_alloc: 1431 kfree(dig_port); 1432 return false; 1433 } 1434