1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code). 6 */ 7 8 #include "g4x_dp.h" 9 #include "intel_audio.h" 10 #include "intel_backlight.h" 11 #include "intel_connector.h" 12 #include "intel_de.h" 13 #include "intel_display_types.h" 14 #include "intel_dp.h" 15 #include "intel_dp_link_training.h" 16 #include "intel_dpio_phy.h" 17 #include "intel_fifo_underrun.h" 18 #include "intel_hdmi.h" 19 #include "intel_hotplug.h" 20 #include "intel_pps.h" 21 #include "vlv_sideband.h" 22 23 struct dp_link_dpll { 24 int clock; 25 struct dpll dpll; 26 }; 27 28 static const struct dp_link_dpll g4x_dpll[] = { 29 { 162000, 30 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } }, 31 { 270000, 32 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } } 33 }; 34 35 static const struct dp_link_dpll pch_dpll[] = { 36 { 162000, 37 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } }, 38 { 270000, 39 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } } 40 }; 41 42 static const struct dp_link_dpll vlv_dpll[] = { 43 { 162000, 44 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } }, 45 { 270000, 46 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } } 47 }; 48 49 /* 50 * CHV supports eDP 1.4 that have more link rates. 51 * Below only provides the fixed rate but exclude variable rate. 52 */ 53 static const struct dp_link_dpll chv_dpll[] = { 54 /* 55 * CHV requires to program fractional division for m2. 56 * m2 is stored in fixed point format using formula below 57 * (m2_int << 22) | m2_fraction 58 */ 59 { 162000, /* m2_int = 32, m2_fraction = 1677722 */ 60 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } }, 61 { 270000, /* m2_int = 27, m2_fraction = 0 */ 62 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } }, 63 }; 64 65 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915) 66 { 67 return IS_CHERRYVIEW(i915) ? &chv_dpll[0].dpll : &vlv_dpll[0].dpll; 68 } 69 70 void g4x_dp_set_clock(struct intel_encoder *encoder, 71 struct intel_crtc_state *pipe_config) 72 { 73 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 74 const struct dp_link_dpll *divisor = NULL; 75 int i, count = 0; 76 77 if (IS_G4X(dev_priv)) { 78 divisor = g4x_dpll; 79 count = ARRAY_SIZE(g4x_dpll); 80 } else if (HAS_PCH_SPLIT(dev_priv)) { 81 divisor = pch_dpll; 82 count = ARRAY_SIZE(pch_dpll); 83 } else if (IS_CHERRYVIEW(dev_priv)) { 84 divisor = chv_dpll; 85 count = ARRAY_SIZE(chv_dpll); 86 } else if (IS_VALLEYVIEW(dev_priv)) { 87 divisor = vlv_dpll; 88 count = ARRAY_SIZE(vlv_dpll); 89 } 90 91 if (divisor && count) { 92 for (i = 0; i < count; i++) { 93 if (pipe_config->port_clock == divisor[i].clock) { 94 pipe_config->dpll = divisor[i].dpll; 95 pipe_config->clock_set = true; 96 break; 97 } 98 } 99 } 100 } 101 102 static void intel_dp_prepare(struct intel_encoder *encoder, 103 const struct intel_crtc_state *pipe_config) 104 { 105 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 106 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 107 enum port port = encoder->port; 108 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 109 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 110 111 intel_dp_set_link_params(intel_dp, 112 pipe_config->port_clock, 113 pipe_config->lane_count); 114 115 /* 116 * There are four kinds of DP registers: 117 * IBX PCH 118 * SNB CPU 119 * IVB CPU 120 * CPT PCH 121 * 122 * IBX PCH and CPU are the same for almost everything, 123 * except that the CPU DP PLL is configured in this 124 * register 125 * 126 * CPT PCH is quite different, having many bits moved 127 * to the TRANS_DP_CTL register instead. That 128 * configuration happens (oddly) in ilk_pch_enable 129 */ 130 131 /* Preserve the BIOS-computed detected bit. This is 132 * supposed to be read-only. 133 */ 134 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED; 135 136 /* Handle DP bits in common between all three register formats */ 137 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 138 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count); 139 140 /* Split out the IBX/CPU vs CPT settings */ 141 142 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) { 143 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 144 intel_dp->DP |= DP_SYNC_HS_HIGH; 145 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 146 intel_dp->DP |= DP_SYNC_VS_HIGH; 147 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 148 149 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 150 intel_dp->DP |= DP_ENHANCED_FRAMING; 151 152 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe); 153 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 154 u32 trans_dp; 155 156 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 157 158 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe)); 159 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 160 trans_dp |= TRANS_DP_ENH_FRAMING; 161 else 162 trans_dp &= ~TRANS_DP_ENH_FRAMING; 163 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp); 164 } else { 165 if (IS_G4X(dev_priv) && pipe_config->limited_color_range) 166 intel_dp->DP |= DP_COLOR_RANGE_16_235; 167 168 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 169 intel_dp->DP |= DP_SYNC_HS_HIGH; 170 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 171 intel_dp->DP |= DP_SYNC_VS_HIGH; 172 intel_dp->DP |= DP_LINK_TRAIN_OFF; 173 174 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 175 intel_dp->DP |= DP_ENHANCED_FRAMING; 176 177 if (IS_CHERRYVIEW(dev_priv)) 178 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe); 179 else 180 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe); 181 } 182 } 183 184 static void assert_dp_port(struct intel_dp *intel_dp, bool state) 185 { 186 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 187 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 188 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN; 189 190 I915_STATE_WARN(cur_state != state, 191 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n", 192 dig_port->base.base.base.id, dig_port->base.base.name, 193 onoff(state), onoff(cur_state)); 194 } 195 #define assert_dp_port_disabled(d) assert_dp_port((d), false) 196 197 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state) 198 { 199 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE; 200 201 I915_STATE_WARN(cur_state != state, 202 "eDP PLL state assertion failure (expected %s, current %s)\n", 203 onoff(state), onoff(cur_state)); 204 } 205 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true) 206 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false) 207 208 static void ilk_edp_pll_on(struct intel_dp *intel_dp, 209 const struct intel_crtc_state *pipe_config) 210 { 211 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 212 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 213 214 assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder); 215 assert_dp_port_disabled(intel_dp); 216 assert_edp_pll_disabled(dev_priv); 217 218 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n", 219 pipe_config->port_clock); 220 221 intel_dp->DP &= ~DP_PLL_FREQ_MASK; 222 223 if (pipe_config->port_clock == 162000) 224 intel_dp->DP |= DP_PLL_FREQ_162MHZ; 225 else 226 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 227 228 intel_de_write(dev_priv, DP_A, intel_dp->DP); 229 intel_de_posting_read(dev_priv, DP_A); 230 udelay(500); 231 232 /* 233 * [DevILK] Work around required when enabling DP PLL 234 * while a pipe is enabled going to FDI: 235 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI 236 * 2. Program DP PLL enable 237 */ 238 if (IS_IRONLAKE(dev_priv)) 239 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe); 240 241 intel_dp->DP |= DP_PLL_ENABLE; 242 243 intel_de_write(dev_priv, DP_A, intel_dp->DP); 244 intel_de_posting_read(dev_priv, DP_A); 245 udelay(200); 246 } 247 248 static void ilk_edp_pll_off(struct intel_dp *intel_dp, 249 const struct intel_crtc_state *old_crtc_state) 250 { 251 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 252 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 253 254 assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder); 255 assert_dp_port_disabled(intel_dp); 256 assert_edp_pll_enabled(dev_priv); 257 258 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n"); 259 260 intel_dp->DP &= ~DP_PLL_ENABLE; 261 262 intel_de_write(dev_priv, DP_A, intel_dp->DP); 263 intel_de_posting_read(dev_priv, DP_A); 264 udelay(200); 265 } 266 267 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv, 268 enum port port, enum pipe *pipe) 269 { 270 enum pipe p; 271 272 for_each_pipe(dev_priv, p) { 273 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p)); 274 275 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) { 276 *pipe = p; 277 return true; 278 } 279 } 280 281 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n", 282 port_name(port)); 283 284 /* must initialize pipe to something for the asserts */ 285 *pipe = PIPE_A; 286 287 return false; 288 } 289 290 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv, 291 i915_reg_t dp_reg, enum port port, 292 enum pipe *pipe) 293 { 294 bool ret; 295 u32 val; 296 297 val = intel_de_read(dev_priv, dp_reg); 298 299 ret = val & DP_PORT_EN; 300 301 /* asserts want to know the pipe even if the port is disabled */ 302 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 303 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB; 304 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) 305 ret &= cpt_dp_port_selected(dev_priv, port, pipe); 306 else if (IS_CHERRYVIEW(dev_priv)) 307 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV; 308 else 309 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT; 310 311 return ret; 312 } 313 314 static bool intel_dp_get_hw_state(struct intel_encoder *encoder, 315 enum pipe *pipe) 316 { 317 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 318 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 319 intel_wakeref_t wakeref; 320 bool ret; 321 322 wakeref = intel_display_power_get_if_enabled(dev_priv, 323 encoder->power_domain); 324 if (!wakeref) 325 return false; 326 327 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 328 encoder->port, pipe); 329 330 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 331 332 return ret; 333 } 334 335 static void intel_dp_get_config(struct intel_encoder *encoder, 336 struct intel_crtc_state *pipe_config) 337 { 338 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 339 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 340 u32 tmp, flags = 0; 341 enum port port = encoder->port; 342 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 343 344 if (encoder->type == INTEL_OUTPUT_EDP) 345 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 346 else 347 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 348 349 tmp = intel_de_read(dev_priv, intel_dp->output_reg); 350 351 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A; 352 353 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 354 u32 trans_dp = intel_de_read(dev_priv, 355 TRANS_DP_CTL(crtc->pipe)); 356 357 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH) 358 flags |= DRM_MODE_FLAG_PHSYNC; 359 else 360 flags |= DRM_MODE_FLAG_NHSYNC; 361 362 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH) 363 flags |= DRM_MODE_FLAG_PVSYNC; 364 else 365 flags |= DRM_MODE_FLAG_NVSYNC; 366 } else { 367 if (tmp & DP_SYNC_HS_HIGH) 368 flags |= DRM_MODE_FLAG_PHSYNC; 369 else 370 flags |= DRM_MODE_FLAG_NHSYNC; 371 372 if (tmp & DP_SYNC_VS_HIGH) 373 flags |= DRM_MODE_FLAG_PVSYNC; 374 else 375 flags |= DRM_MODE_FLAG_NVSYNC; 376 } 377 378 pipe_config->hw.adjusted_mode.flags |= flags; 379 380 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235) 381 pipe_config->limited_color_range = true; 382 383 pipe_config->lane_count = 384 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1; 385 386 intel_dp_get_m_n(crtc, pipe_config); 387 388 if (port == PORT_A) { 389 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ) 390 pipe_config->port_clock = 162000; 391 else 392 pipe_config->port_clock = 270000; 393 } 394 395 pipe_config->hw.adjusted_mode.crtc_clock = 396 intel_dotclock_calculate(pipe_config->port_clock, 397 &pipe_config->dp_m_n); 398 399 if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp && 400 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 401 /* 402 * This is a big fat ugly hack. 403 * 404 * Some machines in UEFI boot mode provide us a VBT that has 18 405 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 406 * unknown we fail to light up. Yet the same BIOS boots up with 407 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 408 * max, not what it tells us to use. 409 * 410 * Note: This will still be broken if the eDP panel is not lit 411 * up by the BIOS, and thus we can't get the mode at module 412 * load. 413 */ 414 drm_dbg_kms(&dev_priv->drm, 415 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 416 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 417 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 418 } 419 } 420 421 static void 422 intel_dp_link_down(struct intel_encoder *encoder, 423 const struct intel_crtc_state *old_crtc_state) 424 { 425 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 426 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 427 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 428 enum port port = encoder->port; 429 430 if (drm_WARN_ON(&dev_priv->drm, 431 (intel_de_read(dev_priv, intel_dp->output_reg) & 432 DP_PORT_EN) == 0)) 433 return; 434 435 drm_dbg_kms(&dev_priv->drm, "\n"); 436 437 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 438 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) { 439 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 440 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT; 441 } else { 442 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 443 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE; 444 } 445 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 446 intel_de_posting_read(dev_priv, intel_dp->output_reg); 447 448 intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE); 449 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 450 intel_de_posting_read(dev_priv, intel_dp->output_reg); 451 452 /* 453 * HW workaround for IBX, we need to move the port 454 * to transcoder A after disabling it to allow the 455 * matching HDMI port to be enabled on transcoder A. 456 */ 457 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) { 458 /* 459 * We get CPU/PCH FIFO underruns on the other pipe when 460 * doing the workaround. Sweep them under the rug. 461 */ 462 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false); 463 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false); 464 465 /* always enable with pattern 1 (as per spec) */ 466 intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK); 467 intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) | 468 DP_LINK_TRAIN_PAT_1; 469 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 470 intel_de_posting_read(dev_priv, intel_dp->output_reg); 471 472 intel_dp->DP &= ~DP_PORT_EN; 473 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 474 intel_de_posting_read(dev_priv, intel_dp->output_reg); 475 476 intel_wait_for_vblank_if_active(dev_priv, PIPE_A); 477 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true); 478 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true); 479 } 480 481 msleep(intel_dp->pps.panel_power_down_delay); 482 483 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 484 intel_wakeref_t wakeref; 485 486 with_intel_pps_lock(intel_dp, wakeref) 487 intel_dp->pps.active_pipe = INVALID_PIPE; 488 } 489 } 490 491 static void intel_disable_dp(struct intel_atomic_state *state, 492 struct intel_encoder *encoder, 493 const struct intel_crtc_state *old_crtc_state, 494 const struct drm_connector_state *old_conn_state) 495 { 496 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 497 498 intel_dp->link_trained = false; 499 500 if (old_crtc_state->has_audio) 501 intel_audio_codec_disable(encoder, 502 old_crtc_state, old_conn_state); 503 504 /* 505 * Make sure the panel is off before trying to change the mode. 506 * But also ensure that we have vdd while we switch off the panel. 507 */ 508 intel_pps_vdd_on(intel_dp); 509 intel_edp_backlight_off(old_conn_state); 510 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 511 intel_pps_off(intel_dp); 512 } 513 514 static void g4x_disable_dp(struct intel_atomic_state *state, 515 struct intel_encoder *encoder, 516 const struct intel_crtc_state *old_crtc_state, 517 const struct drm_connector_state *old_conn_state) 518 { 519 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 520 } 521 522 static void vlv_disable_dp(struct intel_atomic_state *state, 523 struct intel_encoder *encoder, 524 const struct intel_crtc_state *old_crtc_state, 525 const struct drm_connector_state *old_conn_state) 526 { 527 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 528 } 529 530 static void g4x_post_disable_dp(struct intel_atomic_state *state, 531 struct intel_encoder *encoder, 532 const struct intel_crtc_state *old_crtc_state, 533 const struct drm_connector_state *old_conn_state) 534 { 535 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 536 enum port port = encoder->port; 537 538 /* 539 * Bspec does not list a specific disable sequence for g4x DP. 540 * Follow the ilk+ sequence (disable pipe before the port) for 541 * g4x DP as it does not suffer from underruns like the normal 542 * g4x modeset sequence (disable pipe after the port). 543 */ 544 intel_dp_link_down(encoder, old_crtc_state); 545 546 /* Only ilk+ has port A */ 547 if (port == PORT_A) 548 ilk_edp_pll_off(intel_dp, old_crtc_state); 549 } 550 551 static void vlv_post_disable_dp(struct intel_atomic_state *state, 552 struct intel_encoder *encoder, 553 const struct intel_crtc_state *old_crtc_state, 554 const struct drm_connector_state *old_conn_state) 555 { 556 intel_dp_link_down(encoder, old_crtc_state); 557 } 558 559 static void chv_post_disable_dp(struct intel_atomic_state *state, 560 struct intel_encoder *encoder, 561 const struct intel_crtc_state *old_crtc_state, 562 const struct drm_connector_state *old_conn_state) 563 { 564 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 565 566 intel_dp_link_down(encoder, old_crtc_state); 567 568 vlv_dpio_get(dev_priv); 569 570 /* Assert data lane reset */ 571 chv_data_lane_soft_reset(encoder, old_crtc_state, true); 572 573 vlv_dpio_put(dev_priv); 574 } 575 576 static void 577 cpt_set_link_train(struct intel_dp *intel_dp, 578 const struct intel_crtc_state *crtc_state, 579 u8 dp_train_pat) 580 { 581 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 582 583 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 584 585 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 586 case DP_TRAINING_PATTERN_DISABLE: 587 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 588 break; 589 case DP_TRAINING_PATTERN_1: 590 intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT; 591 break; 592 case DP_TRAINING_PATTERN_2: 593 intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT; 594 break; 595 default: 596 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 597 return; 598 } 599 600 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 601 intel_de_posting_read(dev_priv, intel_dp->output_reg); 602 } 603 604 static void 605 g4x_set_link_train(struct intel_dp *intel_dp, 606 const struct intel_crtc_state *crtc_state, 607 u8 dp_train_pat) 608 { 609 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 610 611 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 612 613 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 614 case DP_TRAINING_PATTERN_DISABLE: 615 intel_dp->DP |= DP_LINK_TRAIN_OFF; 616 break; 617 case DP_TRAINING_PATTERN_1: 618 intel_dp->DP |= DP_LINK_TRAIN_PAT_1; 619 break; 620 case DP_TRAINING_PATTERN_2: 621 intel_dp->DP |= DP_LINK_TRAIN_PAT_2; 622 break; 623 default: 624 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 625 return; 626 } 627 628 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 629 intel_de_posting_read(dev_priv, intel_dp->output_reg); 630 } 631 632 static void intel_dp_enable_port(struct intel_dp *intel_dp, 633 const struct intel_crtc_state *crtc_state) 634 { 635 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 636 637 /* enable with pattern 1 (as per spec) */ 638 639 intel_dp_program_link_training_pattern(intel_dp, crtc_state, 640 DP_PHY_DPRX, DP_TRAINING_PATTERN_1); 641 642 /* 643 * Magic for VLV/CHV. We _must_ first set up the register 644 * without actually enabling the port, and then do another 645 * write to enable the port. Otherwise link training will 646 * fail when the power sequencer is freshly used for this port. 647 */ 648 intel_dp->DP |= DP_PORT_EN; 649 if (crtc_state->has_audio) 650 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 651 652 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 653 intel_de_posting_read(dev_priv, intel_dp->output_reg); 654 } 655 656 static void intel_enable_dp(struct intel_atomic_state *state, 657 struct intel_encoder *encoder, 658 const struct intel_crtc_state *pipe_config, 659 const struct drm_connector_state *conn_state) 660 { 661 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 662 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 663 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 664 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg); 665 enum pipe pipe = crtc->pipe; 666 intel_wakeref_t wakeref; 667 668 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN)) 669 return; 670 671 with_intel_pps_lock(intel_dp, wakeref) { 672 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 673 vlv_pps_init(encoder, pipe_config); 674 675 intel_dp_enable_port(intel_dp, pipe_config); 676 677 intel_pps_vdd_on_unlocked(intel_dp); 678 intel_pps_on_unlocked(intel_dp); 679 intel_pps_vdd_off_unlocked(intel_dp, true); 680 } 681 682 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 683 unsigned int lane_mask = 0x0; 684 685 if (IS_CHERRYVIEW(dev_priv)) 686 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count); 687 688 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp), 689 lane_mask); 690 } 691 692 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 693 intel_dp_configure_protocol_converter(intel_dp, pipe_config); 694 intel_dp_check_frl_training(intel_dp); 695 intel_dp_pcon_dsc_configure(intel_dp, pipe_config); 696 intel_dp_start_link_train(intel_dp, pipe_config); 697 intel_dp_stop_link_train(intel_dp, pipe_config); 698 699 if (pipe_config->has_audio) { 700 drm_dbg(&dev_priv->drm, "Enabling DP audio on pipe %c\n", 701 pipe_name(pipe)); 702 intel_audio_codec_enable(encoder, pipe_config, conn_state); 703 } 704 } 705 706 static void g4x_enable_dp(struct intel_atomic_state *state, 707 struct intel_encoder *encoder, 708 const struct intel_crtc_state *pipe_config, 709 const struct drm_connector_state *conn_state) 710 { 711 intel_enable_dp(state, encoder, pipe_config, conn_state); 712 intel_edp_backlight_on(pipe_config, conn_state); 713 } 714 715 static void vlv_enable_dp(struct intel_atomic_state *state, 716 struct intel_encoder *encoder, 717 const struct intel_crtc_state *pipe_config, 718 const struct drm_connector_state *conn_state) 719 { 720 intel_edp_backlight_on(pipe_config, conn_state); 721 } 722 723 static void g4x_pre_enable_dp(struct intel_atomic_state *state, 724 struct intel_encoder *encoder, 725 const struct intel_crtc_state *pipe_config, 726 const struct drm_connector_state *conn_state) 727 { 728 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 729 enum port port = encoder->port; 730 731 intel_dp_prepare(encoder, pipe_config); 732 733 /* Only ilk+ has port A */ 734 if (port == PORT_A) 735 ilk_edp_pll_on(intel_dp, pipe_config); 736 } 737 738 static void vlv_pre_enable_dp(struct intel_atomic_state *state, 739 struct intel_encoder *encoder, 740 const struct intel_crtc_state *pipe_config, 741 const struct drm_connector_state *conn_state) 742 { 743 vlv_phy_pre_encoder_enable(encoder, pipe_config); 744 745 intel_enable_dp(state, encoder, pipe_config, conn_state); 746 } 747 748 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state, 749 struct intel_encoder *encoder, 750 const struct intel_crtc_state *pipe_config, 751 const struct drm_connector_state *conn_state) 752 { 753 intel_dp_prepare(encoder, pipe_config); 754 755 vlv_phy_pre_pll_enable(encoder, pipe_config); 756 } 757 758 static void chv_pre_enable_dp(struct intel_atomic_state *state, 759 struct intel_encoder *encoder, 760 const struct intel_crtc_state *pipe_config, 761 const struct drm_connector_state *conn_state) 762 { 763 chv_phy_pre_encoder_enable(encoder, pipe_config); 764 765 intel_enable_dp(state, encoder, pipe_config, conn_state); 766 767 /* Second common lane will stay alive on its own now */ 768 chv_phy_release_cl2_override(encoder); 769 } 770 771 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state, 772 struct intel_encoder *encoder, 773 const struct intel_crtc_state *pipe_config, 774 const struct drm_connector_state *conn_state) 775 { 776 intel_dp_prepare(encoder, pipe_config); 777 778 chv_phy_pre_pll_enable(encoder, pipe_config); 779 } 780 781 static void chv_dp_post_pll_disable(struct intel_atomic_state *state, 782 struct intel_encoder *encoder, 783 const struct intel_crtc_state *old_crtc_state, 784 const struct drm_connector_state *old_conn_state) 785 { 786 chv_phy_post_pll_disable(encoder, old_crtc_state); 787 } 788 789 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp, 790 const struct intel_crtc_state *crtc_state) 791 { 792 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2; 793 } 794 795 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp, 796 const struct intel_crtc_state *crtc_state) 797 { 798 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3; 799 } 800 801 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp) 802 { 803 return DP_TRAIN_PRE_EMPH_LEVEL_2; 804 } 805 806 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp) 807 { 808 return DP_TRAIN_PRE_EMPH_LEVEL_3; 809 } 810 811 static void vlv_set_signal_levels(struct intel_encoder *encoder, 812 const struct intel_crtc_state *crtc_state) 813 { 814 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 815 unsigned long demph_reg_value, preemph_reg_value, 816 uniqtranscale_reg_value; 817 u8 train_set = intel_dp->train_set[0]; 818 819 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 820 case DP_TRAIN_PRE_EMPH_LEVEL_0: 821 preemph_reg_value = 0x0004000; 822 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 823 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 824 demph_reg_value = 0x2B405555; 825 uniqtranscale_reg_value = 0x552AB83A; 826 break; 827 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 828 demph_reg_value = 0x2B404040; 829 uniqtranscale_reg_value = 0x5548B83A; 830 break; 831 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 832 demph_reg_value = 0x2B245555; 833 uniqtranscale_reg_value = 0x5560B83A; 834 break; 835 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 836 demph_reg_value = 0x2B405555; 837 uniqtranscale_reg_value = 0x5598DA3A; 838 break; 839 default: 840 return; 841 } 842 break; 843 case DP_TRAIN_PRE_EMPH_LEVEL_1: 844 preemph_reg_value = 0x0002000; 845 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 846 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 847 demph_reg_value = 0x2B404040; 848 uniqtranscale_reg_value = 0x5552B83A; 849 break; 850 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 851 demph_reg_value = 0x2B404848; 852 uniqtranscale_reg_value = 0x5580B83A; 853 break; 854 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 855 demph_reg_value = 0x2B404040; 856 uniqtranscale_reg_value = 0x55ADDA3A; 857 break; 858 default: 859 return; 860 } 861 break; 862 case DP_TRAIN_PRE_EMPH_LEVEL_2: 863 preemph_reg_value = 0x0000000; 864 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 865 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 866 demph_reg_value = 0x2B305555; 867 uniqtranscale_reg_value = 0x5570B83A; 868 break; 869 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 870 demph_reg_value = 0x2B2B4040; 871 uniqtranscale_reg_value = 0x55ADDA3A; 872 break; 873 default: 874 return; 875 } 876 break; 877 case DP_TRAIN_PRE_EMPH_LEVEL_3: 878 preemph_reg_value = 0x0006000; 879 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 880 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 881 demph_reg_value = 0x1B405555; 882 uniqtranscale_reg_value = 0x55ADDA3A; 883 break; 884 default: 885 return; 886 } 887 break; 888 default: 889 return; 890 } 891 892 vlv_set_phy_signal_level(encoder, crtc_state, 893 demph_reg_value, preemph_reg_value, 894 uniqtranscale_reg_value, 0); 895 } 896 897 static void chv_set_signal_levels(struct intel_encoder *encoder, 898 const struct intel_crtc_state *crtc_state) 899 { 900 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 901 u32 deemph_reg_value, margin_reg_value; 902 bool uniq_trans_scale = false; 903 u8 train_set = intel_dp->train_set[0]; 904 905 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 906 case DP_TRAIN_PRE_EMPH_LEVEL_0: 907 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 908 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 909 deemph_reg_value = 128; 910 margin_reg_value = 52; 911 break; 912 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 913 deemph_reg_value = 128; 914 margin_reg_value = 77; 915 break; 916 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 917 deemph_reg_value = 128; 918 margin_reg_value = 102; 919 break; 920 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 921 deemph_reg_value = 128; 922 margin_reg_value = 154; 923 uniq_trans_scale = true; 924 break; 925 default: 926 return; 927 } 928 break; 929 case DP_TRAIN_PRE_EMPH_LEVEL_1: 930 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 931 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 932 deemph_reg_value = 85; 933 margin_reg_value = 78; 934 break; 935 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 936 deemph_reg_value = 85; 937 margin_reg_value = 116; 938 break; 939 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 940 deemph_reg_value = 85; 941 margin_reg_value = 154; 942 break; 943 default: 944 return; 945 } 946 break; 947 case DP_TRAIN_PRE_EMPH_LEVEL_2: 948 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 949 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 950 deemph_reg_value = 64; 951 margin_reg_value = 104; 952 break; 953 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 954 deemph_reg_value = 64; 955 margin_reg_value = 154; 956 break; 957 default: 958 return; 959 } 960 break; 961 case DP_TRAIN_PRE_EMPH_LEVEL_3: 962 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 963 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 964 deemph_reg_value = 43; 965 margin_reg_value = 154; 966 break; 967 default: 968 return; 969 } 970 break; 971 default: 972 return; 973 } 974 975 chv_set_phy_signal_level(encoder, crtc_state, 976 deemph_reg_value, margin_reg_value, 977 uniq_trans_scale); 978 } 979 980 static u32 g4x_signal_levels(u8 train_set) 981 { 982 u32 signal_levels = 0; 983 984 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 985 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 986 default: 987 signal_levels |= DP_VOLTAGE_0_4; 988 break; 989 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 990 signal_levels |= DP_VOLTAGE_0_6; 991 break; 992 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 993 signal_levels |= DP_VOLTAGE_0_8; 994 break; 995 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 996 signal_levels |= DP_VOLTAGE_1_2; 997 break; 998 } 999 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1000 case DP_TRAIN_PRE_EMPH_LEVEL_0: 1001 default: 1002 signal_levels |= DP_PRE_EMPHASIS_0; 1003 break; 1004 case DP_TRAIN_PRE_EMPH_LEVEL_1: 1005 signal_levels |= DP_PRE_EMPHASIS_3_5; 1006 break; 1007 case DP_TRAIN_PRE_EMPH_LEVEL_2: 1008 signal_levels |= DP_PRE_EMPHASIS_6; 1009 break; 1010 case DP_TRAIN_PRE_EMPH_LEVEL_3: 1011 signal_levels |= DP_PRE_EMPHASIS_9_5; 1012 break; 1013 } 1014 return signal_levels; 1015 } 1016 1017 static void 1018 g4x_set_signal_levels(struct intel_encoder *encoder, 1019 const struct intel_crtc_state *crtc_state) 1020 { 1021 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1022 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1023 u8 train_set = intel_dp->train_set[0]; 1024 u32 signal_levels; 1025 1026 signal_levels = g4x_signal_levels(train_set); 1027 1028 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1029 signal_levels); 1030 1031 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK); 1032 intel_dp->DP |= signal_levels; 1033 1034 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1035 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1036 } 1037 1038 /* SNB CPU eDP voltage swing and pre-emphasis control */ 1039 static u32 snb_cpu_edp_signal_levels(u8 train_set) 1040 { 1041 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1042 DP_TRAIN_PRE_EMPHASIS_MASK); 1043 1044 switch (signal_levels) { 1045 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1046 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1047 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1048 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1049 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1050 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1051 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1052 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1053 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1054 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1055 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1056 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1057 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1058 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1059 default: 1060 MISSING_CASE(signal_levels); 1061 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1062 } 1063 } 1064 1065 static void 1066 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1067 const struct intel_crtc_state *crtc_state) 1068 { 1069 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1070 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1071 u8 train_set = intel_dp->train_set[0]; 1072 u32 signal_levels; 1073 1074 signal_levels = snb_cpu_edp_signal_levels(train_set); 1075 1076 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1077 signal_levels); 1078 1079 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB; 1080 intel_dp->DP |= signal_levels; 1081 1082 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1083 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1084 } 1085 1086 /* IVB CPU eDP voltage swing and pre-emphasis control */ 1087 static u32 ivb_cpu_edp_signal_levels(u8 train_set) 1088 { 1089 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1090 DP_TRAIN_PRE_EMPHASIS_MASK); 1091 1092 switch (signal_levels) { 1093 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1094 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1095 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1096 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1097 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1098 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1099 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1100 1101 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1102 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1103 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1104 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1105 1106 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1107 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1108 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1109 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1110 1111 default: 1112 MISSING_CASE(signal_levels); 1113 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1114 } 1115 } 1116 1117 static void 1118 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1119 const struct intel_crtc_state *crtc_state) 1120 { 1121 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1122 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1123 u8 train_set = intel_dp->train_set[0]; 1124 u32 signal_levels; 1125 1126 signal_levels = ivb_cpu_edp_signal_levels(train_set); 1127 1128 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1129 signal_levels); 1130 1131 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB; 1132 intel_dp->DP |= signal_levels; 1133 1134 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1135 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1136 } 1137 1138 /* 1139 * If display is now connected check links status, 1140 * there has been known issues of link loss triggering 1141 * long pulse. 1142 * 1143 * Some sinks (eg. ASUS PB287Q) seem to perform some 1144 * weird HPD ping pong during modesets. So we can apparently 1145 * end up with HPD going low during a modeset, and then 1146 * going back up soon after. And once that happens we must 1147 * retrain the link to get a picture. That's in case no 1148 * userspace component reacted to intermittent HPD dip. 1149 */ 1150 static enum intel_hotplug_state 1151 intel_dp_hotplug(struct intel_encoder *encoder, 1152 struct intel_connector *connector) 1153 { 1154 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1155 struct drm_modeset_acquire_ctx ctx; 1156 enum intel_hotplug_state state; 1157 int ret; 1158 1159 if (intel_dp->compliance.test_active && 1160 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 1161 intel_dp_phy_test(encoder); 1162 /* just do the PHY test and nothing else */ 1163 return INTEL_HOTPLUG_UNCHANGED; 1164 } 1165 1166 state = intel_encoder_hotplug(encoder, connector); 1167 1168 drm_modeset_acquire_init(&ctx, 0); 1169 1170 for (;;) { 1171 ret = intel_dp_retrain_link(encoder, &ctx); 1172 1173 if (ret == -EDEADLK) { 1174 drm_modeset_backoff(&ctx); 1175 continue; 1176 } 1177 1178 break; 1179 } 1180 1181 drm_modeset_drop_locks(&ctx); 1182 drm_modeset_acquire_fini(&ctx); 1183 drm_WARN(encoder->base.dev, ret, 1184 "Acquiring modeset locks failed with %i\n", ret); 1185 1186 /* 1187 * Keeping it consistent with intel_ddi_hotplug() and 1188 * intel_hdmi_hotplug(). 1189 */ 1190 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries) 1191 state = INTEL_HOTPLUG_RETRY; 1192 1193 return state; 1194 } 1195 1196 static bool ibx_digital_port_connected(struct intel_encoder *encoder) 1197 { 1198 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1199 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 1200 1201 return intel_de_read(dev_priv, SDEISR) & bit; 1202 } 1203 1204 static bool g4x_digital_port_connected(struct intel_encoder *encoder) 1205 { 1206 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1207 u32 bit; 1208 1209 switch (encoder->hpd_pin) { 1210 case HPD_PORT_B: 1211 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X; 1212 break; 1213 case HPD_PORT_C: 1214 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X; 1215 break; 1216 case HPD_PORT_D: 1217 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X; 1218 break; 1219 default: 1220 MISSING_CASE(encoder->hpd_pin); 1221 return false; 1222 } 1223 1224 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1225 } 1226 1227 static bool gm45_digital_port_connected(struct intel_encoder *encoder) 1228 { 1229 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1230 u32 bit; 1231 1232 switch (encoder->hpd_pin) { 1233 case HPD_PORT_B: 1234 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45; 1235 break; 1236 case HPD_PORT_C: 1237 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45; 1238 break; 1239 case HPD_PORT_D: 1240 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45; 1241 break; 1242 default: 1243 MISSING_CASE(encoder->hpd_pin); 1244 return false; 1245 } 1246 1247 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1248 } 1249 1250 static bool ilk_digital_port_connected(struct intel_encoder *encoder) 1251 { 1252 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1253 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 1254 1255 return intel_de_read(dev_priv, DEISR) & bit; 1256 } 1257 1258 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 1259 { 1260 intel_dp_encoder_flush_work(encoder); 1261 1262 drm_encoder_cleanup(encoder); 1263 kfree(enc_to_dig_port(to_intel_encoder(encoder))); 1264 } 1265 1266 enum pipe vlv_active_pipe(struct intel_dp *intel_dp) 1267 { 1268 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1269 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1270 enum pipe pipe; 1271 1272 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 1273 encoder->port, &pipe)) 1274 return pipe; 1275 1276 return INVALID_PIPE; 1277 } 1278 1279 static void intel_dp_encoder_reset(struct drm_encoder *encoder) 1280 { 1281 struct drm_i915_private *dev_priv = to_i915(encoder->dev); 1282 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 1283 1284 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg); 1285 1286 intel_dp->reset_link_params = true; 1287 1288 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1289 intel_wakeref_t wakeref; 1290 1291 with_intel_pps_lock(intel_dp, wakeref) 1292 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp); 1293 } 1294 1295 intel_pps_encoder_reset(intel_dp); 1296 } 1297 1298 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1299 .reset = intel_dp_encoder_reset, 1300 .destroy = intel_dp_encoder_destroy, 1301 }; 1302 1303 bool g4x_dp_init(struct drm_i915_private *dev_priv, 1304 i915_reg_t output_reg, enum port port) 1305 { 1306 struct intel_digital_port *dig_port; 1307 struct intel_encoder *intel_encoder; 1308 struct drm_encoder *encoder; 1309 struct intel_connector *intel_connector; 1310 1311 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 1312 if (!dig_port) 1313 return false; 1314 1315 intel_connector = intel_connector_alloc(); 1316 if (!intel_connector) 1317 goto err_connector_alloc; 1318 1319 intel_encoder = &dig_port->base; 1320 encoder = &intel_encoder->base; 1321 1322 mutex_init(&dig_port->hdcp_mutex); 1323 1324 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base, 1325 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS, 1326 "DP %c", port_name(port))) 1327 goto err_encoder_init; 1328 1329 intel_encoder->hotplug = intel_dp_hotplug; 1330 intel_encoder->compute_config = intel_dp_compute_config; 1331 intel_encoder->get_hw_state = intel_dp_get_hw_state; 1332 intel_encoder->get_config = intel_dp_get_config; 1333 intel_encoder->sync_state = intel_dp_sync_state; 1334 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check; 1335 intel_encoder->update_pipe = intel_backlight_update; 1336 intel_encoder->suspend = intel_dp_encoder_suspend; 1337 intel_encoder->shutdown = intel_dp_encoder_shutdown; 1338 if (IS_CHERRYVIEW(dev_priv)) { 1339 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable; 1340 intel_encoder->pre_enable = chv_pre_enable_dp; 1341 intel_encoder->enable = vlv_enable_dp; 1342 intel_encoder->disable = vlv_disable_dp; 1343 intel_encoder->post_disable = chv_post_disable_dp; 1344 intel_encoder->post_pll_disable = chv_dp_post_pll_disable; 1345 } else if (IS_VALLEYVIEW(dev_priv)) { 1346 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable; 1347 intel_encoder->pre_enable = vlv_pre_enable_dp; 1348 intel_encoder->enable = vlv_enable_dp; 1349 intel_encoder->disable = vlv_disable_dp; 1350 intel_encoder->post_disable = vlv_post_disable_dp; 1351 } else { 1352 intel_encoder->pre_enable = g4x_pre_enable_dp; 1353 intel_encoder->enable = g4x_enable_dp; 1354 intel_encoder->disable = g4x_disable_dp; 1355 intel_encoder->post_disable = g4x_post_disable_dp; 1356 } 1357 1358 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 1359 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) 1360 dig_port->dp.set_link_train = cpt_set_link_train; 1361 else 1362 dig_port->dp.set_link_train = g4x_set_link_train; 1363 1364 if (IS_CHERRYVIEW(dev_priv)) 1365 intel_encoder->set_signal_levels = chv_set_signal_levels; 1366 else if (IS_VALLEYVIEW(dev_priv)) 1367 intel_encoder->set_signal_levels = vlv_set_signal_levels; 1368 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 1369 intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels; 1370 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A) 1371 intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels; 1372 else 1373 intel_encoder->set_signal_levels = g4x_set_signal_levels; 1374 1375 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) || 1376 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) { 1377 dig_port->dp.preemph_max = intel_dp_preemph_max_3; 1378 dig_port->dp.voltage_max = intel_dp_voltage_max_3; 1379 } else { 1380 dig_port->dp.preemph_max = intel_dp_preemph_max_2; 1381 dig_port->dp.voltage_max = intel_dp_voltage_max_2; 1382 } 1383 1384 dig_port->dp.output_reg = output_reg; 1385 dig_port->max_lanes = 4; 1386 1387 intel_encoder->type = INTEL_OUTPUT_DP; 1388 intel_encoder->power_domain = intel_port_to_power_domain(port); 1389 if (IS_CHERRYVIEW(dev_priv)) { 1390 if (port == PORT_D) 1391 intel_encoder->pipe_mask = BIT(PIPE_C); 1392 else 1393 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B); 1394 } else { 1395 intel_encoder->pipe_mask = ~0; 1396 } 1397 intel_encoder->cloneable = 0; 1398 intel_encoder->port = port; 1399 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 1400 1401 dig_port->hpd_pulse = intel_dp_hpd_pulse; 1402 1403 if (HAS_GMCH(dev_priv)) { 1404 if (IS_GM45(dev_priv)) 1405 dig_port->connected = gm45_digital_port_connected; 1406 else 1407 dig_port->connected = g4x_digital_port_connected; 1408 } else { 1409 if (port == PORT_A) 1410 dig_port->connected = ilk_digital_port_connected; 1411 else 1412 dig_port->connected = ibx_digital_port_connected; 1413 } 1414 1415 if (port != PORT_A) 1416 intel_infoframe_init(dig_port); 1417 1418 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 1419 if (!intel_dp_init_connector(dig_port, intel_connector)) 1420 goto err_init_connector; 1421 1422 return true; 1423 1424 err_init_connector: 1425 drm_encoder_cleanup(encoder); 1426 err_encoder_init: 1427 kfree(intel_connector); 1428 err_connector_alloc: 1429 kfree(dig_port); 1430 return false; 1431 } 1432