1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code). 6 */ 7 8 #include "g4x_dp.h" 9 #include "intel_audio.h" 10 #include "intel_backlight.h" 11 #include "intel_connector.h" 12 #include "intel_crtc.h" 13 #include "intel_de.h" 14 #include "intel_display_types.h" 15 #include "intel_dp.h" 16 #include "intel_dp_link_training.h" 17 #include "intel_dpio_phy.h" 18 #include "intel_fifo_underrun.h" 19 #include "intel_hdmi.h" 20 #include "intel_hotplug.h" 21 #include "intel_pps.h" 22 #include "vlv_sideband.h" 23 24 struct dp_link_dpll { 25 int clock; 26 struct dpll dpll; 27 }; 28 29 static const struct dp_link_dpll g4x_dpll[] = { 30 { 162000, 31 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } }, 32 { 270000, 33 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } } 34 }; 35 36 static const struct dp_link_dpll pch_dpll[] = { 37 { 162000, 38 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } }, 39 { 270000, 40 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } } 41 }; 42 43 static const struct dp_link_dpll vlv_dpll[] = { 44 { 162000, 45 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } }, 46 { 270000, 47 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } } 48 }; 49 50 /* 51 * CHV supports eDP 1.4 that have more link rates. 52 * Below only provides the fixed rate but exclude variable rate. 53 */ 54 static const struct dp_link_dpll chv_dpll[] = { 55 /* 56 * CHV requires to program fractional division for m2. 57 * m2 is stored in fixed point format using formula below 58 * (m2_int << 22) | m2_fraction 59 */ 60 { 162000, /* m2_int = 32, m2_fraction = 1677722 */ 61 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } }, 62 { 270000, /* m2_int = 27, m2_fraction = 0 */ 63 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } }, 64 }; 65 66 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915) 67 { 68 return IS_CHERRYVIEW(i915) ? &chv_dpll[0].dpll : &vlv_dpll[0].dpll; 69 } 70 71 void g4x_dp_set_clock(struct intel_encoder *encoder, 72 struct intel_crtc_state *pipe_config) 73 { 74 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 75 const struct dp_link_dpll *divisor = NULL; 76 int i, count = 0; 77 78 if (IS_G4X(dev_priv)) { 79 divisor = g4x_dpll; 80 count = ARRAY_SIZE(g4x_dpll); 81 } else if (HAS_PCH_SPLIT(dev_priv)) { 82 divisor = pch_dpll; 83 count = ARRAY_SIZE(pch_dpll); 84 } else if (IS_CHERRYVIEW(dev_priv)) { 85 divisor = chv_dpll; 86 count = ARRAY_SIZE(chv_dpll); 87 } else if (IS_VALLEYVIEW(dev_priv)) { 88 divisor = vlv_dpll; 89 count = ARRAY_SIZE(vlv_dpll); 90 } 91 92 if (divisor && count) { 93 for (i = 0; i < count; i++) { 94 if (pipe_config->port_clock == divisor[i].clock) { 95 pipe_config->dpll = divisor[i].dpll; 96 pipe_config->clock_set = true; 97 break; 98 } 99 } 100 } 101 } 102 103 static void intel_dp_prepare(struct intel_encoder *encoder, 104 const struct intel_crtc_state *pipe_config) 105 { 106 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 107 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 108 enum port port = encoder->port; 109 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 110 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 111 112 intel_dp_set_link_params(intel_dp, 113 pipe_config->port_clock, 114 pipe_config->lane_count); 115 116 /* 117 * There are four kinds of DP registers: 118 * IBX PCH 119 * SNB CPU 120 * IVB CPU 121 * CPT PCH 122 * 123 * IBX PCH and CPU are the same for almost everything, 124 * except that the CPU DP PLL is configured in this 125 * register 126 * 127 * CPT PCH is quite different, having many bits moved 128 * to the TRANS_DP_CTL register instead. That 129 * configuration happens (oddly) in ilk_pch_enable 130 */ 131 132 /* Preserve the BIOS-computed detected bit. This is 133 * supposed to be read-only. 134 */ 135 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED; 136 137 /* Handle DP bits in common between all three register formats */ 138 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 139 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count); 140 141 /* Split out the IBX/CPU vs CPT settings */ 142 143 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) { 144 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 145 intel_dp->DP |= DP_SYNC_HS_HIGH; 146 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 147 intel_dp->DP |= DP_SYNC_VS_HIGH; 148 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 149 150 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 151 intel_dp->DP |= DP_ENHANCED_FRAMING; 152 153 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe); 154 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 155 u32 trans_dp; 156 157 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 158 159 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe)); 160 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 161 trans_dp |= TRANS_DP_ENH_FRAMING; 162 else 163 trans_dp &= ~TRANS_DP_ENH_FRAMING; 164 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp); 165 } else { 166 if (IS_G4X(dev_priv) && pipe_config->limited_color_range) 167 intel_dp->DP |= DP_COLOR_RANGE_16_235; 168 169 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 170 intel_dp->DP |= DP_SYNC_HS_HIGH; 171 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 172 intel_dp->DP |= DP_SYNC_VS_HIGH; 173 intel_dp->DP |= DP_LINK_TRAIN_OFF; 174 175 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 176 intel_dp->DP |= DP_ENHANCED_FRAMING; 177 178 if (IS_CHERRYVIEW(dev_priv)) 179 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe); 180 else 181 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe); 182 } 183 } 184 185 static void assert_dp_port(struct intel_dp *intel_dp, bool state) 186 { 187 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 188 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 189 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN; 190 191 I915_STATE_WARN(cur_state != state, 192 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n", 193 dig_port->base.base.base.id, dig_port->base.base.name, 194 onoff(state), onoff(cur_state)); 195 } 196 #define assert_dp_port_disabled(d) assert_dp_port((d), false) 197 198 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state) 199 { 200 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE; 201 202 I915_STATE_WARN(cur_state != state, 203 "eDP PLL state assertion failure (expected %s, current %s)\n", 204 onoff(state), onoff(cur_state)); 205 } 206 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true) 207 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false) 208 209 static void ilk_edp_pll_on(struct intel_dp *intel_dp, 210 const struct intel_crtc_state *pipe_config) 211 { 212 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 213 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 214 215 assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder); 216 assert_dp_port_disabled(intel_dp); 217 assert_edp_pll_disabled(dev_priv); 218 219 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n", 220 pipe_config->port_clock); 221 222 intel_dp->DP &= ~DP_PLL_FREQ_MASK; 223 224 if (pipe_config->port_clock == 162000) 225 intel_dp->DP |= DP_PLL_FREQ_162MHZ; 226 else 227 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 228 229 intel_de_write(dev_priv, DP_A, intel_dp->DP); 230 intel_de_posting_read(dev_priv, DP_A); 231 udelay(500); 232 233 /* 234 * [DevILK] Work around required when enabling DP PLL 235 * while a pipe is enabled going to FDI: 236 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI 237 * 2. Program DP PLL enable 238 */ 239 if (IS_IRONLAKE(dev_priv)) 240 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe); 241 242 intel_dp->DP |= DP_PLL_ENABLE; 243 244 intel_de_write(dev_priv, DP_A, intel_dp->DP); 245 intel_de_posting_read(dev_priv, DP_A); 246 udelay(200); 247 } 248 249 static void ilk_edp_pll_off(struct intel_dp *intel_dp, 250 const struct intel_crtc_state *old_crtc_state) 251 { 252 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 253 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 254 255 assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder); 256 assert_dp_port_disabled(intel_dp); 257 assert_edp_pll_enabled(dev_priv); 258 259 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n"); 260 261 intel_dp->DP &= ~DP_PLL_ENABLE; 262 263 intel_de_write(dev_priv, DP_A, intel_dp->DP); 264 intel_de_posting_read(dev_priv, DP_A); 265 udelay(200); 266 } 267 268 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv, 269 enum port port, enum pipe *pipe) 270 { 271 enum pipe p; 272 273 for_each_pipe(dev_priv, p) { 274 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p)); 275 276 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) { 277 *pipe = p; 278 return true; 279 } 280 } 281 282 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n", 283 port_name(port)); 284 285 /* must initialize pipe to something for the asserts */ 286 *pipe = PIPE_A; 287 288 return false; 289 } 290 291 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv, 292 i915_reg_t dp_reg, enum port port, 293 enum pipe *pipe) 294 { 295 bool ret; 296 u32 val; 297 298 val = intel_de_read(dev_priv, dp_reg); 299 300 ret = val & DP_PORT_EN; 301 302 /* asserts want to know the pipe even if the port is disabled */ 303 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 304 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB; 305 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) 306 ret &= cpt_dp_port_selected(dev_priv, port, pipe); 307 else if (IS_CHERRYVIEW(dev_priv)) 308 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV; 309 else 310 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT; 311 312 return ret; 313 } 314 315 static bool intel_dp_get_hw_state(struct intel_encoder *encoder, 316 enum pipe *pipe) 317 { 318 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 319 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 320 intel_wakeref_t wakeref; 321 bool ret; 322 323 wakeref = intel_display_power_get_if_enabled(dev_priv, 324 encoder->power_domain); 325 if (!wakeref) 326 return false; 327 328 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 329 encoder->port, pipe); 330 331 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 332 333 return ret; 334 } 335 336 static void intel_dp_get_config(struct intel_encoder *encoder, 337 struct intel_crtc_state *pipe_config) 338 { 339 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 340 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 341 u32 tmp, flags = 0; 342 enum port port = encoder->port; 343 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 344 345 if (encoder->type == INTEL_OUTPUT_EDP) 346 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 347 else 348 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 349 350 tmp = intel_de_read(dev_priv, intel_dp->output_reg); 351 352 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A; 353 354 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 355 u32 trans_dp = intel_de_read(dev_priv, 356 TRANS_DP_CTL(crtc->pipe)); 357 358 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH) 359 flags |= DRM_MODE_FLAG_PHSYNC; 360 else 361 flags |= DRM_MODE_FLAG_NHSYNC; 362 363 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH) 364 flags |= DRM_MODE_FLAG_PVSYNC; 365 else 366 flags |= DRM_MODE_FLAG_NVSYNC; 367 } else { 368 if (tmp & DP_SYNC_HS_HIGH) 369 flags |= DRM_MODE_FLAG_PHSYNC; 370 else 371 flags |= DRM_MODE_FLAG_NHSYNC; 372 373 if (tmp & DP_SYNC_VS_HIGH) 374 flags |= DRM_MODE_FLAG_PVSYNC; 375 else 376 flags |= DRM_MODE_FLAG_NVSYNC; 377 } 378 379 pipe_config->hw.adjusted_mode.flags |= flags; 380 381 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235) 382 pipe_config->limited_color_range = true; 383 384 pipe_config->lane_count = 385 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1; 386 387 intel_dp_get_m_n(crtc, pipe_config); 388 389 if (port == PORT_A) { 390 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ) 391 pipe_config->port_clock = 162000; 392 else 393 pipe_config->port_clock = 270000; 394 } 395 396 pipe_config->hw.adjusted_mode.crtc_clock = 397 intel_dotclock_calculate(pipe_config->port_clock, 398 &pipe_config->dp_m_n); 399 400 if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp && 401 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 402 /* 403 * This is a big fat ugly hack. 404 * 405 * Some machines in UEFI boot mode provide us a VBT that has 18 406 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 407 * unknown we fail to light up. Yet the same BIOS boots up with 408 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 409 * max, not what it tells us to use. 410 * 411 * Note: This will still be broken if the eDP panel is not lit 412 * up by the BIOS, and thus we can't get the mode at module 413 * load. 414 */ 415 drm_dbg_kms(&dev_priv->drm, 416 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 417 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 418 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 419 } 420 } 421 422 static void 423 intel_dp_link_down(struct intel_encoder *encoder, 424 const struct intel_crtc_state *old_crtc_state) 425 { 426 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 427 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 428 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 429 enum port port = encoder->port; 430 431 if (drm_WARN_ON(&dev_priv->drm, 432 (intel_de_read(dev_priv, intel_dp->output_reg) & 433 DP_PORT_EN) == 0)) 434 return; 435 436 drm_dbg_kms(&dev_priv->drm, "\n"); 437 438 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 439 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) { 440 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 441 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT; 442 } else { 443 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 444 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE; 445 } 446 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 447 intel_de_posting_read(dev_priv, intel_dp->output_reg); 448 449 intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE); 450 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 451 intel_de_posting_read(dev_priv, intel_dp->output_reg); 452 453 /* 454 * HW workaround for IBX, we need to move the port 455 * to transcoder A after disabling it to allow the 456 * matching HDMI port to be enabled on transcoder A. 457 */ 458 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) { 459 /* 460 * We get CPU/PCH FIFO underruns on the other pipe when 461 * doing the workaround. Sweep them under the rug. 462 */ 463 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false); 464 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false); 465 466 /* always enable with pattern 1 (as per spec) */ 467 intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK); 468 intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) | 469 DP_LINK_TRAIN_PAT_1; 470 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 471 intel_de_posting_read(dev_priv, intel_dp->output_reg); 472 473 intel_dp->DP &= ~DP_PORT_EN; 474 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 475 intel_de_posting_read(dev_priv, intel_dp->output_reg); 476 477 intel_wait_for_vblank_if_active(dev_priv, PIPE_A); 478 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true); 479 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true); 480 } 481 482 msleep(intel_dp->pps.panel_power_down_delay); 483 484 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 485 intel_wakeref_t wakeref; 486 487 with_intel_pps_lock(intel_dp, wakeref) 488 intel_dp->pps.active_pipe = INVALID_PIPE; 489 } 490 } 491 492 static void intel_disable_dp(struct intel_atomic_state *state, 493 struct intel_encoder *encoder, 494 const struct intel_crtc_state *old_crtc_state, 495 const struct drm_connector_state *old_conn_state) 496 { 497 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 498 499 intel_dp->link_trained = false; 500 501 if (old_crtc_state->has_audio) 502 intel_audio_codec_disable(encoder, 503 old_crtc_state, old_conn_state); 504 505 /* 506 * Make sure the panel is off before trying to change the mode. 507 * But also ensure that we have vdd while we switch off the panel. 508 */ 509 intel_pps_vdd_on(intel_dp); 510 intel_edp_backlight_off(old_conn_state); 511 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 512 intel_pps_off(intel_dp); 513 } 514 515 static void g4x_disable_dp(struct intel_atomic_state *state, 516 struct intel_encoder *encoder, 517 const struct intel_crtc_state *old_crtc_state, 518 const struct drm_connector_state *old_conn_state) 519 { 520 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 521 } 522 523 static void vlv_disable_dp(struct intel_atomic_state *state, 524 struct intel_encoder *encoder, 525 const struct intel_crtc_state *old_crtc_state, 526 const struct drm_connector_state *old_conn_state) 527 { 528 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 529 } 530 531 static void g4x_post_disable_dp(struct intel_atomic_state *state, 532 struct intel_encoder *encoder, 533 const struct intel_crtc_state *old_crtc_state, 534 const struct drm_connector_state *old_conn_state) 535 { 536 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 537 enum port port = encoder->port; 538 539 /* 540 * Bspec does not list a specific disable sequence for g4x DP. 541 * Follow the ilk+ sequence (disable pipe before the port) for 542 * g4x DP as it does not suffer from underruns like the normal 543 * g4x modeset sequence (disable pipe after the port). 544 */ 545 intel_dp_link_down(encoder, old_crtc_state); 546 547 /* Only ilk+ has port A */ 548 if (port == PORT_A) 549 ilk_edp_pll_off(intel_dp, old_crtc_state); 550 } 551 552 static void vlv_post_disable_dp(struct intel_atomic_state *state, 553 struct intel_encoder *encoder, 554 const struct intel_crtc_state *old_crtc_state, 555 const struct drm_connector_state *old_conn_state) 556 { 557 intel_dp_link_down(encoder, old_crtc_state); 558 } 559 560 static void chv_post_disable_dp(struct intel_atomic_state *state, 561 struct intel_encoder *encoder, 562 const struct intel_crtc_state *old_crtc_state, 563 const struct drm_connector_state *old_conn_state) 564 { 565 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 566 567 intel_dp_link_down(encoder, old_crtc_state); 568 569 vlv_dpio_get(dev_priv); 570 571 /* Assert data lane reset */ 572 chv_data_lane_soft_reset(encoder, old_crtc_state, true); 573 574 vlv_dpio_put(dev_priv); 575 } 576 577 static void 578 cpt_set_link_train(struct intel_dp *intel_dp, 579 const struct intel_crtc_state *crtc_state, 580 u8 dp_train_pat) 581 { 582 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 583 584 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 585 586 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 587 case DP_TRAINING_PATTERN_DISABLE: 588 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 589 break; 590 case DP_TRAINING_PATTERN_1: 591 intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT; 592 break; 593 case DP_TRAINING_PATTERN_2: 594 intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT; 595 break; 596 default: 597 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 598 return; 599 } 600 601 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 602 intel_de_posting_read(dev_priv, intel_dp->output_reg); 603 } 604 605 static void 606 g4x_set_link_train(struct intel_dp *intel_dp, 607 const struct intel_crtc_state *crtc_state, 608 u8 dp_train_pat) 609 { 610 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 611 612 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 613 614 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 615 case DP_TRAINING_PATTERN_DISABLE: 616 intel_dp->DP |= DP_LINK_TRAIN_OFF; 617 break; 618 case DP_TRAINING_PATTERN_1: 619 intel_dp->DP |= DP_LINK_TRAIN_PAT_1; 620 break; 621 case DP_TRAINING_PATTERN_2: 622 intel_dp->DP |= DP_LINK_TRAIN_PAT_2; 623 break; 624 default: 625 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 626 return; 627 } 628 629 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 630 intel_de_posting_read(dev_priv, intel_dp->output_reg); 631 } 632 633 static void intel_dp_enable_port(struct intel_dp *intel_dp, 634 const struct intel_crtc_state *crtc_state) 635 { 636 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 637 638 /* enable with pattern 1 (as per spec) */ 639 640 intel_dp_program_link_training_pattern(intel_dp, crtc_state, 641 DP_PHY_DPRX, DP_TRAINING_PATTERN_1); 642 643 /* 644 * Magic for VLV/CHV. We _must_ first set up the register 645 * without actually enabling the port, and then do another 646 * write to enable the port. Otherwise link training will 647 * fail when the power sequencer is freshly used for this port. 648 */ 649 intel_dp->DP |= DP_PORT_EN; 650 if (crtc_state->has_audio) 651 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 652 653 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 654 intel_de_posting_read(dev_priv, intel_dp->output_reg); 655 } 656 657 static void intel_enable_dp(struct intel_atomic_state *state, 658 struct intel_encoder *encoder, 659 const struct intel_crtc_state *pipe_config, 660 const struct drm_connector_state *conn_state) 661 { 662 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 663 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 664 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 665 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg); 666 enum pipe pipe = crtc->pipe; 667 intel_wakeref_t wakeref; 668 669 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN)) 670 return; 671 672 with_intel_pps_lock(intel_dp, wakeref) { 673 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 674 vlv_pps_init(encoder, pipe_config); 675 676 intel_dp_enable_port(intel_dp, pipe_config); 677 678 intel_pps_vdd_on_unlocked(intel_dp); 679 intel_pps_on_unlocked(intel_dp); 680 intel_pps_vdd_off_unlocked(intel_dp, true); 681 } 682 683 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 684 unsigned int lane_mask = 0x0; 685 686 if (IS_CHERRYVIEW(dev_priv)) 687 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count); 688 689 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp), 690 lane_mask); 691 } 692 693 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 694 intel_dp_configure_protocol_converter(intel_dp, pipe_config); 695 intel_dp_check_frl_training(intel_dp); 696 intel_dp_pcon_dsc_configure(intel_dp, pipe_config); 697 intel_dp_start_link_train(intel_dp, pipe_config); 698 intel_dp_stop_link_train(intel_dp, pipe_config); 699 700 if (pipe_config->has_audio) { 701 drm_dbg(&dev_priv->drm, "Enabling DP audio on pipe %c\n", 702 pipe_name(pipe)); 703 intel_audio_codec_enable(encoder, pipe_config, conn_state); 704 } 705 } 706 707 static void g4x_enable_dp(struct intel_atomic_state *state, 708 struct intel_encoder *encoder, 709 const struct intel_crtc_state *pipe_config, 710 const struct drm_connector_state *conn_state) 711 { 712 intel_enable_dp(state, encoder, pipe_config, conn_state); 713 intel_edp_backlight_on(pipe_config, conn_state); 714 } 715 716 static void vlv_enable_dp(struct intel_atomic_state *state, 717 struct intel_encoder *encoder, 718 const struct intel_crtc_state *pipe_config, 719 const struct drm_connector_state *conn_state) 720 { 721 intel_edp_backlight_on(pipe_config, conn_state); 722 } 723 724 static void g4x_pre_enable_dp(struct intel_atomic_state *state, 725 struct intel_encoder *encoder, 726 const struct intel_crtc_state *pipe_config, 727 const struct drm_connector_state *conn_state) 728 { 729 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 730 enum port port = encoder->port; 731 732 intel_dp_prepare(encoder, pipe_config); 733 734 /* Only ilk+ has port A */ 735 if (port == PORT_A) 736 ilk_edp_pll_on(intel_dp, pipe_config); 737 } 738 739 static void vlv_pre_enable_dp(struct intel_atomic_state *state, 740 struct intel_encoder *encoder, 741 const struct intel_crtc_state *pipe_config, 742 const struct drm_connector_state *conn_state) 743 { 744 vlv_phy_pre_encoder_enable(encoder, pipe_config); 745 746 intel_enable_dp(state, encoder, pipe_config, conn_state); 747 } 748 749 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state, 750 struct intel_encoder *encoder, 751 const struct intel_crtc_state *pipe_config, 752 const struct drm_connector_state *conn_state) 753 { 754 intel_dp_prepare(encoder, pipe_config); 755 756 vlv_phy_pre_pll_enable(encoder, pipe_config); 757 } 758 759 static void chv_pre_enable_dp(struct intel_atomic_state *state, 760 struct intel_encoder *encoder, 761 const struct intel_crtc_state *pipe_config, 762 const struct drm_connector_state *conn_state) 763 { 764 chv_phy_pre_encoder_enable(encoder, pipe_config); 765 766 intel_enable_dp(state, encoder, pipe_config, conn_state); 767 768 /* Second common lane will stay alive on its own now */ 769 chv_phy_release_cl2_override(encoder); 770 } 771 772 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state, 773 struct intel_encoder *encoder, 774 const struct intel_crtc_state *pipe_config, 775 const struct drm_connector_state *conn_state) 776 { 777 intel_dp_prepare(encoder, pipe_config); 778 779 chv_phy_pre_pll_enable(encoder, pipe_config); 780 } 781 782 static void chv_dp_post_pll_disable(struct intel_atomic_state *state, 783 struct intel_encoder *encoder, 784 const struct intel_crtc_state *old_crtc_state, 785 const struct drm_connector_state *old_conn_state) 786 { 787 chv_phy_post_pll_disable(encoder, old_crtc_state); 788 } 789 790 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp, 791 const struct intel_crtc_state *crtc_state) 792 { 793 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2; 794 } 795 796 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp, 797 const struct intel_crtc_state *crtc_state) 798 { 799 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3; 800 } 801 802 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp) 803 { 804 return DP_TRAIN_PRE_EMPH_LEVEL_2; 805 } 806 807 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp) 808 { 809 return DP_TRAIN_PRE_EMPH_LEVEL_3; 810 } 811 812 static void vlv_set_signal_levels(struct intel_encoder *encoder, 813 const struct intel_crtc_state *crtc_state) 814 { 815 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 816 unsigned long demph_reg_value, preemph_reg_value, 817 uniqtranscale_reg_value; 818 u8 train_set = intel_dp->train_set[0]; 819 820 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 821 case DP_TRAIN_PRE_EMPH_LEVEL_0: 822 preemph_reg_value = 0x0004000; 823 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 824 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 825 demph_reg_value = 0x2B405555; 826 uniqtranscale_reg_value = 0x552AB83A; 827 break; 828 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 829 demph_reg_value = 0x2B404040; 830 uniqtranscale_reg_value = 0x5548B83A; 831 break; 832 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 833 demph_reg_value = 0x2B245555; 834 uniqtranscale_reg_value = 0x5560B83A; 835 break; 836 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 837 demph_reg_value = 0x2B405555; 838 uniqtranscale_reg_value = 0x5598DA3A; 839 break; 840 default: 841 return; 842 } 843 break; 844 case DP_TRAIN_PRE_EMPH_LEVEL_1: 845 preemph_reg_value = 0x0002000; 846 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 847 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 848 demph_reg_value = 0x2B404040; 849 uniqtranscale_reg_value = 0x5552B83A; 850 break; 851 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 852 demph_reg_value = 0x2B404848; 853 uniqtranscale_reg_value = 0x5580B83A; 854 break; 855 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 856 demph_reg_value = 0x2B404040; 857 uniqtranscale_reg_value = 0x55ADDA3A; 858 break; 859 default: 860 return; 861 } 862 break; 863 case DP_TRAIN_PRE_EMPH_LEVEL_2: 864 preemph_reg_value = 0x0000000; 865 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 866 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 867 demph_reg_value = 0x2B305555; 868 uniqtranscale_reg_value = 0x5570B83A; 869 break; 870 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 871 demph_reg_value = 0x2B2B4040; 872 uniqtranscale_reg_value = 0x55ADDA3A; 873 break; 874 default: 875 return; 876 } 877 break; 878 case DP_TRAIN_PRE_EMPH_LEVEL_3: 879 preemph_reg_value = 0x0006000; 880 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 881 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 882 demph_reg_value = 0x1B405555; 883 uniqtranscale_reg_value = 0x55ADDA3A; 884 break; 885 default: 886 return; 887 } 888 break; 889 default: 890 return; 891 } 892 893 vlv_set_phy_signal_level(encoder, crtc_state, 894 demph_reg_value, preemph_reg_value, 895 uniqtranscale_reg_value, 0); 896 } 897 898 static void chv_set_signal_levels(struct intel_encoder *encoder, 899 const struct intel_crtc_state *crtc_state) 900 { 901 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 902 u32 deemph_reg_value, margin_reg_value; 903 bool uniq_trans_scale = false; 904 u8 train_set = intel_dp->train_set[0]; 905 906 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 907 case DP_TRAIN_PRE_EMPH_LEVEL_0: 908 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 909 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 910 deemph_reg_value = 128; 911 margin_reg_value = 52; 912 break; 913 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 914 deemph_reg_value = 128; 915 margin_reg_value = 77; 916 break; 917 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 918 deemph_reg_value = 128; 919 margin_reg_value = 102; 920 break; 921 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 922 deemph_reg_value = 128; 923 margin_reg_value = 154; 924 uniq_trans_scale = true; 925 break; 926 default: 927 return; 928 } 929 break; 930 case DP_TRAIN_PRE_EMPH_LEVEL_1: 931 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 932 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 933 deemph_reg_value = 85; 934 margin_reg_value = 78; 935 break; 936 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 937 deemph_reg_value = 85; 938 margin_reg_value = 116; 939 break; 940 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 941 deemph_reg_value = 85; 942 margin_reg_value = 154; 943 break; 944 default: 945 return; 946 } 947 break; 948 case DP_TRAIN_PRE_EMPH_LEVEL_2: 949 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 950 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 951 deemph_reg_value = 64; 952 margin_reg_value = 104; 953 break; 954 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 955 deemph_reg_value = 64; 956 margin_reg_value = 154; 957 break; 958 default: 959 return; 960 } 961 break; 962 case DP_TRAIN_PRE_EMPH_LEVEL_3: 963 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 964 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 965 deemph_reg_value = 43; 966 margin_reg_value = 154; 967 break; 968 default: 969 return; 970 } 971 break; 972 default: 973 return; 974 } 975 976 chv_set_phy_signal_level(encoder, crtc_state, 977 deemph_reg_value, margin_reg_value, 978 uniq_trans_scale); 979 } 980 981 static u32 g4x_signal_levels(u8 train_set) 982 { 983 u32 signal_levels = 0; 984 985 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 986 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 987 default: 988 signal_levels |= DP_VOLTAGE_0_4; 989 break; 990 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 991 signal_levels |= DP_VOLTAGE_0_6; 992 break; 993 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 994 signal_levels |= DP_VOLTAGE_0_8; 995 break; 996 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 997 signal_levels |= DP_VOLTAGE_1_2; 998 break; 999 } 1000 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1001 case DP_TRAIN_PRE_EMPH_LEVEL_0: 1002 default: 1003 signal_levels |= DP_PRE_EMPHASIS_0; 1004 break; 1005 case DP_TRAIN_PRE_EMPH_LEVEL_1: 1006 signal_levels |= DP_PRE_EMPHASIS_3_5; 1007 break; 1008 case DP_TRAIN_PRE_EMPH_LEVEL_2: 1009 signal_levels |= DP_PRE_EMPHASIS_6; 1010 break; 1011 case DP_TRAIN_PRE_EMPH_LEVEL_3: 1012 signal_levels |= DP_PRE_EMPHASIS_9_5; 1013 break; 1014 } 1015 return signal_levels; 1016 } 1017 1018 static void 1019 g4x_set_signal_levels(struct intel_encoder *encoder, 1020 const struct intel_crtc_state *crtc_state) 1021 { 1022 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1023 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1024 u8 train_set = intel_dp->train_set[0]; 1025 u32 signal_levels; 1026 1027 signal_levels = g4x_signal_levels(train_set); 1028 1029 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1030 signal_levels); 1031 1032 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK); 1033 intel_dp->DP |= signal_levels; 1034 1035 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1036 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1037 } 1038 1039 /* SNB CPU eDP voltage swing and pre-emphasis control */ 1040 static u32 snb_cpu_edp_signal_levels(u8 train_set) 1041 { 1042 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1043 DP_TRAIN_PRE_EMPHASIS_MASK); 1044 1045 switch (signal_levels) { 1046 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1047 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1048 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1049 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1050 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1051 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1052 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1053 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1054 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1055 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1056 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1057 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1058 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1059 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1060 default: 1061 MISSING_CASE(signal_levels); 1062 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1063 } 1064 } 1065 1066 static void 1067 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1068 const struct intel_crtc_state *crtc_state) 1069 { 1070 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1071 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1072 u8 train_set = intel_dp->train_set[0]; 1073 u32 signal_levels; 1074 1075 signal_levels = snb_cpu_edp_signal_levels(train_set); 1076 1077 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1078 signal_levels); 1079 1080 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB; 1081 intel_dp->DP |= signal_levels; 1082 1083 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1084 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1085 } 1086 1087 /* IVB CPU eDP voltage swing and pre-emphasis control */ 1088 static u32 ivb_cpu_edp_signal_levels(u8 train_set) 1089 { 1090 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1091 DP_TRAIN_PRE_EMPHASIS_MASK); 1092 1093 switch (signal_levels) { 1094 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1095 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1096 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1097 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1098 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1099 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1100 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1101 1102 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1103 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1104 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1105 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1106 1107 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1108 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1109 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1110 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1111 1112 default: 1113 MISSING_CASE(signal_levels); 1114 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1115 } 1116 } 1117 1118 static void 1119 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1120 const struct intel_crtc_state *crtc_state) 1121 { 1122 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1123 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1124 u8 train_set = intel_dp->train_set[0]; 1125 u32 signal_levels; 1126 1127 signal_levels = ivb_cpu_edp_signal_levels(train_set); 1128 1129 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1130 signal_levels); 1131 1132 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB; 1133 intel_dp->DP |= signal_levels; 1134 1135 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1136 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1137 } 1138 1139 /* 1140 * If display is now connected check links status, 1141 * there has been known issues of link loss triggering 1142 * long pulse. 1143 * 1144 * Some sinks (eg. ASUS PB287Q) seem to perform some 1145 * weird HPD ping pong during modesets. So we can apparently 1146 * end up with HPD going low during a modeset, and then 1147 * going back up soon after. And once that happens we must 1148 * retrain the link to get a picture. That's in case no 1149 * userspace component reacted to intermittent HPD dip. 1150 */ 1151 static enum intel_hotplug_state 1152 intel_dp_hotplug(struct intel_encoder *encoder, 1153 struct intel_connector *connector) 1154 { 1155 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1156 struct drm_modeset_acquire_ctx ctx; 1157 enum intel_hotplug_state state; 1158 int ret; 1159 1160 if (intel_dp->compliance.test_active && 1161 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 1162 intel_dp_phy_test(encoder); 1163 /* just do the PHY test and nothing else */ 1164 return INTEL_HOTPLUG_UNCHANGED; 1165 } 1166 1167 state = intel_encoder_hotplug(encoder, connector); 1168 1169 drm_modeset_acquire_init(&ctx, 0); 1170 1171 for (;;) { 1172 ret = intel_dp_retrain_link(encoder, &ctx); 1173 1174 if (ret == -EDEADLK) { 1175 drm_modeset_backoff(&ctx); 1176 continue; 1177 } 1178 1179 break; 1180 } 1181 1182 drm_modeset_drop_locks(&ctx); 1183 drm_modeset_acquire_fini(&ctx); 1184 drm_WARN(encoder->base.dev, ret, 1185 "Acquiring modeset locks failed with %i\n", ret); 1186 1187 /* 1188 * Keeping it consistent with intel_ddi_hotplug() and 1189 * intel_hdmi_hotplug(). 1190 */ 1191 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries) 1192 state = INTEL_HOTPLUG_RETRY; 1193 1194 return state; 1195 } 1196 1197 static bool ibx_digital_port_connected(struct intel_encoder *encoder) 1198 { 1199 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1200 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 1201 1202 return intel_de_read(dev_priv, SDEISR) & bit; 1203 } 1204 1205 static bool g4x_digital_port_connected(struct intel_encoder *encoder) 1206 { 1207 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1208 u32 bit; 1209 1210 switch (encoder->hpd_pin) { 1211 case HPD_PORT_B: 1212 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X; 1213 break; 1214 case HPD_PORT_C: 1215 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X; 1216 break; 1217 case HPD_PORT_D: 1218 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X; 1219 break; 1220 default: 1221 MISSING_CASE(encoder->hpd_pin); 1222 return false; 1223 } 1224 1225 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1226 } 1227 1228 static bool gm45_digital_port_connected(struct intel_encoder *encoder) 1229 { 1230 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1231 u32 bit; 1232 1233 switch (encoder->hpd_pin) { 1234 case HPD_PORT_B: 1235 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45; 1236 break; 1237 case HPD_PORT_C: 1238 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45; 1239 break; 1240 case HPD_PORT_D: 1241 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45; 1242 break; 1243 default: 1244 MISSING_CASE(encoder->hpd_pin); 1245 return false; 1246 } 1247 1248 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1249 } 1250 1251 static bool ilk_digital_port_connected(struct intel_encoder *encoder) 1252 { 1253 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1254 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 1255 1256 return intel_de_read(dev_priv, DEISR) & bit; 1257 } 1258 1259 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 1260 { 1261 intel_dp_encoder_flush_work(encoder); 1262 1263 drm_encoder_cleanup(encoder); 1264 kfree(enc_to_dig_port(to_intel_encoder(encoder))); 1265 } 1266 1267 enum pipe vlv_active_pipe(struct intel_dp *intel_dp) 1268 { 1269 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1270 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1271 enum pipe pipe; 1272 1273 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 1274 encoder->port, &pipe)) 1275 return pipe; 1276 1277 return INVALID_PIPE; 1278 } 1279 1280 static void intel_dp_encoder_reset(struct drm_encoder *encoder) 1281 { 1282 struct drm_i915_private *dev_priv = to_i915(encoder->dev); 1283 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 1284 1285 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg); 1286 1287 intel_dp->reset_link_params = true; 1288 1289 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1290 intel_wakeref_t wakeref; 1291 1292 with_intel_pps_lock(intel_dp, wakeref) 1293 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp); 1294 } 1295 1296 intel_pps_encoder_reset(intel_dp); 1297 } 1298 1299 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1300 .reset = intel_dp_encoder_reset, 1301 .destroy = intel_dp_encoder_destroy, 1302 }; 1303 1304 bool g4x_dp_init(struct drm_i915_private *dev_priv, 1305 i915_reg_t output_reg, enum port port) 1306 { 1307 struct intel_digital_port *dig_port; 1308 struct intel_encoder *intel_encoder; 1309 struct drm_encoder *encoder; 1310 struct intel_connector *intel_connector; 1311 1312 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 1313 if (!dig_port) 1314 return false; 1315 1316 intel_connector = intel_connector_alloc(); 1317 if (!intel_connector) 1318 goto err_connector_alloc; 1319 1320 intel_encoder = &dig_port->base; 1321 encoder = &intel_encoder->base; 1322 1323 mutex_init(&dig_port->hdcp_mutex); 1324 1325 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base, 1326 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS, 1327 "DP %c", port_name(port))) 1328 goto err_encoder_init; 1329 1330 intel_encoder->hotplug = intel_dp_hotplug; 1331 intel_encoder->compute_config = intel_dp_compute_config; 1332 intel_encoder->get_hw_state = intel_dp_get_hw_state; 1333 intel_encoder->get_config = intel_dp_get_config; 1334 intel_encoder->sync_state = intel_dp_sync_state; 1335 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check; 1336 intel_encoder->update_pipe = intel_backlight_update; 1337 intel_encoder->suspend = intel_dp_encoder_suspend; 1338 intel_encoder->shutdown = intel_dp_encoder_shutdown; 1339 if (IS_CHERRYVIEW(dev_priv)) { 1340 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable; 1341 intel_encoder->pre_enable = chv_pre_enable_dp; 1342 intel_encoder->enable = vlv_enable_dp; 1343 intel_encoder->disable = vlv_disable_dp; 1344 intel_encoder->post_disable = chv_post_disable_dp; 1345 intel_encoder->post_pll_disable = chv_dp_post_pll_disable; 1346 } else if (IS_VALLEYVIEW(dev_priv)) { 1347 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable; 1348 intel_encoder->pre_enable = vlv_pre_enable_dp; 1349 intel_encoder->enable = vlv_enable_dp; 1350 intel_encoder->disable = vlv_disable_dp; 1351 intel_encoder->post_disable = vlv_post_disable_dp; 1352 } else { 1353 intel_encoder->pre_enable = g4x_pre_enable_dp; 1354 intel_encoder->enable = g4x_enable_dp; 1355 intel_encoder->disable = g4x_disable_dp; 1356 intel_encoder->post_disable = g4x_post_disable_dp; 1357 } 1358 1359 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 1360 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) 1361 dig_port->dp.set_link_train = cpt_set_link_train; 1362 else 1363 dig_port->dp.set_link_train = g4x_set_link_train; 1364 1365 if (IS_CHERRYVIEW(dev_priv)) 1366 intel_encoder->set_signal_levels = chv_set_signal_levels; 1367 else if (IS_VALLEYVIEW(dev_priv)) 1368 intel_encoder->set_signal_levels = vlv_set_signal_levels; 1369 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 1370 intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels; 1371 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A) 1372 intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels; 1373 else 1374 intel_encoder->set_signal_levels = g4x_set_signal_levels; 1375 1376 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) || 1377 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) { 1378 dig_port->dp.preemph_max = intel_dp_preemph_max_3; 1379 dig_port->dp.voltage_max = intel_dp_voltage_max_3; 1380 } else { 1381 dig_port->dp.preemph_max = intel_dp_preemph_max_2; 1382 dig_port->dp.voltage_max = intel_dp_voltage_max_2; 1383 } 1384 1385 dig_port->dp.output_reg = output_reg; 1386 dig_port->max_lanes = 4; 1387 1388 intel_encoder->type = INTEL_OUTPUT_DP; 1389 intel_encoder->power_domain = intel_port_to_power_domain(port); 1390 if (IS_CHERRYVIEW(dev_priv)) { 1391 if (port == PORT_D) 1392 intel_encoder->pipe_mask = BIT(PIPE_C); 1393 else 1394 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B); 1395 } else { 1396 intel_encoder->pipe_mask = ~0; 1397 } 1398 intel_encoder->cloneable = 0; 1399 intel_encoder->port = port; 1400 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 1401 1402 dig_port->hpd_pulse = intel_dp_hpd_pulse; 1403 1404 if (HAS_GMCH(dev_priv)) { 1405 if (IS_GM45(dev_priv)) 1406 dig_port->connected = gm45_digital_port_connected; 1407 else 1408 dig_port->connected = g4x_digital_port_connected; 1409 } else { 1410 if (port == PORT_A) 1411 dig_port->connected = ilk_digital_port_connected; 1412 else 1413 dig_port->connected = ibx_digital_port_connected; 1414 } 1415 1416 if (port != PORT_A) 1417 intel_infoframe_init(dig_port); 1418 1419 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 1420 if (!intel_dp_init_connector(dig_port, intel_connector)) 1421 goto err_init_connector; 1422 1423 return true; 1424 1425 err_init_connector: 1426 drm_encoder_cleanup(encoder); 1427 err_encoder_init: 1428 kfree(intel_connector); 1429 err_connector_alloc: 1430 kfree(dig_port); 1431 return false; 1432 } 1433