1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code). 6 */ 7 8 #include "g4x_dp.h" 9 #include "intel_audio.h" 10 #include "intel_backlight.h" 11 #include "intel_connector.h" 12 #include "intel_crtc.h" 13 #include "intel_de.h" 14 #include "intel_display_types.h" 15 #include "intel_dp.h" 16 #include "intel_dp_link_training.h" 17 #include "intel_dpio_phy.h" 18 #include "intel_fifo_underrun.h" 19 #include "intel_hdmi.h" 20 #include "intel_hotplug.h" 21 #include "intel_pch_display.h" 22 #include "intel_pps.h" 23 #include "vlv_sideband.h" 24 25 struct dp_link_dpll { 26 int clock; 27 struct dpll dpll; 28 }; 29 30 static const struct dp_link_dpll g4x_dpll[] = { 31 { 162000, 32 { .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8 } }, 33 { 270000, 34 { .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2 } } 35 }; 36 37 static const struct dp_link_dpll pch_dpll[] = { 38 { 162000, 39 { .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9 } }, 40 { 270000, 41 { .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8 } } 42 }; 43 44 static const struct dp_link_dpll vlv_dpll[] = { 45 { 162000, 46 { .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81 } }, 47 { 270000, 48 { .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27 } } 49 }; 50 51 /* 52 * CHV supports eDP 1.4 that have more link rates. 53 * Below only provides the fixed rate but exclude variable rate. 54 */ 55 static const struct dp_link_dpll chv_dpll[] = { 56 /* 57 * CHV requires to program fractional division for m2. 58 * m2 is stored in fixed point format using formula below 59 * (m2_int << 22) | m2_fraction 60 */ 61 { 162000, /* m2_int = 32, m2_fraction = 1677722 */ 62 { .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a } }, 63 { 270000, /* m2_int = 27, m2_fraction = 0 */ 64 { .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 } }, 65 }; 66 67 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915) 68 { 69 return IS_CHERRYVIEW(i915) ? &chv_dpll[0].dpll : &vlv_dpll[0].dpll; 70 } 71 72 void g4x_dp_set_clock(struct intel_encoder *encoder, 73 struct intel_crtc_state *pipe_config) 74 { 75 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 76 const struct dp_link_dpll *divisor = NULL; 77 int i, count = 0; 78 79 if (IS_G4X(dev_priv)) { 80 divisor = g4x_dpll; 81 count = ARRAY_SIZE(g4x_dpll); 82 } else if (HAS_PCH_SPLIT(dev_priv)) { 83 divisor = pch_dpll; 84 count = ARRAY_SIZE(pch_dpll); 85 } else if (IS_CHERRYVIEW(dev_priv)) { 86 divisor = chv_dpll; 87 count = ARRAY_SIZE(chv_dpll); 88 } else if (IS_VALLEYVIEW(dev_priv)) { 89 divisor = vlv_dpll; 90 count = ARRAY_SIZE(vlv_dpll); 91 } 92 93 if (divisor && count) { 94 for (i = 0; i < count; i++) { 95 if (pipe_config->port_clock == divisor[i].clock) { 96 pipe_config->dpll = divisor[i].dpll; 97 pipe_config->clock_set = true; 98 break; 99 } 100 } 101 } 102 } 103 104 static void intel_dp_prepare(struct intel_encoder *encoder, 105 const struct intel_crtc_state *pipe_config) 106 { 107 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 108 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 109 enum port port = encoder->port; 110 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 111 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 112 113 intel_dp_set_link_params(intel_dp, 114 pipe_config->port_clock, 115 pipe_config->lane_count); 116 117 /* 118 * There are four kinds of DP registers: 119 * IBX PCH 120 * SNB CPU 121 * IVB CPU 122 * CPT PCH 123 * 124 * IBX PCH and CPU are the same for almost everything, 125 * except that the CPU DP PLL is configured in this 126 * register 127 * 128 * CPT PCH is quite different, having many bits moved 129 * to the TRANS_DP_CTL register instead. That 130 * configuration happens (oddly) in ilk_pch_enable 131 */ 132 133 /* Preserve the BIOS-computed detected bit. This is 134 * supposed to be read-only. 135 */ 136 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED; 137 138 /* Handle DP bits in common between all three register formats */ 139 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 140 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count); 141 142 /* Split out the IBX/CPU vs CPT settings */ 143 144 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) { 145 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 146 intel_dp->DP |= DP_SYNC_HS_HIGH; 147 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 148 intel_dp->DP |= DP_SYNC_VS_HIGH; 149 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 150 151 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 152 intel_dp->DP |= DP_ENHANCED_FRAMING; 153 154 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe); 155 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 156 u32 trans_dp; 157 158 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 159 160 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe)); 161 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 162 trans_dp |= TRANS_DP_ENH_FRAMING; 163 else 164 trans_dp &= ~TRANS_DP_ENH_FRAMING; 165 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp); 166 } else { 167 if (IS_G4X(dev_priv) && pipe_config->limited_color_range) 168 intel_dp->DP |= DP_COLOR_RANGE_16_235; 169 170 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 171 intel_dp->DP |= DP_SYNC_HS_HIGH; 172 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 173 intel_dp->DP |= DP_SYNC_VS_HIGH; 174 intel_dp->DP |= DP_LINK_TRAIN_OFF; 175 176 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 177 intel_dp->DP |= DP_ENHANCED_FRAMING; 178 179 if (IS_CHERRYVIEW(dev_priv)) 180 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe); 181 else 182 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe); 183 } 184 } 185 186 static void assert_dp_port(struct intel_dp *intel_dp, bool state) 187 { 188 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 189 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 190 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN; 191 192 I915_STATE_WARN(cur_state != state, 193 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n", 194 dig_port->base.base.base.id, dig_port->base.base.name, 195 onoff(state), onoff(cur_state)); 196 } 197 #define assert_dp_port_disabled(d) assert_dp_port((d), false) 198 199 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state) 200 { 201 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE; 202 203 I915_STATE_WARN(cur_state != state, 204 "eDP PLL state assertion failure (expected %s, current %s)\n", 205 onoff(state), onoff(cur_state)); 206 } 207 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true) 208 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false) 209 210 static void ilk_edp_pll_on(struct intel_dp *intel_dp, 211 const struct intel_crtc_state *pipe_config) 212 { 213 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 214 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 215 216 assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder); 217 assert_dp_port_disabled(intel_dp); 218 assert_edp_pll_disabled(dev_priv); 219 220 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n", 221 pipe_config->port_clock); 222 223 intel_dp->DP &= ~DP_PLL_FREQ_MASK; 224 225 if (pipe_config->port_clock == 162000) 226 intel_dp->DP |= DP_PLL_FREQ_162MHZ; 227 else 228 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 229 230 intel_de_write(dev_priv, DP_A, intel_dp->DP); 231 intel_de_posting_read(dev_priv, DP_A); 232 udelay(500); 233 234 /* 235 * [DevILK] Work around required when enabling DP PLL 236 * while a pipe is enabled going to FDI: 237 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI 238 * 2. Program DP PLL enable 239 */ 240 if (IS_IRONLAKE(dev_priv)) 241 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe); 242 243 intel_dp->DP |= DP_PLL_ENABLE; 244 245 intel_de_write(dev_priv, DP_A, intel_dp->DP); 246 intel_de_posting_read(dev_priv, DP_A); 247 udelay(200); 248 } 249 250 static void ilk_edp_pll_off(struct intel_dp *intel_dp, 251 const struct intel_crtc_state *old_crtc_state) 252 { 253 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 254 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 255 256 assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder); 257 assert_dp_port_disabled(intel_dp); 258 assert_edp_pll_enabled(dev_priv); 259 260 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n"); 261 262 intel_dp->DP &= ~DP_PLL_ENABLE; 263 264 intel_de_write(dev_priv, DP_A, intel_dp->DP); 265 intel_de_posting_read(dev_priv, DP_A); 266 udelay(200); 267 } 268 269 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv, 270 enum port port, enum pipe *pipe) 271 { 272 enum pipe p; 273 274 for_each_pipe(dev_priv, p) { 275 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p)); 276 277 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) { 278 *pipe = p; 279 return true; 280 } 281 } 282 283 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n", 284 port_name(port)); 285 286 /* must initialize pipe to something for the asserts */ 287 *pipe = PIPE_A; 288 289 return false; 290 } 291 292 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv, 293 i915_reg_t dp_reg, enum port port, 294 enum pipe *pipe) 295 { 296 bool ret; 297 u32 val; 298 299 val = intel_de_read(dev_priv, dp_reg); 300 301 ret = val & DP_PORT_EN; 302 303 /* asserts want to know the pipe even if the port is disabled */ 304 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 305 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB; 306 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) 307 ret &= cpt_dp_port_selected(dev_priv, port, pipe); 308 else if (IS_CHERRYVIEW(dev_priv)) 309 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV; 310 else 311 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT; 312 313 return ret; 314 } 315 316 static bool intel_dp_get_hw_state(struct intel_encoder *encoder, 317 enum pipe *pipe) 318 { 319 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 320 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 321 intel_wakeref_t wakeref; 322 bool ret; 323 324 wakeref = intel_display_power_get_if_enabled(dev_priv, 325 encoder->power_domain); 326 if (!wakeref) 327 return false; 328 329 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 330 encoder->port, pipe); 331 332 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 333 334 return ret; 335 } 336 337 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state) 338 { 339 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 340 341 if (crtc_state->has_pch_encoder) { 342 intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n); 343 intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2); 344 } else { 345 intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder, 346 &crtc_state->dp_m_n); 347 intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder, 348 &crtc_state->dp_m2_n2); 349 } 350 } 351 352 static void intel_dp_get_config(struct intel_encoder *encoder, 353 struct intel_crtc_state *pipe_config) 354 { 355 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 356 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 357 u32 tmp, flags = 0; 358 enum port port = encoder->port; 359 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 360 361 if (encoder->type == INTEL_OUTPUT_EDP) 362 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 363 else 364 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 365 366 tmp = intel_de_read(dev_priv, intel_dp->output_reg); 367 368 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A; 369 370 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 371 u32 trans_dp = intel_de_read(dev_priv, 372 TRANS_DP_CTL(crtc->pipe)); 373 374 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH) 375 flags |= DRM_MODE_FLAG_PHSYNC; 376 else 377 flags |= DRM_MODE_FLAG_NHSYNC; 378 379 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH) 380 flags |= DRM_MODE_FLAG_PVSYNC; 381 else 382 flags |= DRM_MODE_FLAG_NVSYNC; 383 } else { 384 if (tmp & DP_SYNC_HS_HIGH) 385 flags |= DRM_MODE_FLAG_PHSYNC; 386 else 387 flags |= DRM_MODE_FLAG_NHSYNC; 388 389 if (tmp & DP_SYNC_VS_HIGH) 390 flags |= DRM_MODE_FLAG_PVSYNC; 391 else 392 flags |= DRM_MODE_FLAG_NVSYNC; 393 } 394 395 pipe_config->hw.adjusted_mode.flags |= flags; 396 397 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235) 398 pipe_config->limited_color_range = true; 399 400 pipe_config->lane_count = 401 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1; 402 403 g4x_dp_get_m_n(pipe_config); 404 405 if (port == PORT_A) { 406 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ) 407 pipe_config->port_clock = 162000; 408 else 409 pipe_config->port_clock = 270000; 410 } 411 412 pipe_config->hw.adjusted_mode.crtc_clock = 413 intel_dotclock_calculate(pipe_config->port_clock, 414 &pipe_config->dp_m_n); 415 416 if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp && 417 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 418 /* 419 * This is a big fat ugly hack. 420 * 421 * Some machines in UEFI boot mode provide us a VBT that has 18 422 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 423 * unknown we fail to light up. Yet the same BIOS boots up with 424 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 425 * max, not what it tells us to use. 426 * 427 * Note: This will still be broken if the eDP panel is not lit 428 * up by the BIOS, and thus we can't get the mode at module 429 * load. 430 */ 431 drm_dbg_kms(&dev_priv->drm, 432 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 433 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 434 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 435 } 436 } 437 438 static void 439 intel_dp_link_down(struct intel_encoder *encoder, 440 const struct intel_crtc_state *old_crtc_state) 441 { 442 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 443 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 444 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 445 enum port port = encoder->port; 446 447 if (drm_WARN_ON(&dev_priv->drm, 448 (intel_de_read(dev_priv, intel_dp->output_reg) & 449 DP_PORT_EN) == 0)) 450 return; 451 452 drm_dbg_kms(&dev_priv->drm, "\n"); 453 454 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 455 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) { 456 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 457 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT; 458 } else { 459 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 460 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE; 461 } 462 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 463 intel_de_posting_read(dev_priv, intel_dp->output_reg); 464 465 intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE); 466 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 467 intel_de_posting_read(dev_priv, intel_dp->output_reg); 468 469 /* 470 * HW workaround for IBX, we need to move the port 471 * to transcoder A after disabling it to allow the 472 * matching HDMI port to be enabled on transcoder A. 473 */ 474 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) { 475 /* 476 * We get CPU/PCH FIFO underruns on the other pipe when 477 * doing the workaround. Sweep them under the rug. 478 */ 479 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false); 480 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false); 481 482 /* always enable with pattern 1 (as per spec) */ 483 intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK); 484 intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) | 485 DP_LINK_TRAIN_PAT_1; 486 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 487 intel_de_posting_read(dev_priv, intel_dp->output_reg); 488 489 intel_dp->DP &= ~DP_PORT_EN; 490 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 491 intel_de_posting_read(dev_priv, intel_dp->output_reg); 492 493 intel_wait_for_vblank_if_active(dev_priv, PIPE_A); 494 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true); 495 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true); 496 } 497 498 msleep(intel_dp->pps.panel_power_down_delay); 499 500 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 501 intel_wakeref_t wakeref; 502 503 with_intel_pps_lock(intel_dp, wakeref) 504 intel_dp->pps.active_pipe = INVALID_PIPE; 505 } 506 } 507 508 static void intel_disable_dp(struct intel_atomic_state *state, 509 struct intel_encoder *encoder, 510 const struct intel_crtc_state *old_crtc_state, 511 const struct drm_connector_state *old_conn_state) 512 { 513 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 514 515 intel_dp->link_trained = false; 516 517 if (old_crtc_state->has_audio) 518 intel_audio_codec_disable(encoder, 519 old_crtc_state, old_conn_state); 520 521 /* 522 * Make sure the panel is off before trying to change the mode. 523 * But also ensure that we have vdd while we switch off the panel. 524 */ 525 intel_pps_vdd_on(intel_dp); 526 intel_edp_backlight_off(old_conn_state); 527 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 528 intel_pps_off(intel_dp); 529 } 530 531 static void g4x_disable_dp(struct intel_atomic_state *state, 532 struct intel_encoder *encoder, 533 const struct intel_crtc_state *old_crtc_state, 534 const struct drm_connector_state *old_conn_state) 535 { 536 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 537 } 538 539 static void vlv_disable_dp(struct intel_atomic_state *state, 540 struct intel_encoder *encoder, 541 const struct intel_crtc_state *old_crtc_state, 542 const struct drm_connector_state *old_conn_state) 543 { 544 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 545 } 546 547 static void g4x_post_disable_dp(struct intel_atomic_state *state, 548 struct intel_encoder *encoder, 549 const struct intel_crtc_state *old_crtc_state, 550 const struct drm_connector_state *old_conn_state) 551 { 552 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 553 enum port port = encoder->port; 554 555 /* 556 * Bspec does not list a specific disable sequence for g4x DP. 557 * Follow the ilk+ sequence (disable pipe before the port) for 558 * g4x DP as it does not suffer from underruns like the normal 559 * g4x modeset sequence (disable pipe after the port). 560 */ 561 intel_dp_link_down(encoder, old_crtc_state); 562 563 /* Only ilk+ has port A */ 564 if (port == PORT_A) 565 ilk_edp_pll_off(intel_dp, old_crtc_state); 566 } 567 568 static void vlv_post_disable_dp(struct intel_atomic_state *state, 569 struct intel_encoder *encoder, 570 const struct intel_crtc_state *old_crtc_state, 571 const struct drm_connector_state *old_conn_state) 572 { 573 intel_dp_link_down(encoder, old_crtc_state); 574 } 575 576 static void chv_post_disable_dp(struct intel_atomic_state *state, 577 struct intel_encoder *encoder, 578 const struct intel_crtc_state *old_crtc_state, 579 const struct drm_connector_state *old_conn_state) 580 { 581 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 582 583 intel_dp_link_down(encoder, old_crtc_state); 584 585 vlv_dpio_get(dev_priv); 586 587 /* Assert data lane reset */ 588 chv_data_lane_soft_reset(encoder, old_crtc_state, true); 589 590 vlv_dpio_put(dev_priv); 591 } 592 593 static void 594 cpt_set_link_train(struct intel_dp *intel_dp, 595 const struct intel_crtc_state *crtc_state, 596 u8 dp_train_pat) 597 { 598 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 599 600 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 601 602 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 603 case DP_TRAINING_PATTERN_DISABLE: 604 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 605 break; 606 case DP_TRAINING_PATTERN_1: 607 intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT; 608 break; 609 case DP_TRAINING_PATTERN_2: 610 intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT; 611 break; 612 default: 613 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 614 return; 615 } 616 617 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 618 intel_de_posting_read(dev_priv, intel_dp->output_reg); 619 } 620 621 static void 622 g4x_set_link_train(struct intel_dp *intel_dp, 623 const struct intel_crtc_state *crtc_state, 624 u8 dp_train_pat) 625 { 626 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 627 628 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 629 630 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 631 case DP_TRAINING_PATTERN_DISABLE: 632 intel_dp->DP |= DP_LINK_TRAIN_OFF; 633 break; 634 case DP_TRAINING_PATTERN_1: 635 intel_dp->DP |= DP_LINK_TRAIN_PAT_1; 636 break; 637 case DP_TRAINING_PATTERN_2: 638 intel_dp->DP |= DP_LINK_TRAIN_PAT_2; 639 break; 640 default: 641 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 642 return; 643 } 644 645 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 646 intel_de_posting_read(dev_priv, intel_dp->output_reg); 647 } 648 649 static void intel_dp_enable_port(struct intel_dp *intel_dp, 650 const struct intel_crtc_state *crtc_state) 651 { 652 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 653 654 /* enable with pattern 1 (as per spec) */ 655 656 intel_dp_program_link_training_pattern(intel_dp, crtc_state, 657 DP_PHY_DPRX, DP_TRAINING_PATTERN_1); 658 659 /* 660 * Magic for VLV/CHV. We _must_ first set up the register 661 * without actually enabling the port, and then do another 662 * write to enable the port. Otherwise link training will 663 * fail when the power sequencer is freshly used for this port. 664 */ 665 intel_dp->DP |= DP_PORT_EN; 666 if (crtc_state->has_audio) 667 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 668 669 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 670 intel_de_posting_read(dev_priv, intel_dp->output_reg); 671 } 672 673 static void intel_enable_dp(struct intel_atomic_state *state, 674 struct intel_encoder *encoder, 675 const struct intel_crtc_state *pipe_config, 676 const struct drm_connector_state *conn_state) 677 { 678 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 679 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 680 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 681 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg); 682 enum pipe pipe = crtc->pipe; 683 intel_wakeref_t wakeref; 684 685 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN)) 686 return; 687 688 with_intel_pps_lock(intel_dp, wakeref) { 689 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 690 vlv_pps_init(encoder, pipe_config); 691 692 intel_dp_enable_port(intel_dp, pipe_config); 693 694 intel_pps_vdd_on_unlocked(intel_dp); 695 intel_pps_on_unlocked(intel_dp); 696 intel_pps_vdd_off_unlocked(intel_dp, true); 697 } 698 699 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 700 unsigned int lane_mask = 0x0; 701 702 if (IS_CHERRYVIEW(dev_priv)) 703 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count); 704 705 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp), 706 lane_mask); 707 } 708 709 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 710 intel_dp_configure_protocol_converter(intel_dp, pipe_config); 711 intel_dp_check_frl_training(intel_dp); 712 intel_dp_pcon_dsc_configure(intel_dp, pipe_config); 713 intel_dp_start_link_train(intel_dp, pipe_config); 714 intel_dp_stop_link_train(intel_dp, pipe_config); 715 716 if (pipe_config->has_audio) { 717 drm_dbg(&dev_priv->drm, "Enabling DP audio on pipe %c\n", 718 pipe_name(pipe)); 719 intel_audio_codec_enable(encoder, pipe_config, conn_state); 720 } 721 } 722 723 static void g4x_enable_dp(struct intel_atomic_state *state, 724 struct intel_encoder *encoder, 725 const struct intel_crtc_state *pipe_config, 726 const struct drm_connector_state *conn_state) 727 { 728 intel_enable_dp(state, encoder, pipe_config, conn_state); 729 intel_edp_backlight_on(pipe_config, conn_state); 730 } 731 732 static void vlv_enable_dp(struct intel_atomic_state *state, 733 struct intel_encoder *encoder, 734 const struct intel_crtc_state *pipe_config, 735 const struct drm_connector_state *conn_state) 736 { 737 intel_edp_backlight_on(pipe_config, conn_state); 738 } 739 740 static void g4x_pre_enable_dp(struct intel_atomic_state *state, 741 struct intel_encoder *encoder, 742 const struct intel_crtc_state *pipe_config, 743 const struct drm_connector_state *conn_state) 744 { 745 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 746 enum port port = encoder->port; 747 748 intel_dp_prepare(encoder, pipe_config); 749 750 /* Only ilk+ has port A */ 751 if (port == PORT_A) 752 ilk_edp_pll_on(intel_dp, pipe_config); 753 } 754 755 static void vlv_pre_enable_dp(struct intel_atomic_state *state, 756 struct intel_encoder *encoder, 757 const struct intel_crtc_state *pipe_config, 758 const struct drm_connector_state *conn_state) 759 { 760 vlv_phy_pre_encoder_enable(encoder, pipe_config); 761 762 intel_enable_dp(state, encoder, pipe_config, conn_state); 763 } 764 765 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state, 766 struct intel_encoder *encoder, 767 const struct intel_crtc_state *pipe_config, 768 const struct drm_connector_state *conn_state) 769 { 770 intel_dp_prepare(encoder, pipe_config); 771 772 vlv_phy_pre_pll_enable(encoder, pipe_config); 773 } 774 775 static void chv_pre_enable_dp(struct intel_atomic_state *state, 776 struct intel_encoder *encoder, 777 const struct intel_crtc_state *pipe_config, 778 const struct drm_connector_state *conn_state) 779 { 780 chv_phy_pre_encoder_enable(encoder, pipe_config); 781 782 intel_enable_dp(state, encoder, pipe_config, conn_state); 783 784 /* Second common lane will stay alive on its own now */ 785 chv_phy_release_cl2_override(encoder); 786 } 787 788 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state, 789 struct intel_encoder *encoder, 790 const struct intel_crtc_state *pipe_config, 791 const struct drm_connector_state *conn_state) 792 { 793 intel_dp_prepare(encoder, pipe_config); 794 795 chv_phy_pre_pll_enable(encoder, pipe_config); 796 } 797 798 static void chv_dp_post_pll_disable(struct intel_atomic_state *state, 799 struct intel_encoder *encoder, 800 const struct intel_crtc_state *old_crtc_state, 801 const struct drm_connector_state *old_conn_state) 802 { 803 chv_phy_post_pll_disable(encoder, old_crtc_state); 804 } 805 806 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp, 807 const struct intel_crtc_state *crtc_state) 808 { 809 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2; 810 } 811 812 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp, 813 const struct intel_crtc_state *crtc_state) 814 { 815 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3; 816 } 817 818 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp) 819 { 820 return DP_TRAIN_PRE_EMPH_LEVEL_2; 821 } 822 823 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp) 824 { 825 return DP_TRAIN_PRE_EMPH_LEVEL_3; 826 } 827 828 static void vlv_set_signal_levels(struct intel_encoder *encoder, 829 const struct intel_crtc_state *crtc_state) 830 { 831 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 832 unsigned long demph_reg_value, preemph_reg_value, 833 uniqtranscale_reg_value; 834 u8 train_set = intel_dp->train_set[0]; 835 836 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 837 case DP_TRAIN_PRE_EMPH_LEVEL_0: 838 preemph_reg_value = 0x0004000; 839 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 840 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 841 demph_reg_value = 0x2B405555; 842 uniqtranscale_reg_value = 0x552AB83A; 843 break; 844 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 845 demph_reg_value = 0x2B404040; 846 uniqtranscale_reg_value = 0x5548B83A; 847 break; 848 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 849 demph_reg_value = 0x2B245555; 850 uniqtranscale_reg_value = 0x5560B83A; 851 break; 852 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 853 demph_reg_value = 0x2B405555; 854 uniqtranscale_reg_value = 0x5598DA3A; 855 break; 856 default: 857 return; 858 } 859 break; 860 case DP_TRAIN_PRE_EMPH_LEVEL_1: 861 preemph_reg_value = 0x0002000; 862 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 863 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 864 demph_reg_value = 0x2B404040; 865 uniqtranscale_reg_value = 0x5552B83A; 866 break; 867 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 868 demph_reg_value = 0x2B404848; 869 uniqtranscale_reg_value = 0x5580B83A; 870 break; 871 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 872 demph_reg_value = 0x2B404040; 873 uniqtranscale_reg_value = 0x55ADDA3A; 874 break; 875 default: 876 return; 877 } 878 break; 879 case DP_TRAIN_PRE_EMPH_LEVEL_2: 880 preemph_reg_value = 0x0000000; 881 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 882 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 883 demph_reg_value = 0x2B305555; 884 uniqtranscale_reg_value = 0x5570B83A; 885 break; 886 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 887 demph_reg_value = 0x2B2B4040; 888 uniqtranscale_reg_value = 0x55ADDA3A; 889 break; 890 default: 891 return; 892 } 893 break; 894 case DP_TRAIN_PRE_EMPH_LEVEL_3: 895 preemph_reg_value = 0x0006000; 896 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 897 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 898 demph_reg_value = 0x1B405555; 899 uniqtranscale_reg_value = 0x55ADDA3A; 900 break; 901 default: 902 return; 903 } 904 break; 905 default: 906 return; 907 } 908 909 vlv_set_phy_signal_level(encoder, crtc_state, 910 demph_reg_value, preemph_reg_value, 911 uniqtranscale_reg_value, 0); 912 } 913 914 static void chv_set_signal_levels(struct intel_encoder *encoder, 915 const struct intel_crtc_state *crtc_state) 916 { 917 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 918 u32 deemph_reg_value, margin_reg_value; 919 bool uniq_trans_scale = false; 920 u8 train_set = intel_dp->train_set[0]; 921 922 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 923 case DP_TRAIN_PRE_EMPH_LEVEL_0: 924 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 925 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 926 deemph_reg_value = 128; 927 margin_reg_value = 52; 928 break; 929 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 930 deemph_reg_value = 128; 931 margin_reg_value = 77; 932 break; 933 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 934 deemph_reg_value = 128; 935 margin_reg_value = 102; 936 break; 937 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 938 deemph_reg_value = 128; 939 margin_reg_value = 154; 940 uniq_trans_scale = true; 941 break; 942 default: 943 return; 944 } 945 break; 946 case DP_TRAIN_PRE_EMPH_LEVEL_1: 947 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 948 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 949 deemph_reg_value = 85; 950 margin_reg_value = 78; 951 break; 952 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 953 deemph_reg_value = 85; 954 margin_reg_value = 116; 955 break; 956 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 957 deemph_reg_value = 85; 958 margin_reg_value = 154; 959 break; 960 default: 961 return; 962 } 963 break; 964 case DP_TRAIN_PRE_EMPH_LEVEL_2: 965 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 966 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 967 deemph_reg_value = 64; 968 margin_reg_value = 104; 969 break; 970 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 971 deemph_reg_value = 64; 972 margin_reg_value = 154; 973 break; 974 default: 975 return; 976 } 977 break; 978 case DP_TRAIN_PRE_EMPH_LEVEL_3: 979 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 980 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 981 deemph_reg_value = 43; 982 margin_reg_value = 154; 983 break; 984 default: 985 return; 986 } 987 break; 988 default: 989 return; 990 } 991 992 chv_set_phy_signal_level(encoder, crtc_state, 993 deemph_reg_value, margin_reg_value, 994 uniq_trans_scale); 995 } 996 997 static u32 g4x_signal_levels(u8 train_set) 998 { 999 u32 signal_levels = 0; 1000 1001 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 1002 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 1003 default: 1004 signal_levels |= DP_VOLTAGE_0_4; 1005 break; 1006 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 1007 signal_levels |= DP_VOLTAGE_0_6; 1008 break; 1009 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 1010 signal_levels |= DP_VOLTAGE_0_8; 1011 break; 1012 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 1013 signal_levels |= DP_VOLTAGE_1_2; 1014 break; 1015 } 1016 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1017 case DP_TRAIN_PRE_EMPH_LEVEL_0: 1018 default: 1019 signal_levels |= DP_PRE_EMPHASIS_0; 1020 break; 1021 case DP_TRAIN_PRE_EMPH_LEVEL_1: 1022 signal_levels |= DP_PRE_EMPHASIS_3_5; 1023 break; 1024 case DP_TRAIN_PRE_EMPH_LEVEL_2: 1025 signal_levels |= DP_PRE_EMPHASIS_6; 1026 break; 1027 case DP_TRAIN_PRE_EMPH_LEVEL_3: 1028 signal_levels |= DP_PRE_EMPHASIS_9_5; 1029 break; 1030 } 1031 return signal_levels; 1032 } 1033 1034 static void 1035 g4x_set_signal_levels(struct intel_encoder *encoder, 1036 const struct intel_crtc_state *crtc_state) 1037 { 1038 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1039 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1040 u8 train_set = intel_dp->train_set[0]; 1041 u32 signal_levels; 1042 1043 signal_levels = g4x_signal_levels(train_set); 1044 1045 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1046 signal_levels); 1047 1048 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK); 1049 intel_dp->DP |= signal_levels; 1050 1051 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1052 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1053 } 1054 1055 /* SNB CPU eDP voltage swing and pre-emphasis control */ 1056 static u32 snb_cpu_edp_signal_levels(u8 train_set) 1057 { 1058 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1059 DP_TRAIN_PRE_EMPHASIS_MASK); 1060 1061 switch (signal_levels) { 1062 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1063 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1064 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1065 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1066 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1067 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1068 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1069 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1070 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1071 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1072 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1073 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1074 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1075 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1076 default: 1077 MISSING_CASE(signal_levels); 1078 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1079 } 1080 } 1081 1082 static void 1083 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1084 const struct intel_crtc_state *crtc_state) 1085 { 1086 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1087 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1088 u8 train_set = intel_dp->train_set[0]; 1089 u32 signal_levels; 1090 1091 signal_levels = snb_cpu_edp_signal_levels(train_set); 1092 1093 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1094 signal_levels); 1095 1096 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB; 1097 intel_dp->DP |= signal_levels; 1098 1099 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1100 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1101 } 1102 1103 /* IVB CPU eDP voltage swing and pre-emphasis control */ 1104 static u32 ivb_cpu_edp_signal_levels(u8 train_set) 1105 { 1106 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1107 DP_TRAIN_PRE_EMPHASIS_MASK); 1108 1109 switch (signal_levels) { 1110 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1111 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1112 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1113 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1114 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1115 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1116 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1117 1118 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1119 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1120 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1121 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1122 1123 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1124 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1125 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1126 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1127 1128 default: 1129 MISSING_CASE(signal_levels); 1130 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1131 } 1132 } 1133 1134 static void 1135 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1136 const struct intel_crtc_state *crtc_state) 1137 { 1138 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1139 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1140 u8 train_set = intel_dp->train_set[0]; 1141 u32 signal_levels; 1142 1143 signal_levels = ivb_cpu_edp_signal_levels(train_set); 1144 1145 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1146 signal_levels); 1147 1148 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB; 1149 intel_dp->DP |= signal_levels; 1150 1151 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1152 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1153 } 1154 1155 /* 1156 * If display is now connected check links status, 1157 * there has been known issues of link loss triggering 1158 * long pulse. 1159 * 1160 * Some sinks (eg. ASUS PB287Q) seem to perform some 1161 * weird HPD ping pong during modesets. So we can apparently 1162 * end up with HPD going low during a modeset, and then 1163 * going back up soon after. And once that happens we must 1164 * retrain the link to get a picture. That's in case no 1165 * userspace component reacted to intermittent HPD dip. 1166 */ 1167 static enum intel_hotplug_state 1168 intel_dp_hotplug(struct intel_encoder *encoder, 1169 struct intel_connector *connector) 1170 { 1171 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1172 struct drm_modeset_acquire_ctx ctx; 1173 enum intel_hotplug_state state; 1174 int ret; 1175 1176 if (intel_dp->compliance.test_active && 1177 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 1178 intel_dp_phy_test(encoder); 1179 /* just do the PHY test and nothing else */ 1180 return INTEL_HOTPLUG_UNCHANGED; 1181 } 1182 1183 state = intel_encoder_hotplug(encoder, connector); 1184 1185 drm_modeset_acquire_init(&ctx, 0); 1186 1187 for (;;) { 1188 ret = intel_dp_retrain_link(encoder, &ctx); 1189 1190 if (ret == -EDEADLK) { 1191 drm_modeset_backoff(&ctx); 1192 continue; 1193 } 1194 1195 break; 1196 } 1197 1198 drm_modeset_drop_locks(&ctx); 1199 drm_modeset_acquire_fini(&ctx); 1200 drm_WARN(encoder->base.dev, ret, 1201 "Acquiring modeset locks failed with %i\n", ret); 1202 1203 /* 1204 * Keeping it consistent with intel_ddi_hotplug() and 1205 * intel_hdmi_hotplug(). 1206 */ 1207 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries) 1208 state = INTEL_HOTPLUG_RETRY; 1209 1210 return state; 1211 } 1212 1213 static bool ibx_digital_port_connected(struct intel_encoder *encoder) 1214 { 1215 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1216 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 1217 1218 return intel_de_read(dev_priv, SDEISR) & bit; 1219 } 1220 1221 static bool g4x_digital_port_connected(struct intel_encoder *encoder) 1222 { 1223 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1224 u32 bit; 1225 1226 switch (encoder->hpd_pin) { 1227 case HPD_PORT_B: 1228 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X; 1229 break; 1230 case HPD_PORT_C: 1231 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X; 1232 break; 1233 case HPD_PORT_D: 1234 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X; 1235 break; 1236 default: 1237 MISSING_CASE(encoder->hpd_pin); 1238 return false; 1239 } 1240 1241 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1242 } 1243 1244 static bool gm45_digital_port_connected(struct intel_encoder *encoder) 1245 { 1246 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1247 u32 bit; 1248 1249 switch (encoder->hpd_pin) { 1250 case HPD_PORT_B: 1251 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45; 1252 break; 1253 case HPD_PORT_C: 1254 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45; 1255 break; 1256 case HPD_PORT_D: 1257 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45; 1258 break; 1259 default: 1260 MISSING_CASE(encoder->hpd_pin); 1261 return false; 1262 } 1263 1264 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1265 } 1266 1267 static bool ilk_digital_port_connected(struct intel_encoder *encoder) 1268 { 1269 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1270 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 1271 1272 return intel_de_read(dev_priv, DEISR) & bit; 1273 } 1274 1275 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 1276 { 1277 intel_dp_encoder_flush_work(encoder); 1278 1279 drm_encoder_cleanup(encoder); 1280 kfree(enc_to_dig_port(to_intel_encoder(encoder))); 1281 } 1282 1283 enum pipe vlv_active_pipe(struct intel_dp *intel_dp) 1284 { 1285 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1286 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1287 enum pipe pipe; 1288 1289 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 1290 encoder->port, &pipe)) 1291 return pipe; 1292 1293 return INVALID_PIPE; 1294 } 1295 1296 static void intel_dp_encoder_reset(struct drm_encoder *encoder) 1297 { 1298 struct drm_i915_private *dev_priv = to_i915(encoder->dev); 1299 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 1300 1301 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg); 1302 1303 intel_dp->reset_link_params = true; 1304 1305 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1306 intel_wakeref_t wakeref; 1307 1308 with_intel_pps_lock(intel_dp, wakeref) 1309 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp); 1310 } 1311 1312 intel_pps_encoder_reset(intel_dp); 1313 } 1314 1315 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1316 .reset = intel_dp_encoder_reset, 1317 .destroy = intel_dp_encoder_destroy, 1318 }; 1319 1320 bool g4x_dp_init(struct drm_i915_private *dev_priv, 1321 i915_reg_t output_reg, enum port port) 1322 { 1323 struct intel_digital_port *dig_port; 1324 struct intel_encoder *intel_encoder; 1325 struct drm_encoder *encoder; 1326 struct intel_connector *intel_connector; 1327 1328 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 1329 if (!dig_port) 1330 return false; 1331 1332 intel_connector = intel_connector_alloc(); 1333 if (!intel_connector) 1334 goto err_connector_alloc; 1335 1336 intel_encoder = &dig_port->base; 1337 encoder = &intel_encoder->base; 1338 1339 mutex_init(&dig_port->hdcp_mutex); 1340 1341 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base, 1342 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS, 1343 "DP %c", port_name(port))) 1344 goto err_encoder_init; 1345 1346 intel_encoder->hotplug = intel_dp_hotplug; 1347 intel_encoder->compute_config = intel_dp_compute_config; 1348 intel_encoder->get_hw_state = intel_dp_get_hw_state; 1349 intel_encoder->get_config = intel_dp_get_config; 1350 intel_encoder->sync_state = intel_dp_sync_state; 1351 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check; 1352 intel_encoder->update_pipe = intel_backlight_update; 1353 intel_encoder->suspend = intel_dp_encoder_suspend; 1354 intel_encoder->shutdown = intel_dp_encoder_shutdown; 1355 if (IS_CHERRYVIEW(dev_priv)) { 1356 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable; 1357 intel_encoder->pre_enable = chv_pre_enable_dp; 1358 intel_encoder->enable = vlv_enable_dp; 1359 intel_encoder->disable = vlv_disable_dp; 1360 intel_encoder->post_disable = chv_post_disable_dp; 1361 intel_encoder->post_pll_disable = chv_dp_post_pll_disable; 1362 } else if (IS_VALLEYVIEW(dev_priv)) { 1363 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable; 1364 intel_encoder->pre_enable = vlv_pre_enable_dp; 1365 intel_encoder->enable = vlv_enable_dp; 1366 intel_encoder->disable = vlv_disable_dp; 1367 intel_encoder->post_disable = vlv_post_disable_dp; 1368 } else { 1369 intel_encoder->pre_enable = g4x_pre_enable_dp; 1370 intel_encoder->enable = g4x_enable_dp; 1371 intel_encoder->disable = g4x_disable_dp; 1372 intel_encoder->post_disable = g4x_post_disable_dp; 1373 } 1374 1375 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 1376 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) 1377 dig_port->dp.set_link_train = cpt_set_link_train; 1378 else 1379 dig_port->dp.set_link_train = g4x_set_link_train; 1380 1381 if (IS_CHERRYVIEW(dev_priv)) 1382 intel_encoder->set_signal_levels = chv_set_signal_levels; 1383 else if (IS_VALLEYVIEW(dev_priv)) 1384 intel_encoder->set_signal_levels = vlv_set_signal_levels; 1385 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 1386 intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels; 1387 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A) 1388 intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels; 1389 else 1390 intel_encoder->set_signal_levels = g4x_set_signal_levels; 1391 1392 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) || 1393 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) { 1394 dig_port->dp.preemph_max = intel_dp_preemph_max_3; 1395 dig_port->dp.voltage_max = intel_dp_voltage_max_3; 1396 } else { 1397 dig_port->dp.preemph_max = intel_dp_preemph_max_2; 1398 dig_port->dp.voltage_max = intel_dp_voltage_max_2; 1399 } 1400 1401 dig_port->dp.output_reg = output_reg; 1402 dig_port->max_lanes = 4; 1403 1404 intel_encoder->type = INTEL_OUTPUT_DP; 1405 intel_encoder->power_domain = intel_port_to_power_domain(port); 1406 if (IS_CHERRYVIEW(dev_priv)) { 1407 if (port == PORT_D) 1408 intel_encoder->pipe_mask = BIT(PIPE_C); 1409 else 1410 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B); 1411 } else { 1412 intel_encoder->pipe_mask = ~0; 1413 } 1414 intel_encoder->cloneable = 0; 1415 intel_encoder->port = port; 1416 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 1417 1418 dig_port->hpd_pulse = intel_dp_hpd_pulse; 1419 1420 if (HAS_GMCH(dev_priv)) { 1421 if (IS_GM45(dev_priv)) 1422 dig_port->connected = gm45_digital_port_connected; 1423 else 1424 dig_port->connected = g4x_digital_port_connected; 1425 } else { 1426 if (port == PORT_A) 1427 dig_port->connected = ilk_digital_port_connected; 1428 else 1429 dig_port->connected = ibx_digital_port_connected; 1430 } 1431 1432 if (port != PORT_A) 1433 intel_infoframe_init(dig_port); 1434 1435 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 1436 if (!intel_dp_init_connector(dig_port, intel_connector)) 1437 goto err_init_connector; 1438 1439 return true; 1440 1441 err_init_connector: 1442 drm_encoder_cleanup(encoder); 1443 err_encoder_init: 1444 kfree(intel_connector); 1445 err_connector_alloc: 1446 kfree(dig_port); 1447 return false; 1448 } 1449