1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code). 6 */ 7 8 #include <linux/string_helpers.h> 9 10 #include "g4x_dp.h" 11 #include "intel_audio.h" 12 #include "intel_backlight.h" 13 #include "intel_connector.h" 14 #include "intel_crtc.h" 15 #include "intel_de.h" 16 #include "intel_display_types.h" 17 #include "intel_dp.h" 18 #include "intel_dp_link_training.h" 19 #include "intel_dpio_phy.h" 20 #include "intel_fifo_underrun.h" 21 #include "intel_hdmi.h" 22 #include "intel_hotplug.h" 23 #include "intel_pch_display.h" 24 #include "intel_pps.h" 25 #include "vlv_sideband.h" 26 27 static const struct dpll g4x_dpll[] = { 28 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8, }, 29 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2, }, 30 }; 31 32 static const struct dpll pch_dpll[] = { 33 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9, }, 34 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8, }, 35 }; 36 37 static const struct dpll vlv_dpll[] = { 38 { .dot = 162000, .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81, }, 39 { .dot = 270000, .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27, }, 40 }; 41 42 static const struct dpll chv_dpll[] = { 43 /* m2 is .22 binary fixed point */ 44 { .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ }, 45 { .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ }, 46 }; 47 48 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915) 49 { 50 return IS_CHERRYVIEW(i915) ? &chv_dpll[0] : &vlv_dpll[0]; 51 } 52 53 void g4x_dp_set_clock(struct intel_encoder *encoder, 54 struct intel_crtc_state *pipe_config) 55 { 56 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 57 const struct dpll *divisor = NULL; 58 int i, count = 0; 59 60 if (IS_G4X(dev_priv)) { 61 divisor = g4x_dpll; 62 count = ARRAY_SIZE(g4x_dpll); 63 } else if (HAS_PCH_SPLIT(dev_priv)) { 64 divisor = pch_dpll; 65 count = ARRAY_SIZE(pch_dpll); 66 } else if (IS_CHERRYVIEW(dev_priv)) { 67 divisor = chv_dpll; 68 count = ARRAY_SIZE(chv_dpll); 69 } else if (IS_VALLEYVIEW(dev_priv)) { 70 divisor = vlv_dpll; 71 count = ARRAY_SIZE(vlv_dpll); 72 } 73 74 if (divisor && count) { 75 for (i = 0; i < count; i++) { 76 if (pipe_config->port_clock == divisor[i].dot) { 77 pipe_config->dpll = divisor[i]; 78 pipe_config->clock_set = true; 79 break; 80 } 81 } 82 } 83 } 84 85 static void intel_dp_prepare(struct intel_encoder *encoder, 86 const struct intel_crtc_state *pipe_config) 87 { 88 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 89 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 90 enum port port = encoder->port; 91 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 92 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 93 94 intel_dp_set_link_params(intel_dp, 95 pipe_config->port_clock, 96 pipe_config->lane_count); 97 98 /* 99 * There are four kinds of DP registers: 100 * IBX PCH 101 * SNB CPU 102 * IVB CPU 103 * CPT PCH 104 * 105 * IBX PCH and CPU are the same for almost everything, 106 * except that the CPU DP PLL is configured in this 107 * register 108 * 109 * CPT PCH is quite different, having many bits moved 110 * to the TRANS_DP_CTL register instead. That 111 * configuration happens (oddly) in ilk_pch_enable 112 */ 113 114 /* Preserve the BIOS-computed detected bit. This is 115 * supposed to be read-only. 116 */ 117 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED; 118 119 /* Handle DP bits in common between all three register formats */ 120 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 121 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count); 122 123 /* Split out the IBX/CPU vs CPT settings */ 124 125 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) { 126 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 127 intel_dp->DP |= DP_SYNC_HS_HIGH; 128 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 129 intel_dp->DP |= DP_SYNC_VS_HIGH; 130 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 131 132 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 133 intel_dp->DP |= DP_ENHANCED_FRAMING; 134 135 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe); 136 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 137 u32 trans_dp; 138 139 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 140 141 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe)); 142 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 143 trans_dp |= TRANS_DP_ENH_FRAMING; 144 else 145 trans_dp &= ~TRANS_DP_ENH_FRAMING; 146 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp); 147 } else { 148 if (IS_G4X(dev_priv) && pipe_config->limited_color_range) 149 intel_dp->DP |= DP_COLOR_RANGE_16_235; 150 151 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 152 intel_dp->DP |= DP_SYNC_HS_HIGH; 153 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 154 intel_dp->DP |= DP_SYNC_VS_HIGH; 155 intel_dp->DP |= DP_LINK_TRAIN_OFF; 156 157 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 158 intel_dp->DP |= DP_ENHANCED_FRAMING; 159 160 if (IS_CHERRYVIEW(dev_priv)) 161 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe); 162 else 163 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe); 164 } 165 } 166 167 static void assert_dp_port(struct intel_dp *intel_dp, bool state) 168 { 169 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 170 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 171 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN; 172 173 I915_STATE_WARN(cur_state != state, 174 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n", 175 dig_port->base.base.base.id, dig_port->base.base.name, 176 str_on_off(state), str_on_off(cur_state)); 177 } 178 #define assert_dp_port_disabled(d) assert_dp_port((d), false) 179 180 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state) 181 { 182 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE; 183 184 I915_STATE_WARN(cur_state != state, 185 "eDP PLL state assertion failure (expected %s, current %s)\n", 186 str_on_off(state), str_on_off(cur_state)); 187 } 188 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true) 189 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false) 190 191 static void ilk_edp_pll_on(struct intel_dp *intel_dp, 192 const struct intel_crtc_state *pipe_config) 193 { 194 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 195 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 196 197 assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder); 198 assert_dp_port_disabled(intel_dp); 199 assert_edp_pll_disabled(dev_priv); 200 201 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n", 202 pipe_config->port_clock); 203 204 intel_dp->DP &= ~DP_PLL_FREQ_MASK; 205 206 if (pipe_config->port_clock == 162000) 207 intel_dp->DP |= DP_PLL_FREQ_162MHZ; 208 else 209 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 210 211 intel_de_write(dev_priv, DP_A, intel_dp->DP); 212 intel_de_posting_read(dev_priv, DP_A); 213 udelay(500); 214 215 /* 216 * [DevILK] Work around required when enabling DP PLL 217 * while a pipe is enabled going to FDI: 218 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI 219 * 2. Program DP PLL enable 220 */ 221 if (IS_IRONLAKE(dev_priv)) 222 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe); 223 224 intel_dp->DP |= DP_PLL_ENABLE; 225 226 intel_de_write(dev_priv, DP_A, intel_dp->DP); 227 intel_de_posting_read(dev_priv, DP_A); 228 udelay(200); 229 } 230 231 static void ilk_edp_pll_off(struct intel_dp *intel_dp, 232 const struct intel_crtc_state *old_crtc_state) 233 { 234 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 235 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 236 237 assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder); 238 assert_dp_port_disabled(intel_dp); 239 assert_edp_pll_enabled(dev_priv); 240 241 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n"); 242 243 intel_dp->DP &= ~DP_PLL_ENABLE; 244 245 intel_de_write(dev_priv, DP_A, intel_dp->DP); 246 intel_de_posting_read(dev_priv, DP_A); 247 udelay(200); 248 } 249 250 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv, 251 enum port port, enum pipe *pipe) 252 { 253 enum pipe p; 254 255 for_each_pipe(dev_priv, p) { 256 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p)); 257 258 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) { 259 *pipe = p; 260 return true; 261 } 262 } 263 264 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n", 265 port_name(port)); 266 267 /* must initialize pipe to something for the asserts */ 268 *pipe = PIPE_A; 269 270 return false; 271 } 272 273 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv, 274 i915_reg_t dp_reg, enum port port, 275 enum pipe *pipe) 276 { 277 bool ret; 278 u32 val; 279 280 val = intel_de_read(dev_priv, dp_reg); 281 282 ret = val & DP_PORT_EN; 283 284 /* asserts want to know the pipe even if the port is disabled */ 285 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 286 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB; 287 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) 288 ret &= cpt_dp_port_selected(dev_priv, port, pipe); 289 else if (IS_CHERRYVIEW(dev_priv)) 290 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV; 291 else 292 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT; 293 294 return ret; 295 } 296 297 static bool intel_dp_get_hw_state(struct intel_encoder *encoder, 298 enum pipe *pipe) 299 { 300 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 301 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 302 intel_wakeref_t wakeref; 303 bool ret; 304 305 wakeref = intel_display_power_get_if_enabled(dev_priv, 306 encoder->power_domain); 307 if (!wakeref) 308 return false; 309 310 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 311 encoder->port, pipe); 312 313 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 314 315 return ret; 316 } 317 318 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state) 319 { 320 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 321 322 if (crtc_state->has_pch_encoder) { 323 intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n); 324 intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2); 325 } else { 326 intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder, 327 &crtc_state->dp_m_n); 328 intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder, 329 &crtc_state->dp_m2_n2); 330 } 331 } 332 333 static void intel_dp_get_config(struct intel_encoder *encoder, 334 struct intel_crtc_state *pipe_config) 335 { 336 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 337 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 338 u32 tmp, flags = 0; 339 enum port port = encoder->port; 340 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 341 342 if (encoder->type == INTEL_OUTPUT_EDP) 343 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 344 else 345 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 346 347 tmp = intel_de_read(dev_priv, intel_dp->output_reg); 348 349 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A; 350 351 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 352 u32 trans_dp = intel_de_read(dev_priv, 353 TRANS_DP_CTL(crtc->pipe)); 354 355 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH) 356 flags |= DRM_MODE_FLAG_PHSYNC; 357 else 358 flags |= DRM_MODE_FLAG_NHSYNC; 359 360 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH) 361 flags |= DRM_MODE_FLAG_PVSYNC; 362 else 363 flags |= DRM_MODE_FLAG_NVSYNC; 364 } else { 365 if (tmp & DP_SYNC_HS_HIGH) 366 flags |= DRM_MODE_FLAG_PHSYNC; 367 else 368 flags |= DRM_MODE_FLAG_NHSYNC; 369 370 if (tmp & DP_SYNC_VS_HIGH) 371 flags |= DRM_MODE_FLAG_PVSYNC; 372 else 373 flags |= DRM_MODE_FLAG_NVSYNC; 374 } 375 376 pipe_config->hw.adjusted_mode.flags |= flags; 377 378 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235) 379 pipe_config->limited_color_range = true; 380 381 pipe_config->lane_count = 382 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1; 383 384 g4x_dp_get_m_n(pipe_config); 385 386 if (port == PORT_A) { 387 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ) 388 pipe_config->port_clock = 162000; 389 else 390 pipe_config->port_clock = 270000; 391 } 392 393 pipe_config->hw.adjusted_mode.crtc_clock = 394 intel_dotclock_calculate(pipe_config->port_clock, 395 &pipe_config->dp_m_n); 396 397 if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp && 398 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 399 /* 400 * This is a big fat ugly hack. 401 * 402 * Some machines in UEFI boot mode provide us a VBT that has 18 403 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 404 * unknown we fail to light up. Yet the same BIOS boots up with 405 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 406 * max, not what it tells us to use. 407 * 408 * Note: This will still be broken if the eDP panel is not lit 409 * up by the BIOS, and thus we can't get the mode at module 410 * load. 411 */ 412 drm_dbg_kms(&dev_priv->drm, 413 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 414 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 415 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 416 } 417 } 418 419 static void 420 intel_dp_link_down(struct intel_encoder *encoder, 421 const struct intel_crtc_state *old_crtc_state) 422 { 423 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 424 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 425 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 426 enum port port = encoder->port; 427 428 if (drm_WARN_ON(&dev_priv->drm, 429 (intel_de_read(dev_priv, intel_dp->output_reg) & 430 DP_PORT_EN) == 0)) 431 return; 432 433 drm_dbg_kms(&dev_priv->drm, "\n"); 434 435 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 436 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) { 437 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 438 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT; 439 } else { 440 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 441 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE; 442 } 443 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 444 intel_de_posting_read(dev_priv, intel_dp->output_reg); 445 446 intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE); 447 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 448 intel_de_posting_read(dev_priv, intel_dp->output_reg); 449 450 /* 451 * HW workaround for IBX, we need to move the port 452 * to transcoder A after disabling it to allow the 453 * matching HDMI port to be enabled on transcoder A. 454 */ 455 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) { 456 /* 457 * We get CPU/PCH FIFO underruns on the other pipe when 458 * doing the workaround. Sweep them under the rug. 459 */ 460 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false); 461 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false); 462 463 /* always enable with pattern 1 (as per spec) */ 464 intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK); 465 intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) | 466 DP_LINK_TRAIN_PAT_1; 467 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 468 intel_de_posting_read(dev_priv, intel_dp->output_reg); 469 470 intel_dp->DP &= ~DP_PORT_EN; 471 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 472 intel_de_posting_read(dev_priv, intel_dp->output_reg); 473 474 intel_wait_for_vblank_if_active(dev_priv, PIPE_A); 475 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true); 476 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true); 477 } 478 479 msleep(intel_dp->pps.panel_power_down_delay); 480 481 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 482 intel_wakeref_t wakeref; 483 484 with_intel_pps_lock(intel_dp, wakeref) 485 intel_dp->pps.active_pipe = INVALID_PIPE; 486 } 487 } 488 489 static void intel_disable_dp(struct intel_atomic_state *state, 490 struct intel_encoder *encoder, 491 const struct intel_crtc_state *old_crtc_state, 492 const struct drm_connector_state *old_conn_state) 493 { 494 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 495 496 intel_dp->link_trained = false; 497 498 intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state); 499 500 /* 501 * Make sure the panel is off before trying to change the mode. 502 * But also ensure that we have vdd while we switch off the panel. 503 */ 504 intel_pps_vdd_on(intel_dp); 505 intel_edp_backlight_off(old_conn_state); 506 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 507 intel_pps_off(intel_dp); 508 } 509 510 static void g4x_disable_dp(struct intel_atomic_state *state, 511 struct intel_encoder *encoder, 512 const struct intel_crtc_state *old_crtc_state, 513 const struct drm_connector_state *old_conn_state) 514 { 515 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 516 } 517 518 static void vlv_disable_dp(struct intel_atomic_state *state, 519 struct intel_encoder *encoder, 520 const struct intel_crtc_state *old_crtc_state, 521 const struct drm_connector_state *old_conn_state) 522 { 523 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 524 } 525 526 static void g4x_post_disable_dp(struct intel_atomic_state *state, 527 struct intel_encoder *encoder, 528 const struct intel_crtc_state *old_crtc_state, 529 const struct drm_connector_state *old_conn_state) 530 { 531 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 532 enum port port = encoder->port; 533 534 /* 535 * Bspec does not list a specific disable sequence for g4x DP. 536 * Follow the ilk+ sequence (disable pipe before the port) for 537 * g4x DP as it does not suffer from underruns like the normal 538 * g4x modeset sequence (disable pipe after the port). 539 */ 540 intel_dp_link_down(encoder, old_crtc_state); 541 542 /* Only ilk+ has port A */ 543 if (port == PORT_A) 544 ilk_edp_pll_off(intel_dp, old_crtc_state); 545 } 546 547 static void vlv_post_disable_dp(struct intel_atomic_state *state, 548 struct intel_encoder *encoder, 549 const struct intel_crtc_state *old_crtc_state, 550 const struct drm_connector_state *old_conn_state) 551 { 552 intel_dp_link_down(encoder, old_crtc_state); 553 } 554 555 static void chv_post_disable_dp(struct intel_atomic_state *state, 556 struct intel_encoder *encoder, 557 const struct intel_crtc_state *old_crtc_state, 558 const struct drm_connector_state *old_conn_state) 559 { 560 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 561 562 intel_dp_link_down(encoder, old_crtc_state); 563 564 vlv_dpio_get(dev_priv); 565 566 /* Assert data lane reset */ 567 chv_data_lane_soft_reset(encoder, old_crtc_state, true); 568 569 vlv_dpio_put(dev_priv); 570 } 571 572 static void 573 cpt_set_link_train(struct intel_dp *intel_dp, 574 const struct intel_crtc_state *crtc_state, 575 u8 dp_train_pat) 576 { 577 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 578 579 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 580 581 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 582 case DP_TRAINING_PATTERN_DISABLE: 583 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 584 break; 585 case DP_TRAINING_PATTERN_1: 586 intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT; 587 break; 588 case DP_TRAINING_PATTERN_2: 589 intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT; 590 break; 591 default: 592 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 593 return; 594 } 595 596 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 597 intel_de_posting_read(dev_priv, intel_dp->output_reg); 598 } 599 600 static void 601 g4x_set_link_train(struct intel_dp *intel_dp, 602 const struct intel_crtc_state *crtc_state, 603 u8 dp_train_pat) 604 { 605 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 606 607 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 608 609 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 610 case DP_TRAINING_PATTERN_DISABLE: 611 intel_dp->DP |= DP_LINK_TRAIN_OFF; 612 break; 613 case DP_TRAINING_PATTERN_1: 614 intel_dp->DP |= DP_LINK_TRAIN_PAT_1; 615 break; 616 case DP_TRAINING_PATTERN_2: 617 intel_dp->DP |= DP_LINK_TRAIN_PAT_2; 618 break; 619 default: 620 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 621 return; 622 } 623 624 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 625 intel_de_posting_read(dev_priv, intel_dp->output_reg); 626 } 627 628 static void intel_dp_enable_port(struct intel_dp *intel_dp, 629 const struct intel_crtc_state *crtc_state) 630 { 631 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 632 633 /* enable with pattern 1 (as per spec) */ 634 635 intel_dp_program_link_training_pattern(intel_dp, crtc_state, 636 DP_PHY_DPRX, DP_TRAINING_PATTERN_1); 637 638 /* 639 * Magic for VLV/CHV. We _must_ first set up the register 640 * without actually enabling the port, and then do another 641 * write to enable the port. Otherwise link training will 642 * fail when the power sequencer is freshly used for this port. 643 */ 644 intel_dp->DP |= DP_PORT_EN; 645 if (crtc_state->has_audio) 646 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 647 648 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 649 intel_de_posting_read(dev_priv, intel_dp->output_reg); 650 } 651 652 static void intel_enable_dp(struct intel_atomic_state *state, 653 struct intel_encoder *encoder, 654 const struct intel_crtc_state *pipe_config, 655 const struct drm_connector_state *conn_state) 656 { 657 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 658 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 659 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg); 660 intel_wakeref_t wakeref; 661 662 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN)) 663 return; 664 665 with_intel_pps_lock(intel_dp, wakeref) { 666 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 667 vlv_pps_init(encoder, pipe_config); 668 669 intel_dp_enable_port(intel_dp, pipe_config); 670 671 intel_pps_vdd_on_unlocked(intel_dp); 672 intel_pps_on_unlocked(intel_dp); 673 intel_pps_vdd_off_unlocked(intel_dp, true); 674 } 675 676 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 677 unsigned int lane_mask = 0x0; 678 679 if (IS_CHERRYVIEW(dev_priv)) 680 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count); 681 682 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp), 683 lane_mask); 684 } 685 686 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 687 intel_dp_configure_protocol_converter(intel_dp, pipe_config); 688 intel_dp_check_frl_training(intel_dp); 689 intel_dp_pcon_dsc_configure(intel_dp, pipe_config); 690 intel_dp_start_link_train(intel_dp, pipe_config); 691 intel_dp_stop_link_train(intel_dp, pipe_config); 692 693 intel_audio_codec_enable(encoder, pipe_config, conn_state); 694 } 695 696 static void g4x_enable_dp(struct intel_atomic_state *state, 697 struct intel_encoder *encoder, 698 const struct intel_crtc_state *pipe_config, 699 const struct drm_connector_state *conn_state) 700 { 701 intel_enable_dp(state, encoder, pipe_config, conn_state); 702 intel_edp_backlight_on(pipe_config, conn_state); 703 } 704 705 static void vlv_enable_dp(struct intel_atomic_state *state, 706 struct intel_encoder *encoder, 707 const struct intel_crtc_state *pipe_config, 708 const struct drm_connector_state *conn_state) 709 { 710 intel_edp_backlight_on(pipe_config, conn_state); 711 } 712 713 static void g4x_pre_enable_dp(struct intel_atomic_state *state, 714 struct intel_encoder *encoder, 715 const struct intel_crtc_state *pipe_config, 716 const struct drm_connector_state *conn_state) 717 { 718 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 719 enum port port = encoder->port; 720 721 intel_dp_prepare(encoder, pipe_config); 722 723 /* Only ilk+ has port A */ 724 if (port == PORT_A) 725 ilk_edp_pll_on(intel_dp, pipe_config); 726 } 727 728 static void vlv_pre_enable_dp(struct intel_atomic_state *state, 729 struct intel_encoder *encoder, 730 const struct intel_crtc_state *pipe_config, 731 const struct drm_connector_state *conn_state) 732 { 733 vlv_phy_pre_encoder_enable(encoder, pipe_config); 734 735 intel_enable_dp(state, encoder, pipe_config, conn_state); 736 } 737 738 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state, 739 struct intel_encoder *encoder, 740 const struct intel_crtc_state *pipe_config, 741 const struct drm_connector_state *conn_state) 742 { 743 intel_dp_prepare(encoder, pipe_config); 744 745 vlv_phy_pre_pll_enable(encoder, pipe_config); 746 } 747 748 static void chv_pre_enable_dp(struct intel_atomic_state *state, 749 struct intel_encoder *encoder, 750 const struct intel_crtc_state *pipe_config, 751 const struct drm_connector_state *conn_state) 752 { 753 chv_phy_pre_encoder_enable(encoder, pipe_config); 754 755 intel_enable_dp(state, encoder, pipe_config, conn_state); 756 757 /* Second common lane will stay alive on its own now */ 758 chv_phy_release_cl2_override(encoder); 759 } 760 761 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state, 762 struct intel_encoder *encoder, 763 const struct intel_crtc_state *pipe_config, 764 const struct drm_connector_state *conn_state) 765 { 766 intel_dp_prepare(encoder, pipe_config); 767 768 chv_phy_pre_pll_enable(encoder, pipe_config); 769 } 770 771 static void chv_dp_post_pll_disable(struct intel_atomic_state *state, 772 struct intel_encoder *encoder, 773 const struct intel_crtc_state *old_crtc_state, 774 const struct drm_connector_state *old_conn_state) 775 { 776 chv_phy_post_pll_disable(encoder, old_crtc_state); 777 } 778 779 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp, 780 const struct intel_crtc_state *crtc_state) 781 { 782 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2; 783 } 784 785 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp, 786 const struct intel_crtc_state *crtc_state) 787 { 788 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3; 789 } 790 791 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp) 792 { 793 return DP_TRAIN_PRE_EMPH_LEVEL_2; 794 } 795 796 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp) 797 { 798 return DP_TRAIN_PRE_EMPH_LEVEL_3; 799 } 800 801 static void vlv_set_signal_levels(struct intel_encoder *encoder, 802 const struct intel_crtc_state *crtc_state) 803 { 804 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 805 unsigned long demph_reg_value, preemph_reg_value, 806 uniqtranscale_reg_value; 807 u8 train_set = intel_dp->train_set[0]; 808 809 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 810 case DP_TRAIN_PRE_EMPH_LEVEL_0: 811 preemph_reg_value = 0x0004000; 812 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 813 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 814 demph_reg_value = 0x2B405555; 815 uniqtranscale_reg_value = 0x552AB83A; 816 break; 817 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 818 demph_reg_value = 0x2B404040; 819 uniqtranscale_reg_value = 0x5548B83A; 820 break; 821 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 822 demph_reg_value = 0x2B245555; 823 uniqtranscale_reg_value = 0x5560B83A; 824 break; 825 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 826 demph_reg_value = 0x2B405555; 827 uniqtranscale_reg_value = 0x5598DA3A; 828 break; 829 default: 830 return; 831 } 832 break; 833 case DP_TRAIN_PRE_EMPH_LEVEL_1: 834 preemph_reg_value = 0x0002000; 835 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 836 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 837 demph_reg_value = 0x2B404040; 838 uniqtranscale_reg_value = 0x5552B83A; 839 break; 840 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 841 demph_reg_value = 0x2B404848; 842 uniqtranscale_reg_value = 0x5580B83A; 843 break; 844 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 845 demph_reg_value = 0x2B404040; 846 uniqtranscale_reg_value = 0x55ADDA3A; 847 break; 848 default: 849 return; 850 } 851 break; 852 case DP_TRAIN_PRE_EMPH_LEVEL_2: 853 preemph_reg_value = 0x0000000; 854 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 855 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 856 demph_reg_value = 0x2B305555; 857 uniqtranscale_reg_value = 0x5570B83A; 858 break; 859 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 860 demph_reg_value = 0x2B2B4040; 861 uniqtranscale_reg_value = 0x55ADDA3A; 862 break; 863 default: 864 return; 865 } 866 break; 867 case DP_TRAIN_PRE_EMPH_LEVEL_3: 868 preemph_reg_value = 0x0006000; 869 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 870 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 871 demph_reg_value = 0x1B405555; 872 uniqtranscale_reg_value = 0x55ADDA3A; 873 break; 874 default: 875 return; 876 } 877 break; 878 default: 879 return; 880 } 881 882 vlv_set_phy_signal_level(encoder, crtc_state, 883 demph_reg_value, preemph_reg_value, 884 uniqtranscale_reg_value, 0); 885 } 886 887 static void chv_set_signal_levels(struct intel_encoder *encoder, 888 const struct intel_crtc_state *crtc_state) 889 { 890 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 891 u32 deemph_reg_value, margin_reg_value; 892 bool uniq_trans_scale = false; 893 u8 train_set = intel_dp->train_set[0]; 894 895 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 896 case DP_TRAIN_PRE_EMPH_LEVEL_0: 897 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 898 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 899 deemph_reg_value = 128; 900 margin_reg_value = 52; 901 break; 902 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 903 deemph_reg_value = 128; 904 margin_reg_value = 77; 905 break; 906 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 907 deemph_reg_value = 128; 908 margin_reg_value = 102; 909 break; 910 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 911 deemph_reg_value = 128; 912 margin_reg_value = 154; 913 uniq_trans_scale = true; 914 break; 915 default: 916 return; 917 } 918 break; 919 case DP_TRAIN_PRE_EMPH_LEVEL_1: 920 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 921 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 922 deemph_reg_value = 85; 923 margin_reg_value = 78; 924 break; 925 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 926 deemph_reg_value = 85; 927 margin_reg_value = 116; 928 break; 929 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 930 deemph_reg_value = 85; 931 margin_reg_value = 154; 932 break; 933 default: 934 return; 935 } 936 break; 937 case DP_TRAIN_PRE_EMPH_LEVEL_2: 938 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 939 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 940 deemph_reg_value = 64; 941 margin_reg_value = 104; 942 break; 943 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 944 deemph_reg_value = 64; 945 margin_reg_value = 154; 946 break; 947 default: 948 return; 949 } 950 break; 951 case DP_TRAIN_PRE_EMPH_LEVEL_3: 952 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 953 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 954 deemph_reg_value = 43; 955 margin_reg_value = 154; 956 break; 957 default: 958 return; 959 } 960 break; 961 default: 962 return; 963 } 964 965 chv_set_phy_signal_level(encoder, crtc_state, 966 deemph_reg_value, margin_reg_value, 967 uniq_trans_scale); 968 } 969 970 static u32 g4x_signal_levels(u8 train_set) 971 { 972 u32 signal_levels = 0; 973 974 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 975 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 976 default: 977 signal_levels |= DP_VOLTAGE_0_4; 978 break; 979 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 980 signal_levels |= DP_VOLTAGE_0_6; 981 break; 982 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 983 signal_levels |= DP_VOLTAGE_0_8; 984 break; 985 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 986 signal_levels |= DP_VOLTAGE_1_2; 987 break; 988 } 989 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 990 case DP_TRAIN_PRE_EMPH_LEVEL_0: 991 default: 992 signal_levels |= DP_PRE_EMPHASIS_0; 993 break; 994 case DP_TRAIN_PRE_EMPH_LEVEL_1: 995 signal_levels |= DP_PRE_EMPHASIS_3_5; 996 break; 997 case DP_TRAIN_PRE_EMPH_LEVEL_2: 998 signal_levels |= DP_PRE_EMPHASIS_6; 999 break; 1000 case DP_TRAIN_PRE_EMPH_LEVEL_3: 1001 signal_levels |= DP_PRE_EMPHASIS_9_5; 1002 break; 1003 } 1004 return signal_levels; 1005 } 1006 1007 static void 1008 g4x_set_signal_levels(struct intel_encoder *encoder, 1009 const struct intel_crtc_state *crtc_state) 1010 { 1011 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1012 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1013 u8 train_set = intel_dp->train_set[0]; 1014 u32 signal_levels; 1015 1016 signal_levels = g4x_signal_levels(train_set); 1017 1018 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1019 signal_levels); 1020 1021 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK); 1022 intel_dp->DP |= signal_levels; 1023 1024 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1025 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1026 } 1027 1028 /* SNB CPU eDP voltage swing and pre-emphasis control */ 1029 static u32 snb_cpu_edp_signal_levels(u8 train_set) 1030 { 1031 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1032 DP_TRAIN_PRE_EMPHASIS_MASK); 1033 1034 switch (signal_levels) { 1035 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1036 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1037 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1038 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1039 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1040 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1041 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1042 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1043 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1044 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1045 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1046 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1047 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1048 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1049 default: 1050 MISSING_CASE(signal_levels); 1051 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1052 } 1053 } 1054 1055 static void 1056 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1057 const struct intel_crtc_state *crtc_state) 1058 { 1059 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1060 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1061 u8 train_set = intel_dp->train_set[0]; 1062 u32 signal_levels; 1063 1064 signal_levels = snb_cpu_edp_signal_levels(train_set); 1065 1066 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1067 signal_levels); 1068 1069 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB; 1070 intel_dp->DP |= signal_levels; 1071 1072 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1073 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1074 } 1075 1076 /* IVB CPU eDP voltage swing and pre-emphasis control */ 1077 static u32 ivb_cpu_edp_signal_levels(u8 train_set) 1078 { 1079 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1080 DP_TRAIN_PRE_EMPHASIS_MASK); 1081 1082 switch (signal_levels) { 1083 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1084 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1085 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1086 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1087 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1088 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1089 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1090 1091 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1092 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1093 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1094 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1095 1096 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1097 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1098 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1099 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1100 1101 default: 1102 MISSING_CASE(signal_levels); 1103 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1104 } 1105 } 1106 1107 static void 1108 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1109 const struct intel_crtc_state *crtc_state) 1110 { 1111 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1112 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1113 u8 train_set = intel_dp->train_set[0]; 1114 u32 signal_levels; 1115 1116 signal_levels = ivb_cpu_edp_signal_levels(train_set); 1117 1118 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1119 signal_levels); 1120 1121 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB; 1122 intel_dp->DP |= signal_levels; 1123 1124 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1125 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1126 } 1127 1128 /* 1129 * If display is now connected check links status, 1130 * there has been known issues of link loss triggering 1131 * long pulse. 1132 * 1133 * Some sinks (eg. ASUS PB287Q) seem to perform some 1134 * weird HPD ping pong during modesets. So we can apparently 1135 * end up with HPD going low during a modeset, and then 1136 * going back up soon after. And once that happens we must 1137 * retrain the link to get a picture. That's in case no 1138 * userspace component reacted to intermittent HPD dip. 1139 */ 1140 static enum intel_hotplug_state 1141 intel_dp_hotplug(struct intel_encoder *encoder, 1142 struct intel_connector *connector) 1143 { 1144 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1145 struct drm_modeset_acquire_ctx ctx; 1146 enum intel_hotplug_state state; 1147 int ret; 1148 1149 if (intel_dp->compliance.test_active && 1150 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 1151 intel_dp_phy_test(encoder); 1152 /* just do the PHY test and nothing else */ 1153 return INTEL_HOTPLUG_UNCHANGED; 1154 } 1155 1156 state = intel_encoder_hotplug(encoder, connector); 1157 1158 drm_modeset_acquire_init(&ctx, 0); 1159 1160 for (;;) { 1161 ret = intel_dp_retrain_link(encoder, &ctx); 1162 1163 if (ret == -EDEADLK) { 1164 drm_modeset_backoff(&ctx); 1165 continue; 1166 } 1167 1168 break; 1169 } 1170 1171 drm_modeset_drop_locks(&ctx); 1172 drm_modeset_acquire_fini(&ctx); 1173 drm_WARN(encoder->base.dev, ret, 1174 "Acquiring modeset locks failed with %i\n", ret); 1175 1176 /* 1177 * Keeping it consistent with intel_ddi_hotplug() and 1178 * intel_hdmi_hotplug(). 1179 */ 1180 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries) 1181 state = INTEL_HOTPLUG_RETRY; 1182 1183 return state; 1184 } 1185 1186 static bool ibx_digital_port_connected(struct intel_encoder *encoder) 1187 { 1188 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1189 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 1190 1191 return intel_de_read(dev_priv, SDEISR) & bit; 1192 } 1193 1194 static bool g4x_digital_port_connected(struct intel_encoder *encoder) 1195 { 1196 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1197 u32 bit; 1198 1199 switch (encoder->hpd_pin) { 1200 case HPD_PORT_B: 1201 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X; 1202 break; 1203 case HPD_PORT_C: 1204 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X; 1205 break; 1206 case HPD_PORT_D: 1207 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X; 1208 break; 1209 default: 1210 MISSING_CASE(encoder->hpd_pin); 1211 return false; 1212 } 1213 1214 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1215 } 1216 1217 static bool gm45_digital_port_connected(struct intel_encoder *encoder) 1218 { 1219 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1220 u32 bit; 1221 1222 switch (encoder->hpd_pin) { 1223 case HPD_PORT_B: 1224 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45; 1225 break; 1226 case HPD_PORT_C: 1227 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45; 1228 break; 1229 case HPD_PORT_D: 1230 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45; 1231 break; 1232 default: 1233 MISSING_CASE(encoder->hpd_pin); 1234 return false; 1235 } 1236 1237 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1238 } 1239 1240 static bool ilk_digital_port_connected(struct intel_encoder *encoder) 1241 { 1242 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1243 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 1244 1245 return intel_de_read(dev_priv, DEISR) & bit; 1246 } 1247 1248 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 1249 { 1250 intel_dp_encoder_flush_work(encoder); 1251 1252 drm_encoder_cleanup(encoder); 1253 kfree(enc_to_dig_port(to_intel_encoder(encoder))); 1254 } 1255 1256 enum pipe vlv_active_pipe(struct intel_dp *intel_dp) 1257 { 1258 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1259 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1260 enum pipe pipe; 1261 1262 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 1263 encoder->port, &pipe)) 1264 return pipe; 1265 1266 return INVALID_PIPE; 1267 } 1268 1269 static void intel_dp_encoder_reset(struct drm_encoder *encoder) 1270 { 1271 struct drm_i915_private *dev_priv = to_i915(encoder->dev); 1272 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 1273 1274 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg); 1275 1276 intel_dp->reset_link_params = true; 1277 1278 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1279 intel_wakeref_t wakeref; 1280 1281 with_intel_pps_lock(intel_dp, wakeref) 1282 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp); 1283 } 1284 1285 intel_pps_encoder_reset(intel_dp); 1286 } 1287 1288 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1289 .reset = intel_dp_encoder_reset, 1290 .destroy = intel_dp_encoder_destroy, 1291 }; 1292 1293 bool g4x_dp_init(struct drm_i915_private *dev_priv, 1294 i915_reg_t output_reg, enum port port) 1295 { 1296 struct intel_digital_port *dig_port; 1297 struct intel_encoder *intel_encoder; 1298 struct drm_encoder *encoder; 1299 struct intel_connector *intel_connector; 1300 1301 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 1302 if (!dig_port) 1303 return false; 1304 1305 intel_connector = intel_connector_alloc(); 1306 if (!intel_connector) 1307 goto err_connector_alloc; 1308 1309 intel_encoder = &dig_port->base; 1310 encoder = &intel_encoder->base; 1311 1312 mutex_init(&dig_port->hdcp_mutex); 1313 1314 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base, 1315 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS, 1316 "DP %c", port_name(port))) 1317 goto err_encoder_init; 1318 1319 intel_encoder->hotplug = intel_dp_hotplug; 1320 intel_encoder->compute_config = intel_dp_compute_config; 1321 intel_encoder->get_hw_state = intel_dp_get_hw_state; 1322 intel_encoder->get_config = intel_dp_get_config; 1323 intel_encoder->sync_state = intel_dp_sync_state; 1324 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check; 1325 intel_encoder->update_pipe = intel_backlight_update; 1326 intel_encoder->suspend = intel_dp_encoder_suspend; 1327 intel_encoder->shutdown = intel_dp_encoder_shutdown; 1328 if (IS_CHERRYVIEW(dev_priv)) { 1329 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable; 1330 intel_encoder->pre_enable = chv_pre_enable_dp; 1331 intel_encoder->enable = vlv_enable_dp; 1332 intel_encoder->disable = vlv_disable_dp; 1333 intel_encoder->post_disable = chv_post_disable_dp; 1334 intel_encoder->post_pll_disable = chv_dp_post_pll_disable; 1335 } else if (IS_VALLEYVIEW(dev_priv)) { 1336 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable; 1337 intel_encoder->pre_enable = vlv_pre_enable_dp; 1338 intel_encoder->enable = vlv_enable_dp; 1339 intel_encoder->disable = vlv_disable_dp; 1340 intel_encoder->post_disable = vlv_post_disable_dp; 1341 } else { 1342 intel_encoder->pre_enable = g4x_pre_enable_dp; 1343 intel_encoder->enable = g4x_enable_dp; 1344 intel_encoder->disable = g4x_disable_dp; 1345 intel_encoder->post_disable = g4x_post_disable_dp; 1346 } 1347 1348 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 1349 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) 1350 dig_port->dp.set_link_train = cpt_set_link_train; 1351 else 1352 dig_port->dp.set_link_train = g4x_set_link_train; 1353 1354 if (IS_CHERRYVIEW(dev_priv)) 1355 intel_encoder->set_signal_levels = chv_set_signal_levels; 1356 else if (IS_VALLEYVIEW(dev_priv)) 1357 intel_encoder->set_signal_levels = vlv_set_signal_levels; 1358 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 1359 intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels; 1360 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A) 1361 intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels; 1362 else 1363 intel_encoder->set_signal_levels = g4x_set_signal_levels; 1364 1365 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) || 1366 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) { 1367 dig_port->dp.preemph_max = intel_dp_preemph_max_3; 1368 dig_port->dp.voltage_max = intel_dp_voltage_max_3; 1369 } else { 1370 dig_port->dp.preemph_max = intel_dp_preemph_max_2; 1371 dig_port->dp.voltage_max = intel_dp_voltage_max_2; 1372 } 1373 1374 dig_port->dp.output_reg = output_reg; 1375 dig_port->max_lanes = 4; 1376 1377 intel_encoder->type = INTEL_OUTPUT_DP; 1378 intel_encoder->power_domain = intel_port_to_power_domain(port); 1379 if (IS_CHERRYVIEW(dev_priv)) { 1380 if (port == PORT_D) 1381 intel_encoder->pipe_mask = BIT(PIPE_C); 1382 else 1383 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B); 1384 } else { 1385 intel_encoder->pipe_mask = ~0; 1386 } 1387 intel_encoder->cloneable = 0; 1388 intel_encoder->port = port; 1389 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 1390 1391 dig_port->hpd_pulse = intel_dp_hpd_pulse; 1392 1393 if (HAS_GMCH(dev_priv)) { 1394 if (IS_GM45(dev_priv)) 1395 dig_port->connected = gm45_digital_port_connected; 1396 else 1397 dig_port->connected = g4x_digital_port_connected; 1398 } else { 1399 if (port == PORT_A) 1400 dig_port->connected = ilk_digital_port_connected; 1401 else 1402 dig_port->connected = ibx_digital_port_connected; 1403 } 1404 1405 if (port != PORT_A) 1406 intel_infoframe_init(dig_port); 1407 1408 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 1409 if (!intel_dp_init_connector(dig_port, intel_connector)) 1410 goto err_init_connector; 1411 1412 return true; 1413 1414 err_init_connector: 1415 drm_encoder_cleanup(encoder); 1416 err_encoder_init: 1417 kfree(intel_connector); 1418 err_connector_alloc: 1419 kfree(dig_port); 1420 return false; 1421 } 1422