1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code). 6 */ 7 8 #include <linux/string_helpers.h> 9 10 #include "g4x_dp.h" 11 #include "i915_reg.h" 12 #include "intel_audio.h" 13 #include "intel_backlight.h" 14 #include "intel_connector.h" 15 #include "intel_crtc.h" 16 #include "intel_de.h" 17 #include "intel_display_power.h" 18 #include "intel_display_types.h" 19 #include "intel_dp.h" 20 #include "intel_dp_link_training.h" 21 #include "intel_dpio_phy.h" 22 #include "intel_fifo_underrun.h" 23 #include "intel_hdmi.h" 24 #include "intel_hotplug.h" 25 #include "intel_pch_display.h" 26 #include "intel_pps.h" 27 #include "vlv_sideband.h" 28 29 static const struct dpll g4x_dpll[] = { 30 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8, }, 31 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2, }, 32 }; 33 34 static const struct dpll pch_dpll[] = { 35 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9, }, 36 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8, }, 37 }; 38 39 static const struct dpll vlv_dpll[] = { 40 { .dot = 162000, .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81, }, 41 { .dot = 270000, .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27, }, 42 }; 43 44 static const struct dpll chv_dpll[] = { 45 /* m2 is .22 binary fixed point */ 46 { .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ }, 47 { .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ }, 48 }; 49 50 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915) 51 { 52 return IS_CHERRYVIEW(i915) ? &chv_dpll[0] : &vlv_dpll[0]; 53 } 54 55 void g4x_dp_set_clock(struct intel_encoder *encoder, 56 struct intel_crtc_state *pipe_config) 57 { 58 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 59 const struct dpll *divisor = NULL; 60 int i, count = 0; 61 62 if (IS_G4X(dev_priv)) { 63 divisor = g4x_dpll; 64 count = ARRAY_SIZE(g4x_dpll); 65 } else if (HAS_PCH_SPLIT(dev_priv)) { 66 divisor = pch_dpll; 67 count = ARRAY_SIZE(pch_dpll); 68 } else if (IS_CHERRYVIEW(dev_priv)) { 69 divisor = chv_dpll; 70 count = ARRAY_SIZE(chv_dpll); 71 } else if (IS_VALLEYVIEW(dev_priv)) { 72 divisor = vlv_dpll; 73 count = ARRAY_SIZE(vlv_dpll); 74 } 75 76 if (divisor && count) { 77 for (i = 0; i < count; i++) { 78 if (pipe_config->port_clock == divisor[i].dot) { 79 pipe_config->dpll = divisor[i]; 80 pipe_config->clock_set = true; 81 break; 82 } 83 } 84 } 85 } 86 87 static void intel_dp_prepare(struct intel_encoder *encoder, 88 const struct intel_crtc_state *pipe_config) 89 { 90 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 91 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 92 enum port port = encoder->port; 93 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 94 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 95 96 intel_dp_set_link_params(intel_dp, 97 pipe_config->port_clock, 98 pipe_config->lane_count); 99 100 /* 101 * There are four kinds of DP registers: 102 * IBX PCH 103 * SNB CPU 104 * IVB CPU 105 * CPT PCH 106 * 107 * IBX PCH and CPU are the same for almost everything, 108 * except that the CPU DP PLL is configured in this 109 * register 110 * 111 * CPT PCH is quite different, having many bits moved 112 * to the TRANS_DP_CTL register instead. That 113 * configuration happens (oddly) in ilk_pch_enable 114 */ 115 116 /* Preserve the BIOS-computed detected bit. This is 117 * supposed to be read-only. 118 */ 119 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED; 120 121 /* Handle DP bits in common between all three register formats */ 122 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 123 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count); 124 125 /* Split out the IBX/CPU vs CPT settings */ 126 127 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) { 128 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 129 intel_dp->DP |= DP_SYNC_HS_HIGH; 130 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 131 intel_dp->DP |= DP_SYNC_VS_HIGH; 132 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 133 134 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 135 intel_dp->DP |= DP_ENHANCED_FRAMING; 136 137 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe); 138 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 139 u32 trans_dp; 140 141 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 142 143 trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe)); 144 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 145 trans_dp |= TRANS_DP_ENH_FRAMING; 146 else 147 trans_dp &= ~TRANS_DP_ENH_FRAMING; 148 intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp); 149 } else { 150 if (IS_G4X(dev_priv) && pipe_config->limited_color_range) 151 intel_dp->DP |= DP_COLOR_RANGE_16_235; 152 153 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 154 intel_dp->DP |= DP_SYNC_HS_HIGH; 155 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 156 intel_dp->DP |= DP_SYNC_VS_HIGH; 157 intel_dp->DP |= DP_LINK_TRAIN_OFF; 158 159 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 160 intel_dp->DP |= DP_ENHANCED_FRAMING; 161 162 if (IS_CHERRYVIEW(dev_priv)) 163 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe); 164 else 165 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe); 166 } 167 } 168 169 static void assert_dp_port(struct intel_dp *intel_dp, bool state) 170 { 171 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 172 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 173 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN; 174 175 I915_STATE_WARN(cur_state != state, 176 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n", 177 dig_port->base.base.base.id, dig_port->base.base.name, 178 str_on_off(state), str_on_off(cur_state)); 179 } 180 #define assert_dp_port_disabled(d) assert_dp_port((d), false) 181 182 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state) 183 { 184 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE; 185 186 I915_STATE_WARN(cur_state != state, 187 "eDP PLL state assertion failure (expected %s, current %s)\n", 188 str_on_off(state), str_on_off(cur_state)); 189 } 190 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true) 191 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false) 192 193 static void ilk_edp_pll_on(struct intel_dp *intel_dp, 194 const struct intel_crtc_state *pipe_config) 195 { 196 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 197 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 198 199 assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder); 200 assert_dp_port_disabled(intel_dp); 201 assert_edp_pll_disabled(dev_priv); 202 203 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n", 204 pipe_config->port_clock); 205 206 intel_dp->DP &= ~DP_PLL_FREQ_MASK; 207 208 if (pipe_config->port_clock == 162000) 209 intel_dp->DP |= DP_PLL_FREQ_162MHZ; 210 else 211 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 212 213 intel_de_write(dev_priv, DP_A, intel_dp->DP); 214 intel_de_posting_read(dev_priv, DP_A); 215 udelay(500); 216 217 /* 218 * [DevILK] Work around required when enabling DP PLL 219 * while a pipe is enabled going to FDI: 220 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI 221 * 2. Program DP PLL enable 222 */ 223 if (IS_IRONLAKE(dev_priv)) 224 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe); 225 226 intel_dp->DP |= DP_PLL_ENABLE; 227 228 intel_de_write(dev_priv, DP_A, intel_dp->DP); 229 intel_de_posting_read(dev_priv, DP_A); 230 udelay(200); 231 } 232 233 static void ilk_edp_pll_off(struct intel_dp *intel_dp, 234 const struct intel_crtc_state *old_crtc_state) 235 { 236 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 237 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 238 239 assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder); 240 assert_dp_port_disabled(intel_dp); 241 assert_edp_pll_enabled(dev_priv); 242 243 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n"); 244 245 intel_dp->DP &= ~DP_PLL_ENABLE; 246 247 intel_de_write(dev_priv, DP_A, intel_dp->DP); 248 intel_de_posting_read(dev_priv, DP_A); 249 udelay(200); 250 } 251 252 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv, 253 enum port port, enum pipe *pipe) 254 { 255 enum pipe p; 256 257 for_each_pipe(dev_priv, p) { 258 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p)); 259 260 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) { 261 *pipe = p; 262 return true; 263 } 264 } 265 266 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n", 267 port_name(port)); 268 269 /* must initialize pipe to something for the asserts */ 270 *pipe = PIPE_A; 271 272 return false; 273 } 274 275 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv, 276 i915_reg_t dp_reg, enum port port, 277 enum pipe *pipe) 278 { 279 bool ret; 280 u32 val; 281 282 val = intel_de_read(dev_priv, dp_reg); 283 284 ret = val & DP_PORT_EN; 285 286 /* asserts want to know the pipe even if the port is disabled */ 287 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 288 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB; 289 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) 290 ret &= cpt_dp_port_selected(dev_priv, port, pipe); 291 else if (IS_CHERRYVIEW(dev_priv)) 292 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV; 293 else 294 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT; 295 296 return ret; 297 } 298 299 static bool intel_dp_get_hw_state(struct intel_encoder *encoder, 300 enum pipe *pipe) 301 { 302 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 303 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 304 intel_wakeref_t wakeref; 305 bool ret; 306 307 wakeref = intel_display_power_get_if_enabled(dev_priv, 308 encoder->power_domain); 309 if (!wakeref) 310 return false; 311 312 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 313 encoder->port, pipe); 314 315 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 316 317 return ret; 318 } 319 320 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state) 321 { 322 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 323 324 if (crtc_state->has_pch_encoder) { 325 intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n); 326 intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2); 327 } else { 328 intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder, 329 &crtc_state->dp_m_n); 330 intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder, 331 &crtc_state->dp_m2_n2); 332 } 333 } 334 335 static void intel_dp_get_config(struct intel_encoder *encoder, 336 struct intel_crtc_state *pipe_config) 337 { 338 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 339 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 340 u32 tmp, flags = 0; 341 enum port port = encoder->port; 342 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 343 344 if (encoder->type == INTEL_OUTPUT_EDP) 345 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 346 else 347 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 348 349 tmp = intel_de_read(dev_priv, intel_dp->output_reg); 350 351 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A; 352 353 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 354 u32 trans_dp = intel_de_read(dev_priv, 355 TRANS_DP_CTL(crtc->pipe)); 356 357 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH) 358 flags |= DRM_MODE_FLAG_PHSYNC; 359 else 360 flags |= DRM_MODE_FLAG_NHSYNC; 361 362 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH) 363 flags |= DRM_MODE_FLAG_PVSYNC; 364 else 365 flags |= DRM_MODE_FLAG_NVSYNC; 366 } else { 367 if (tmp & DP_SYNC_HS_HIGH) 368 flags |= DRM_MODE_FLAG_PHSYNC; 369 else 370 flags |= DRM_MODE_FLAG_NHSYNC; 371 372 if (tmp & DP_SYNC_VS_HIGH) 373 flags |= DRM_MODE_FLAG_PVSYNC; 374 else 375 flags |= DRM_MODE_FLAG_NVSYNC; 376 } 377 378 pipe_config->hw.adjusted_mode.flags |= flags; 379 380 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235) 381 pipe_config->limited_color_range = true; 382 383 pipe_config->lane_count = 384 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1; 385 386 g4x_dp_get_m_n(pipe_config); 387 388 if (port == PORT_A) { 389 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ) 390 pipe_config->port_clock = 162000; 391 else 392 pipe_config->port_clock = 270000; 393 } 394 395 pipe_config->hw.adjusted_mode.crtc_clock = 396 intel_dotclock_calculate(pipe_config->port_clock, 397 &pipe_config->dp_m_n); 398 399 if (intel_dp_is_edp(intel_dp)) 400 intel_edp_fixup_vbt_bpp(encoder, pipe_config->pipe_bpp); 401 } 402 403 static void 404 intel_dp_link_down(struct intel_encoder *encoder, 405 const struct intel_crtc_state *old_crtc_state) 406 { 407 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 408 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 409 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 410 enum port port = encoder->port; 411 412 if (drm_WARN_ON(&dev_priv->drm, 413 (intel_de_read(dev_priv, intel_dp->output_reg) & 414 DP_PORT_EN) == 0)) 415 return; 416 417 drm_dbg_kms(&dev_priv->drm, "\n"); 418 419 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 420 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) { 421 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 422 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT; 423 } else { 424 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 425 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE; 426 } 427 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 428 intel_de_posting_read(dev_priv, intel_dp->output_reg); 429 430 intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE); 431 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 432 intel_de_posting_read(dev_priv, intel_dp->output_reg); 433 434 /* 435 * HW workaround for IBX, we need to move the port 436 * to transcoder A after disabling it to allow the 437 * matching HDMI port to be enabled on transcoder A. 438 */ 439 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) { 440 /* 441 * We get CPU/PCH FIFO underruns on the other pipe when 442 * doing the workaround. Sweep them under the rug. 443 */ 444 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false); 445 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false); 446 447 /* always enable with pattern 1 (as per spec) */ 448 intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK); 449 intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) | 450 DP_LINK_TRAIN_PAT_1; 451 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 452 intel_de_posting_read(dev_priv, intel_dp->output_reg); 453 454 intel_dp->DP &= ~DP_PORT_EN; 455 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 456 intel_de_posting_read(dev_priv, intel_dp->output_reg); 457 458 intel_wait_for_vblank_if_active(dev_priv, PIPE_A); 459 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true); 460 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true); 461 } 462 463 msleep(intel_dp->pps.panel_power_down_delay); 464 465 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 466 intel_wakeref_t wakeref; 467 468 with_intel_pps_lock(intel_dp, wakeref) 469 intel_dp->pps.active_pipe = INVALID_PIPE; 470 } 471 } 472 473 static void intel_disable_dp(struct intel_atomic_state *state, 474 struct intel_encoder *encoder, 475 const struct intel_crtc_state *old_crtc_state, 476 const struct drm_connector_state *old_conn_state) 477 { 478 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 479 480 intel_dp->link_trained = false; 481 482 intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state); 483 484 /* 485 * Make sure the panel is off before trying to change the mode. 486 * But also ensure that we have vdd while we switch off the panel. 487 */ 488 intel_pps_vdd_on(intel_dp); 489 intel_edp_backlight_off(old_conn_state); 490 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 491 intel_pps_off(intel_dp); 492 } 493 494 static void g4x_disable_dp(struct intel_atomic_state *state, 495 struct intel_encoder *encoder, 496 const struct intel_crtc_state *old_crtc_state, 497 const struct drm_connector_state *old_conn_state) 498 { 499 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 500 } 501 502 static void vlv_disable_dp(struct intel_atomic_state *state, 503 struct intel_encoder *encoder, 504 const struct intel_crtc_state *old_crtc_state, 505 const struct drm_connector_state *old_conn_state) 506 { 507 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 508 } 509 510 static void g4x_post_disable_dp(struct intel_atomic_state *state, 511 struct intel_encoder *encoder, 512 const struct intel_crtc_state *old_crtc_state, 513 const struct drm_connector_state *old_conn_state) 514 { 515 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 516 enum port port = encoder->port; 517 518 /* 519 * Bspec does not list a specific disable sequence for g4x DP. 520 * Follow the ilk+ sequence (disable pipe before the port) for 521 * g4x DP as it does not suffer from underruns like the normal 522 * g4x modeset sequence (disable pipe after the port). 523 */ 524 intel_dp_link_down(encoder, old_crtc_state); 525 526 /* Only ilk+ has port A */ 527 if (port == PORT_A) 528 ilk_edp_pll_off(intel_dp, old_crtc_state); 529 } 530 531 static void vlv_post_disable_dp(struct intel_atomic_state *state, 532 struct intel_encoder *encoder, 533 const struct intel_crtc_state *old_crtc_state, 534 const struct drm_connector_state *old_conn_state) 535 { 536 intel_dp_link_down(encoder, old_crtc_state); 537 } 538 539 static void chv_post_disable_dp(struct intel_atomic_state *state, 540 struct intel_encoder *encoder, 541 const struct intel_crtc_state *old_crtc_state, 542 const struct drm_connector_state *old_conn_state) 543 { 544 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 545 546 intel_dp_link_down(encoder, old_crtc_state); 547 548 vlv_dpio_get(dev_priv); 549 550 /* Assert data lane reset */ 551 chv_data_lane_soft_reset(encoder, old_crtc_state, true); 552 553 vlv_dpio_put(dev_priv); 554 } 555 556 static void 557 cpt_set_link_train(struct intel_dp *intel_dp, 558 const struct intel_crtc_state *crtc_state, 559 u8 dp_train_pat) 560 { 561 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 562 563 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 564 565 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 566 case DP_TRAINING_PATTERN_DISABLE: 567 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 568 break; 569 case DP_TRAINING_PATTERN_1: 570 intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT; 571 break; 572 case DP_TRAINING_PATTERN_2: 573 intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT; 574 break; 575 default: 576 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 577 return; 578 } 579 580 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 581 intel_de_posting_read(dev_priv, intel_dp->output_reg); 582 } 583 584 static void 585 g4x_set_link_train(struct intel_dp *intel_dp, 586 const struct intel_crtc_state *crtc_state, 587 u8 dp_train_pat) 588 { 589 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 590 591 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 592 593 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 594 case DP_TRAINING_PATTERN_DISABLE: 595 intel_dp->DP |= DP_LINK_TRAIN_OFF; 596 break; 597 case DP_TRAINING_PATTERN_1: 598 intel_dp->DP |= DP_LINK_TRAIN_PAT_1; 599 break; 600 case DP_TRAINING_PATTERN_2: 601 intel_dp->DP |= DP_LINK_TRAIN_PAT_2; 602 break; 603 default: 604 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 605 return; 606 } 607 608 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 609 intel_de_posting_read(dev_priv, intel_dp->output_reg); 610 } 611 612 static void intel_dp_enable_port(struct intel_dp *intel_dp, 613 const struct intel_crtc_state *crtc_state) 614 { 615 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 616 617 /* enable with pattern 1 (as per spec) */ 618 619 intel_dp_program_link_training_pattern(intel_dp, crtc_state, 620 DP_PHY_DPRX, DP_TRAINING_PATTERN_1); 621 622 /* 623 * Magic for VLV/CHV. We _must_ first set up the register 624 * without actually enabling the port, and then do another 625 * write to enable the port. Otherwise link training will 626 * fail when the power sequencer is freshly used for this port. 627 */ 628 intel_dp->DP |= DP_PORT_EN; 629 if (crtc_state->has_audio) 630 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 631 632 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 633 intel_de_posting_read(dev_priv, intel_dp->output_reg); 634 } 635 636 static void intel_enable_dp(struct intel_atomic_state *state, 637 struct intel_encoder *encoder, 638 const struct intel_crtc_state *pipe_config, 639 const struct drm_connector_state *conn_state) 640 { 641 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 642 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 643 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg); 644 intel_wakeref_t wakeref; 645 646 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN)) 647 return; 648 649 with_intel_pps_lock(intel_dp, wakeref) { 650 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 651 vlv_pps_init(encoder, pipe_config); 652 653 intel_dp_enable_port(intel_dp, pipe_config); 654 655 intel_pps_vdd_on_unlocked(intel_dp); 656 intel_pps_on_unlocked(intel_dp); 657 intel_pps_vdd_off_unlocked(intel_dp, true); 658 } 659 660 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 661 unsigned int lane_mask = 0x0; 662 663 if (IS_CHERRYVIEW(dev_priv)) 664 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count); 665 666 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp), 667 lane_mask); 668 } 669 670 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 671 intel_dp_configure_protocol_converter(intel_dp, pipe_config); 672 intel_dp_check_frl_training(intel_dp); 673 intel_dp_pcon_dsc_configure(intel_dp, pipe_config); 674 intel_dp_start_link_train(intel_dp, pipe_config); 675 intel_dp_stop_link_train(intel_dp, pipe_config); 676 } 677 678 static void g4x_enable_dp(struct intel_atomic_state *state, 679 struct intel_encoder *encoder, 680 const struct intel_crtc_state *pipe_config, 681 const struct drm_connector_state *conn_state) 682 { 683 intel_enable_dp(state, encoder, pipe_config, conn_state); 684 intel_audio_codec_enable(encoder, pipe_config, conn_state); 685 intel_edp_backlight_on(pipe_config, conn_state); 686 } 687 688 static void vlv_enable_dp(struct intel_atomic_state *state, 689 struct intel_encoder *encoder, 690 const struct intel_crtc_state *pipe_config, 691 const struct drm_connector_state *conn_state) 692 { 693 intel_audio_codec_enable(encoder, pipe_config, conn_state); 694 intel_edp_backlight_on(pipe_config, conn_state); 695 } 696 697 static void g4x_pre_enable_dp(struct intel_atomic_state *state, 698 struct intel_encoder *encoder, 699 const struct intel_crtc_state *pipe_config, 700 const struct drm_connector_state *conn_state) 701 { 702 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 703 enum port port = encoder->port; 704 705 intel_dp_prepare(encoder, pipe_config); 706 707 /* Only ilk+ has port A */ 708 if (port == PORT_A) 709 ilk_edp_pll_on(intel_dp, pipe_config); 710 } 711 712 static void vlv_pre_enable_dp(struct intel_atomic_state *state, 713 struct intel_encoder *encoder, 714 const struct intel_crtc_state *pipe_config, 715 const struct drm_connector_state *conn_state) 716 { 717 vlv_phy_pre_encoder_enable(encoder, pipe_config); 718 719 intel_enable_dp(state, encoder, pipe_config, conn_state); 720 } 721 722 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state, 723 struct intel_encoder *encoder, 724 const struct intel_crtc_state *pipe_config, 725 const struct drm_connector_state *conn_state) 726 { 727 intel_dp_prepare(encoder, pipe_config); 728 729 vlv_phy_pre_pll_enable(encoder, pipe_config); 730 } 731 732 static void chv_pre_enable_dp(struct intel_atomic_state *state, 733 struct intel_encoder *encoder, 734 const struct intel_crtc_state *pipe_config, 735 const struct drm_connector_state *conn_state) 736 { 737 chv_phy_pre_encoder_enable(encoder, pipe_config); 738 739 intel_enable_dp(state, encoder, pipe_config, conn_state); 740 741 /* Second common lane will stay alive on its own now */ 742 chv_phy_release_cl2_override(encoder); 743 } 744 745 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state, 746 struct intel_encoder *encoder, 747 const struct intel_crtc_state *pipe_config, 748 const struct drm_connector_state *conn_state) 749 { 750 intel_dp_prepare(encoder, pipe_config); 751 752 chv_phy_pre_pll_enable(encoder, pipe_config); 753 } 754 755 static void chv_dp_post_pll_disable(struct intel_atomic_state *state, 756 struct intel_encoder *encoder, 757 const struct intel_crtc_state *old_crtc_state, 758 const struct drm_connector_state *old_conn_state) 759 { 760 chv_phy_post_pll_disable(encoder, old_crtc_state); 761 } 762 763 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp, 764 const struct intel_crtc_state *crtc_state) 765 { 766 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2; 767 } 768 769 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp, 770 const struct intel_crtc_state *crtc_state) 771 { 772 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3; 773 } 774 775 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp) 776 { 777 return DP_TRAIN_PRE_EMPH_LEVEL_2; 778 } 779 780 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp) 781 { 782 return DP_TRAIN_PRE_EMPH_LEVEL_3; 783 } 784 785 static void vlv_set_signal_levels(struct intel_encoder *encoder, 786 const struct intel_crtc_state *crtc_state) 787 { 788 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 789 unsigned long demph_reg_value, preemph_reg_value, 790 uniqtranscale_reg_value; 791 u8 train_set = intel_dp->train_set[0]; 792 793 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 794 case DP_TRAIN_PRE_EMPH_LEVEL_0: 795 preemph_reg_value = 0x0004000; 796 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 797 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 798 demph_reg_value = 0x2B405555; 799 uniqtranscale_reg_value = 0x552AB83A; 800 break; 801 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 802 demph_reg_value = 0x2B404040; 803 uniqtranscale_reg_value = 0x5548B83A; 804 break; 805 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 806 demph_reg_value = 0x2B245555; 807 uniqtranscale_reg_value = 0x5560B83A; 808 break; 809 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 810 demph_reg_value = 0x2B405555; 811 uniqtranscale_reg_value = 0x5598DA3A; 812 break; 813 default: 814 return; 815 } 816 break; 817 case DP_TRAIN_PRE_EMPH_LEVEL_1: 818 preemph_reg_value = 0x0002000; 819 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 820 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 821 demph_reg_value = 0x2B404040; 822 uniqtranscale_reg_value = 0x5552B83A; 823 break; 824 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 825 demph_reg_value = 0x2B404848; 826 uniqtranscale_reg_value = 0x5580B83A; 827 break; 828 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 829 demph_reg_value = 0x2B404040; 830 uniqtranscale_reg_value = 0x55ADDA3A; 831 break; 832 default: 833 return; 834 } 835 break; 836 case DP_TRAIN_PRE_EMPH_LEVEL_2: 837 preemph_reg_value = 0x0000000; 838 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 839 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 840 demph_reg_value = 0x2B305555; 841 uniqtranscale_reg_value = 0x5570B83A; 842 break; 843 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 844 demph_reg_value = 0x2B2B4040; 845 uniqtranscale_reg_value = 0x55ADDA3A; 846 break; 847 default: 848 return; 849 } 850 break; 851 case DP_TRAIN_PRE_EMPH_LEVEL_3: 852 preemph_reg_value = 0x0006000; 853 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 854 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 855 demph_reg_value = 0x1B405555; 856 uniqtranscale_reg_value = 0x55ADDA3A; 857 break; 858 default: 859 return; 860 } 861 break; 862 default: 863 return; 864 } 865 866 vlv_set_phy_signal_level(encoder, crtc_state, 867 demph_reg_value, preemph_reg_value, 868 uniqtranscale_reg_value, 0); 869 } 870 871 static void chv_set_signal_levels(struct intel_encoder *encoder, 872 const struct intel_crtc_state *crtc_state) 873 { 874 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 875 u32 deemph_reg_value, margin_reg_value; 876 bool uniq_trans_scale = false; 877 u8 train_set = intel_dp->train_set[0]; 878 879 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 880 case DP_TRAIN_PRE_EMPH_LEVEL_0: 881 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 882 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 883 deemph_reg_value = 128; 884 margin_reg_value = 52; 885 break; 886 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 887 deemph_reg_value = 128; 888 margin_reg_value = 77; 889 break; 890 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 891 deemph_reg_value = 128; 892 margin_reg_value = 102; 893 break; 894 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 895 deemph_reg_value = 128; 896 margin_reg_value = 154; 897 uniq_trans_scale = true; 898 break; 899 default: 900 return; 901 } 902 break; 903 case DP_TRAIN_PRE_EMPH_LEVEL_1: 904 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 905 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 906 deemph_reg_value = 85; 907 margin_reg_value = 78; 908 break; 909 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 910 deemph_reg_value = 85; 911 margin_reg_value = 116; 912 break; 913 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 914 deemph_reg_value = 85; 915 margin_reg_value = 154; 916 break; 917 default: 918 return; 919 } 920 break; 921 case DP_TRAIN_PRE_EMPH_LEVEL_2: 922 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 923 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 924 deemph_reg_value = 64; 925 margin_reg_value = 104; 926 break; 927 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 928 deemph_reg_value = 64; 929 margin_reg_value = 154; 930 break; 931 default: 932 return; 933 } 934 break; 935 case DP_TRAIN_PRE_EMPH_LEVEL_3: 936 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 937 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 938 deemph_reg_value = 43; 939 margin_reg_value = 154; 940 break; 941 default: 942 return; 943 } 944 break; 945 default: 946 return; 947 } 948 949 chv_set_phy_signal_level(encoder, crtc_state, 950 deemph_reg_value, margin_reg_value, 951 uniq_trans_scale); 952 } 953 954 static u32 g4x_signal_levels(u8 train_set) 955 { 956 u32 signal_levels = 0; 957 958 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 959 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 960 default: 961 signal_levels |= DP_VOLTAGE_0_4; 962 break; 963 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 964 signal_levels |= DP_VOLTAGE_0_6; 965 break; 966 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 967 signal_levels |= DP_VOLTAGE_0_8; 968 break; 969 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 970 signal_levels |= DP_VOLTAGE_1_2; 971 break; 972 } 973 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 974 case DP_TRAIN_PRE_EMPH_LEVEL_0: 975 default: 976 signal_levels |= DP_PRE_EMPHASIS_0; 977 break; 978 case DP_TRAIN_PRE_EMPH_LEVEL_1: 979 signal_levels |= DP_PRE_EMPHASIS_3_5; 980 break; 981 case DP_TRAIN_PRE_EMPH_LEVEL_2: 982 signal_levels |= DP_PRE_EMPHASIS_6; 983 break; 984 case DP_TRAIN_PRE_EMPH_LEVEL_3: 985 signal_levels |= DP_PRE_EMPHASIS_9_5; 986 break; 987 } 988 return signal_levels; 989 } 990 991 static void 992 g4x_set_signal_levels(struct intel_encoder *encoder, 993 const struct intel_crtc_state *crtc_state) 994 { 995 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 996 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 997 u8 train_set = intel_dp->train_set[0]; 998 u32 signal_levels; 999 1000 signal_levels = g4x_signal_levels(train_set); 1001 1002 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1003 signal_levels); 1004 1005 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK); 1006 intel_dp->DP |= signal_levels; 1007 1008 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1009 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1010 } 1011 1012 /* SNB CPU eDP voltage swing and pre-emphasis control */ 1013 static u32 snb_cpu_edp_signal_levels(u8 train_set) 1014 { 1015 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1016 DP_TRAIN_PRE_EMPHASIS_MASK); 1017 1018 switch (signal_levels) { 1019 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1020 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1021 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1022 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1023 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1024 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1025 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1026 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1027 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1028 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1029 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1030 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1031 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1032 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1033 default: 1034 MISSING_CASE(signal_levels); 1035 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1036 } 1037 } 1038 1039 static void 1040 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1041 const struct intel_crtc_state *crtc_state) 1042 { 1043 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1044 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1045 u8 train_set = intel_dp->train_set[0]; 1046 u32 signal_levels; 1047 1048 signal_levels = snb_cpu_edp_signal_levels(train_set); 1049 1050 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1051 signal_levels); 1052 1053 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB; 1054 intel_dp->DP |= signal_levels; 1055 1056 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1057 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1058 } 1059 1060 /* IVB CPU eDP voltage swing and pre-emphasis control */ 1061 static u32 ivb_cpu_edp_signal_levels(u8 train_set) 1062 { 1063 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1064 DP_TRAIN_PRE_EMPHASIS_MASK); 1065 1066 switch (signal_levels) { 1067 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1068 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1069 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1070 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1071 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1072 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1073 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1074 1075 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1076 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1077 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1078 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1079 1080 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1081 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1082 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1083 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1084 1085 default: 1086 MISSING_CASE(signal_levels); 1087 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1088 } 1089 } 1090 1091 static void 1092 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1093 const struct intel_crtc_state *crtc_state) 1094 { 1095 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1096 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1097 u8 train_set = intel_dp->train_set[0]; 1098 u32 signal_levels; 1099 1100 signal_levels = ivb_cpu_edp_signal_levels(train_set); 1101 1102 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1103 signal_levels); 1104 1105 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB; 1106 intel_dp->DP |= signal_levels; 1107 1108 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1109 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1110 } 1111 1112 /* 1113 * If display is now connected check links status, 1114 * there has been known issues of link loss triggering 1115 * long pulse. 1116 * 1117 * Some sinks (eg. ASUS PB287Q) seem to perform some 1118 * weird HPD ping pong during modesets. So we can apparently 1119 * end up with HPD going low during a modeset, and then 1120 * going back up soon after. And once that happens we must 1121 * retrain the link to get a picture. That's in case no 1122 * userspace component reacted to intermittent HPD dip. 1123 */ 1124 static enum intel_hotplug_state 1125 intel_dp_hotplug(struct intel_encoder *encoder, 1126 struct intel_connector *connector) 1127 { 1128 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1129 struct drm_modeset_acquire_ctx ctx; 1130 enum intel_hotplug_state state; 1131 int ret; 1132 1133 if (intel_dp->compliance.test_active && 1134 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 1135 intel_dp_phy_test(encoder); 1136 /* just do the PHY test and nothing else */ 1137 return INTEL_HOTPLUG_UNCHANGED; 1138 } 1139 1140 state = intel_encoder_hotplug(encoder, connector); 1141 1142 drm_modeset_acquire_init(&ctx, 0); 1143 1144 for (;;) { 1145 ret = intel_dp_retrain_link(encoder, &ctx); 1146 1147 if (ret == -EDEADLK) { 1148 drm_modeset_backoff(&ctx); 1149 continue; 1150 } 1151 1152 break; 1153 } 1154 1155 drm_modeset_drop_locks(&ctx); 1156 drm_modeset_acquire_fini(&ctx); 1157 drm_WARN(encoder->base.dev, ret, 1158 "Acquiring modeset locks failed with %i\n", ret); 1159 1160 /* 1161 * Keeping it consistent with intel_ddi_hotplug() and 1162 * intel_hdmi_hotplug(). 1163 */ 1164 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries) 1165 state = INTEL_HOTPLUG_RETRY; 1166 1167 return state; 1168 } 1169 1170 static bool ibx_digital_port_connected(struct intel_encoder *encoder) 1171 { 1172 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1173 u32 bit = dev_priv->display.hotplug.pch_hpd[encoder->hpd_pin]; 1174 1175 return intel_de_read(dev_priv, SDEISR) & bit; 1176 } 1177 1178 static bool g4x_digital_port_connected(struct intel_encoder *encoder) 1179 { 1180 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1181 u32 bit; 1182 1183 switch (encoder->hpd_pin) { 1184 case HPD_PORT_B: 1185 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X; 1186 break; 1187 case HPD_PORT_C: 1188 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X; 1189 break; 1190 case HPD_PORT_D: 1191 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X; 1192 break; 1193 default: 1194 MISSING_CASE(encoder->hpd_pin); 1195 return false; 1196 } 1197 1198 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1199 } 1200 1201 static bool gm45_digital_port_connected(struct intel_encoder *encoder) 1202 { 1203 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1204 u32 bit; 1205 1206 switch (encoder->hpd_pin) { 1207 case HPD_PORT_B: 1208 bit = PORTB_HOTPLUG_LIVE_STATUS_GM45; 1209 break; 1210 case HPD_PORT_C: 1211 bit = PORTC_HOTPLUG_LIVE_STATUS_GM45; 1212 break; 1213 case HPD_PORT_D: 1214 bit = PORTD_HOTPLUG_LIVE_STATUS_GM45; 1215 break; 1216 default: 1217 MISSING_CASE(encoder->hpd_pin); 1218 return false; 1219 } 1220 1221 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit; 1222 } 1223 1224 static bool ilk_digital_port_connected(struct intel_encoder *encoder) 1225 { 1226 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1227 u32 bit = dev_priv->display.hotplug.hpd[encoder->hpd_pin]; 1228 1229 return intel_de_read(dev_priv, DEISR) & bit; 1230 } 1231 1232 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 1233 { 1234 intel_dp_encoder_flush_work(encoder); 1235 1236 drm_encoder_cleanup(encoder); 1237 kfree(enc_to_dig_port(to_intel_encoder(encoder))); 1238 } 1239 1240 enum pipe vlv_active_pipe(struct intel_dp *intel_dp) 1241 { 1242 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1243 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1244 enum pipe pipe; 1245 1246 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 1247 encoder->port, &pipe)) 1248 return pipe; 1249 1250 return INVALID_PIPE; 1251 } 1252 1253 static void intel_dp_encoder_reset(struct drm_encoder *encoder) 1254 { 1255 struct drm_i915_private *dev_priv = to_i915(encoder->dev); 1256 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 1257 1258 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg); 1259 1260 intel_dp->reset_link_params = true; 1261 1262 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1263 intel_wakeref_t wakeref; 1264 1265 with_intel_pps_lock(intel_dp, wakeref) 1266 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp); 1267 } 1268 1269 intel_pps_encoder_reset(intel_dp); 1270 } 1271 1272 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1273 .reset = intel_dp_encoder_reset, 1274 .destroy = intel_dp_encoder_destroy, 1275 }; 1276 1277 bool g4x_dp_init(struct drm_i915_private *dev_priv, 1278 i915_reg_t output_reg, enum port port) 1279 { 1280 struct intel_digital_port *dig_port; 1281 struct intel_encoder *intel_encoder; 1282 struct drm_encoder *encoder; 1283 struct intel_connector *intel_connector; 1284 1285 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 1286 if (!dig_port) 1287 return false; 1288 1289 intel_connector = intel_connector_alloc(); 1290 if (!intel_connector) 1291 goto err_connector_alloc; 1292 1293 intel_encoder = &dig_port->base; 1294 encoder = &intel_encoder->base; 1295 1296 mutex_init(&dig_port->hdcp_mutex); 1297 1298 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base, 1299 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS, 1300 "DP %c", port_name(port))) 1301 goto err_encoder_init; 1302 1303 intel_encoder->hotplug = intel_dp_hotplug; 1304 intel_encoder->compute_config = intel_dp_compute_config; 1305 intel_encoder->get_hw_state = intel_dp_get_hw_state; 1306 intel_encoder->get_config = intel_dp_get_config; 1307 intel_encoder->sync_state = intel_dp_sync_state; 1308 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check; 1309 intel_encoder->update_pipe = intel_backlight_update; 1310 intel_encoder->suspend = intel_dp_encoder_suspend; 1311 intel_encoder->shutdown = intel_dp_encoder_shutdown; 1312 if (IS_CHERRYVIEW(dev_priv)) { 1313 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable; 1314 intel_encoder->pre_enable = chv_pre_enable_dp; 1315 intel_encoder->enable = vlv_enable_dp; 1316 intel_encoder->disable = vlv_disable_dp; 1317 intel_encoder->post_disable = chv_post_disable_dp; 1318 intel_encoder->post_pll_disable = chv_dp_post_pll_disable; 1319 } else if (IS_VALLEYVIEW(dev_priv)) { 1320 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable; 1321 intel_encoder->pre_enable = vlv_pre_enable_dp; 1322 intel_encoder->enable = vlv_enable_dp; 1323 intel_encoder->disable = vlv_disable_dp; 1324 intel_encoder->post_disable = vlv_post_disable_dp; 1325 } else { 1326 intel_encoder->pre_enable = g4x_pre_enable_dp; 1327 intel_encoder->enable = g4x_enable_dp; 1328 intel_encoder->disable = g4x_disable_dp; 1329 intel_encoder->post_disable = g4x_post_disable_dp; 1330 } 1331 1332 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 1333 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) 1334 dig_port->dp.set_link_train = cpt_set_link_train; 1335 else 1336 dig_port->dp.set_link_train = g4x_set_link_train; 1337 1338 if (IS_CHERRYVIEW(dev_priv)) 1339 intel_encoder->set_signal_levels = chv_set_signal_levels; 1340 else if (IS_VALLEYVIEW(dev_priv)) 1341 intel_encoder->set_signal_levels = vlv_set_signal_levels; 1342 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 1343 intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels; 1344 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A) 1345 intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels; 1346 else 1347 intel_encoder->set_signal_levels = g4x_set_signal_levels; 1348 1349 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) || 1350 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) { 1351 dig_port->dp.preemph_max = intel_dp_preemph_max_3; 1352 dig_port->dp.voltage_max = intel_dp_voltage_max_3; 1353 } else { 1354 dig_port->dp.preemph_max = intel_dp_preemph_max_2; 1355 dig_port->dp.voltage_max = intel_dp_voltage_max_2; 1356 } 1357 1358 dig_port->dp.output_reg = output_reg; 1359 dig_port->max_lanes = 4; 1360 1361 intel_encoder->type = INTEL_OUTPUT_DP; 1362 intel_encoder->power_domain = intel_display_power_ddi_lanes_domain(dev_priv, port); 1363 if (IS_CHERRYVIEW(dev_priv)) { 1364 if (port == PORT_D) 1365 intel_encoder->pipe_mask = BIT(PIPE_C); 1366 else 1367 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B); 1368 } else { 1369 intel_encoder->pipe_mask = ~0; 1370 } 1371 intel_encoder->cloneable = 0; 1372 intel_encoder->port = port; 1373 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 1374 1375 dig_port->hpd_pulse = intel_dp_hpd_pulse; 1376 1377 if (HAS_GMCH(dev_priv)) { 1378 if (IS_GM45(dev_priv)) 1379 dig_port->connected = gm45_digital_port_connected; 1380 else 1381 dig_port->connected = g4x_digital_port_connected; 1382 } else { 1383 if (port == PORT_A) 1384 dig_port->connected = ilk_digital_port_connected; 1385 else 1386 dig_port->connected = ibx_digital_port_connected; 1387 } 1388 1389 if (port != PORT_A) 1390 intel_infoframe_init(dig_port); 1391 1392 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 1393 if (!intel_dp_init_connector(dig_port, intel_connector)) 1394 goto err_init_connector; 1395 1396 return true; 1397 1398 err_init_connector: 1399 drm_encoder_cleanup(encoder); 1400 err_encoder_init: 1401 kfree(intel_connector); 1402 err_connector_alloc: 1403 kfree(dig_port); 1404 return false; 1405 } 1406