xref: /openbmc/linux/drivers/gpu/drm/i915/display/g4x_dp.c (revision 48ca54e3)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code).
6  */
7 
8 #include <linux/string_helpers.h>
9 
10 #include "g4x_dp.h"
11 #include "intel_audio.h"
12 #include "intel_backlight.h"
13 #include "intel_connector.h"
14 #include "intel_crtc.h"
15 #include "intel_de.h"
16 #include "intel_display_power.h"
17 #include "intel_display_types.h"
18 #include "intel_dp.h"
19 #include "intel_dp_link_training.h"
20 #include "intel_dpio_phy.h"
21 #include "intel_fifo_underrun.h"
22 #include "intel_hdmi.h"
23 #include "intel_hotplug.h"
24 #include "intel_pch_display.h"
25 #include "intel_pps.h"
26 #include "vlv_sideband.h"
27 
28 static const struct dpll g4x_dpll[] = {
29 	{ .dot = 162000, .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8, },
30 	{ .dot = 270000, .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2, },
31 };
32 
33 static const struct dpll pch_dpll[] = {
34 	{ .dot = 162000, .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9, },
35 	{ .dot = 270000, .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8, },
36 };
37 
38 static const struct dpll vlv_dpll[] = {
39 	{ .dot = 162000, .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81, },
40 	{ .dot = 270000, .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27, },
41 };
42 
43 static const struct dpll chv_dpll[] = {
44 	/* m2 is .22 binary fixed point  */
45 	{ .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
46 	{ .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ },
47 };
48 
49 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915)
50 {
51 	return IS_CHERRYVIEW(i915) ? &chv_dpll[0] : &vlv_dpll[0];
52 }
53 
54 void g4x_dp_set_clock(struct intel_encoder *encoder,
55 		      struct intel_crtc_state *pipe_config)
56 {
57 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
58 	const struct dpll *divisor = NULL;
59 	int i, count = 0;
60 
61 	if (IS_G4X(dev_priv)) {
62 		divisor = g4x_dpll;
63 		count = ARRAY_SIZE(g4x_dpll);
64 	} else if (HAS_PCH_SPLIT(dev_priv)) {
65 		divisor = pch_dpll;
66 		count = ARRAY_SIZE(pch_dpll);
67 	} else if (IS_CHERRYVIEW(dev_priv)) {
68 		divisor = chv_dpll;
69 		count = ARRAY_SIZE(chv_dpll);
70 	} else if (IS_VALLEYVIEW(dev_priv)) {
71 		divisor = vlv_dpll;
72 		count = ARRAY_SIZE(vlv_dpll);
73 	}
74 
75 	if (divisor && count) {
76 		for (i = 0; i < count; i++) {
77 			if (pipe_config->port_clock == divisor[i].dot) {
78 				pipe_config->dpll = divisor[i];
79 				pipe_config->clock_set = true;
80 				break;
81 			}
82 		}
83 	}
84 }
85 
86 static void intel_dp_prepare(struct intel_encoder *encoder,
87 			     const struct intel_crtc_state *pipe_config)
88 {
89 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
90 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
91 	enum port port = encoder->port;
92 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
93 	const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
94 
95 	intel_dp_set_link_params(intel_dp,
96 				 pipe_config->port_clock,
97 				 pipe_config->lane_count);
98 
99 	/*
100 	 * There are four kinds of DP registers:
101 	 * IBX PCH
102 	 * SNB CPU
103 	 * IVB CPU
104 	 * CPT PCH
105 	 *
106 	 * IBX PCH and CPU are the same for almost everything,
107 	 * except that the CPU DP PLL is configured in this
108 	 * register
109 	 *
110 	 * CPT PCH is quite different, having many bits moved
111 	 * to the TRANS_DP_CTL register instead. That
112 	 * configuration happens (oddly) in ilk_pch_enable
113 	 */
114 
115 	/* Preserve the BIOS-computed detected bit. This is
116 	 * supposed to be read-only.
117 	 */
118 	intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED;
119 
120 	/* Handle DP bits in common between all three register formats */
121 	intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
122 	intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count);
123 
124 	/* Split out the IBX/CPU vs CPT settings */
125 
126 	if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) {
127 		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
128 			intel_dp->DP |= DP_SYNC_HS_HIGH;
129 		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
130 			intel_dp->DP |= DP_SYNC_VS_HIGH;
131 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
132 
133 		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
134 			intel_dp->DP |= DP_ENHANCED_FRAMING;
135 
136 		intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe);
137 	} else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
138 		u32 trans_dp;
139 
140 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
141 
142 		trans_dp = intel_de_read(dev_priv, TRANS_DP_CTL(crtc->pipe));
143 		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
144 			trans_dp |= TRANS_DP_ENH_FRAMING;
145 		else
146 			trans_dp &= ~TRANS_DP_ENH_FRAMING;
147 		intel_de_write(dev_priv, TRANS_DP_CTL(crtc->pipe), trans_dp);
148 	} else {
149 		if (IS_G4X(dev_priv) && pipe_config->limited_color_range)
150 			intel_dp->DP |= DP_COLOR_RANGE_16_235;
151 
152 		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
153 			intel_dp->DP |= DP_SYNC_HS_HIGH;
154 		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
155 			intel_dp->DP |= DP_SYNC_VS_HIGH;
156 		intel_dp->DP |= DP_LINK_TRAIN_OFF;
157 
158 		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
159 			intel_dp->DP |= DP_ENHANCED_FRAMING;
160 
161 		if (IS_CHERRYVIEW(dev_priv))
162 			intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe);
163 		else
164 			intel_dp->DP |= DP_PIPE_SEL(crtc->pipe);
165 	}
166 }
167 
168 static void assert_dp_port(struct intel_dp *intel_dp, bool state)
169 {
170 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
171 	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
172 	bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN;
173 
174 	I915_STATE_WARN(cur_state != state,
175 			"[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n",
176 			dig_port->base.base.base.id, dig_port->base.base.name,
177 			str_on_off(state), str_on_off(cur_state));
178 }
179 #define assert_dp_port_disabled(d) assert_dp_port((d), false)
180 
181 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
182 {
183 	bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE;
184 
185 	I915_STATE_WARN(cur_state != state,
186 			"eDP PLL state assertion failure (expected %s, current %s)\n",
187 			str_on_off(state), str_on_off(cur_state));
188 }
189 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
190 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
191 
192 static void ilk_edp_pll_on(struct intel_dp *intel_dp,
193 			   const struct intel_crtc_state *pipe_config)
194 {
195 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
196 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
197 
198 	assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder);
199 	assert_dp_port_disabled(intel_dp);
200 	assert_edp_pll_disabled(dev_priv);
201 
202 	drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n",
203 		    pipe_config->port_clock);
204 
205 	intel_dp->DP &= ~DP_PLL_FREQ_MASK;
206 
207 	if (pipe_config->port_clock == 162000)
208 		intel_dp->DP |= DP_PLL_FREQ_162MHZ;
209 	else
210 		intel_dp->DP |= DP_PLL_FREQ_270MHZ;
211 
212 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
213 	intel_de_posting_read(dev_priv, DP_A);
214 	udelay(500);
215 
216 	/*
217 	 * [DevILK] Work around required when enabling DP PLL
218 	 * while a pipe is enabled going to FDI:
219 	 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI
220 	 * 2. Program DP PLL enable
221 	 */
222 	if (IS_IRONLAKE(dev_priv))
223 		intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe);
224 
225 	intel_dp->DP |= DP_PLL_ENABLE;
226 
227 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
228 	intel_de_posting_read(dev_priv, DP_A);
229 	udelay(200);
230 }
231 
232 static void ilk_edp_pll_off(struct intel_dp *intel_dp,
233 			    const struct intel_crtc_state *old_crtc_state)
234 {
235 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
236 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
237 
238 	assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder);
239 	assert_dp_port_disabled(intel_dp);
240 	assert_edp_pll_enabled(dev_priv);
241 
242 	drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n");
243 
244 	intel_dp->DP &= ~DP_PLL_ENABLE;
245 
246 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
247 	intel_de_posting_read(dev_priv, DP_A);
248 	udelay(200);
249 }
250 
251 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv,
252 				 enum port port, enum pipe *pipe)
253 {
254 	enum pipe p;
255 
256 	for_each_pipe(dev_priv, p) {
257 		u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p));
258 
259 		if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) {
260 			*pipe = p;
261 			return true;
262 		}
263 	}
264 
265 	drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n",
266 		    port_name(port));
267 
268 	/* must initialize pipe to something for the asserts */
269 	*pipe = PIPE_A;
270 
271 	return false;
272 }
273 
274 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv,
275 			 i915_reg_t dp_reg, enum port port,
276 			 enum pipe *pipe)
277 {
278 	bool ret;
279 	u32 val;
280 
281 	val = intel_de_read(dev_priv, dp_reg);
282 
283 	ret = val & DP_PORT_EN;
284 
285 	/* asserts want to know the pipe even if the port is disabled */
286 	if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
287 		*pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB;
288 	else if (HAS_PCH_CPT(dev_priv) && port != PORT_A)
289 		ret &= cpt_dp_port_selected(dev_priv, port, pipe);
290 	else if (IS_CHERRYVIEW(dev_priv))
291 		*pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV;
292 	else
293 		*pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT;
294 
295 	return ret;
296 }
297 
298 static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
299 				  enum pipe *pipe)
300 {
301 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
302 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
303 	intel_wakeref_t wakeref;
304 	bool ret;
305 
306 	wakeref = intel_display_power_get_if_enabled(dev_priv,
307 						     encoder->power_domain);
308 	if (!wakeref)
309 		return false;
310 
311 	ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
312 				  encoder->port, pipe);
313 
314 	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
315 
316 	return ret;
317 }
318 
319 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state)
320 {
321 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
322 
323 	if (crtc_state->has_pch_encoder) {
324 		intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n);
325 		intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2);
326 	} else {
327 		intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder,
328 					       &crtc_state->dp_m_n);
329 		intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder,
330 					       &crtc_state->dp_m2_n2);
331 	}
332 }
333 
334 static void intel_dp_get_config(struct intel_encoder *encoder,
335 				struct intel_crtc_state *pipe_config)
336 {
337 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
338 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
339 	u32 tmp, flags = 0;
340 	enum port port = encoder->port;
341 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
342 
343 	if (encoder->type == INTEL_OUTPUT_EDP)
344 		pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
345 	else
346 		pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
347 
348 	tmp = intel_de_read(dev_priv, intel_dp->output_reg);
349 
350 	pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
351 
352 	if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
353 		u32 trans_dp = intel_de_read(dev_priv,
354 					     TRANS_DP_CTL(crtc->pipe));
355 
356 		if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
357 			flags |= DRM_MODE_FLAG_PHSYNC;
358 		else
359 			flags |= DRM_MODE_FLAG_NHSYNC;
360 
361 		if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
362 			flags |= DRM_MODE_FLAG_PVSYNC;
363 		else
364 			flags |= DRM_MODE_FLAG_NVSYNC;
365 	} else {
366 		if (tmp & DP_SYNC_HS_HIGH)
367 			flags |= DRM_MODE_FLAG_PHSYNC;
368 		else
369 			flags |= DRM_MODE_FLAG_NHSYNC;
370 
371 		if (tmp & DP_SYNC_VS_HIGH)
372 			flags |= DRM_MODE_FLAG_PVSYNC;
373 		else
374 			flags |= DRM_MODE_FLAG_NVSYNC;
375 	}
376 
377 	pipe_config->hw.adjusted_mode.flags |= flags;
378 
379 	if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235)
380 		pipe_config->limited_color_range = true;
381 
382 	pipe_config->lane_count =
383 		((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
384 
385 	g4x_dp_get_m_n(pipe_config);
386 
387 	if (port == PORT_A) {
388 		if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
389 			pipe_config->port_clock = 162000;
390 		else
391 			pipe_config->port_clock = 270000;
392 	}
393 
394 	pipe_config->hw.adjusted_mode.crtc_clock =
395 		intel_dotclock_calculate(pipe_config->port_clock,
396 					 &pipe_config->dp_m_n);
397 
398 	if (intel_dp_is_edp(intel_dp) && dev_priv->vbt.edp.bpp &&
399 	    pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) {
400 		/*
401 		 * This is a big fat ugly hack.
402 		 *
403 		 * Some machines in UEFI boot mode provide us a VBT that has 18
404 		 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons
405 		 * unknown we fail to light up. Yet the same BIOS boots up with
406 		 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as
407 		 * max, not what it tells us to use.
408 		 *
409 		 * Note: This will still be broken if the eDP panel is not lit
410 		 * up by the BIOS, and thus we can't get the mode at module
411 		 * load.
412 		 */
413 		drm_dbg_kms(&dev_priv->drm,
414 			    "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
415 			    pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp);
416 		dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp;
417 	}
418 }
419 
420 static void
421 intel_dp_link_down(struct intel_encoder *encoder,
422 		   const struct intel_crtc_state *old_crtc_state)
423 {
424 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
425 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
426 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
427 	enum port port = encoder->port;
428 
429 	if (drm_WARN_ON(&dev_priv->drm,
430 			(intel_de_read(dev_priv, intel_dp->output_reg) &
431 			 DP_PORT_EN) == 0))
432 		return;
433 
434 	drm_dbg_kms(&dev_priv->drm, "\n");
435 
436 	if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
437 	    (HAS_PCH_CPT(dev_priv) && port != PORT_A)) {
438 		intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
439 		intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
440 	} else {
441 		intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
442 		intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE;
443 	}
444 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
445 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
446 
447 	intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
448 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
449 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
450 
451 	/*
452 	 * HW workaround for IBX, we need to move the port
453 	 * to transcoder A after disabling it to allow the
454 	 * matching HDMI port to be enabled on transcoder A.
455 	 */
456 	if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) {
457 		/*
458 		 * We get CPU/PCH FIFO underruns on the other pipe when
459 		 * doing the workaround. Sweep them under the rug.
460 		 */
461 		intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
462 		intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
463 
464 		/* always enable with pattern 1 (as per spec) */
465 		intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK);
466 		intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) |
467 			DP_LINK_TRAIN_PAT_1;
468 		intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
469 		intel_de_posting_read(dev_priv, intel_dp->output_reg);
470 
471 		intel_dp->DP &= ~DP_PORT_EN;
472 		intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
473 		intel_de_posting_read(dev_priv, intel_dp->output_reg);
474 
475 		intel_wait_for_vblank_if_active(dev_priv, PIPE_A);
476 		intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
477 		intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
478 	}
479 
480 	msleep(intel_dp->pps.panel_power_down_delay);
481 
482 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
483 		intel_wakeref_t wakeref;
484 
485 		with_intel_pps_lock(intel_dp, wakeref)
486 			intel_dp->pps.active_pipe = INVALID_PIPE;
487 	}
488 }
489 
490 static void intel_disable_dp(struct intel_atomic_state *state,
491 			     struct intel_encoder *encoder,
492 			     const struct intel_crtc_state *old_crtc_state,
493 			     const struct drm_connector_state *old_conn_state)
494 {
495 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
496 
497 	intel_dp->link_trained = false;
498 
499 	intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state);
500 
501 	/*
502 	 * Make sure the panel is off before trying to change the mode.
503 	 * But also ensure that we have vdd while we switch off the panel.
504 	 */
505 	intel_pps_vdd_on(intel_dp);
506 	intel_edp_backlight_off(old_conn_state);
507 	intel_dp_set_power(intel_dp, DP_SET_POWER_D3);
508 	intel_pps_off(intel_dp);
509 }
510 
511 static void g4x_disable_dp(struct intel_atomic_state *state,
512 			   struct intel_encoder *encoder,
513 			   const struct intel_crtc_state *old_crtc_state,
514 			   const struct drm_connector_state *old_conn_state)
515 {
516 	intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
517 }
518 
519 static void vlv_disable_dp(struct intel_atomic_state *state,
520 			   struct intel_encoder *encoder,
521 			   const struct intel_crtc_state *old_crtc_state,
522 			   const struct drm_connector_state *old_conn_state)
523 {
524 	intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
525 }
526 
527 static void g4x_post_disable_dp(struct intel_atomic_state *state,
528 				struct intel_encoder *encoder,
529 				const struct intel_crtc_state *old_crtc_state,
530 				const struct drm_connector_state *old_conn_state)
531 {
532 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
533 	enum port port = encoder->port;
534 
535 	/*
536 	 * Bspec does not list a specific disable sequence for g4x DP.
537 	 * Follow the ilk+ sequence (disable pipe before the port) for
538 	 * g4x DP as it does not suffer from underruns like the normal
539 	 * g4x modeset sequence (disable pipe after the port).
540 	 */
541 	intel_dp_link_down(encoder, old_crtc_state);
542 
543 	/* Only ilk+ has port A */
544 	if (port == PORT_A)
545 		ilk_edp_pll_off(intel_dp, old_crtc_state);
546 }
547 
548 static void vlv_post_disable_dp(struct intel_atomic_state *state,
549 				struct intel_encoder *encoder,
550 				const struct intel_crtc_state *old_crtc_state,
551 				const struct drm_connector_state *old_conn_state)
552 {
553 	intel_dp_link_down(encoder, old_crtc_state);
554 }
555 
556 static void chv_post_disable_dp(struct intel_atomic_state *state,
557 				struct intel_encoder *encoder,
558 				const struct intel_crtc_state *old_crtc_state,
559 				const struct drm_connector_state *old_conn_state)
560 {
561 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
562 
563 	intel_dp_link_down(encoder, old_crtc_state);
564 
565 	vlv_dpio_get(dev_priv);
566 
567 	/* Assert data lane reset */
568 	chv_data_lane_soft_reset(encoder, old_crtc_state, true);
569 
570 	vlv_dpio_put(dev_priv);
571 }
572 
573 static void
574 cpt_set_link_train(struct intel_dp *intel_dp,
575 		   const struct intel_crtc_state *crtc_state,
576 		   u8 dp_train_pat)
577 {
578 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
579 
580 	intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
581 
582 	switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
583 	case DP_TRAINING_PATTERN_DISABLE:
584 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
585 		break;
586 	case DP_TRAINING_PATTERN_1:
587 		intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT;
588 		break;
589 	case DP_TRAINING_PATTERN_2:
590 		intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT;
591 		break;
592 	default:
593 		MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
594 		return;
595 	}
596 
597 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
598 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
599 }
600 
601 static void
602 g4x_set_link_train(struct intel_dp *intel_dp,
603 		   const struct intel_crtc_state *crtc_state,
604 		   u8 dp_train_pat)
605 {
606 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
607 
608 	intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
609 
610 	switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
611 	case DP_TRAINING_PATTERN_DISABLE:
612 		intel_dp->DP |= DP_LINK_TRAIN_OFF;
613 		break;
614 	case DP_TRAINING_PATTERN_1:
615 		intel_dp->DP |= DP_LINK_TRAIN_PAT_1;
616 		break;
617 	case DP_TRAINING_PATTERN_2:
618 		intel_dp->DP |= DP_LINK_TRAIN_PAT_2;
619 		break;
620 	default:
621 		MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
622 		return;
623 	}
624 
625 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
626 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
627 }
628 
629 static void intel_dp_enable_port(struct intel_dp *intel_dp,
630 				 const struct intel_crtc_state *crtc_state)
631 {
632 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
633 
634 	/* enable with pattern 1 (as per spec) */
635 
636 	intel_dp_program_link_training_pattern(intel_dp, crtc_state,
637 					       DP_PHY_DPRX, DP_TRAINING_PATTERN_1);
638 
639 	/*
640 	 * Magic for VLV/CHV. We _must_ first set up the register
641 	 * without actually enabling the port, and then do another
642 	 * write to enable the port. Otherwise link training will
643 	 * fail when the power sequencer is freshly used for this port.
644 	 */
645 	intel_dp->DP |= DP_PORT_EN;
646 	if (crtc_state->has_audio)
647 		intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
648 
649 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
650 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
651 }
652 
653 static void intel_enable_dp(struct intel_atomic_state *state,
654 			    struct intel_encoder *encoder,
655 			    const struct intel_crtc_state *pipe_config,
656 			    const struct drm_connector_state *conn_state)
657 {
658 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
659 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
660 	u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg);
661 	intel_wakeref_t wakeref;
662 
663 	if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN))
664 		return;
665 
666 	with_intel_pps_lock(intel_dp, wakeref) {
667 		if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
668 			vlv_pps_init(encoder, pipe_config);
669 
670 		intel_dp_enable_port(intel_dp, pipe_config);
671 
672 		intel_pps_vdd_on_unlocked(intel_dp);
673 		intel_pps_on_unlocked(intel_dp);
674 		intel_pps_vdd_off_unlocked(intel_dp, true);
675 	}
676 
677 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
678 		unsigned int lane_mask = 0x0;
679 
680 		if (IS_CHERRYVIEW(dev_priv))
681 			lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count);
682 
683 		vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
684 				    lane_mask);
685 	}
686 
687 	intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
688 	intel_dp_configure_protocol_converter(intel_dp, pipe_config);
689 	intel_dp_check_frl_training(intel_dp);
690 	intel_dp_pcon_dsc_configure(intel_dp, pipe_config);
691 	intel_dp_start_link_train(intel_dp, pipe_config);
692 	intel_dp_stop_link_train(intel_dp, pipe_config);
693 
694 	intel_audio_codec_enable(encoder, pipe_config, conn_state);
695 }
696 
697 static void g4x_enable_dp(struct intel_atomic_state *state,
698 			  struct intel_encoder *encoder,
699 			  const struct intel_crtc_state *pipe_config,
700 			  const struct drm_connector_state *conn_state)
701 {
702 	intel_enable_dp(state, encoder, pipe_config, conn_state);
703 	intel_edp_backlight_on(pipe_config, conn_state);
704 }
705 
706 static void vlv_enable_dp(struct intel_atomic_state *state,
707 			  struct intel_encoder *encoder,
708 			  const struct intel_crtc_state *pipe_config,
709 			  const struct drm_connector_state *conn_state)
710 {
711 	intel_edp_backlight_on(pipe_config, conn_state);
712 }
713 
714 static void g4x_pre_enable_dp(struct intel_atomic_state *state,
715 			      struct intel_encoder *encoder,
716 			      const struct intel_crtc_state *pipe_config,
717 			      const struct drm_connector_state *conn_state)
718 {
719 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
720 	enum port port = encoder->port;
721 
722 	intel_dp_prepare(encoder, pipe_config);
723 
724 	/* Only ilk+ has port A */
725 	if (port == PORT_A)
726 		ilk_edp_pll_on(intel_dp, pipe_config);
727 }
728 
729 static void vlv_pre_enable_dp(struct intel_atomic_state *state,
730 			      struct intel_encoder *encoder,
731 			      const struct intel_crtc_state *pipe_config,
732 			      const struct drm_connector_state *conn_state)
733 {
734 	vlv_phy_pre_encoder_enable(encoder, pipe_config);
735 
736 	intel_enable_dp(state, encoder, pipe_config, conn_state);
737 }
738 
739 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state,
740 				  struct intel_encoder *encoder,
741 				  const struct intel_crtc_state *pipe_config,
742 				  const struct drm_connector_state *conn_state)
743 {
744 	intel_dp_prepare(encoder, pipe_config);
745 
746 	vlv_phy_pre_pll_enable(encoder, pipe_config);
747 }
748 
749 static void chv_pre_enable_dp(struct intel_atomic_state *state,
750 			      struct intel_encoder *encoder,
751 			      const struct intel_crtc_state *pipe_config,
752 			      const struct drm_connector_state *conn_state)
753 {
754 	chv_phy_pre_encoder_enable(encoder, pipe_config);
755 
756 	intel_enable_dp(state, encoder, pipe_config, conn_state);
757 
758 	/* Second common lane will stay alive on its own now */
759 	chv_phy_release_cl2_override(encoder);
760 }
761 
762 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state,
763 				  struct intel_encoder *encoder,
764 				  const struct intel_crtc_state *pipe_config,
765 				  const struct drm_connector_state *conn_state)
766 {
767 	intel_dp_prepare(encoder, pipe_config);
768 
769 	chv_phy_pre_pll_enable(encoder, pipe_config);
770 }
771 
772 static void chv_dp_post_pll_disable(struct intel_atomic_state *state,
773 				    struct intel_encoder *encoder,
774 				    const struct intel_crtc_state *old_crtc_state,
775 				    const struct drm_connector_state *old_conn_state)
776 {
777 	chv_phy_post_pll_disable(encoder, old_crtc_state);
778 }
779 
780 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp,
781 				 const struct intel_crtc_state *crtc_state)
782 {
783 	return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
784 }
785 
786 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp,
787 				 const struct intel_crtc_state *crtc_state)
788 {
789 	return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
790 }
791 
792 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp)
793 {
794 	return DP_TRAIN_PRE_EMPH_LEVEL_2;
795 }
796 
797 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp)
798 {
799 	return DP_TRAIN_PRE_EMPH_LEVEL_3;
800 }
801 
802 static void vlv_set_signal_levels(struct intel_encoder *encoder,
803 				  const struct intel_crtc_state *crtc_state)
804 {
805 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
806 	unsigned long demph_reg_value, preemph_reg_value,
807 		uniqtranscale_reg_value;
808 	u8 train_set = intel_dp->train_set[0];
809 
810 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
811 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
812 		preemph_reg_value = 0x0004000;
813 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
814 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
815 			demph_reg_value = 0x2B405555;
816 			uniqtranscale_reg_value = 0x552AB83A;
817 			break;
818 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
819 			demph_reg_value = 0x2B404040;
820 			uniqtranscale_reg_value = 0x5548B83A;
821 			break;
822 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
823 			demph_reg_value = 0x2B245555;
824 			uniqtranscale_reg_value = 0x5560B83A;
825 			break;
826 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
827 			demph_reg_value = 0x2B405555;
828 			uniqtranscale_reg_value = 0x5598DA3A;
829 			break;
830 		default:
831 			return;
832 		}
833 		break;
834 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
835 		preemph_reg_value = 0x0002000;
836 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
837 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
838 			demph_reg_value = 0x2B404040;
839 			uniqtranscale_reg_value = 0x5552B83A;
840 			break;
841 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
842 			demph_reg_value = 0x2B404848;
843 			uniqtranscale_reg_value = 0x5580B83A;
844 			break;
845 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
846 			demph_reg_value = 0x2B404040;
847 			uniqtranscale_reg_value = 0x55ADDA3A;
848 			break;
849 		default:
850 			return;
851 		}
852 		break;
853 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
854 		preemph_reg_value = 0x0000000;
855 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
856 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
857 			demph_reg_value = 0x2B305555;
858 			uniqtranscale_reg_value = 0x5570B83A;
859 			break;
860 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
861 			demph_reg_value = 0x2B2B4040;
862 			uniqtranscale_reg_value = 0x55ADDA3A;
863 			break;
864 		default:
865 			return;
866 		}
867 		break;
868 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
869 		preemph_reg_value = 0x0006000;
870 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
871 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
872 			demph_reg_value = 0x1B405555;
873 			uniqtranscale_reg_value = 0x55ADDA3A;
874 			break;
875 		default:
876 			return;
877 		}
878 		break;
879 	default:
880 		return;
881 	}
882 
883 	vlv_set_phy_signal_level(encoder, crtc_state,
884 				 demph_reg_value, preemph_reg_value,
885 				 uniqtranscale_reg_value, 0);
886 }
887 
888 static void chv_set_signal_levels(struct intel_encoder *encoder,
889 				  const struct intel_crtc_state *crtc_state)
890 {
891 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
892 	u32 deemph_reg_value, margin_reg_value;
893 	bool uniq_trans_scale = false;
894 	u8 train_set = intel_dp->train_set[0];
895 
896 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
897 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
898 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
899 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
900 			deemph_reg_value = 128;
901 			margin_reg_value = 52;
902 			break;
903 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
904 			deemph_reg_value = 128;
905 			margin_reg_value = 77;
906 			break;
907 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
908 			deemph_reg_value = 128;
909 			margin_reg_value = 102;
910 			break;
911 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
912 			deemph_reg_value = 128;
913 			margin_reg_value = 154;
914 			uniq_trans_scale = true;
915 			break;
916 		default:
917 			return;
918 		}
919 		break;
920 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
921 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
922 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
923 			deemph_reg_value = 85;
924 			margin_reg_value = 78;
925 			break;
926 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
927 			deemph_reg_value = 85;
928 			margin_reg_value = 116;
929 			break;
930 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
931 			deemph_reg_value = 85;
932 			margin_reg_value = 154;
933 			break;
934 		default:
935 			return;
936 		}
937 		break;
938 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
939 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
940 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
941 			deemph_reg_value = 64;
942 			margin_reg_value = 104;
943 			break;
944 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
945 			deemph_reg_value = 64;
946 			margin_reg_value = 154;
947 			break;
948 		default:
949 			return;
950 		}
951 		break;
952 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
953 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
954 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
955 			deemph_reg_value = 43;
956 			margin_reg_value = 154;
957 			break;
958 		default:
959 			return;
960 		}
961 		break;
962 	default:
963 		return;
964 	}
965 
966 	chv_set_phy_signal_level(encoder, crtc_state,
967 				 deemph_reg_value, margin_reg_value,
968 				 uniq_trans_scale);
969 }
970 
971 static u32 g4x_signal_levels(u8 train_set)
972 {
973 	u32 signal_levels = 0;
974 
975 	switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
976 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
977 	default:
978 		signal_levels |= DP_VOLTAGE_0_4;
979 		break;
980 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
981 		signal_levels |= DP_VOLTAGE_0_6;
982 		break;
983 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
984 		signal_levels |= DP_VOLTAGE_0_8;
985 		break;
986 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
987 		signal_levels |= DP_VOLTAGE_1_2;
988 		break;
989 	}
990 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
991 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
992 	default:
993 		signal_levels |= DP_PRE_EMPHASIS_0;
994 		break;
995 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
996 		signal_levels |= DP_PRE_EMPHASIS_3_5;
997 		break;
998 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
999 		signal_levels |= DP_PRE_EMPHASIS_6;
1000 		break;
1001 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
1002 		signal_levels |= DP_PRE_EMPHASIS_9_5;
1003 		break;
1004 	}
1005 	return signal_levels;
1006 }
1007 
1008 static void
1009 g4x_set_signal_levels(struct intel_encoder *encoder,
1010 		      const struct intel_crtc_state *crtc_state)
1011 {
1012 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1013 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1014 	u8 train_set = intel_dp->train_set[0];
1015 	u32 signal_levels;
1016 
1017 	signal_levels = g4x_signal_levels(train_set);
1018 
1019 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1020 		    signal_levels);
1021 
1022 	intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK);
1023 	intel_dp->DP |= signal_levels;
1024 
1025 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1026 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1027 }
1028 
1029 /* SNB CPU eDP voltage swing and pre-emphasis control */
1030 static u32 snb_cpu_edp_signal_levels(u8 train_set)
1031 {
1032 	u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1033 					DP_TRAIN_PRE_EMPHASIS_MASK);
1034 
1035 	switch (signal_levels) {
1036 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1037 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1038 		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1039 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1040 		return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1041 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1042 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1043 		return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1044 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1045 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1046 		return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1047 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1048 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1049 		return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1050 	default:
1051 		MISSING_CASE(signal_levels);
1052 		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1053 	}
1054 }
1055 
1056 static void
1057 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1058 			      const struct intel_crtc_state *crtc_state)
1059 {
1060 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1061 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1062 	u8 train_set = intel_dp->train_set[0];
1063 	u32 signal_levels;
1064 
1065 	signal_levels = snb_cpu_edp_signal_levels(train_set);
1066 
1067 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1068 		    signal_levels);
1069 
1070 	intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
1071 	intel_dp->DP |= signal_levels;
1072 
1073 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1074 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1075 }
1076 
1077 /* IVB CPU eDP voltage swing and pre-emphasis control */
1078 static u32 ivb_cpu_edp_signal_levels(u8 train_set)
1079 {
1080 	u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1081 					DP_TRAIN_PRE_EMPHASIS_MASK);
1082 
1083 	switch (signal_levels) {
1084 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1085 		return EDP_LINK_TRAIN_400MV_0DB_IVB;
1086 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1087 		return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1088 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1089 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1090 		return EDP_LINK_TRAIN_400MV_6DB_IVB;
1091 
1092 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1093 		return EDP_LINK_TRAIN_600MV_0DB_IVB;
1094 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1095 		return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1096 
1097 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1098 		return EDP_LINK_TRAIN_800MV_0DB_IVB;
1099 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1100 		return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1101 
1102 	default:
1103 		MISSING_CASE(signal_levels);
1104 		return EDP_LINK_TRAIN_500MV_0DB_IVB;
1105 	}
1106 }
1107 
1108 static void
1109 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1110 			      const struct intel_crtc_state *crtc_state)
1111 {
1112 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1113 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1114 	u8 train_set = intel_dp->train_set[0];
1115 	u32 signal_levels;
1116 
1117 	signal_levels = ivb_cpu_edp_signal_levels(train_set);
1118 
1119 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1120 		    signal_levels);
1121 
1122 	intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
1123 	intel_dp->DP |= signal_levels;
1124 
1125 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1126 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1127 }
1128 
1129 /*
1130  * If display is now connected check links status,
1131  * there has been known issues of link loss triggering
1132  * long pulse.
1133  *
1134  * Some sinks (eg. ASUS PB287Q) seem to perform some
1135  * weird HPD ping pong during modesets. So we can apparently
1136  * end up with HPD going low during a modeset, and then
1137  * going back up soon after. And once that happens we must
1138  * retrain the link to get a picture. That's in case no
1139  * userspace component reacted to intermittent HPD dip.
1140  */
1141 static enum intel_hotplug_state
1142 intel_dp_hotplug(struct intel_encoder *encoder,
1143 		 struct intel_connector *connector)
1144 {
1145 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1146 	struct drm_modeset_acquire_ctx ctx;
1147 	enum intel_hotplug_state state;
1148 	int ret;
1149 
1150 	if (intel_dp->compliance.test_active &&
1151 	    intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) {
1152 		intel_dp_phy_test(encoder);
1153 		/* just do the PHY test and nothing else */
1154 		return INTEL_HOTPLUG_UNCHANGED;
1155 	}
1156 
1157 	state = intel_encoder_hotplug(encoder, connector);
1158 
1159 	drm_modeset_acquire_init(&ctx, 0);
1160 
1161 	for (;;) {
1162 		ret = intel_dp_retrain_link(encoder, &ctx);
1163 
1164 		if (ret == -EDEADLK) {
1165 			drm_modeset_backoff(&ctx);
1166 			continue;
1167 		}
1168 
1169 		break;
1170 	}
1171 
1172 	drm_modeset_drop_locks(&ctx);
1173 	drm_modeset_acquire_fini(&ctx);
1174 	drm_WARN(encoder->base.dev, ret,
1175 		 "Acquiring modeset locks failed with %i\n", ret);
1176 
1177 	/*
1178 	 * Keeping it consistent with intel_ddi_hotplug() and
1179 	 * intel_hdmi_hotplug().
1180 	 */
1181 	if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries)
1182 		state = INTEL_HOTPLUG_RETRY;
1183 
1184 	return state;
1185 }
1186 
1187 static bool ibx_digital_port_connected(struct intel_encoder *encoder)
1188 {
1189 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1190 	u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin];
1191 
1192 	return intel_de_read(dev_priv, SDEISR) & bit;
1193 }
1194 
1195 static bool g4x_digital_port_connected(struct intel_encoder *encoder)
1196 {
1197 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1198 	u32 bit;
1199 
1200 	switch (encoder->hpd_pin) {
1201 	case HPD_PORT_B:
1202 		bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
1203 		break;
1204 	case HPD_PORT_C:
1205 		bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
1206 		break;
1207 	case HPD_PORT_D:
1208 		bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
1209 		break;
1210 	default:
1211 		MISSING_CASE(encoder->hpd_pin);
1212 		return false;
1213 	}
1214 
1215 	return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
1216 }
1217 
1218 static bool gm45_digital_port_connected(struct intel_encoder *encoder)
1219 {
1220 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1221 	u32 bit;
1222 
1223 	switch (encoder->hpd_pin) {
1224 	case HPD_PORT_B:
1225 		bit = PORTB_HOTPLUG_LIVE_STATUS_GM45;
1226 		break;
1227 	case HPD_PORT_C:
1228 		bit = PORTC_HOTPLUG_LIVE_STATUS_GM45;
1229 		break;
1230 	case HPD_PORT_D:
1231 		bit = PORTD_HOTPLUG_LIVE_STATUS_GM45;
1232 		break;
1233 	default:
1234 		MISSING_CASE(encoder->hpd_pin);
1235 		return false;
1236 	}
1237 
1238 	return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
1239 }
1240 
1241 static bool ilk_digital_port_connected(struct intel_encoder *encoder)
1242 {
1243 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1244 	u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
1245 
1246 	return intel_de_read(dev_priv, DEISR) & bit;
1247 }
1248 
1249 static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
1250 {
1251 	intel_dp_encoder_flush_work(encoder);
1252 
1253 	drm_encoder_cleanup(encoder);
1254 	kfree(enc_to_dig_port(to_intel_encoder(encoder)));
1255 }
1256 
1257 enum pipe vlv_active_pipe(struct intel_dp *intel_dp)
1258 {
1259 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1260 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1261 	enum pipe pipe;
1262 
1263 	if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
1264 				encoder->port, &pipe))
1265 		return pipe;
1266 
1267 	return INVALID_PIPE;
1268 }
1269 
1270 static void intel_dp_encoder_reset(struct drm_encoder *encoder)
1271 {
1272 	struct drm_i915_private *dev_priv = to_i915(encoder->dev);
1273 	struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder));
1274 
1275 	intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
1276 
1277 	intel_dp->reset_link_params = true;
1278 
1279 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1280 		intel_wakeref_t wakeref;
1281 
1282 		with_intel_pps_lock(intel_dp, wakeref)
1283 			intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
1284 	}
1285 
1286 	intel_pps_encoder_reset(intel_dp);
1287 }
1288 
1289 static const struct drm_encoder_funcs intel_dp_enc_funcs = {
1290 	.reset = intel_dp_encoder_reset,
1291 	.destroy = intel_dp_encoder_destroy,
1292 };
1293 
1294 bool g4x_dp_init(struct drm_i915_private *dev_priv,
1295 		 i915_reg_t output_reg, enum port port)
1296 {
1297 	struct intel_digital_port *dig_port;
1298 	struct intel_encoder *intel_encoder;
1299 	struct drm_encoder *encoder;
1300 	struct intel_connector *intel_connector;
1301 
1302 	dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
1303 	if (!dig_port)
1304 		return false;
1305 
1306 	intel_connector = intel_connector_alloc();
1307 	if (!intel_connector)
1308 		goto err_connector_alloc;
1309 
1310 	intel_encoder = &dig_port->base;
1311 	encoder = &intel_encoder->base;
1312 
1313 	mutex_init(&dig_port->hdcp_mutex);
1314 
1315 	if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base,
1316 			     &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS,
1317 			     "DP %c", port_name(port)))
1318 		goto err_encoder_init;
1319 
1320 	intel_encoder->hotplug = intel_dp_hotplug;
1321 	intel_encoder->compute_config = intel_dp_compute_config;
1322 	intel_encoder->get_hw_state = intel_dp_get_hw_state;
1323 	intel_encoder->get_config = intel_dp_get_config;
1324 	intel_encoder->sync_state = intel_dp_sync_state;
1325 	intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check;
1326 	intel_encoder->update_pipe = intel_backlight_update;
1327 	intel_encoder->suspend = intel_dp_encoder_suspend;
1328 	intel_encoder->shutdown = intel_dp_encoder_shutdown;
1329 	if (IS_CHERRYVIEW(dev_priv)) {
1330 		intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
1331 		intel_encoder->pre_enable = chv_pre_enable_dp;
1332 		intel_encoder->enable = vlv_enable_dp;
1333 		intel_encoder->disable = vlv_disable_dp;
1334 		intel_encoder->post_disable = chv_post_disable_dp;
1335 		intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
1336 	} else if (IS_VALLEYVIEW(dev_priv)) {
1337 		intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
1338 		intel_encoder->pre_enable = vlv_pre_enable_dp;
1339 		intel_encoder->enable = vlv_enable_dp;
1340 		intel_encoder->disable = vlv_disable_dp;
1341 		intel_encoder->post_disable = vlv_post_disable_dp;
1342 	} else {
1343 		intel_encoder->pre_enable = g4x_pre_enable_dp;
1344 		intel_encoder->enable = g4x_enable_dp;
1345 		intel_encoder->disable = g4x_disable_dp;
1346 		intel_encoder->post_disable = g4x_post_disable_dp;
1347 	}
1348 
1349 	if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
1350 	    (HAS_PCH_CPT(dev_priv) && port != PORT_A))
1351 		dig_port->dp.set_link_train = cpt_set_link_train;
1352 	else
1353 		dig_port->dp.set_link_train = g4x_set_link_train;
1354 
1355 	if (IS_CHERRYVIEW(dev_priv))
1356 		intel_encoder->set_signal_levels = chv_set_signal_levels;
1357 	else if (IS_VALLEYVIEW(dev_priv))
1358 		intel_encoder->set_signal_levels = vlv_set_signal_levels;
1359 	else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
1360 		intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels;
1361 	else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A)
1362 		intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels;
1363 	else
1364 		intel_encoder->set_signal_levels = g4x_set_signal_levels;
1365 
1366 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) ||
1367 	    (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) {
1368 		dig_port->dp.preemph_max = intel_dp_preemph_max_3;
1369 		dig_port->dp.voltage_max = intel_dp_voltage_max_3;
1370 	} else {
1371 		dig_port->dp.preemph_max = intel_dp_preemph_max_2;
1372 		dig_port->dp.voltage_max = intel_dp_voltage_max_2;
1373 	}
1374 
1375 	dig_port->dp.output_reg = output_reg;
1376 	dig_port->max_lanes = 4;
1377 
1378 	intel_encoder->type = INTEL_OUTPUT_DP;
1379 	intel_encoder->power_domain = intel_display_power_ddi_lanes_domain(dev_priv, port);
1380 	if (IS_CHERRYVIEW(dev_priv)) {
1381 		if (port == PORT_D)
1382 			intel_encoder->pipe_mask = BIT(PIPE_C);
1383 		else
1384 			intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B);
1385 	} else {
1386 		intel_encoder->pipe_mask = ~0;
1387 	}
1388 	intel_encoder->cloneable = 0;
1389 	intel_encoder->port = port;
1390 	intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
1391 
1392 	dig_port->hpd_pulse = intel_dp_hpd_pulse;
1393 
1394 	if (HAS_GMCH(dev_priv)) {
1395 		if (IS_GM45(dev_priv))
1396 			dig_port->connected = gm45_digital_port_connected;
1397 		else
1398 			dig_port->connected = g4x_digital_port_connected;
1399 	} else {
1400 		if (port == PORT_A)
1401 			dig_port->connected = ilk_digital_port_connected;
1402 		else
1403 			dig_port->connected = ibx_digital_port_connected;
1404 	}
1405 
1406 	if (port != PORT_A)
1407 		intel_infoframe_init(dig_port);
1408 
1409 	dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port);
1410 	if (!intel_dp_init_connector(dig_port, intel_connector))
1411 		goto err_init_connector;
1412 
1413 	return true;
1414 
1415 err_init_connector:
1416 	drm_encoder_cleanup(encoder);
1417 err_encoder_init:
1418 	kfree(intel_connector);
1419 err_connector_alloc:
1420 	kfree(dig_port);
1421 	return false;
1422 }
1423