xref: /openbmc/linux/drivers/gpu/drm/i915/display/g4x_dp.c (revision 185c8f33)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code).
6  */
7 
8 #include <linux/string_helpers.h>
9 
10 #include "g4x_dp.h"
11 #include "i915_reg.h"
12 #include "intel_audio.h"
13 #include "intel_backlight.h"
14 #include "intel_connector.h"
15 #include "intel_crtc.h"
16 #include "intel_de.h"
17 #include "intel_display_power.h"
18 #include "intel_display_types.h"
19 #include "intel_dp.h"
20 #include "intel_dp_aux.h"
21 #include "intel_dp_link_training.h"
22 #include "intel_dpio_phy.h"
23 #include "intel_fifo_underrun.h"
24 #include "intel_hdmi.h"
25 #include "intel_hotplug.h"
26 #include "intel_pch_display.h"
27 #include "intel_pps.h"
28 #include "vlv_sideband.h"
29 
30 static const struct dpll g4x_dpll[] = {
31 	{ .dot = 162000, .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8, },
32 	{ .dot = 270000, .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2, },
33 };
34 
35 static const struct dpll pch_dpll[] = {
36 	{ .dot = 162000, .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9, },
37 	{ .dot = 270000, .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8, },
38 };
39 
40 static const struct dpll vlv_dpll[] = {
41 	{ .dot = 162000, .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81, },
42 	{ .dot = 270000, .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27, },
43 };
44 
45 static const struct dpll chv_dpll[] = {
46 	/* m2 is .22 binary fixed point  */
47 	{ .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
48 	{ .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ },
49 };
50 
51 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915)
52 {
53 	return IS_CHERRYVIEW(i915) ? &chv_dpll[0] : &vlv_dpll[0];
54 }
55 
56 void g4x_dp_set_clock(struct intel_encoder *encoder,
57 		      struct intel_crtc_state *pipe_config)
58 {
59 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
60 	const struct dpll *divisor = NULL;
61 	int i, count = 0;
62 
63 	if (IS_G4X(dev_priv)) {
64 		divisor = g4x_dpll;
65 		count = ARRAY_SIZE(g4x_dpll);
66 	} else if (HAS_PCH_SPLIT(dev_priv)) {
67 		divisor = pch_dpll;
68 		count = ARRAY_SIZE(pch_dpll);
69 	} else if (IS_CHERRYVIEW(dev_priv)) {
70 		divisor = chv_dpll;
71 		count = ARRAY_SIZE(chv_dpll);
72 	} else if (IS_VALLEYVIEW(dev_priv)) {
73 		divisor = vlv_dpll;
74 		count = ARRAY_SIZE(vlv_dpll);
75 	}
76 
77 	if (divisor && count) {
78 		for (i = 0; i < count; i++) {
79 			if (pipe_config->port_clock == divisor[i].dot) {
80 				pipe_config->dpll = divisor[i];
81 				pipe_config->clock_set = true;
82 				break;
83 			}
84 		}
85 	}
86 }
87 
88 static void intel_dp_prepare(struct intel_encoder *encoder,
89 			     const struct intel_crtc_state *pipe_config)
90 {
91 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
92 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
93 	enum port port = encoder->port;
94 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
95 	const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
96 
97 	intel_dp_set_link_params(intel_dp,
98 				 pipe_config->port_clock,
99 				 pipe_config->lane_count);
100 
101 	/*
102 	 * There are four kinds of DP registers:
103 	 * IBX PCH
104 	 * SNB CPU
105 	 * IVB CPU
106 	 * CPT PCH
107 	 *
108 	 * IBX PCH and CPU are the same for almost everything,
109 	 * except that the CPU DP PLL is configured in this
110 	 * register
111 	 *
112 	 * CPT PCH is quite different, having many bits moved
113 	 * to the TRANS_DP_CTL register instead. That
114 	 * configuration happens (oddly) in ilk_pch_enable
115 	 */
116 
117 	/* Preserve the BIOS-computed detected bit. This is
118 	 * supposed to be read-only.
119 	 */
120 	intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED;
121 
122 	/* Handle DP bits in common between all three register formats */
123 	intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
124 	intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count);
125 
126 	/* Split out the IBX/CPU vs CPT settings */
127 
128 	if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) {
129 		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
130 			intel_dp->DP |= DP_SYNC_HS_HIGH;
131 		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
132 			intel_dp->DP |= DP_SYNC_VS_HIGH;
133 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
134 
135 		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
136 			intel_dp->DP |= DP_ENHANCED_FRAMING;
137 
138 		intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe);
139 	} else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
140 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
141 
142 		intel_de_rmw(dev_priv, TRANS_DP_CTL(crtc->pipe),
143 			     TRANS_DP_ENH_FRAMING,
144 			     drm_dp_enhanced_frame_cap(intel_dp->dpcd) ?
145 			     TRANS_DP_ENH_FRAMING : 0);
146 	} else {
147 		if (IS_G4X(dev_priv) && pipe_config->limited_color_range)
148 			intel_dp->DP |= DP_COLOR_RANGE_16_235;
149 
150 		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
151 			intel_dp->DP |= DP_SYNC_HS_HIGH;
152 		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
153 			intel_dp->DP |= DP_SYNC_VS_HIGH;
154 		intel_dp->DP |= DP_LINK_TRAIN_OFF;
155 
156 		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
157 			intel_dp->DP |= DP_ENHANCED_FRAMING;
158 
159 		if (IS_CHERRYVIEW(dev_priv))
160 			intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe);
161 		else
162 			intel_dp->DP |= DP_PIPE_SEL(crtc->pipe);
163 	}
164 }
165 
166 static void assert_dp_port(struct intel_dp *intel_dp, bool state)
167 {
168 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
169 	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
170 	bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN;
171 
172 	I915_STATE_WARN(dev_priv, cur_state != state,
173 			"[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n",
174 			dig_port->base.base.base.id, dig_port->base.base.name,
175 			str_on_off(state), str_on_off(cur_state));
176 }
177 #define assert_dp_port_disabled(d) assert_dp_port((d), false)
178 
179 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
180 {
181 	bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE;
182 
183 	I915_STATE_WARN(dev_priv, cur_state != state,
184 			"eDP PLL state assertion failure (expected %s, current %s)\n",
185 			str_on_off(state), str_on_off(cur_state));
186 }
187 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
188 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
189 
190 static void ilk_edp_pll_on(struct intel_dp *intel_dp,
191 			   const struct intel_crtc_state *pipe_config)
192 {
193 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
194 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
195 
196 	assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder);
197 	assert_dp_port_disabled(intel_dp);
198 	assert_edp_pll_disabled(dev_priv);
199 
200 	drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n",
201 		    pipe_config->port_clock);
202 
203 	intel_dp->DP &= ~DP_PLL_FREQ_MASK;
204 
205 	if (pipe_config->port_clock == 162000)
206 		intel_dp->DP |= DP_PLL_FREQ_162MHZ;
207 	else
208 		intel_dp->DP |= DP_PLL_FREQ_270MHZ;
209 
210 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
211 	intel_de_posting_read(dev_priv, DP_A);
212 	udelay(500);
213 
214 	/*
215 	 * [DevILK] Work around required when enabling DP PLL
216 	 * while a pipe is enabled going to FDI:
217 	 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI
218 	 * 2. Program DP PLL enable
219 	 */
220 	if (IS_IRONLAKE(dev_priv))
221 		intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe);
222 
223 	intel_dp->DP |= DP_PLL_ENABLE;
224 
225 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
226 	intel_de_posting_read(dev_priv, DP_A);
227 	udelay(200);
228 }
229 
230 static void ilk_edp_pll_off(struct intel_dp *intel_dp,
231 			    const struct intel_crtc_state *old_crtc_state)
232 {
233 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
234 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
235 
236 	assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder);
237 	assert_dp_port_disabled(intel_dp);
238 	assert_edp_pll_enabled(dev_priv);
239 
240 	drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n");
241 
242 	intel_dp->DP &= ~DP_PLL_ENABLE;
243 
244 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
245 	intel_de_posting_read(dev_priv, DP_A);
246 	udelay(200);
247 }
248 
249 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv,
250 				 enum port port, enum pipe *pipe)
251 {
252 	enum pipe p;
253 
254 	for_each_pipe(dev_priv, p) {
255 		u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p));
256 
257 		if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) {
258 			*pipe = p;
259 			return true;
260 		}
261 	}
262 
263 	drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n",
264 		    port_name(port));
265 
266 	/* must initialize pipe to something for the asserts */
267 	*pipe = PIPE_A;
268 
269 	return false;
270 }
271 
272 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv,
273 			 i915_reg_t dp_reg, enum port port,
274 			 enum pipe *pipe)
275 {
276 	bool ret;
277 	u32 val;
278 
279 	val = intel_de_read(dev_priv, dp_reg);
280 
281 	ret = val & DP_PORT_EN;
282 
283 	/* asserts want to know the pipe even if the port is disabled */
284 	if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
285 		*pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB;
286 	else if (HAS_PCH_CPT(dev_priv) && port != PORT_A)
287 		ret &= cpt_dp_port_selected(dev_priv, port, pipe);
288 	else if (IS_CHERRYVIEW(dev_priv))
289 		*pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV;
290 	else
291 		*pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT;
292 
293 	return ret;
294 }
295 
296 static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
297 				  enum pipe *pipe)
298 {
299 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
300 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
301 	intel_wakeref_t wakeref;
302 	bool ret;
303 
304 	wakeref = intel_display_power_get_if_enabled(dev_priv,
305 						     encoder->power_domain);
306 	if (!wakeref)
307 		return false;
308 
309 	ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
310 				  encoder->port, pipe);
311 
312 	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
313 
314 	return ret;
315 }
316 
317 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state)
318 {
319 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
320 
321 	if (crtc_state->has_pch_encoder) {
322 		intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n);
323 		intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2);
324 	} else {
325 		intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder,
326 					       &crtc_state->dp_m_n);
327 		intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder,
328 					       &crtc_state->dp_m2_n2);
329 	}
330 }
331 
332 static void intel_dp_get_config(struct intel_encoder *encoder,
333 				struct intel_crtc_state *pipe_config)
334 {
335 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
336 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
337 	u32 tmp, flags = 0;
338 	enum port port = encoder->port;
339 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
340 
341 	if (encoder->type == INTEL_OUTPUT_EDP)
342 		pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
343 	else
344 		pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
345 
346 	tmp = intel_de_read(dev_priv, intel_dp->output_reg);
347 
348 	pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
349 
350 	if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
351 		u32 trans_dp = intel_de_read(dev_priv,
352 					     TRANS_DP_CTL(crtc->pipe));
353 
354 		if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
355 			flags |= DRM_MODE_FLAG_PHSYNC;
356 		else
357 			flags |= DRM_MODE_FLAG_NHSYNC;
358 
359 		if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
360 			flags |= DRM_MODE_FLAG_PVSYNC;
361 		else
362 			flags |= DRM_MODE_FLAG_NVSYNC;
363 	} else {
364 		if (tmp & DP_SYNC_HS_HIGH)
365 			flags |= DRM_MODE_FLAG_PHSYNC;
366 		else
367 			flags |= DRM_MODE_FLAG_NHSYNC;
368 
369 		if (tmp & DP_SYNC_VS_HIGH)
370 			flags |= DRM_MODE_FLAG_PVSYNC;
371 		else
372 			flags |= DRM_MODE_FLAG_NVSYNC;
373 	}
374 
375 	pipe_config->hw.adjusted_mode.flags |= flags;
376 
377 	if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235)
378 		pipe_config->limited_color_range = true;
379 
380 	pipe_config->lane_count =
381 		((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
382 
383 	g4x_dp_get_m_n(pipe_config);
384 
385 	if (port == PORT_A) {
386 		if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
387 			pipe_config->port_clock = 162000;
388 		else
389 			pipe_config->port_clock = 270000;
390 	}
391 
392 	pipe_config->hw.adjusted_mode.crtc_clock =
393 		intel_dotclock_calculate(pipe_config->port_clock,
394 					 &pipe_config->dp_m_n);
395 
396 	if (intel_dp_is_edp(intel_dp))
397 		intel_edp_fixup_vbt_bpp(encoder, pipe_config->pipe_bpp);
398 
399 	intel_audio_codec_get_config(encoder, pipe_config);
400 }
401 
402 static void
403 intel_dp_link_down(struct intel_encoder *encoder,
404 		   const struct intel_crtc_state *old_crtc_state)
405 {
406 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
407 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
408 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
409 	enum port port = encoder->port;
410 
411 	if (drm_WARN_ON(&dev_priv->drm,
412 			(intel_de_read(dev_priv, intel_dp->output_reg) &
413 			 DP_PORT_EN) == 0))
414 		return;
415 
416 	drm_dbg_kms(&dev_priv->drm, "\n");
417 
418 	if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
419 	    (HAS_PCH_CPT(dev_priv) && port != PORT_A)) {
420 		intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
421 		intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
422 	} else {
423 		intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
424 		intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE;
425 	}
426 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
427 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
428 
429 	intel_dp->DP &= ~(DP_PORT_EN | DP_AUDIO_OUTPUT_ENABLE);
430 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
431 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
432 
433 	/*
434 	 * HW workaround for IBX, we need to move the port
435 	 * to transcoder A after disabling it to allow the
436 	 * matching HDMI port to be enabled on transcoder A.
437 	 */
438 	if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) {
439 		/*
440 		 * We get CPU/PCH FIFO underruns on the other pipe when
441 		 * doing the workaround. Sweep them under the rug.
442 		 */
443 		intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
444 		intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
445 
446 		/* always enable with pattern 1 (as per spec) */
447 		intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK);
448 		intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) |
449 			DP_LINK_TRAIN_PAT_1;
450 		intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
451 		intel_de_posting_read(dev_priv, intel_dp->output_reg);
452 
453 		intel_dp->DP &= ~DP_PORT_EN;
454 		intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
455 		intel_de_posting_read(dev_priv, intel_dp->output_reg);
456 
457 		intel_wait_for_vblank_if_active(dev_priv, PIPE_A);
458 		intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
459 		intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
460 	}
461 
462 	msleep(intel_dp->pps.panel_power_down_delay);
463 
464 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
465 		intel_wakeref_t wakeref;
466 
467 		with_intel_pps_lock(intel_dp, wakeref)
468 			intel_dp->pps.active_pipe = INVALID_PIPE;
469 	}
470 }
471 
472 static void intel_disable_dp(struct intel_atomic_state *state,
473 			     struct intel_encoder *encoder,
474 			     const struct intel_crtc_state *old_crtc_state,
475 			     const struct drm_connector_state *old_conn_state)
476 {
477 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
478 
479 	intel_dp->link_trained = false;
480 
481 	intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state);
482 
483 	/*
484 	 * Make sure the panel is off before trying to change the mode.
485 	 * But also ensure that we have vdd while we switch off the panel.
486 	 */
487 	intel_pps_vdd_on(intel_dp);
488 	intel_edp_backlight_off(old_conn_state);
489 	intel_dp_set_power(intel_dp, DP_SET_POWER_D3);
490 	intel_pps_off(intel_dp);
491 }
492 
493 static void g4x_disable_dp(struct intel_atomic_state *state,
494 			   struct intel_encoder *encoder,
495 			   const struct intel_crtc_state *old_crtc_state,
496 			   const struct drm_connector_state *old_conn_state)
497 {
498 	intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
499 }
500 
501 static void vlv_disable_dp(struct intel_atomic_state *state,
502 			   struct intel_encoder *encoder,
503 			   const struct intel_crtc_state *old_crtc_state,
504 			   const struct drm_connector_state *old_conn_state)
505 {
506 	intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
507 }
508 
509 static void g4x_post_disable_dp(struct intel_atomic_state *state,
510 				struct intel_encoder *encoder,
511 				const struct intel_crtc_state *old_crtc_state,
512 				const struct drm_connector_state *old_conn_state)
513 {
514 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
515 	enum port port = encoder->port;
516 
517 	/*
518 	 * Bspec does not list a specific disable sequence for g4x DP.
519 	 * Follow the ilk+ sequence (disable pipe before the port) for
520 	 * g4x DP as it does not suffer from underruns like the normal
521 	 * g4x modeset sequence (disable pipe after the port).
522 	 */
523 	intel_dp_link_down(encoder, old_crtc_state);
524 
525 	/* Only ilk+ has port A */
526 	if (port == PORT_A)
527 		ilk_edp_pll_off(intel_dp, old_crtc_state);
528 }
529 
530 static void vlv_post_disable_dp(struct intel_atomic_state *state,
531 				struct intel_encoder *encoder,
532 				const struct intel_crtc_state *old_crtc_state,
533 				const struct drm_connector_state *old_conn_state)
534 {
535 	intel_dp_link_down(encoder, old_crtc_state);
536 }
537 
538 static void chv_post_disable_dp(struct intel_atomic_state *state,
539 				struct intel_encoder *encoder,
540 				const struct intel_crtc_state *old_crtc_state,
541 				const struct drm_connector_state *old_conn_state)
542 {
543 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
544 
545 	intel_dp_link_down(encoder, old_crtc_state);
546 
547 	vlv_dpio_get(dev_priv);
548 
549 	/* Assert data lane reset */
550 	chv_data_lane_soft_reset(encoder, old_crtc_state, true);
551 
552 	vlv_dpio_put(dev_priv);
553 }
554 
555 static void
556 cpt_set_link_train(struct intel_dp *intel_dp,
557 		   const struct intel_crtc_state *crtc_state,
558 		   u8 dp_train_pat)
559 {
560 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
561 
562 	intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
563 
564 	switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
565 	case DP_TRAINING_PATTERN_DISABLE:
566 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
567 		break;
568 	case DP_TRAINING_PATTERN_1:
569 		intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT;
570 		break;
571 	case DP_TRAINING_PATTERN_2:
572 		intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT;
573 		break;
574 	default:
575 		MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
576 		return;
577 	}
578 
579 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
580 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
581 }
582 
583 static void
584 g4x_set_link_train(struct intel_dp *intel_dp,
585 		   const struct intel_crtc_state *crtc_state,
586 		   u8 dp_train_pat)
587 {
588 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
589 
590 	intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
591 
592 	switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
593 	case DP_TRAINING_PATTERN_DISABLE:
594 		intel_dp->DP |= DP_LINK_TRAIN_OFF;
595 		break;
596 	case DP_TRAINING_PATTERN_1:
597 		intel_dp->DP |= DP_LINK_TRAIN_PAT_1;
598 		break;
599 	case DP_TRAINING_PATTERN_2:
600 		intel_dp->DP |= DP_LINK_TRAIN_PAT_2;
601 		break;
602 	default:
603 		MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
604 		return;
605 	}
606 
607 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
608 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
609 }
610 
611 static void intel_dp_enable_port(struct intel_dp *intel_dp,
612 				 const struct intel_crtc_state *crtc_state)
613 {
614 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
615 
616 	/* enable with pattern 1 (as per spec) */
617 
618 	intel_dp_program_link_training_pattern(intel_dp, crtc_state,
619 					       DP_PHY_DPRX, DP_TRAINING_PATTERN_1);
620 
621 	/*
622 	 * Magic for VLV/CHV. We _must_ first set up the register
623 	 * without actually enabling the port, and then do another
624 	 * write to enable the port. Otherwise link training will
625 	 * fail when the power sequencer is freshly used for this port.
626 	 */
627 	intel_dp->DP |= DP_PORT_EN;
628 	if (crtc_state->has_audio)
629 		intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
630 
631 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
632 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
633 }
634 
635 static void intel_enable_dp(struct intel_atomic_state *state,
636 			    struct intel_encoder *encoder,
637 			    const struct intel_crtc_state *pipe_config,
638 			    const struct drm_connector_state *conn_state)
639 {
640 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
641 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
642 	u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg);
643 	intel_wakeref_t wakeref;
644 
645 	if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN))
646 		return;
647 
648 	with_intel_pps_lock(intel_dp, wakeref) {
649 		if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
650 			vlv_pps_init(encoder, pipe_config);
651 
652 		intel_dp_enable_port(intel_dp, pipe_config);
653 
654 		intel_pps_vdd_on_unlocked(intel_dp);
655 		intel_pps_on_unlocked(intel_dp);
656 		intel_pps_vdd_off_unlocked(intel_dp, true);
657 	}
658 
659 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
660 		unsigned int lane_mask = 0x0;
661 
662 		if (IS_CHERRYVIEW(dev_priv))
663 			lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count);
664 
665 		vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
666 				    lane_mask);
667 	}
668 
669 	intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
670 	intel_dp_configure_protocol_converter(intel_dp, pipe_config);
671 	intel_dp_check_frl_training(intel_dp);
672 	intel_dp_pcon_dsc_configure(intel_dp, pipe_config);
673 	intel_dp_start_link_train(intel_dp, pipe_config);
674 	intel_dp_stop_link_train(intel_dp, pipe_config);
675 }
676 
677 static void g4x_enable_dp(struct intel_atomic_state *state,
678 			  struct intel_encoder *encoder,
679 			  const struct intel_crtc_state *pipe_config,
680 			  const struct drm_connector_state *conn_state)
681 {
682 	intel_enable_dp(state, encoder, pipe_config, conn_state);
683 	intel_audio_codec_enable(encoder, pipe_config, conn_state);
684 	intel_edp_backlight_on(pipe_config, conn_state);
685 }
686 
687 static void vlv_enable_dp(struct intel_atomic_state *state,
688 			  struct intel_encoder *encoder,
689 			  const struct intel_crtc_state *pipe_config,
690 			  const struct drm_connector_state *conn_state)
691 {
692 	intel_audio_codec_enable(encoder, pipe_config, conn_state);
693 	intel_edp_backlight_on(pipe_config, conn_state);
694 }
695 
696 static void g4x_pre_enable_dp(struct intel_atomic_state *state,
697 			      struct intel_encoder *encoder,
698 			      const struct intel_crtc_state *pipe_config,
699 			      const struct drm_connector_state *conn_state)
700 {
701 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
702 	enum port port = encoder->port;
703 
704 	intel_dp_prepare(encoder, pipe_config);
705 
706 	/* Only ilk+ has port A */
707 	if (port == PORT_A)
708 		ilk_edp_pll_on(intel_dp, pipe_config);
709 }
710 
711 static void vlv_pre_enable_dp(struct intel_atomic_state *state,
712 			      struct intel_encoder *encoder,
713 			      const struct intel_crtc_state *pipe_config,
714 			      const struct drm_connector_state *conn_state)
715 {
716 	vlv_phy_pre_encoder_enable(encoder, pipe_config);
717 
718 	intel_enable_dp(state, encoder, pipe_config, conn_state);
719 }
720 
721 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state,
722 				  struct intel_encoder *encoder,
723 				  const struct intel_crtc_state *pipe_config,
724 				  const struct drm_connector_state *conn_state)
725 {
726 	intel_dp_prepare(encoder, pipe_config);
727 
728 	vlv_phy_pre_pll_enable(encoder, pipe_config);
729 }
730 
731 static void chv_pre_enable_dp(struct intel_atomic_state *state,
732 			      struct intel_encoder *encoder,
733 			      const struct intel_crtc_state *pipe_config,
734 			      const struct drm_connector_state *conn_state)
735 {
736 	chv_phy_pre_encoder_enable(encoder, pipe_config);
737 
738 	intel_enable_dp(state, encoder, pipe_config, conn_state);
739 
740 	/* Second common lane will stay alive on its own now */
741 	chv_phy_release_cl2_override(encoder);
742 }
743 
744 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state,
745 				  struct intel_encoder *encoder,
746 				  const struct intel_crtc_state *pipe_config,
747 				  const struct drm_connector_state *conn_state)
748 {
749 	intel_dp_prepare(encoder, pipe_config);
750 
751 	chv_phy_pre_pll_enable(encoder, pipe_config);
752 }
753 
754 static void chv_dp_post_pll_disable(struct intel_atomic_state *state,
755 				    struct intel_encoder *encoder,
756 				    const struct intel_crtc_state *old_crtc_state,
757 				    const struct drm_connector_state *old_conn_state)
758 {
759 	chv_phy_post_pll_disable(encoder, old_crtc_state);
760 }
761 
762 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp,
763 				 const struct intel_crtc_state *crtc_state)
764 {
765 	return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
766 }
767 
768 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp,
769 				 const struct intel_crtc_state *crtc_state)
770 {
771 	return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
772 }
773 
774 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp)
775 {
776 	return DP_TRAIN_PRE_EMPH_LEVEL_2;
777 }
778 
779 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp)
780 {
781 	return DP_TRAIN_PRE_EMPH_LEVEL_3;
782 }
783 
784 static void vlv_set_signal_levels(struct intel_encoder *encoder,
785 				  const struct intel_crtc_state *crtc_state)
786 {
787 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
788 	unsigned long demph_reg_value, preemph_reg_value,
789 		uniqtranscale_reg_value;
790 	u8 train_set = intel_dp->train_set[0];
791 
792 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
793 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
794 		preemph_reg_value = 0x0004000;
795 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
796 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
797 			demph_reg_value = 0x2B405555;
798 			uniqtranscale_reg_value = 0x552AB83A;
799 			break;
800 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
801 			demph_reg_value = 0x2B404040;
802 			uniqtranscale_reg_value = 0x5548B83A;
803 			break;
804 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
805 			demph_reg_value = 0x2B245555;
806 			uniqtranscale_reg_value = 0x5560B83A;
807 			break;
808 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
809 			demph_reg_value = 0x2B405555;
810 			uniqtranscale_reg_value = 0x5598DA3A;
811 			break;
812 		default:
813 			return;
814 		}
815 		break;
816 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
817 		preemph_reg_value = 0x0002000;
818 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
819 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
820 			demph_reg_value = 0x2B404040;
821 			uniqtranscale_reg_value = 0x5552B83A;
822 			break;
823 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
824 			demph_reg_value = 0x2B404848;
825 			uniqtranscale_reg_value = 0x5580B83A;
826 			break;
827 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
828 			demph_reg_value = 0x2B404040;
829 			uniqtranscale_reg_value = 0x55ADDA3A;
830 			break;
831 		default:
832 			return;
833 		}
834 		break;
835 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
836 		preemph_reg_value = 0x0000000;
837 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
838 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
839 			demph_reg_value = 0x2B305555;
840 			uniqtranscale_reg_value = 0x5570B83A;
841 			break;
842 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
843 			demph_reg_value = 0x2B2B4040;
844 			uniqtranscale_reg_value = 0x55ADDA3A;
845 			break;
846 		default:
847 			return;
848 		}
849 		break;
850 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
851 		preemph_reg_value = 0x0006000;
852 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
853 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
854 			demph_reg_value = 0x1B405555;
855 			uniqtranscale_reg_value = 0x55ADDA3A;
856 			break;
857 		default:
858 			return;
859 		}
860 		break;
861 	default:
862 		return;
863 	}
864 
865 	vlv_set_phy_signal_level(encoder, crtc_state,
866 				 demph_reg_value, preemph_reg_value,
867 				 uniqtranscale_reg_value, 0);
868 }
869 
870 static void chv_set_signal_levels(struct intel_encoder *encoder,
871 				  const struct intel_crtc_state *crtc_state)
872 {
873 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
874 	u32 deemph_reg_value, margin_reg_value;
875 	bool uniq_trans_scale = false;
876 	u8 train_set = intel_dp->train_set[0];
877 
878 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
879 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
880 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
881 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
882 			deemph_reg_value = 128;
883 			margin_reg_value = 52;
884 			break;
885 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
886 			deemph_reg_value = 128;
887 			margin_reg_value = 77;
888 			break;
889 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
890 			deemph_reg_value = 128;
891 			margin_reg_value = 102;
892 			break;
893 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
894 			deemph_reg_value = 128;
895 			margin_reg_value = 154;
896 			uniq_trans_scale = true;
897 			break;
898 		default:
899 			return;
900 		}
901 		break;
902 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
903 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
904 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
905 			deemph_reg_value = 85;
906 			margin_reg_value = 78;
907 			break;
908 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
909 			deemph_reg_value = 85;
910 			margin_reg_value = 116;
911 			break;
912 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
913 			deemph_reg_value = 85;
914 			margin_reg_value = 154;
915 			break;
916 		default:
917 			return;
918 		}
919 		break;
920 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
921 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
922 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
923 			deemph_reg_value = 64;
924 			margin_reg_value = 104;
925 			break;
926 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
927 			deemph_reg_value = 64;
928 			margin_reg_value = 154;
929 			break;
930 		default:
931 			return;
932 		}
933 		break;
934 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
935 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
936 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
937 			deemph_reg_value = 43;
938 			margin_reg_value = 154;
939 			break;
940 		default:
941 			return;
942 		}
943 		break;
944 	default:
945 		return;
946 	}
947 
948 	chv_set_phy_signal_level(encoder, crtc_state,
949 				 deemph_reg_value, margin_reg_value,
950 				 uniq_trans_scale);
951 }
952 
953 static u32 g4x_signal_levels(u8 train_set)
954 {
955 	u32 signal_levels = 0;
956 
957 	switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
958 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
959 	default:
960 		signal_levels |= DP_VOLTAGE_0_4;
961 		break;
962 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
963 		signal_levels |= DP_VOLTAGE_0_6;
964 		break;
965 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
966 		signal_levels |= DP_VOLTAGE_0_8;
967 		break;
968 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
969 		signal_levels |= DP_VOLTAGE_1_2;
970 		break;
971 	}
972 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
973 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
974 	default:
975 		signal_levels |= DP_PRE_EMPHASIS_0;
976 		break;
977 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
978 		signal_levels |= DP_PRE_EMPHASIS_3_5;
979 		break;
980 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
981 		signal_levels |= DP_PRE_EMPHASIS_6;
982 		break;
983 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
984 		signal_levels |= DP_PRE_EMPHASIS_9_5;
985 		break;
986 	}
987 	return signal_levels;
988 }
989 
990 static void
991 g4x_set_signal_levels(struct intel_encoder *encoder,
992 		      const struct intel_crtc_state *crtc_state)
993 {
994 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
995 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
996 	u8 train_set = intel_dp->train_set[0];
997 	u32 signal_levels;
998 
999 	signal_levels = g4x_signal_levels(train_set);
1000 
1001 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1002 		    signal_levels);
1003 
1004 	intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK);
1005 	intel_dp->DP |= signal_levels;
1006 
1007 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1008 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1009 }
1010 
1011 /* SNB CPU eDP voltage swing and pre-emphasis control */
1012 static u32 snb_cpu_edp_signal_levels(u8 train_set)
1013 {
1014 	u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1015 					DP_TRAIN_PRE_EMPHASIS_MASK);
1016 
1017 	switch (signal_levels) {
1018 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1019 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1020 		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1021 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1022 		return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1023 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1024 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1025 		return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1026 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1027 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1028 		return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1029 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1030 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1031 		return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1032 	default:
1033 		MISSING_CASE(signal_levels);
1034 		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1035 	}
1036 }
1037 
1038 static void
1039 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1040 			      const struct intel_crtc_state *crtc_state)
1041 {
1042 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1043 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1044 	u8 train_set = intel_dp->train_set[0];
1045 	u32 signal_levels;
1046 
1047 	signal_levels = snb_cpu_edp_signal_levels(train_set);
1048 
1049 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1050 		    signal_levels);
1051 
1052 	intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
1053 	intel_dp->DP |= signal_levels;
1054 
1055 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1056 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1057 }
1058 
1059 /* IVB CPU eDP voltage swing and pre-emphasis control */
1060 static u32 ivb_cpu_edp_signal_levels(u8 train_set)
1061 {
1062 	u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1063 					DP_TRAIN_PRE_EMPHASIS_MASK);
1064 
1065 	switch (signal_levels) {
1066 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1067 		return EDP_LINK_TRAIN_400MV_0DB_IVB;
1068 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1069 		return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1070 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1071 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1072 		return EDP_LINK_TRAIN_400MV_6DB_IVB;
1073 
1074 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1075 		return EDP_LINK_TRAIN_600MV_0DB_IVB;
1076 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1077 		return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1078 
1079 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1080 		return EDP_LINK_TRAIN_800MV_0DB_IVB;
1081 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1082 		return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1083 
1084 	default:
1085 		MISSING_CASE(signal_levels);
1086 		return EDP_LINK_TRAIN_500MV_0DB_IVB;
1087 	}
1088 }
1089 
1090 static void
1091 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1092 			      const struct intel_crtc_state *crtc_state)
1093 {
1094 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1095 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1096 	u8 train_set = intel_dp->train_set[0];
1097 	u32 signal_levels;
1098 
1099 	signal_levels = ivb_cpu_edp_signal_levels(train_set);
1100 
1101 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1102 		    signal_levels);
1103 
1104 	intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
1105 	intel_dp->DP |= signal_levels;
1106 
1107 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1108 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1109 }
1110 
1111 /*
1112  * If display is now connected check links status,
1113  * there has been known issues of link loss triggering
1114  * long pulse.
1115  *
1116  * Some sinks (eg. ASUS PB287Q) seem to perform some
1117  * weird HPD ping pong during modesets. So we can apparently
1118  * end up with HPD going low during a modeset, and then
1119  * going back up soon after. And once that happens we must
1120  * retrain the link to get a picture. That's in case no
1121  * userspace component reacted to intermittent HPD dip.
1122  */
1123 static enum intel_hotplug_state
1124 intel_dp_hotplug(struct intel_encoder *encoder,
1125 		 struct intel_connector *connector)
1126 {
1127 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1128 	struct drm_modeset_acquire_ctx ctx;
1129 	enum intel_hotplug_state state;
1130 	int ret;
1131 
1132 	if (intel_dp->compliance.test_active &&
1133 	    intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) {
1134 		intel_dp_phy_test(encoder);
1135 		/* just do the PHY test and nothing else */
1136 		return INTEL_HOTPLUG_UNCHANGED;
1137 	}
1138 
1139 	state = intel_encoder_hotplug(encoder, connector);
1140 
1141 	drm_modeset_acquire_init(&ctx, 0);
1142 
1143 	for (;;) {
1144 		ret = intel_dp_retrain_link(encoder, &ctx);
1145 
1146 		if (ret == -EDEADLK) {
1147 			drm_modeset_backoff(&ctx);
1148 			continue;
1149 		}
1150 
1151 		break;
1152 	}
1153 
1154 	drm_modeset_drop_locks(&ctx);
1155 	drm_modeset_acquire_fini(&ctx);
1156 	drm_WARN(encoder->base.dev, ret,
1157 		 "Acquiring modeset locks failed with %i\n", ret);
1158 
1159 	/*
1160 	 * Keeping it consistent with intel_ddi_hotplug() and
1161 	 * intel_hdmi_hotplug().
1162 	 */
1163 	if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries)
1164 		state = INTEL_HOTPLUG_RETRY;
1165 
1166 	return state;
1167 }
1168 
1169 static bool ibx_digital_port_connected(struct intel_encoder *encoder)
1170 {
1171 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1172 	u32 bit = dev_priv->display.hotplug.pch_hpd[encoder->hpd_pin];
1173 
1174 	return intel_de_read(dev_priv, SDEISR) & bit;
1175 }
1176 
1177 static bool g4x_digital_port_connected(struct intel_encoder *encoder)
1178 {
1179 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1180 	u32 bit;
1181 
1182 	switch (encoder->hpd_pin) {
1183 	case HPD_PORT_B:
1184 		bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
1185 		break;
1186 	case HPD_PORT_C:
1187 		bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
1188 		break;
1189 	case HPD_PORT_D:
1190 		bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
1191 		break;
1192 	default:
1193 		MISSING_CASE(encoder->hpd_pin);
1194 		return false;
1195 	}
1196 
1197 	return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
1198 }
1199 
1200 static bool ilk_digital_port_connected(struct intel_encoder *encoder)
1201 {
1202 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1203 	u32 bit = dev_priv->display.hotplug.hpd[encoder->hpd_pin];
1204 
1205 	return intel_de_read(dev_priv, DEISR) & bit;
1206 }
1207 
1208 static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
1209 {
1210 	intel_dp_encoder_flush_work(encoder);
1211 
1212 	drm_encoder_cleanup(encoder);
1213 	kfree(enc_to_dig_port(to_intel_encoder(encoder)));
1214 }
1215 
1216 enum pipe vlv_active_pipe(struct intel_dp *intel_dp)
1217 {
1218 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1219 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1220 	enum pipe pipe;
1221 
1222 	if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
1223 				encoder->port, &pipe))
1224 		return pipe;
1225 
1226 	return INVALID_PIPE;
1227 }
1228 
1229 static void intel_dp_encoder_reset(struct drm_encoder *encoder)
1230 {
1231 	struct drm_i915_private *dev_priv = to_i915(encoder->dev);
1232 	struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder));
1233 
1234 	intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
1235 
1236 	intel_dp->reset_link_params = true;
1237 
1238 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1239 		intel_wakeref_t wakeref;
1240 
1241 		with_intel_pps_lock(intel_dp, wakeref)
1242 			intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
1243 	}
1244 
1245 	intel_pps_encoder_reset(intel_dp);
1246 }
1247 
1248 static const struct drm_encoder_funcs intel_dp_enc_funcs = {
1249 	.reset = intel_dp_encoder_reset,
1250 	.destroy = intel_dp_encoder_destroy,
1251 };
1252 
1253 bool g4x_dp_init(struct drm_i915_private *dev_priv,
1254 		 i915_reg_t output_reg, enum port port)
1255 {
1256 	const struct intel_bios_encoder_data *devdata;
1257 	struct intel_digital_port *dig_port;
1258 	struct intel_encoder *intel_encoder;
1259 	struct drm_encoder *encoder;
1260 	struct intel_connector *intel_connector;
1261 
1262 	devdata = intel_bios_encoder_data_lookup(dev_priv, port);
1263 
1264 	/* FIXME bail? */
1265 	if (!devdata)
1266 		drm_dbg_kms(&dev_priv->drm, "No VBT child device for DP-%c\n",
1267 			    port_name(port));
1268 
1269 	dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
1270 	if (!dig_port)
1271 		return false;
1272 
1273 	intel_connector = intel_connector_alloc();
1274 	if (!intel_connector)
1275 		goto err_connector_alloc;
1276 
1277 	intel_encoder = &dig_port->base;
1278 	encoder = &intel_encoder->base;
1279 
1280 	intel_encoder->devdata = devdata;
1281 
1282 	mutex_init(&dig_port->hdcp_mutex);
1283 
1284 	if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base,
1285 			     &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS,
1286 			     "DP %c", port_name(port)))
1287 		goto err_encoder_init;
1288 
1289 	intel_encoder->hotplug = intel_dp_hotplug;
1290 	intel_encoder->compute_config = intel_dp_compute_config;
1291 	intel_encoder->get_hw_state = intel_dp_get_hw_state;
1292 	intel_encoder->get_config = intel_dp_get_config;
1293 	intel_encoder->sync_state = intel_dp_sync_state;
1294 	intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check;
1295 	intel_encoder->update_pipe = intel_backlight_update;
1296 	intel_encoder->suspend = intel_dp_encoder_suspend;
1297 	intel_encoder->shutdown = intel_dp_encoder_shutdown;
1298 	if (IS_CHERRYVIEW(dev_priv)) {
1299 		intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
1300 		intel_encoder->pre_enable = chv_pre_enable_dp;
1301 		intel_encoder->enable = vlv_enable_dp;
1302 		intel_encoder->disable = vlv_disable_dp;
1303 		intel_encoder->post_disable = chv_post_disable_dp;
1304 		intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
1305 	} else if (IS_VALLEYVIEW(dev_priv)) {
1306 		intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
1307 		intel_encoder->pre_enable = vlv_pre_enable_dp;
1308 		intel_encoder->enable = vlv_enable_dp;
1309 		intel_encoder->disable = vlv_disable_dp;
1310 		intel_encoder->post_disable = vlv_post_disable_dp;
1311 	} else {
1312 		intel_encoder->pre_enable = g4x_pre_enable_dp;
1313 		intel_encoder->enable = g4x_enable_dp;
1314 		intel_encoder->disable = g4x_disable_dp;
1315 		intel_encoder->post_disable = g4x_post_disable_dp;
1316 	}
1317 
1318 	if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
1319 	    (HAS_PCH_CPT(dev_priv) && port != PORT_A))
1320 		dig_port->dp.set_link_train = cpt_set_link_train;
1321 	else
1322 		dig_port->dp.set_link_train = g4x_set_link_train;
1323 
1324 	if (IS_CHERRYVIEW(dev_priv))
1325 		intel_encoder->set_signal_levels = chv_set_signal_levels;
1326 	else if (IS_VALLEYVIEW(dev_priv))
1327 		intel_encoder->set_signal_levels = vlv_set_signal_levels;
1328 	else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
1329 		intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels;
1330 	else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A)
1331 		intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels;
1332 	else
1333 		intel_encoder->set_signal_levels = g4x_set_signal_levels;
1334 
1335 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) ||
1336 	    (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) {
1337 		dig_port->dp.preemph_max = intel_dp_preemph_max_3;
1338 		dig_port->dp.voltage_max = intel_dp_voltage_max_3;
1339 	} else {
1340 		dig_port->dp.preemph_max = intel_dp_preemph_max_2;
1341 		dig_port->dp.voltage_max = intel_dp_voltage_max_2;
1342 	}
1343 
1344 	dig_port->dp.output_reg = output_reg;
1345 	dig_port->max_lanes = 4;
1346 
1347 	intel_encoder->type = INTEL_OUTPUT_DP;
1348 	intel_encoder->power_domain = intel_display_power_ddi_lanes_domain(dev_priv, port);
1349 	if (IS_CHERRYVIEW(dev_priv)) {
1350 		if (port == PORT_D)
1351 			intel_encoder->pipe_mask = BIT(PIPE_C);
1352 		else
1353 			intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B);
1354 	} else {
1355 		intel_encoder->pipe_mask = ~0;
1356 	}
1357 	intel_encoder->cloneable = 0;
1358 	intel_encoder->port = port;
1359 	intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
1360 
1361 	dig_port->hpd_pulse = intel_dp_hpd_pulse;
1362 
1363 	if (HAS_GMCH(dev_priv)) {
1364 		dig_port->connected = g4x_digital_port_connected;
1365 	} else {
1366 		if (port == PORT_A)
1367 			dig_port->connected = ilk_digital_port_connected;
1368 		else
1369 			dig_port->connected = ibx_digital_port_connected;
1370 	}
1371 
1372 	if (port != PORT_A)
1373 		intel_infoframe_init(dig_port);
1374 
1375 	dig_port->aux_ch = intel_dp_aux_ch(intel_encoder);
1376 	if (!intel_dp_init_connector(dig_port, intel_connector))
1377 		goto err_init_connector;
1378 
1379 	return true;
1380 
1381 err_init_connector:
1382 	drm_encoder_cleanup(encoder);
1383 err_encoder_init:
1384 	kfree(intel_connector);
1385 err_connector_alloc:
1386 	kfree(dig_port);
1387 	return false;
1388 }
1389