1 /*
2  * Copyright © 2008-2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "i915_drv.h"
25 #include "intel_display_types.h"
26 #include "intel_dp.h"
27 #include "intel_dp_link_training.h"
28 
29 static void intel_dp_reset_lttpr_common_caps(struct intel_dp *intel_dp)
30 {
31 	memset(intel_dp->lttpr_common_caps, 0, sizeof(intel_dp->lttpr_common_caps));
32 }
33 
34 static void intel_dp_reset_lttpr_count(struct intel_dp *intel_dp)
35 {
36 	intel_dp->lttpr_common_caps[DP_PHY_REPEATER_CNT -
37 				    DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV] = 0;
38 }
39 
40 static const char *intel_dp_phy_name(enum drm_dp_phy dp_phy,
41 				     char *buf, size_t buf_size)
42 {
43 	if (dp_phy == DP_PHY_DPRX)
44 		snprintf(buf, buf_size, "DPRX");
45 	else
46 		snprintf(buf, buf_size, "LTTPR %d", dp_phy - DP_PHY_LTTPR1 + 1);
47 
48 	return buf;
49 }
50 
51 static u8 *intel_dp_lttpr_phy_caps(struct intel_dp *intel_dp,
52 				   enum drm_dp_phy dp_phy)
53 {
54 	return intel_dp->lttpr_phy_caps[dp_phy - DP_PHY_LTTPR1];
55 }
56 
57 static void intel_dp_read_lttpr_phy_caps(struct intel_dp *intel_dp,
58 					 enum drm_dp_phy dp_phy)
59 {
60 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
61 	u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
62 	char phy_name[10];
63 
64 	intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
65 
66 	if (drm_dp_read_lttpr_phy_caps(&intel_dp->aux, dp_phy, phy_caps) < 0) {
67 		drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
68 			    "[ENCODER:%d:%s][%s] failed to read the PHY caps\n",
69 			    encoder->base.base.id, encoder->base.name, phy_name);
70 		return;
71 	}
72 
73 	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
74 		    "[ENCODER:%d:%s][%s] PHY capabilities: %*ph\n",
75 		    encoder->base.base.id, encoder->base.name, phy_name,
76 		    (int)sizeof(intel_dp->lttpr_phy_caps[0]),
77 		    phy_caps);
78 }
79 
80 static bool intel_dp_read_lttpr_common_caps(struct intel_dp *intel_dp)
81 {
82 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
83 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
84 
85 	if (intel_dp_is_edp(intel_dp))
86 		return false;
87 
88 	/*
89 	 * Detecting LTTPRs must be avoided on platforms with an AUX timeout
90 	 * period < 3.2ms. (see DP Standard v2.0, 2.11.2, 3.6.6.1).
91 	 */
92 	if (DISPLAY_VER(i915) < 10 || IS_GEMINILAKE(i915))
93 		return false;
94 
95 	if (drm_dp_read_lttpr_common_caps(&intel_dp->aux,
96 					  intel_dp->lttpr_common_caps) < 0)
97 		goto reset_caps;
98 
99 	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
100 		    "[ENCODER:%d:%s] LTTPR common capabilities: %*ph\n",
101 		    encoder->base.base.id, encoder->base.name,
102 		    (int)sizeof(intel_dp->lttpr_common_caps),
103 		    intel_dp->lttpr_common_caps);
104 
105 	/* The minimum value of LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV is 1.4 */
106 	if (intel_dp->lttpr_common_caps[0] < 0x14)
107 		goto reset_caps;
108 
109 	return true;
110 
111 reset_caps:
112 	intel_dp_reset_lttpr_common_caps(intel_dp);
113 	return false;
114 }
115 
116 static bool
117 intel_dp_set_lttpr_transparent_mode(struct intel_dp *intel_dp, bool enable)
118 {
119 	u8 val = enable ? DP_PHY_REPEATER_MODE_TRANSPARENT :
120 			  DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
121 
122 	return drm_dp_dpcd_write(&intel_dp->aux, DP_PHY_REPEATER_MODE, &val, 1) == 1;
123 }
124 
125 static int intel_dp_init_lttpr(struct intel_dp *intel_dp)
126 {
127 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
128 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
129 	int lttpr_count;
130 	int i;
131 
132 	if (!intel_dp_read_lttpr_common_caps(intel_dp))
133 		return 0;
134 
135 	lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
136 	/*
137 	 * Prevent setting LTTPR transparent mode explicitly if no LTTPRs are
138 	 * detected as this breaks link training at least on the Dell WD19TB
139 	 * dock.
140 	 */
141 	if (lttpr_count == 0)
142 		return 0;
143 
144 	/*
145 	 * See DP Standard v2.0 3.6.6.1. about the explicit disabling of
146 	 * non-transparent mode and the disable->enable non-transparent mode
147 	 * sequence.
148 	 */
149 	intel_dp_set_lttpr_transparent_mode(intel_dp, true);
150 
151 	/*
152 	 * In case of unsupported number of LTTPRs or failing to switch to
153 	 * non-transparent mode fall-back to transparent link training mode,
154 	 * still taking into account any LTTPR common lane- rate/count limits.
155 	 */
156 	if (lttpr_count < 0)
157 		return 0;
158 
159 	if (!intel_dp_set_lttpr_transparent_mode(intel_dp, false)) {
160 		drm_dbg_kms(&i915->drm,
161 			    "[ENCODER:%d:%s] Switching to LTTPR non-transparent LT mode failed, fall-back to transparent mode\n",
162 			    encoder->base.base.id, encoder->base.name);
163 
164 		intel_dp_set_lttpr_transparent_mode(intel_dp, true);
165 		intel_dp_reset_lttpr_count(intel_dp);
166 
167 		return 0;
168 	}
169 
170 	for (i = 0; i < lttpr_count; i++)
171 		intel_dp_read_lttpr_phy_caps(intel_dp, DP_PHY_LTTPR(i));
172 
173 	return lttpr_count;
174 }
175 
176 /**
177  * intel_dp_init_lttpr_and_dprx_caps - detect LTTPR and DPRX caps, init the LTTPR link training mode
178  * @intel_dp: Intel DP struct
179  *
180  * Read the LTTPR common and DPRX capabilities and switch to non-transparent
181  * link training mode if any is detected and read the PHY capabilities for all
182  * detected LTTPRs. In case of an LTTPR detection error or if the number of
183  * LTTPRs is more than is supported (8), fall back to the no-LTTPR,
184  * transparent mode link training mode.
185  *
186  * Returns:
187  *   >0  if LTTPRs were detected and the non-transparent LT mode was set. The
188  *       DPRX capabilities are read out.
189  *    0  if no LTTPRs or more than 8 LTTPRs were detected or in case of a
190  *       detection failure and the transparent LT mode was set. The DPRX
191  *       capabilities are read out.
192  *   <0  Reading out the DPRX capabilities failed.
193  */
194 int intel_dp_init_lttpr_and_dprx_caps(struct intel_dp *intel_dp)
195 {
196 	int lttpr_count = intel_dp_init_lttpr(intel_dp);
197 
198 	/* The DPTX shall read the DPRX caps after LTTPR detection. */
199 	if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd)) {
200 		intel_dp_reset_lttpr_common_caps(intel_dp);
201 		return -EIO;
202 	}
203 
204 	return lttpr_count;
205 }
206 
207 static u8 dp_voltage_max(u8 preemph)
208 {
209 	switch (preemph & DP_TRAIN_PRE_EMPHASIS_MASK) {
210 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
211 		return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
212 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
213 		return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
214 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
215 		return DP_TRAIN_VOLTAGE_SWING_LEVEL_1;
216 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
217 	default:
218 		return DP_TRAIN_VOLTAGE_SWING_LEVEL_0;
219 	}
220 }
221 
222 static u8 intel_dp_lttpr_voltage_max(struct intel_dp *intel_dp,
223 				     enum drm_dp_phy dp_phy)
224 {
225 	const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
226 
227 	if (drm_dp_lttpr_voltage_swing_level_3_supported(phy_caps))
228 		return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
229 	else
230 		return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
231 }
232 
233 static u8 intel_dp_lttpr_preemph_max(struct intel_dp *intel_dp,
234 				     enum drm_dp_phy dp_phy)
235 {
236 	const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
237 
238 	if (drm_dp_lttpr_pre_emphasis_level_3_supported(phy_caps))
239 		return DP_TRAIN_PRE_EMPH_LEVEL_3;
240 	else
241 		return DP_TRAIN_PRE_EMPH_LEVEL_2;
242 }
243 
244 static bool
245 intel_dp_phy_is_downstream_of_source(struct intel_dp *intel_dp,
246 				     enum drm_dp_phy dp_phy)
247 {
248 	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
249 	int lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
250 
251 	drm_WARN_ON_ONCE(&i915->drm, lttpr_count <= 0 && dp_phy != DP_PHY_DPRX);
252 
253 	return lttpr_count <= 0 || dp_phy == DP_PHY_LTTPR(lttpr_count - 1);
254 }
255 
256 static u8 intel_dp_phy_voltage_max(struct intel_dp *intel_dp,
257 				   const struct intel_crtc_state *crtc_state,
258 				   enum drm_dp_phy dp_phy)
259 {
260 	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
261 	u8 voltage_max;
262 
263 	/*
264 	 * Get voltage_max from the DPTX_PHY (source or LTTPR) upstream from
265 	 * the DPRX_PHY we train.
266 	 */
267 	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
268 		voltage_max = intel_dp->voltage_max(intel_dp, crtc_state);
269 	else
270 		voltage_max = intel_dp_lttpr_voltage_max(intel_dp, dp_phy + 1);
271 
272 	drm_WARN_ON_ONCE(&i915->drm,
273 			 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_2 &&
274 			 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_3);
275 
276 	return voltage_max;
277 }
278 
279 static u8 intel_dp_phy_preemph_max(struct intel_dp *intel_dp,
280 				   enum drm_dp_phy dp_phy)
281 {
282 	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
283 	u8 preemph_max;
284 
285 	/*
286 	 * Get preemph_max from the DPTX_PHY (source or LTTPR) upstream from
287 	 * the DPRX_PHY we train.
288 	 */
289 	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
290 		preemph_max = intel_dp->preemph_max(intel_dp);
291 	else
292 		preemph_max = intel_dp_lttpr_preemph_max(intel_dp, dp_phy + 1);
293 
294 	drm_WARN_ON_ONCE(&i915->drm,
295 			 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_2 &&
296 			 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_3);
297 
298 	return preemph_max;
299 }
300 
301 static bool has_per_lane_signal_levels(struct intel_dp *intel_dp,
302 				       enum drm_dp_phy dp_phy)
303 {
304 	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
305 
306 	return !intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy) ||
307 		DISPLAY_VER(i915) >= 11;
308 }
309 
310 /* 128b/132b */
311 static u8 intel_dp_get_lane_adjust_tx_ffe_preset(struct intel_dp *intel_dp,
312 						 const struct intel_crtc_state *crtc_state,
313 						 enum drm_dp_phy dp_phy,
314 						 const u8 link_status[DP_LINK_STATUS_SIZE],
315 						 int lane)
316 {
317 	u8 tx_ffe = 0;
318 
319 	if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
320 		lane = min(lane, crtc_state->lane_count - 1);
321 		tx_ffe = drm_dp_get_adjust_tx_ffe_preset(link_status, lane);
322 	} else {
323 		for (lane = 0; lane < crtc_state->lane_count; lane++)
324 			tx_ffe = max(tx_ffe, drm_dp_get_adjust_tx_ffe_preset(link_status, lane));
325 	}
326 
327 	return tx_ffe;
328 }
329 
330 /* 8b/10b */
331 static u8 intel_dp_get_lane_adjust_vswing_preemph(struct intel_dp *intel_dp,
332 						  const struct intel_crtc_state *crtc_state,
333 						  enum drm_dp_phy dp_phy,
334 						  const u8 link_status[DP_LINK_STATUS_SIZE],
335 						  int lane)
336 {
337 	u8 v = 0;
338 	u8 p = 0;
339 	u8 voltage_max;
340 	u8 preemph_max;
341 
342 	if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
343 		lane = min(lane, crtc_state->lane_count - 1);
344 
345 		v = drm_dp_get_adjust_request_voltage(link_status, lane);
346 		p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
347 	} else {
348 		for (lane = 0; lane < crtc_state->lane_count; lane++) {
349 			v = max(v, drm_dp_get_adjust_request_voltage(link_status, lane));
350 			p = max(p, drm_dp_get_adjust_request_pre_emphasis(link_status, lane));
351 		}
352 	}
353 
354 	preemph_max = intel_dp_phy_preemph_max(intel_dp, dp_phy);
355 	if (p >= preemph_max)
356 		p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
357 
358 	v = min(v, dp_voltage_max(p));
359 
360 	voltage_max = intel_dp_phy_voltage_max(intel_dp, crtc_state, dp_phy);
361 	if (v >= voltage_max)
362 		v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
363 
364 	return v | p;
365 }
366 
367 static u8 intel_dp_get_lane_adjust_train(struct intel_dp *intel_dp,
368 					 const struct intel_crtc_state *crtc_state,
369 					 enum drm_dp_phy dp_phy,
370 					 const u8 link_status[DP_LINK_STATUS_SIZE],
371 					 int lane)
372 {
373 	if (intel_dp_is_uhbr(crtc_state))
374 		return intel_dp_get_lane_adjust_tx_ffe_preset(intel_dp, crtc_state,
375 							      dp_phy, link_status, lane);
376 	else
377 		return intel_dp_get_lane_adjust_vswing_preemph(intel_dp, crtc_state,
378 							       dp_phy, link_status, lane);
379 }
380 
381 #define TRAIN_REQ_FMT "%d/%d/%d/%d"
382 #define _TRAIN_REQ_VSWING_ARGS(link_status, lane) \
383 	(drm_dp_get_adjust_request_voltage((link_status), (lane)) >> DP_TRAIN_VOLTAGE_SWING_SHIFT)
384 #define TRAIN_REQ_VSWING_ARGS(link_status) \
385 	_TRAIN_REQ_VSWING_ARGS(link_status, 0), \
386 	_TRAIN_REQ_VSWING_ARGS(link_status, 1), \
387 	_TRAIN_REQ_VSWING_ARGS(link_status, 2), \
388 	_TRAIN_REQ_VSWING_ARGS(link_status, 3)
389 #define _TRAIN_REQ_PREEMPH_ARGS(link_status, lane) \
390 	(drm_dp_get_adjust_request_pre_emphasis((link_status), (lane)) >> DP_TRAIN_PRE_EMPHASIS_SHIFT)
391 #define TRAIN_REQ_PREEMPH_ARGS(link_status) \
392 	_TRAIN_REQ_PREEMPH_ARGS(link_status, 0), \
393 	_TRAIN_REQ_PREEMPH_ARGS(link_status, 1), \
394 	_TRAIN_REQ_PREEMPH_ARGS(link_status, 2), \
395 	_TRAIN_REQ_PREEMPH_ARGS(link_status, 3)
396 #define _TRAIN_REQ_TX_FFE_ARGS(link_status, lane) \
397 	drm_dp_get_adjust_tx_ffe_preset((link_status), (lane))
398 #define TRAIN_REQ_TX_FFE_ARGS(link_status) \
399 	_TRAIN_REQ_TX_FFE_ARGS(link_status, 0), \
400 	_TRAIN_REQ_TX_FFE_ARGS(link_status, 1), \
401 	_TRAIN_REQ_TX_FFE_ARGS(link_status, 2), \
402 	_TRAIN_REQ_TX_FFE_ARGS(link_status, 3)
403 
404 void
405 intel_dp_get_adjust_train(struct intel_dp *intel_dp,
406 			  const struct intel_crtc_state *crtc_state,
407 			  enum drm_dp_phy dp_phy,
408 			  const u8 link_status[DP_LINK_STATUS_SIZE])
409 {
410 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
411 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
412 	char phy_name[10];
413 	int lane;
414 
415 	if (intel_dp_is_uhbr(crtc_state)) {
416 		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 128b/132b, lanes: %d, "
417 			    "TX FFE request: " TRAIN_REQ_FMT "\n",
418 			    encoder->base.base.id, encoder->base.name,
419 			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
420 			    crtc_state->lane_count,
421 			    TRAIN_REQ_TX_FFE_ARGS(link_status));
422 	} else {
423 		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 8b/10b, lanes: %d, "
424 			    "vswing request: " TRAIN_REQ_FMT ", "
425 			    "pre-emphasis request: " TRAIN_REQ_FMT "\n",
426 			    encoder->base.base.id, encoder->base.name,
427 			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
428 			    crtc_state->lane_count,
429 			    TRAIN_REQ_VSWING_ARGS(link_status),
430 			    TRAIN_REQ_PREEMPH_ARGS(link_status));
431 	}
432 
433 	for (lane = 0; lane < 4; lane++)
434 		intel_dp->train_set[lane] =
435 			intel_dp_get_lane_adjust_train(intel_dp, crtc_state,
436 						       dp_phy, link_status, lane);
437 }
438 
439 static int intel_dp_training_pattern_set_reg(struct intel_dp *intel_dp,
440 					     enum drm_dp_phy dp_phy)
441 {
442 	return dp_phy == DP_PHY_DPRX ?
443 		DP_TRAINING_PATTERN_SET :
444 		DP_TRAINING_PATTERN_SET_PHY_REPEATER(dp_phy);
445 }
446 
447 static bool
448 intel_dp_set_link_train(struct intel_dp *intel_dp,
449 			const struct intel_crtc_state *crtc_state,
450 			enum drm_dp_phy dp_phy,
451 			u8 dp_train_pat)
452 {
453 	int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
454 	u8 buf[sizeof(intel_dp->train_set) + 1];
455 	int len;
456 
457 	intel_dp_program_link_training_pattern(intel_dp, crtc_state,
458 					       dp_phy, dp_train_pat);
459 
460 	buf[0] = dp_train_pat;
461 	/* DP_TRAINING_LANEx_SET follow DP_TRAINING_PATTERN_SET */
462 	memcpy(buf + 1, intel_dp->train_set, crtc_state->lane_count);
463 	len = crtc_state->lane_count + 1;
464 
465 	return drm_dp_dpcd_write(&intel_dp->aux, reg, buf, len) == len;
466 }
467 
468 static char dp_training_pattern_name(u8 train_pat)
469 {
470 	switch (train_pat) {
471 	case DP_TRAINING_PATTERN_1:
472 	case DP_TRAINING_PATTERN_2:
473 	case DP_TRAINING_PATTERN_3:
474 		return '0' + train_pat;
475 	case DP_TRAINING_PATTERN_4:
476 		return '4';
477 	default:
478 		MISSING_CASE(train_pat);
479 		return '?';
480 	}
481 }
482 
483 void
484 intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
485 				       const struct intel_crtc_state *crtc_state,
486 				       enum drm_dp_phy dp_phy,
487 				       u8 dp_train_pat)
488 {
489 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
490 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
491 	u8 train_pat = intel_dp_training_pattern_symbol(dp_train_pat);
492 	char phy_name[10];
493 
494 	if (train_pat != DP_TRAINING_PATTERN_DISABLE)
495 		drm_dbg_kms(&i915->drm,
496 			    "[ENCODER:%d:%s][%s] Using DP training pattern TPS%c\n",
497 			    encoder->base.base.id, encoder->base.name,
498 			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
499 			    dp_training_pattern_name(train_pat));
500 
501 	intel_dp->set_link_train(intel_dp, crtc_state, dp_train_pat);
502 }
503 
504 #define TRAIN_SET_FMT "%d%s/%d%s/%d%s/%d%s"
505 #define _TRAIN_SET_VSWING_ARGS(train_set) \
506 	((train_set) & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT, \
507 	(train_set) & DP_TRAIN_MAX_SWING_REACHED ? "(max)" : ""
508 #define TRAIN_SET_VSWING_ARGS(train_set) \
509 	_TRAIN_SET_VSWING_ARGS((train_set)[0]), \
510 	_TRAIN_SET_VSWING_ARGS((train_set)[1]), \
511 	_TRAIN_SET_VSWING_ARGS((train_set)[2]), \
512 	_TRAIN_SET_VSWING_ARGS((train_set)[3])
513 #define _TRAIN_SET_PREEMPH_ARGS(train_set) \
514 	((train_set) & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT, \
515 	(train_set) & DP_TRAIN_MAX_PRE_EMPHASIS_REACHED ? "(max)" : ""
516 #define TRAIN_SET_PREEMPH_ARGS(train_set) \
517 	_TRAIN_SET_PREEMPH_ARGS((train_set)[0]), \
518 	_TRAIN_SET_PREEMPH_ARGS((train_set)[1]), \
519 	_TRAIN_SET_PREEMPH_ARGS((train_set)[2]), \
520 	_TRAIN_SET_PREEMPH_ARGS((train_set)[3])
521 #define _TRAIN_SET_TX_FFE_ARGS(train_set) \
522 	((train_set) & DP_TX_FFE_PRESET_VALUE_MASK), ""
523 #define TRAIN_SET_TX_FFE_ARGS(train_set) \
524 	_TRAIN_SET_TX_FFE_ARGS((train_set)[0]), \
525 	_TRAIN_SET_TX_FFE_ARGS((train_set)[1]), \
526 	_TRAIN_SET_TX_FFE_ARGS((train_set)[2]), \
527 	_TRAIN_SET_TX_FFE_ARGS((train_set)[3])
528 
529 void intel_dp_set_signal_levels(struct intel_dp *intel_dp,
530 				const struct intel_crtc_state *crtc_state,
531 				enum drm_dp_phy dp_phy)
532 {
533 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
534 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
535 	char phy_name[10];
536 
537 	if (intel_dp_is_uhbr(crtc_state)) {
538 		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 128b/132b, lanes: %d, "
539 			    "TX FFE presets: " TRAIN_SET_FMT "\n",
540 			    encoder->base.base.id, encoder->base.name,
541 			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
542 			    crtc_state->lane_count,
543 			    TRAIN_SET_TX_FFE_ARGS(intel_dp->train_set));
544 	} else {
545 		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 8b/10b, lanes: %d, "
546 			    "vswing levels: " TRAIN_SET_FMT ", "
547 			    "pre-emphasis levels: " TRAIN_SET_FMT "\n",
548 			    encoder->base.base.id, encoder->base.name,
549 			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
550 			    crtc_state->lane_count,
551 			    TRAIN_SET_VSWING_ARGS(intel_dp->train_set),
552 			    TRAIN_SET_PREEMPH_ARGS(intel_dp->train_set));
553 	}
554 
555 	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
556 		encoder->set_signal_levels(encoder, crtc_state);
557 }
558 
559 static bool
560 intel_dp_reset_link_train(struct intel_dp *intel_dp,
561 			  const struct intel_crtc_state *crtc_state,
562 			  enum drm_dp_phy dp_phy,
563 			  u8 dp_train_pat)
564 {
565 	memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set));
566 	intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
567 	return intel_dp_set_link_train(intel_dp, crtc_state, dp_phy, dp_train_pat);
568 }
569 
570 static bool
571 intel_dp_update_link_train(struct intel_dp *intel_dp,
572 			   const struct intel_crtc_state *crtc_state,
573 			   enum drm_dp_phy dp_phy)
574 {
575 	int reg = dp_phy == DP_PHY_DPRX ?
576 			    DP_TRAINING_LANE0_SET :
577 			    DP_TRAINING_LANE0_SET_PHY_REPEATER(dp_phy);
578 	int ret;
579 
580 	intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
581 
582 	ret = drm_dp_dpcd_write(&intel_dp->aux, reg,
583 				intel_dp->train_set, crtc_state->lane_count);
584 
585 	return ret == crtc_state->lane_count;
586 }
587 
588 /* 128b/132b */
589 static bool intel_dp_lane_max_tx_ffe_reached(u8 train_set_lane)
590 {
591 	return (train_set_lane & DP_TX_FFE_PRESET_VALUE_MASK) ==
592 		DP_TX_FFE_PRESET_VALUE_MASK;
593 }
594 
595 /*
596  * 8b/10b
597  *
598  * FIXME: The DP spec is very confusing here, also the Link CTS spec seems to
599  * have self contradicting tests around this area.
600  *
601  * In lieu of better ideas let's just stop when we've reached the max supported
602  * vswing with its max pre-emphasis, which is either 2+1 or 3+0 depending on
603  * whether vswing level 3 is supported or not.
604  */
605 static bool intel_dp_lane_max_vswing_reached(u8 train_set_lane)
606 {
607 	u8 v = (train_set_lane & DP_TRAIN_VOLTAGE_SWING_MASK) >>
608 		DP_TRAIN_VOLTAGE_SWING_SHIFT;
609 	u8 p = (train_set_lane & DP_TRAIN_PRE_EMPHASIS_MASK) >>
610 		DP_TRAIN_PRE_EMPHASIS_SHIFT;
611 
612 	if ((train_set_lane & DP_TRAIN_MAX_SWING_REACHED) == 0)
613 		return false;
614 
615 	if (v + p != 3)
616 		return false;
617 
618 	return true;
619 }
620 
621 static bool intel_dp_link_max_vswing_reached(struct intel_dp *intel_dp,
622 					     const struct intel_crtc_state *crtc_state)
623 {
624 	int lane;
625 
626 	for (lane = 0; lane < crtc_state->lane_count; lane++) {
627 		u8 train_set_lane = intel_dp->train_set[lane];
628 
629 		if (intel_dp_is_uhbr(crtc_state)) {
630 			if (!intel_dp_lane_max_tx_ffe_reached(train_set_lane))
631 				return false;
632 		} else {
633 			if (!intel_dp_lane_max_vswing_reached(train_set_lane))
634 				return false;
635 		}
636 	}
637 
638 	return true;
639 }
640 
641 /*
642  * Prepare link training by configuring the link parameters. On DDI platforms
643  * also enable the port here.
644  */
645 static bool
646 intel_dp_prepare_link_train(struct intel_dp *intel_dp,
647 			    const struct intel_crtc_state *crtc_state)
648 {
649 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
650 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
651 	u8 link_config[2];
652 	u8 link_bw, rate_select;
653 
654 	if (intel_dp->prepare_link_retrain)
655 		intel_dp->prepare_link_retrain(intel_dp, crtc_state);
656 
657 	intel_dp_compute_rate(intel_dp, crtc_state->port_clock,
658 			      &link_bw, &rate_select);
659 
660 	if (link_bw)
661 		drm_dbg_kms(&i915->drm,
662 			    "[ENCODER:%d:%s] Using LINK_BW_SET value %02x\n",
663 			    encoder->base.base.id, encoder->base.name, link_bw);
664 	else
665 		drm_dbg_kms(&i915->drm,
666 			    "[ENCODER:%d:%s] Using LINK_RATE_SET value %02x\n",
667 			    encoder->base.base.id, encoder->base.name, rate_select);
668 
669 	/* Write the link configuration data */
670 	link_config[0] = link_bw;
671 	link_config[1] = crtc_state->lane_count;
672 	if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
673 		link_config[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
674 	drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_BW_SET, link_config, 2);
675 
676 	/* eDP 1.4 rate select method. */
677 	if (!link_bw)
678 		drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_RATE_SET,
679 				  &rate_select, 1);
680 
681 	link_config[0] = crtc_state->vrr.enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0;
682 	link_config[1] = intel_dp_is_uhbr(crtc_state) ?
683 		DP_SET_ANSI_128B132B : DP_SET_ANSI_8B10B;
684 	drm_dp_dpcd_write(&intel_dp->aux, DP_DOWNSPREAD_CTRL, link_config, 2);
685 
686 	return true;
687 }
688 
689 static bool intel_dp_adjust_request_changed(const struct intel_crtc_state *crtc_state,
690 					    const u8 old_link_status[DP_LINK_STATUS_SIZE],
691 					    const u8 new_link_status[DP_LINK_STATUS_SIZE])
692 {
693 	int lane;
694 
695 	for (lane = 0; lane < crtc_state->lane_count; lane++) {
696 		u8 old, new;
697 
698 		if (intel_dp_is_uhbr(crtc_state)) {
699 			old = drm_dp_get_adjust_tx_ffe_preset(old_link_status, lane);
700 			new = drm_dp_get_adjust_tx_ffe_preset(new_link_status, lane);
701 		} else {
702 			old = drm_dp_get_adjust_request_voltage(old_link_status, lane) |
703 				drm_dp_get_adjust_request_pre_emphasis(old_link_status, lane);
704 			new = drm_dp_get_adjust_request_voltage(new_link_status, lane) |
705 				drm_dp_get_adjust_request_pre_emphasis(new_link_status, lane);
706 		}
707 
708 		if (old != new)
709 			return true;
710 	}
711 
712 	return false;
713 }
714 
715 void
716 intel_dp_dump_link_status(struct intel_dp *intel_dp, enum drm_dp_phy dp_phy,
717 			  const u8 link_status[DP_LINK_STATUS_SIZE])
718 {
719 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
720 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
721 	char phy_name[10];
722 
723 	drm_dbg_kms(&i915->drm,
724 		    "[ENCODER:%d:%s][%s] ln0_1:0x%x ln2_3:0x%x align:0x%x sink:0x%x adj_req0_1:0x%x adj_req2_3:0x%x\n",
725 		    encoder->base.base.id, encoder->base.name,
726 		    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
727 		    link_status[0], link_status[1], link_status[2],
728 		    link_status[3], link_status[4], link_status[5]);
729 }
730 
731 /*
732  * Perform the link training clock recovery phase on the given DP PHY using
733  * training pattern 1.
734  */
735 static bool
736 intel_dp_link_training_clock_recovery(struct intel_dp *intel_dp,
737 				      const struct intel_crtc_state *crtc_state,
738 				      enum drm_dp_phy dp_phy)
739 {
740 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
741 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
742 	u8 old_link_status[DP_LINK_STATUS_SIZE] = {};
743 	int voltage_tries, cr_tries, max_cr_tries;
744 	u8 link_status[DP_LINK_STATUS_SIZE];
745 	bool max_vswing_reached = false;
746 	char phy_name[10];
747 	int delay_us;
748 
749 	delay_us = drm_dp_read_clock_recovery_delay(&intel_dp->aux,
750 						    intel_dp->dpcd, dp_phy,
751 						    intel_dp_is_uhbr(crtc_state));
752 
753 	intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
754 
755 	/* clock recovery */
756 	if (!intel_dp_reset_link_train(intel_dp, crtc_state, dp_phy,
757 				       DP_TRAINING_PATTERN_1 |
758 				       DP_LINK_SCRAMBLING_DISABLE)) {
759 		drm_err(&i915->drm, "[ENCODER:%d:%s][%s] Failed to enable link training\n",
760 			encoder->base.base.id, encoder->base.name, phy_name);
761 		return false;
762 	}
763 
764 	/*
765 	 * The DP 1.4 spec defines the max clock recovery retries value
766 	 * as 10 but for pre-DP 1.4 devices we set a very tolerant
767 	 * retry limit of 80 (4 voltage levels x 4 preemphasis levels x
768 	 * x 5 identical voltage retries). Since the previous specs didn't
769 	 * define a limit and created the possibility of an infinite loop
770 	 * we want to prevent any sync from triggering that corner case.
771 	 */
772 	if (intel_dp->dpcd[DP_DPCD_REV] >= DP_DPCD_REV_14)
773 		max_cr_tries = 10;
774 	else
775 		max_cr_tries = 80;
776 
777 	voltage_tries = 1;
778 	for (cr_tries = 0; cr_tries < max_cr_tries; ++cr_tries) {
779 		usleep_range(delay_us, 2 * delay_us);
780 
781 		if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
782 						     link_status) < 0) {
783 			drm_err(&i915->drm, "[ENCODER:%d:%s][%s] Failed to get link status\n",
784 				encoder->base.base.id, encoder->base.name, phy_name);
785 			return false;
786 		}
787 
788 		if (drm_dp_clock_recovery_ok(link_status, crtc_state->lane_count)) {
789 			drm_dbg_kms(&i915->drm,
790 				    "[ENCODER:%d:%s][%s] Clock recovery OK\n",
791 				    encoder->base.base.id, encoder->base.name, phy_name);
792 			return true;
793 		}
794 
795 		if (voltage_tries == 5) {
796 			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
797 			drm_dbg_kms(&i915->drm,
798 				    "[ENCODER:%d:%s][%s] Same voltage tried 5 times\n",
799 				    encoder->base.base.id, encoder->base.name, phy_name);
800 			return false;
801 		}
802 
803 		if (max_vswing_reached) {
804 			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
805 			drm_dbg_kms(&i915->drm,
806 				    "[ENCODER:%d:%s][%s] Max Voltage Swing reached\n",
807 				    encoder->base.base.id, encoder->base.name, phy_name);
808 			return false;
809 		}
810 
811 		/* Update training set as requested by target */
812 		intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
813 					  link_status);
814 		if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
815 			drm_err(&i915->drm,
816 				"[ENCODER:%d:%s][%s] Failed to update link training\n",
817 				encoder->base.base.id, encoder->base.name, phy_name);
818 			return false;
819 		}
820 
821 		if (!intel_dp_adjust_request_changed(crtc_state, old_link_status, link_status))
822 			++voltage_tries;
823 		else
824 			voltage_tries = 1;
825 
826 		memcpy(old_link_status, link_status, sizeof(link_status));
827 
828 		if (intel_dp_link_max_vswing_reached(intel_dp, crtc_state))
829 			max_vswing_reached = true;
830 	}
831 
832 	intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
833 	drm_err(&i915->drm,
834 		"[ENCODER:%d:%s][%s] Failed clock recovery %d times, giving up!\n",
835 		encoder->base.base.id, encoder->base.name, phy_name, max_cr_tries);
836 
837 	return false;
838 }
839 
840 /*
841  * Pick Training Pattern Sequence (TPS) for channel equalization. 128b/132b TPS2
842  * for UHBR+, TPS4 for HBR3 or for 1.4 devices that support it, TPS3 for HBR2 or
843  * 1.2 devices that support it, TPS2 otherwise.
844  */
845 static u32 intel_dp_training_pattern(struct intel_dp *intel_dp,
846 				     const struct intel_crtc_state *crtc_state,
847 				     enum drm_dp_phy dp_phy)
848 {
849 	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
850 	bool source_tps3, sink_tps3, source_tps4, sink_tps4;
851 
852 	/* UHBR+ use separate 128b/132b TPS2 */
853 	if (intel_dp_is_uhbr(crtc_state))
854 		return DP_TRAINING_PATTERN_2;
855 
856 	/*
857 	 * TPS4 support is mandatory for all downstream devices that
858 	 * support HBR3. There are no known eDP panels that support
859 	 * TPS4 as of Feb 2018 as per VESA eDP_v1.4b_E1 specification.
860 	 * LTTPRs must support TPS4.
861 	 */
862 	source_tps4 = intel_dp_source_supports_tps4(i915);
863 	sink_tps4 = dp_phy != DP_PHY_DPRX ||
864 		    drm_dp_tps4_supported(intel_dp->dpcd);
865 	if (source_tps4 && sink_tps4) {
866 		return DP_TRAINING_PATTERN_4;
867 	} else if (crtc_state->port_clock == 810000) {
868 		if (!source_tps4)
869 			drm_dbg_kms(&i915->drm,
870 				    "8.1 Gbps link rate without source TPS4 support\n");
871 		if (!sink_tps4)
872 			drm_dbg_kms(&i915->drm,
873 				    "8.1 Gbps link rate without sink TPS4 support\n");
874 	}
875 
876 	/*
877 	 * TPS3 support is mandatory for downstream devices that
878 	 * support HBR2. However, not all sinks follow the spec.
879 	 */
880 	source_tps3 = intel_dp_source_supports_tps3(i915);
881 	sink_tps3 = dp_phy != DP_PHY_DPRX ||
882 		    drm_dp_tps3_supported(intel_dp->dpcd);
883 	if (source_tps3 && sink_tps3) {
884 		return  DP_TRAINING_PATTERN_3;
885 	} else if (crtc_state->port_clock >= 540000) {
886 		if (!source_tps3)
887 			drm_dbg_kms(&i915->drm,
888 				    ">=5.4/6.48 Gbps link rate without source TPS3 support\n");
889 		if (!sink_tps3)
890 			drm_dbg_kms(&i915->drm,
891 				    ">=5.4/6.48 Gbps link rate without sink TPS3 support\n");
892 	}
893 
894 	return DP_TRAINING_PATTERN_2;
895 }
896 
897 /*
898  * Perform the link training channel equalization phase on the given DP PHY
899  * using one of training pattern 2, 3 or 4 depending on the source and
900  * sink capabilities.
901  */
902 static bool
903 intel_dp_link_training_channel_equalization(struct intel_dp *intel_dp,
904 					    const struct intel_crtc_state *crtc_state,
905 					    enum drm_dp_phy dp_phy)
906 {
907 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
908 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
909 	int tries;
910 	u32 training_pattern;
911 	u8 link_status[DP_LINK_STATUS_SIZE];
912 	bool channel_eq = false;
913 	char phy_name[10];
914 	int delay_us;
915 
916 	delay_us = drm_dp_read_channel_eq_delay(&intel_dp->aux,
917 						intel_dp->dpcd, dp_phy,
918 						intel_dp_is_uhbr(crtc_state));
919 
920 	intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
921 
922 	training_pattern = intel_dp_training_pattern(intel_dp, crtc_state, dp_phy);
923 	/* Scrambling is disabled for TPS2/3 and enabled for TPS4 */
924 	if (training_pattern != DP_TRAINING_PATTERN_4)
925 		training_pattern |= DP_LINK_SCRAMBLING_DISABLE;
926 
927 	/* channel equalization */
928 	if (!intel_dp_set_link_train(intel_dp, crtc_state, dp_phy,
929 				     training_pattern)) {
930 		drm_err(&i915->drm,
931 			"[ENCODER:%d:%s][%s] Failed to start channel equalization\n",
932 			encoder->base.base.id, encoder->base.name,
933 			phy_name);
934 		return false;
935 	}
936 
937 	for (tries = 0; tries < 5; tries++) {
938 		usleep_range(delay_us, 2 * delay_us);
939 
940 		if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
941 						     link_status) < 0) {
942 			drm_err(&i915->drm,
943 				"[ENCODER:%d:%s][%s] Failed to get link status\n",
944 				encoder->base.base.id, encoder->base.name, phy_name);
945 			break;
946 		}
947 
948 		/* Make sure clock is still ok */
949 		if (!drm_dp_clock_recovery_ok(link_status,
950 					      crtc_state->lane_count)) {
951 			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
952 			drm_dbg_kms(&i915->drm,
953 				    "[ENCODER:%d:%s][%s] Clock recovery check failed, cannot "
954 				    "continue channel equalization\n",
955 				    encoder->base.base.id, encoder->base.name, phy_name);
956 			break;
957 		}
958 
959 		if (drm_dp_channel_eq_ok(link_status,
960 					 crtc_state->lane_count)) {
961 			channel_eq = true;
962 			drm_dbg_kms(&i915->drm,
963 				    "[ENCODER:%d:%s][%s] Channel EQ done. DP Training successful\n",
964 				    encoder->base.base.id, encoder->base.name, phy_name);
965 			break;
966 		}
967 
968 		/* Update training set as requested by target */
969 		intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
970 					  link_status);
971 		if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
972 			drm_err(&i915->drm,
973 				"[ENCODER:%d:%s][%s] Failed to update link training\n",
974 				encoder->base.base.id, encoder->base.name, phy_name);
975 			break;
976 		}
977 	}
978 
979 	/* Try 5 times, else fail and try at lower BW */
980 	if (tries == 5) {
981 		intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
982 		drm_dbg_kms(&i915->drm,
983 			    "[ENCODER:%d:%s][%s] Channel equalization failed 5 times\n",
984 			    encoder->base.base.id, encoder->base.name, phy_name);
985 	}
986 
987 	return channel_eq;
988 }
989 
990 static bool intel_dp_disable_dpcd_training_pattern(struct intel_dp *intel_dp,
991 						   enum drm_dp_phy dp_phy)
992 {
993 	int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
994 	u8 val = DP_TRAINING_PATTERN_DISABLE;
995 
996 	return drm_dp_dpcd_write(&intel_dp->aux, reg, &val, 1) == 1;
997 }
998 
999 static int
1000 intel_dp_128b132b_intra_hop(struct intel_dp *intel_dp,
1001 			    const struct intel_crtc_state *crtc_state)
1002 {
1003 	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1004 	u8 sink_status;
1005 	int ret;
1006 
1007 	ret = drm_dp_dpcd_readb(&intel_dp->aux, DP_SINK_STATUS, &sink_status);
1008 	if (ret != 1) {
1009 		drm_dbg_kms(&i915->drm, "Failed to read sink status\n");
1010 		return ret < 0 ? ret : -EIO;
1011 	}
1012 
1013 	return sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION ? 1 : 0;
1014 }
1015 
1016 /**
1017  * intel_dp_stop_link_train - stop link training
1018  * @intel_dp: DP struct
1019  * @crtc_state: state for CRTC attached to the encoder
1020  *
1021  * Stop the link training of the @intel_dp port, disabling the training
1022  * pattern in the sink's DPCD, and disabling the test pattern symbol
1023  * generation on the port.
1024  *
1025  * What symbols are output on the port after this point is
1026  * platform specific: On DDI/VLV/CHV platforms it will be the idle pattern
1027  * with the pipe being disabled, on older platforms it's HW specific if/how an
1028  * idle pattern is generated, as the pipe is already enabled here for those.
1029  *
1030  * This function must be called after intel_dp_start_link_train().
1031  */
1032 void intel_dp_stop_link_train(struct intel_dp *intel_dp,
1033 			      const struct intel_crtc_state *crtc_state)
1034 {
1035 	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1036 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1037 
1038 	intel_dp->link_trained = true;
1039 
1040 	intel_dp_disable_dpcd_training_pattern(intel_dp, DP_PHY_DPRX);
1041 	intel_dp_program_link_training_pattern(intel_dp, crtc_state, DP_PHY_DPRX,
1042 					       DP_TRAINING_PATTERN_DISABLE);
1043 
1044 	if (intel_dp_is_uhbr(crtc_state) &&
1045 	    wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
1046 		drm_dbg_kms(&i915->drm,
1047 			    "[ENCODER:%d:%s] 128b/132b intra-hop not clearing\n",
1048 			    encoder->base.base.id, encoder->base.name);
1049 	}
1050 }
1051 
1052 static bool
1053 intel_dp_link_train_phy(struct intel_dp *intel_dp,
1054 			const struct intel_crtc_state *crtc_state,
1055 			enum drm_dp_phy dp_phy)
1056 {
1057 	struct intel_connector *connector = intel_dp->attached_connector;
1058 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1059 	char phy_name[10];
1060 	bool ret = false;
1061 
1062 	if (!intel_dp_link_training_clock_recovery(intel_dp, crtc_state, dp_phy))
1063 		goto out;
1064 
1065 	if (!intel_dp_link_training_channel_equalization(intel_dp, crtc_state, dp_phy))
1066 		goto out;
1067 
1068 	ret = true;
1069 
1070 out:
1071 	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
1072 		    "[CONNECTOR:%d:%s][ENCODER:%d:%s][%s] Link Training %s at link rate = %d, lane count = %d\n",
1073 		    connector->base.base.id, connector->base.name,
1074 		    encoder->base.base.id, encoder->base.name,
1075 		    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
1076 		    ret ? "passed" : "failed",
1077 		    crtc_state->port_clock, crtc_state->lane_count);
1078 
1079 	return ret;
1080 }
1081 
1082 static void intel_dp_schedule_fallback_link_training(struct intel_dp *intel_dp,
1083 						     const struct intel_crtc_state *crtc_state)
1084 {
1085 	struct intel_connector *intel_connector = intel_dp->attached_connector;
1086 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1087 
1088 	if (intel_dp->hobl_active) {
1089 		drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
1090 			    "[ENCODER:%d:%s] Link Training failed with HOBL active, "
1091 			    "not enabling it from now on",
1092 			    encoder->base.base.id, encoder->base.name);
1093 		intel_dp->hobl_failed = true;
1094 	} else if (intel_dp_get_link_train_fallback_values(intel_dp,
1095 							   crtc_state->port_clock,
1096 							   crtc_state->lane_count)) {
1097 		return;
1098 	}
1099 
1100 	/* Schedule a Hotplug Uevent to userspace to start modeset */
1101 	schedule_work(&intel_connector->modeset_retry_work);
1102 }
1103 
1104 /* Perform the link training on all LTTPRs and the DPRX on a link. */
1105 static bool
1106 intel_dp_link_train_all_phys(struct intel_dp *intel_dp,
1107 			     const struct intel_crtc_state *crtc_state,
1108 			     int lttpr_count)
1109 {
1110 	bool ret = true;
1111 	int i;
1112 
1113 	for (i = lttpr_count - 1; i >= 0; i--) {
1114 		enum drm_dp_phy dp_phy = DP_PHY_LTTPR(i);
1115 
1116 		ret = intel_dp_link_train_phy(intel_dp, crtc_state, dp_phy);
1117 		intel_dp_disable_dpcd_training_pattern(intel_dp, dp_phy);
1118 
1119 		if (!ret)
1120 			break;
1121 	}
1122 
1123 	if (ret)
1124 		ret = intel_dp_link_train_phy(intel_dp, crtc_state, DP_PHY_DPRX);
1125 
1126 	if (intel_dp->set_idle_link_train)
1127 		intel_dp->set_idle_link_train(intel_dp, crtc_state);
1128 
1129 	return ret;
1130 }
1131 
1132 /*
1133  * 128b/132b DP LANEx_EQ_DONE Sequence (DP 2.0 E11 3.5.2.16.1)
1134  */
1135 static bool
1136 intel_dp_128b132b_lane_eq(struct intel_dp *intel_dp,
1137 			  const struct intel_crtc_state *crtc_state)
1138 {
1139 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1140 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1141 	u8 link_status[DP_LINK_STATUS_SIZE];
1142 	int delay_us;
1143 	int try, max_tries = 20;
1144 	unsigned long deadline;
1145 	bool timeout = false;
1146 
1147 	/*
1148 	 * Reset signal levels. Start transmitting 128b/132b TPS1.
1149 	 *
1150 	 * Put DPRX and LTTPRs (if any) into intra-hop AUX mode by writing TPS1
1151 	 * in DP_TRAINING_PATTERN_SET.
1152 	 */
1153 	if (!intel_dp_reset_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
1154 				       DP_TRAINING_PATTERN_1)) {
1155 		drm_err(&i915->drm,
1156 			"[ENCODER:%d:%s] Failed to start 128b/132b TPS1\n",
1157 			encoder->base.base.id, encoder->base.name);
1158 		return false;
1159 	}
1160 
1161 	delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
1162 
1163 	/* Read the initial TX FFE settings. */
1164 	if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1165 		drm_err(&i915->drm,
1166 			"[ENCODER:%d:%s] Failed to read TX FFE presets\n",
1167 			encoder->base.base.id, encoder->base.name);
1168 		return false;
1169 	}
1170 
1171 	/* Update signal levels and training set as requested. */
1172 	intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
1173 	if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
1174 		drm_err(&i915->drm,
1175 			"[ENCODER:%d:%s] Failed to set initial TX FFE settings\n",
1176 			encoder->base.base.id, encoder->base.name);
1177 		return false;
1178 	}
1179 
1180 	/* Start transmitting 128b/132b TPS2. */
1181 	if (!intel_dp_set_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
1182 				     DP_TRAINING_PATTERN_2)) {
1183 		drm_err(&i915->drm,
1184 			"[ENCODER:%d:%s] Failed to start 128b/132b TPS2\n",
1185 			encoder->base.base.id, encoder->base.name);
1186 		return false;
1187 	}
1188 
1189 	/* Time budget for the LANEx_EQ_DONE Sequence */
1190 	deadline = jiffies + msecs_to_jiffies_timeout(400);
1191 
1192 	for (try = 0; try < max_tries; try++) {
1193 		usleep_range(delay_us, 2 * delay_us);
1194 
1195 		/*
1196 		 * The delay may get updated. The transmitter shall read the
1197 		 * delay before link status during link training.
1198 		 */
1199 		delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
1200 
1201 		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1202 			drm_err(&i915->drm,
1203 				"[ENCODER:%d:%s] Failed to read link status\n",
1204 				encoder->base.base.id, encoder->base.name);
1205 			return false;
1206 		}
1207 
1208 		if (drm_dp_128b132b_link_training_failed(link_status)) {
1209 			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1210 			drm_err(&i915->drm,
1211 				"[ENCODER:%d:%s] Downstream link training failure\n",
1212 				encoder->base.base.id, encoder->base.name);
1213 			return false;
1214 		}
1215 
1216 		if (drm_dp_128b132b_lane_channel_eq_done(link_status, crtc_state->lane_count)) {
1217 			drm_dbg_kms(&i915->drm,
1218 				    "[ENCODER:%d:%s] Lane channel eq done\n",
1219 				    encoder->base.base.id, encoder->base.name);
1220 			break;
1221 		}
1222 
1223 		if (timeout) {
1224 			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1225 			drm_err(&i915->drm,
1226 				"[ENCODER:%d:%s] Lane channel eq timeout\n",
1227 				encoder->base.base.id, encoder->base.name);
1228 			return false;
1229 		}
1230 
1231 		if (time_after(jiffies, deadline))
1232 			timeout = true; /* try one last time after deadline */
1233 
1234 		/* Update signal levels and training set as requested. */
1235 		intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
1236 		if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
1237 			drm_err(&i915->drm,
1238 				"[ENCODER:%d:%s] Failed to update TX FFE settings\n",
1239 				encoder->base.base.id, encoder->base.name);
1240 			return false;
1241 		}
1242 	}
1243 
1244 	if (try == max_tries) {
1245 		intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1246 		drm_err(&i915->drm,
1247 			"[ENCODER:%d:%s] Max loop count reached\n",
1248 			encoder->base.base.id, encoder->base.name);
1249 		return false;
1250 	}
1251 
1252 	for (;;) {
1253 		if (time_after(jiffies, deadline))
1254 			timeout = true; /* try one last time after deadline */
1255 
1256 		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1257 			drm_err(&i915->drm,
1258 				"[ENCODER:%d:%s] Failed to read link status\n",
1259 				encoder->base.base.id, encoder->base.name);
1260 			return false;
1261 		}
1262 
1263 		if (drm_dp_128b132b_link_training_failed(link_status)) {
1264 			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1265 			drm_err(&i915->drm,
1266 				"[ENCODER:%d:%s] Downstream link training failure\n",
1267 				encoder->base.base.id, encoder->base.name);
1268 			return false;
1269 		}
1270 
1271 		if (drm_dp_128b132b_eq_interlane_align_done(link_status)) {
1272 			drm_dbg_kms(&i915->drm,
1273 				    "[ENCODER:%d:%s] Interlane align done\n",
1274 				    encoder->base.base.id, encoder->base.name);
1275 			break;
1276 		}
1277 
1278 		if (timeout) {
1279 			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1280 			drm_err(&i915->drm,
1281 				"[ENCODER:%d:%s] Interlane align timeout\n",
1282 				encoder->base.base.id, encoder->base.name);
1283 			return false;
1284 		}
1285 
1286 		usleep_range(2000, 3000);
1287 	}
1288 
1289 	return true;
1290 }
1291 
1292 /*
1293  * 128b/132b DP LANEx_CDS_DONE Sequence (DP 2.0 E11 3.5.2.16.2)
1294  */
1295 static bool
1296 intel_dp_128b132b_lane_cds(struct intel_dp *intel_dp,
1297 			   const struct intel_crtc_state *crtc_state,
1298 			   int lttpr_count)
1299 {
1300 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1301 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1302 	u8 link_status[DP_LINK_STATUS_SIZE];
1303 	unsigned long deadline;
1304 
1305 	if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TRAINING_PATTERN_SET,
1306 			       DP_TRAINING_PATTERN_2_CDS) != 1) {
1307 		drm_err(&i915->drm,
1308 			"[ENCODER:%d:%s] Failed to start 128b/132b TPS2 CDS\n",
1309 			encoder->base.base.id, encoder->base.name);
1310 		return false;
1311 	}
1312 
1313 	/* Time budget for the LANEx_CDS_DONE Sequence */
1314 	deadline = jiffies + msecs_to_jiffies_timeout((lttpr_count + 1) * 20);
1315 
1316 	for (;;) {
1317 		bool timeout = false;
1318 
1319 		if (time_after(jiffies, deadline))
1320 			timeout = true; /* try one last time after deadline */
1321 
1322 		usleep_range(2000, 3000);
1323 
1324 		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
1325 			drm_err(&i915->drm,
1326 				"[ENCODER:%d:%s] Failed to read link status\n",
1327 				encoder->base.base.id, encoder->base.name);
1328 			return false;
1329 		}
1330 
1331 		if (drm_dp_128b132b_eq_interlane_align_done(link_status) &&
1332 		    drm_dp_128b132b_cds_interlane_align_done(link_status) &&
1333 		    drm_dp_128b132b_lane_symbol_locked(link_status, crtc_state->lane_count)) {
1334 			drm_dbg_kms(&i915->drm,
1335 				    "[ENCODER:%d:%s] CDS interlane align done\n",
1336 				    encoder->base.base.id, encoder->base.name);
1337 			break;
1338 		}
1339 
1340 		if (drm_dp_128b132b_link_training_failed(link_status)) {
1341 			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1342 			drm_err(&i915->drm,
1343 				"[ENCODER:%d:%s] Downstream link training failure\n",
1344 				encoder->base.base.id, encoder->base.name);
1345 			return false;
1346 		}
1347 
1348 		if (timeout) {
1349 			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
1350 			drm_err(&i915->drm,
1351 				"[ENCODER:%d:%s] CDS timeout\n",
1352 				encoder->base.base.id, encoder->base.name);
1353 			return false;
1354 		}
1355 	}
1356 
1357 	/* FIXME: Should DP_TRAINING_PATTERN_DISABLE be written first? */
1358 	if (intel_dp->set_idle_link_train)
1359 		intel_dp->set_idle_link_train(intel_dp, crtc_state);
1360 
1361 	return true;
1362 }
1363 
1364 /*
1365  * 128b/132b link training sequence. (DP 2.0 E11 SCR on link training.)
1366  */
1367 static bool
1368 intel_dp_128b132b_link_train(struct intel_dp *intel_dp,
1369 			     const struct intel_crtc_state *crtc_state,
1370 			     int lttpr_count)
1371 {
1372 	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1373 	struct intel_connector *connector = intel_dp->attached_connector;
1374 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1375 	bool passed = false;
1376 
1377 	if (wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
1378 		drm_err(&i915->drm,
1379 			"[ENCODER:%d:%s] 128b/132b intra-hop not clear\n",
1380 			encoder->base.base.id, encoder->base.name);
1381 		return false;
1382 	}
1383 
1384 	if (intel_dp_128b132b_lane_eq(intel_dp, crtc_state) &&
1385 	    intel_dp_128b132b_lane_cds(intel_dp, crtc_state, lttpr_count))
1386 		passed = true;
1387 
1388 	drm_dbg_kms(&i915->drm,
1389 		    "[CONNECTOR:%d:%s][ENCODER:%d:%s] 128b/132b Link Training %s at link rate = %d, lane count = %d\n",
1390 		    connector->base.base.id, connector->base.name,
1391 		    encoder->base.base.id, encoder->base.name,
1392 		    passed ? "passed" : "failed",
1393 		    crtc_state->port_clock, crtc_state->lane_count);
1394 
1395 	return passed;
1396 }
1397 
1398 /**
1399  * intel_dp_start_link_train - start link training
1400  * @intel_dp: DP struct
1401  * @crtc_state: state for CRTC attached to the encoder
1402  *
1403  * Start the link training of the @intel_dp port, scheduling a fallback
1404  * retraining with reduced link rate/lane parameters if the link training
1405  * fails.
1406  * After calling this function intel_dp_stop_link_train() must be called.
1407  */
1408 void intel_dp_start_link_train(struct intel_dp *intel_dp,
1409 			       const struct intel_crtc_state *crtc_state)
1410 {
1411 	bool passed;
1412 	/*
1413 	 * TODO: Reiniting LTTPRs here won't be needed once proper connector
1414 	 * HW state readout is added.
1415 	 */
1416 	int lttpr_count = intel_dp_init_lttpr_and_dprx_caps(intel_dp);
1417 
1418 	if (lttpr_count < 0)
1419 		/* Still continue with enabling the port and link training. */
1420 		lttpr_count = 0;
1421 
1422 	intel_dp_prepare_link_train(intel_dp, crtc_state);
1423 
1424 	if (intel_dp_is_uhbr(crtc_state))
1425 		passed = intel_dp_128b132b_link_train(intel_dp, crtc_state, lttpr_count);
1426 	else
1427 		passed = intel_dp_link_train_all_phys(intel_dp, crtc_state, lttpr_count);
1428 
1429 	if (!passed)
1430 		intel_dp_schedule_fallback_link_training(intel_dp, crtc_state);
1431 }
1432