1 /*
2  * Copyright 2007-8 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21  * OTHER DEALINGS IN THE SOFTWARE.
22  *
23  * Authors: Dave Airlie
24  *          Alex Deucher
25  */
26 #include "drmP.h"
27 #include "radeon_drm.h"
28 #include "radeon.h"
29 
30 #include "atom.h"
31 #include "atom-bits.h"
32 #include "drm_dp_helper.h"
33 
34 /* move these to drm_dp_helper.c/h */
35 #define DP_LINK_CONFIGURATION_SIZE 9
36 #define DP_LINK_STATUS_SIZE	   6
37 #define DP_DPCD_SIZE	           8
38 
39 static char *voltage_names[] = {
40         "0.4V", "0.6V", "0.8V", "1.2V"
41 };
42 static char *pre_emph_names[] = {
43         "0dB", "3.5dB", "6dB", "9.5dB"
44 };
45 
46 /***** radeon AUX functions *****/
47 union aux_channel_transaction {
48 	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
49 	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
50 };
51 
52 static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
53 				 u8 *send, int send_bytes,
54 				 u8 *recv, int recv_size,
55 				 u8 delay, u8 *ack)
56 {
57 	struct drm_device *dev = chan->dev;
58 	struct radeon_device *rdev = dev->dev_private;
59 	union aux_channel_transaction args;
60 	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
61 	unsigned char *base;
62 	int recv_bytes;
63 
64 	memset(&args, 0, sizeof(args));
65 
66 	base = (unsigned char *)rdev->mode_info.atom_context->scratch;
67 
68 	memcpy(base, send, send_bytes);
69 
70 	args.v1.lpAuxRequest = 0;
71 	args.v1.lpDataOut = 16;
72 	args.v1.ucDataOutLen = 0;
73 	args.v1.ucChannelID = chan->rec.i2c_id;
74 	args.v1.ucDelay = delay / 10;
75 	if (ASIC_IS_DCE4(rdev))
76 		args.v2.ucHPD_ID = chan->rec.hpd;
77 
78 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
79 
80 	*ack = args.v1.ucReplyStatus;
81 
82 	/* timeout */
83 	if (args.v1.ucReplyStatus == 1) {
84 		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
85 		return -ETIMEDOUT;
86 	}
87 
88 	/* flags not zero */
89 	if (args.v1.ucReplyStatus == 2) {
90 		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
91 		return -EBUSY;
92 	}
93 
94 	/* error */
95 	if (args.v1.ucReplyStatus == 3) {
96 		DRM_DEBUG_KMS("dp_aux_ch error\n");
97 		return -EIO;
98 	}
99 
100 	recv_bytes = args.v1.ucDataOutLen;
101 	if (recv_bytes > recv_size)
102 		recv_bytes = recv_size;
103 
104 	if (recv && recv_size)
105 		memcpy(recv, base + 16, recv_bytes);
106 
107 	return recv_bytes;
108 }
109 
110 static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
111 				      u16 address, u8 *send, u8 send_bytes, u8 delay)
112 {
113 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
114 	int ret;
115 	u8 msg[20];
116 	int msg_bytes = send_bytes + 4;
117 	u8 ack;
118 
119 	if (send_bytes > 16)
120 		return -1;
121 
122 	msg[0] = address;
123 	msg[1] = address >> 8;
124 	msg[2] = AUX_NATIVE_WRITE << 4;
125 	msg[3] = (msg_bytes << 4) | (send_bytes - 1);
126 	memcpy(&msg[4], send, send_bytes);
127 
128 	while (1) {
129 		ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
130 					    msg, msg_bytes, NULL, 0, delay, &ack);
131 		if (ret < 0)
132 			return ret;
133 		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
134 			break;
135 		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
136 			udelay(400);
137 		else
138 			return -EIO;
139 	}
140 
141 	return send_bytes;
142 }
143 
144 static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
145 				     u16 address, u8 *recv, int recv_bytes, u8 delay)
146 {
147 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
148 	u8 msg[4];
149 	int msg_bytes = 4;
150 	u8 ack;
151 	int ret;
152 
153 	msg[0] = address;
154 	msg[1] = address >> 8;
155 	msg[2] = AUX_NATIVE_READ << 4;
156 	msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
157 
158 	while (1) {
159 		ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
160 					    msg, msg_bytes, recv, recv_bytes, delay, &ack);
161 		if (ret == 0)
162 			return -EPROTO;
163 		if (ret < 0)
164 			return ret;
165 		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
166 			return ret;
167 		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
168 			udelay(400);
169 		else
170 			return -EIO;
171 	}
172 }
173 
174 static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
175 				 u16 reg, u8 val)
176 {
177 	radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
178 }
179 
180 static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
181 			       u16 reg)
182 {
183 	u8 val = 0;
184 
185 	radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
186 
187 	return val;
188 }
189 
190 int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
191 			 u8 write_byte, u8 *read_byte)
192 {
193 	struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
194 	struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
195 	u16 address = algo_data->address;
196 	u8 msg[5];
197 	u8 reply[2];
198 	unsigned retry;
199 	int msg_bytes;
200 	int reply_bytes = 1;
201 	int ret;
202 	u8 ack;
203 
204 	/* Set up the command byte */
205 	if (mode & MODE_I2C_READ)
206 		msg[2] = AUX_I2C_READ << 4;
207 	else
208 		msg[2] = AUX_I2C_WRITE << 4;
209 
210 	if (!(mode & MODE_I2C_STOP))
211 		msg[2] |= AUX_I2C_MOT << 4;
212 
213 	msg[0] = address;
214 	msg[1] = address >> 8;
215 
216 	switch (mode) {
217 	case MODE_I2C_WRITE:
218 		msg_bytes = 5;
219 		msg[3] = msg_bytes << 4;
220 		msg[4] = write_byte;
221 		break;
222 	case MODE_I2C_READ:
223 		msg_bytes = 4;
224 		msg[3] = msg_bytes << 4;
225 		break;
226 	default:
227 		msg_bytes = 4;
228 		msg[3] = 3 << 4;
229 		break;
230 	}
231 
232 	for (retry = 0; retry < 4; retry++) {
233 		ret = radeon_process_aux_ch(auxch,
234 					    msg, msg_bytes, reply, reply_bytes, 0, &ack);
235 		if (ret < 0) {
236 			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
237 			return ret;
238 		}
239 
240 		switch (ack & AUX_NATIVE_REPLY_MASK) {
241 		case AUX_NATIVE_REPLY_ACK:
242 			/* I2C-over-AUX Reply field is only valid
243 			 * when paired with AUX ACK.
244 			 */
245 			break;
246 		case AUX_NATIVE_REPLY_NACK:
247 			DRM_DEBUG_KMS("aux_ch native nack\n");
248 			return -EREMOTEIO;
249 		case AUX_NATIVE_REPLY_DEFER:
250 			DRM_DEBUG_KMS("aux_ch native defer\n");
251 			udelay(400);
252 			continue;
253 		default:
254 			DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
255 			return -EREMOTEIO;
256 		}
257 
258 		switch (ack & AUX_I2C_REPLY_MASK) {
259 		case AUX_I2C_REPLY_ACK:
260 			if (mode == MODE_I2C_READ)
261 				*read_byte = reply[0];
262 			return ret;
263 		case AUX_I2C_REPLY_NACK:
264 			DRM_DEBUG_KMS("aux_i2c nack\n");
265 			return -EREMOTEIO;
266 		case AUX_I2C_REPLY_DEFER:
267 			DRM_DEBUG_KMS("aux_i2c defer\n");
268 			udelay(400);
269 			break;
270 		default:
271 			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
272 			return -EREMOTEIO;
273 		}
274 	}
275 
276 	DRM_ERROR("aux i2c too many retries, giving up\n");
277 	return -EREMOTEIO;
278 }
279 
280 /***** general DP utility functions *****/
281 
282 static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
283 {
284 	return link_status[r - DP_LANE0_1_STATUS];
285 }
286 
287 static u8 dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE],
288 			     int lane)
289 {
290 	int i = DP_LANE0_1_STATUS + (lane >> 1);
291 	int s = (lane & 1) * 4;
292 	u8 l = dp_link_status(link_status, i);
293 	return (l >> s) & 0xf;
294 }
295 
296 static bool dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE],
297 				 int lane_count)
298 {
299 	int lane;
300 	u8 lane_status;
301 
302 	for (lane = 0; lane < lane_count; lane++) {
303 		lane_status = dp_get_lane_status(link_status, lane);
304 		if ((lane_status & DP_LANE_CR_DONE) == 0)
305 			return false;
306 	}
307 	return true;
308 }
309 
310 static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],
311 			     int lane_count)
312 {
313 	u8 lane_align;
314 	u8 lane_status;
315 	int lane;
316 
317 	lane_align = dp_link_status(link_status,
318 				    DP_LANE_ALIGN_STATUS_UPDATED);
319 	if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
320 		return false;
321 	for (lane = 0; lane < lane_count; lane++) {
322 		lane_status = dp_get_lane_status(link_status, lane);
323 		if ((lane_status & DP_CHANNEL_EQ_BITS) != DP_CHANNEL_EQ_BITS)
324 			return false;
325 	}
326 	return true;
327 }
328 
329 static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE],
330 					int lane)
331 
332 {
333 	int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
334 	int s = ((lane & 1) ?
335 		 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
336 		 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
337 	u8 l = dp_link_status(link_status, i);
338 
339 	return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
340 }
341 
342 static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE],
343 					     int lane)
344 {
345 	int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
346 	int s = ((lane & 1) ?
347 		 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
348 		 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
349 	u8 l = dp_link_status(link_status, i);
350 
351 	return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
352 }
353 
354 #define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_1200
355 #define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPHASIS_9_5
356 
357 static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
358 				int lane_count,
359 				u8 train_set[4])
360 {
361 	u8 v = 0;
362 	u8 p = 0;
363 	int lane;
364 
365 	for (lane = 0; lane < lane_count; lane++) {
366 		u8 this_v = dp_get_adjust_request_voltage(link_status, lane);
367 		u8 this_p = dp_get_adjust_request_pre_emphasis(link_status, lane);
368 
369 		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
370 			  lane,
371 			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
372 			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
373 
374 		if (this_v > v)
375 			v = this_v;
376 		if (this_p > p)
377 			p = this_p;
378 	}
379 
380 	if (v >= DP_VOLTAGE_MAX)
381 		v |= DP_TRAIN_MAX_SWING_REACHED;
382 
383 	if (p >= DP_PRE_EMPHASIS_MAX)
384 		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
385 
386 	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
387 		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
388 		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
389 
390 	for (lane = 0; lane < 4; lane++)
391 		train_set[lane] = v | p;
392 }
393 
394 /* convert bits per color to bits per pixel */
395 /* get bpc from the EDID */
396 static int convert_bpc_to_bpp(int bpc)
397 {
398 	if (bpc == 0)
399 		return 24;
400 	else
401 		return bpc * 3;
402 }
403 
404 /* get the max pix clock supported by the link rate and lane num */
405 static int dp_get_max_dp_pix_clock(int link_rate,
406 				   int lane_num,
407 				   int bpp)
408 {
409 	return (link_rate * lane_num * 8) / bpp;
410 }
411 
412 static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])
413 {
414 	switch (dpcd[DP_MAX_LINK_RATE]) {
415 	case DP_LINK_BW_1_62:
416 	default:
417 		return 162000;
418 	case DP_LINK_BW_2_7:
419 		return 270000;
420 	case DP_LINK_BW_5_4:
421 		return 540000;
422 	}
423 }
424 
425 static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])
426 {
427 	return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
428 }
429 
430 static u8 dp_get_dp_link_rate_coded(int link_rate)
431 {
432 	switch (link_rate) {
433 	case 162000:
434 	default:
435 		return DP_LINK_BW_1_62;
436 	case 270000:
437 		return DP_LINK_BW_2_7;
438 	case 540000:
439 		return DP_LINK_BW_5_4;
440 	}
441 }
442 
443 /***** radeon specific DP functions *****/
444 
445 /* First get the min lane# when low rate is used according to pixel clock
446  * (prefer low rate), second check max lane# supported by DP panel,
447  * if the max lane# < low rate lane# then use max lane# instead.
448  */
449 static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
450 					u8 dpcd[DP_DPCD_SIZE],
451 					int pix_clock)
452 {
453 	int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
454 	int max_link_rate = dp_get_max_link_rate(dpcd);
455 	int max_lane_num = dp_get_max_lane_number(dpcd);
456 	int lane_num;
457 	int max_dp_pix_clock;
458 
459 	for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
460 		max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
461 		if (pix_clock <= max_dp_pix_clock)
462 			break;
463 	}
464 
465 	return lane_num;
466 }
467 
468 static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
469 				       u8 dpcd[DP_DPCD_SIZE],
470 				       int pix_clock)
471 {
472 	int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
473 	int lane_num, max_pix_clock;
474 
475 	if (radeon_connector_encoder_is_dp_bridge(connector))
476 		return 270000;
477 
478 	lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
479 	max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
480 	if (pix_clock <= max_pix_clock)
481 		return 162000;
482 	max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
483 	if (pix_clock <= max_pix_clock)
484 		return 270000;
485 	if (radeon_connector_is_dp12_capable(connector)) {
486 		max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
487 		if (pix_clock <= max_pix_clock)
488 			return 540000;
489 	}
490 
491 	return dp_get_max_link_rate(dpcd);
492 }
493 
494 static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
495 				    int action, int dp_clock,
496 				    u8 ucconfig, u8 lane_num)
497 {
498 	DP_ENCODER_SERVICE_PARAMETERS args;
499 	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
500 
501 	memset(&args, 0, sizeof(args));
502 	args.ucLinkClock = dp_clock / 10;
503 	args.ucConfig = ucconfig;
504 	args.ucAction = action;
505 	args.ucLaneNum = lane_num;
506 	args.ucStatus = 0;
507 
508 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
509 	return args.ucStatus;
510 }
511 
512 u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
513 {
514 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
515 	struct drm_device *dev = radeon_connector->base.dev;
516 	struct radeon_device *rdev = dev->dev_private;
517 
518 	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
519 					 dig_connector->dp_i2c_bus->rec.i2c_id, 0);
520 }
521 
522 bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
523 {
524 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
525 	u8 msg[25];
526 	int ret, i;
527 
528 	ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0);
529 	if (ret > 0) {
530 		memcpy(dig_connector->dpcd, msg, 8);
531 		DRM_DEBUG_KMS("DPCD: ");
532 		for (i = 0; i < 8; i++)
533 			DRM_DEBUG_KMS("%02x ", msg[i]);
534 		DRM_DEBUG_KMS("\n");
535 		return true;
536 	}
537 	dig_connector->dpcd[0] = 0;
538 	return false;
539 }
540 
541 static void radeon_dp_set_panel_mode(struct drm_encoder *encoder,
542 				     struct drm_connector *connector)
543 {
544 	struct drm_device *dev = encoder->dev;
545 	struct radeon_device *rdev = dev->dev_private;
546 	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
547 
548 	if (!ASIC_IS_DCE4(rdev))
549 		return;
550 
551 	if (radeon_connector_encoder_is_dp_bridge(connector))
552 		panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
553 
554 	atombios_dig_encoder_setup(encoder,
555 				   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
556 				   panel_mode);
557 }
558 
559 void radeon_dp_set_link_config(struct drm_connector *connector,
560 			       struct drm_display_mode *mode)
561 {
562 	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
563 	struct radeon_connector_atom_dig *dig_connector;
564 
565 	if (!radeon_connector->con_priv)
566 		return;
567 	dig_connector = radeon_connector->con_priv;
568 
569 	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
570 	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
571 		dig_connector->dp_clock =
572 			radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
573 		dig_connector->dp_lane_count =
574 			radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
575 	}
576 }
577 
578 int radeon_dp_mode_valid_helper(struct drm_connector *connector,
579 				struct drm_display_mode *mode)
580 {
581 	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
582 	struct radeon_connector_atom_dig *dig_connector;
583 	int dp_clock;
584 
585 	if (!radeon_connector->con_priv)
586 		return MODE_CLOCK_HIGH;
587 	dig_connector = radeon_connector->con_priv;
588 
589 	dp_clock =
590 		radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
591 
592 	if ((dp_clock == 540000) &&
593 	    (!radeon_connector_is_dp12_capable(connector)))
594 		return MODE_CLOCK_HIGH;
595 
596 	return MODE_OK;
597 }
598 
599 static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
600 				      u8 link_status[DP_LINK_STATUS_SIZE])
601 {
602 	int ret;
603 	ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
604 					link_status, DP_LINK_STATUS_SIZE, 100);
605 	if (ret <= 0) {
606 		DRM_ERROR("displayport link status failed\n");
607 		return false;
608 	}
609 
610 	DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n",
611 		  link_status[0], link_status[1], link_status[2],
612 		  link_status[3], link_status[4], link_status[5]);
613 	return true;
614 }
615 
616 struct radeon_dp_link_train_info {
617 	struct radeon_device *rdev;
618 	struct drm_encoder *encoder;
619 	struct drm_connector *connector;
620 	struct radeon_connector *radeon_connector;
621 	int enc_id;
622 	int dp_clock;
623 	int dp_lane_count;
624 	int rd_interval;
625 	bool tp3_supported;
626 	u8 dpcd[8];
627 	u8 train_set[4];
628 	u8 link_status[DP_LINK_STATUS_SIZE];
629 	u8 tries;
630 };
631 
632 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
633 {
634 	/* set the initial vs/emph on the source */
635 	atombios_dig_transmitter_setup(dp_info->encoder,
636 				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
637 				       0, dp_info->train_set[0]); /* sets all lanes at once */
638 
639 	/* set the vs/emph on the sink */
640 	radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
641 				   dp_info->train_set, dp_info->dp_lane_count, 0);
642 }
643 
644 static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
645 {
646 	int rtp = 0;
647 
648 	/* set training pattern on the source */
649 	if (ASIC_IS_DCE4(dp_info->rdev)) {
650 		switch (tp) {
651 		case DP_TRAINING_PATTERN_1:
652 			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
653 			break;
654 		case DP_TRAINING_PATTERN_2:
655 			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
656 			break;
657 		case DP_TRAINING_PATTERN_3:
658 			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
659 			break;
660 		}
661 		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
662 	} else {
663 		switch (tp) {
664 		case DP_TRAINING_PATTERN_1:
665 			rtp = 0;
666 			break;
667 		case DP_TRAINING_PATTERN_2:
668 			rtp = 1;
669 			break;
670 		}
671 		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
672 					  dp_info->dp_clock, dp_info->enc_id, rtp);
673 	}
674 
675 	/* enable training pattern on the sink */
676 	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
677 }
678 
679 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
680 {
681 	u8 tmp;
682 
683 	/* power up the sink */
684 	if (dp_info->dpcd[0] >= 0x11)
685 		radeon_write_dpcd_reg(dp_info->radeon_connector,
686 				      DP_SET_POWER, DP_SET_POWER_D0);
687 
688 	/* possibly enable downspread on the sink */
689 	if (dp_info->dpcd[3] & 0x1)
690 		radeon_write_dpcd_reg(dp_info->radeon_connector,
691 				      DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
692 	else
693 		radeon_write_dpcd_reg(dp_info->radeon_connector,
694 				      DP_DOWNSPREAD_CTRL, 0);
695 
696 	radeon_dp_set_panel_mode(dp_info->encoder, dp_info->connector);
697 
698 	/* set the lane count on the sink */
699 	tmp = dp_info->dp_lane_count;
700 	if (dp_info->dpcd[0] >= 0x11)
701 		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
702 	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
703 
704 	/* set the link rate on the sink */
705 	tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock);
706 	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
707 
708 	/* start training on the source */
709 	if (ASIC_IS_DCE4(dp_info->rdev))
710 		atombios_dig_encoder_setup(dp_info->encoder,
711 					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
712 	else
713 		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
714 					  dp_info->dp_clock, dp_info->enc_id, 0);
715 
716 	/* disable the training pattern on the sink */
717 	radeon_write_dpcd_reg(dp_info->radeon_connector,
718 			      DP_TRAINING_PATTERN_SET,
719 			      DP_TRAINING_PATTERN_DISABLE);
720 
721 	return 0;
722 }
723 
724 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
725 {
726 	udelay(400);
727 
728 	/* disable the training pattern on the sink */
729 	radeon_write_dpcd_reg(dp_info->radeon_connector,
730 			      DP_TRAINING_PATTERN_SET,
731 			      DP_TRAINING_PATTERN_DISABLE);
732 
733 	/* disable the training pattern on the source */
734 	if (ASIC_IS_DCE4(dp_info->rdev))
735 		atombios_dig_encoder_setup(dp_info->encoder,
736 					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
737 	else
738 		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
739 					  dp_info->dp_clock, dp_info->enc_id, 0);
740 
741 	return 0;
742 }
743 
744 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
745 {
746 	bool clock_recovery;
747  	u8 voltage;
748 	int i;
749 
750 	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
751 	memset(dp_info->train_set, 0, 4);
752 	radeon_dp_update_vs_emph(dp_info);
753 
754 	udelay(400);
755 
756 	/* clock recovery loop */
757 	clock_recovery = false;
758 	dp_info->tries = 0;
759 	voltage = 0xff;
760 	while (1) {
761 		if (dp_info->rd_interval == 0)
762 			udelay(100);
763 		else
764 			mdelay(dp_info->rd_interval * 4);
765 
766 		if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
767 			break;
768 
769 		if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
770 			clock_recovery = true;
771 			break;
772 		}
773 
774 		for (i = 0; i < dp_info->dp_lane_count; i++) {
775 			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
776 				break;
777 		}
778 		if (i == dp_info->dp_lane_count) {
779 			DRM_ERROR("clock recovery reached max voltage\n");
780 			break;
781 		}
782 
783 		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
784 			++dp_info->tries;
785 			if (dp_info->tries == 5) {
786 				DRM_ERROR("clock recovery tried 5 times\n");
787 				break;
788 			}
789 		} else
790 			dp_info->tries = 0;
791 
792 		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
793 
794 		/* Compute new train_set as requested by sink */
795 		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
796 
797 		radeon_dp_update_vs_emph(dp_info);
798 	}
799 	if (!clock_recovery) {
800 		DRM_ERROR("clock recovery failed\n");
801 		return -1;
802 	} else {
803 		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
804 			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
805 			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
806 			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
807 		return 0;
808 	}
809 }
810 
811 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
812 {
813 	bool channel_eq;
814 
815 	if (dp_info->tp3_supported)
816 		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
817 	else
818 		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
819 
820 	/* channel equalization loop */
821 	dp_info->tries = 0;
822 	channel_eq = false;
823 	while (1) {
824 		if (dp_info->rd_interval == 0)
825 			udelay(400);
826 		else
827 			mdelay(dp_info->rd_interval * 4);
828 
829 		if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
830 			break;
831 
832 		if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
833 			channel_eq = true;
834 			break;
835 		}
836 
837 		/* Try 5 times */
838 		if (dp_info->tries > 5) {
839 			DRM_ERROR("channel eq failed: 5 tries\n");
840 			break;
841 		}
842 
843 		/* Compute new train_set as requested by sink */
844 		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
845 
846 		radeon_dp_update_vs_emph(dp_info);
847 		dp_info->tries++;
848 	}
849 
850 	if (!channel_eq) {
851 		DRM_ERROR("channel eq failed\n");
852 		return -1;
853 	} else {
854 		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
855 			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
856 			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
857 			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
858 		return 0;
859 	}
860 }
861 
862 void radeon_dp_link_train(struct drm_encoder *encoder,
863 			  struct drm_connector *connector)
864 {
865 	struct drm_device *dev = encoder->dev;
866 	struct radeon_device *rdev = dev->dev_private;
867 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
868 	struct radeon_encoder_atom_dig *dig;
869 	struct radeon_connector *radeon_connector;
870 	struct radeon_connector_atom_dig *dig_connector;
871 	struct radeon_dp_link_train_info dp_info;
872  	u8 tmp;
873 
874 	if (!radeon_encoder->enc_priv)
875 		return;
876 	dig = radeon_encoder->enc_priv;
877 
878 	radeon_connector = to_radeon_connector(connector);
879 	if (!radeon_connector->con_priv)
880 		return;
881 	dig_connector = radeon_connector->con_priv;
882 
883 	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
884 	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
885 		return;
886 
887 	dp_info.enc_id = 0;
888 	if (dig->dig_encoder)
889 		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
890 	else
891 		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
892 	if (dig->linkb)
893 		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
894 	else
895 		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
896 
897 	dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL);
898 	tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
899 	if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
900 		dp_info.tp3_supported = true;
901 	else
902 		dp_info.tp3_supported = false;
903 
904 	memcpy(dp_info.dpcd, dig_connector->dpcd, 8);
905 	dp_info.rdev = rdev;
906 	dp_info.encoder = encoder;
907 	dp_info.connector = connector;
908 	dp_info.radeon_connector = radeon_connector;
909 	dp_info.dp_lane_count = dig_connector->dp_lane_count;
910 	dp_info.dp_clock = dig_connector->dp_clock;
911 
912 	if (radeon_dp_link_train_init(&dp_info))
913 		goto done;
914 	if (radeon_dp_link_train_cr(&dp_info))
915 		goto done;
916 	if (radeon_dp_link_train_ce(&dp_info))
917 		goto done;
918 done:
919 	if (radeon_dp_link_train_finish(&dp_info))
920 		return;
921 }
922