1 /*
2  * Copyright 2007-11 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21  * OTHER DEALINGS IN THE SOFTWARE.
22  *
23  * Authors: Dave Airlie
24  *          Alex Deucher
25  */
26 #include <drm/drmP.h>
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/amdgpu_drm.h>
29 #include "amdgpu.h"
30 #include "amdgpu_connectors.h"
31 #include "atom.h"
32 #include "atombios_encoders.h"
33 #include "atombios_dp.h"
34 #include <linux/backlight.h>
35 #include "bif/bif_4_1_d.h"
36 
37 static u8
38 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
39 {
40 	u8 backlight_level;
41 	u32 bios_2_scratch;
42 
43 	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
44 
45 	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
46 			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
47 
48 	return backlight_level;
49 }
50 
51 static void
52 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
53 					    u8 backlight_level)
54 {
55 	u32 bios_2_scratch;
56 
57 	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
58 
59 	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
60 	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
61 			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
62 
63 	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
64 }
65 
66 u8
67 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
68 {
69 	struct drm_device *dev = amdgpu_encoder->base.dev;
70 	struct amdgpu_device *adev = dev->dev_private;
71 
72 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
73 		return 0;
74 
75 	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
76 }
77 
78 void
79 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
80 				     u8 level)
81 {
82 	struct drm_encoder *encoder = &amdgpu_encoder->base;
83 	struct drm_device *dev = amdgpu_encoder->base.dev;
84 	struct amdgpu_device *adev = dev->dev_private;
85 	struct amdgpu_encoder_atom_dig *dig;
86 
87 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
88 		return;
89 
90 	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
91 	    amdgpu_encoder->enc_priv) {
92 		dig = amdgpu_encoder->enc_priv;
93 		dig->backlight_level = level;
94 		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
95 
96 		switch (amdgpu_encoder->encoder_id) {
97 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
98 		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
99 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
100 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
101 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
102 			if (dig->backlight_level == 0)
103 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
104 								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
105 			else {
106 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
107 								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
108 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
109 								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
110 			}
111 			break;
112 		default:
113 			break;
114 		}
115 	}
116 }
117 
118 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
119 
120 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
121 {
122 	u8 level;
123 
124 	/* Convert brightness to hardware level */
125 	if (bd->props.brightness < 0)
126 		level = 0;
127 	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
128 		level = AMDGPU_MAX_BL_LEVEL;
129 	else
130 		level = bd->props.brightness;
131 
132 	return level;
133 }
134 
135 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
136 {
137 	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
138 	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
139 
140 	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
141 					     amdgpu_atombios_encoder_backlight_level(bd));
142 
143 	return 0;
144 }
145 
146 static int
147 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
148 {
149 	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
150 	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
151 	struct drm_device *dev = amdgpu_encoder->base.dev;
152 	struct amdgpu_device *adev = dev->dev_private;
153 
154 	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
155 }
156 
157 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
158 	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
159 	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
160 };
161 
162 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
163 				     struct drm_connector *drm_connector)
164 {
165 	struct drm_device *dev = amdgpu_encoder->base.dev;
166 	struct amdgpu_device *adev = dev->dev_private;
167 	struct backlight_device *bd;
168 	struct backlight_properties props;
169 	struct amdgpu_backlight_privdata *pdata;
170 	struct amdgpu_encoder_atom_dig *dig;
171 	u8 backlight_level;
172 	char bl_name[16];
173 
174 	/* Mac laptops with multiple GPUs use the gmux driver for backlight
175 	 * so don't register a backlight device
176 	 */
177 	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
178 	    (adev->pdev->device == 0x6741))
179 		return;
180 
181 	if (!amdgpu_encoder->enc_priv)
182 		return;
183 
184 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
185 		return;
186 
187 	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
188 	if (!pdata) {
189 		DRM_ERROR("Memory allocation failed\n");
190 		goto error;
191 	}
192 
193 	memset(&props, 0, sizeof(props));
194 	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
195 	props.type = BACKLIGHT_RAW;
196 	snprintf(bl_name, sizeof(bl_name),
197 		 "amdgpu_bl%d", dev->primary->index);
198 	bd = backlight_device_register(bl_name, drm_connector->kdev,
199 				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
200 	if (IS_ERR(bd)) {
201 		DRM_ERROR("Backlight registration failed\n");
202 		goto error;
203 	}
204 
205 	pdata->encoder = amdgpu_encoder;
206 
207 	backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
208 
209 	dig = amdgpu_encoder->enc_priv;
210 	dig->bl_dev = bd;
211 
212 	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
213 	bd->props.power = FB_BLANK_UNBLANK;
214 	backlight_update_status(bd);
215 
216 	DRM_INFO("amdgpu atom DIG backlight initialized\n");
217 
218 	return;
219 
220 error:
221 	kfree(pdata);
222 	return;
223 }
224 
225 void
226 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
227 {
228 	struct drm_device *dev = amdgpu_encoder->base.dev;
229 	struct amdgpu_device *adev = dev->dev_private;
230 	struct backlight_device *bd = NULL;
231 	struct amdgpu_encoder_atom_dig *dig;
232 
233 	if (!amdgpu_encoder->enc_priv)
234 		return;
235 
236 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
237 		return;
238 
239 	dig = amdgpu_encoder->enc_priv;
240 	bd = dig->bl_dev;
241 	dig->bl_dev = NULL;
242 
243 	if (bd) {
244 		struct amdgpu_legacy_backlight_privdata *pdata;
245 
246 		pdata = bl_get_data(bd);
247 		backlight_device_unregister(bd);
248 		kfree(pdata);
249 
250 		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
251 	}
252 }
253 
254 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
255 
256 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
257 {
258 }
259 
260 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
261 {
262 }
263 
264 #endif
265 
266 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
267 {
268 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
269 	switch (amdgpu_encoder->encoder_id) {
270 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
271 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
272 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
273 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
274 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
275 		return true;
276 	default:
277 		return false;
278 	}
279 }
280 
281 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
282 				 const struct drm_display_mode *mode,
283 				 struct drm_display_mode *adjusted_mode)
284 {
285 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
286 
287 	/* set the active encoder to connector routing */
288 	amdgpu_encoder_set_active_device(encoder);
289 	drm_mode_set_crtcinfo(adjusted_mode, 0);
290 
291 	/* hw bug */
292 	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
293 	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
294 		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
295 
296 	/* vertical FP must be at least 1 */
297 	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
298 		adjusted_mode->crtc_vsync_start++;
299 
300 	/* get the native mode for scaling */
301 	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
302 		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
303 	else if (amdgpu_encoder->rmx_type != RMX_OFF)
304 		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
305 
306 	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
307 	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
308 		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
309 		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
310 	}
311 
312 	return true;
313 }
314 
315 static void
316 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
317 {
318 	struct drm_device *dev = encoder->dev;
319 	struct amdgpu_device *adev = dev->dev_private;
320 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
321 	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
322 	int index = 0;
323 
324 	memset(&args, 0, sizeof(args));
325 
326 	switch (amdgpu_encoder->encoder_id) {
327 	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
328 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
329 		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
330 		break;
331 	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
332 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
333 		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
334 		break;
335 	}
336 
337 	args.ucAction = action;
338 	args.ucDacStandard = ATOM_DAC1_PS2;
339 	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
340 
341 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
342 
343 }
344 
345 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
346 {
347 	int bpc = 8;
348 
349 	if (encoder->crtc) {
350 		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
351 		bpc = amdgpu_crtc->bpc;
352 	}
353 
354 	switch (bpc) {
355 	case 0:
356 		return PANEL_BPC_UNDEFINE;
357 	case 6:
358 		return PANEL_6BIT_PER_COLOR;
359 	case 8:
360 	default:
361 		return PANEL_8BIT_PER_COLOR;
362 	case 10:
363 		return PANEL_10BIT_PER_COLOR;
364 	case 12:
365 		return PANEL_12BIT_PER_COLOR;
366 	case 16:
367 		return PANEL_16BIT_PER_COLOR;
368 	}
369 }
370 
371 union dvo_encoder_control {
372 	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
373 	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
374 	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
375 	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
376 };
377 
378 static void
379 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
380 {
381 	struct drm_device *dev = encoder->dev;
382 	struct amdgpu_device *adev = dev->dev_private;
383 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
384 	union dvo_encoder_control args;
385 	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
386 	uint8_t frev, crev;
387 
388 	memset(&args, 0, sizeof(args));
389 
390 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
391 		return;
392 
393 	switch (frev) {
394 	case 1:
395 		switch (crev) {
396 		case 1:
397 			/* R4xx, R5xx */
398 			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
399 
400 			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
401 				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
402 
403 			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
404 			break;
405 		case 2:
406 			/* RS600/690/740 */
407 			args.dvo.sDVOEncoder.ucAction = action;
408 			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
409 			/* DFP1, CRT1, TV1 depending on the type of port */
410 			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
411 
412 			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
413 				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
414 			break;
415 		case 3:
416 			/* R6xx */
417 			args.dvo_v3.ucAction = action;
418 			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
419 			args.dvo_v3.ucDVOConfig = 0; /* XXX */
420 			break;
421 		case 4:
422 			/* DCE8 */
423 			args.dvo_v4.ucAction = action;
424 			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
425 			args.dvo_v4.ucDVOConfig = 0; /* XXX */
426 			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
427 			break;
428 		default:
429 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
430 			break;
431 		}
432 		break;
433 	default:
434 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
435 		break;
436 	}
437 
438 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
439 }
440 
441 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
442 {
443 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
444 	struct drm_connector *connector;
445 	struct amdgpu_connector *amdgpu_connector;
446 	struct amdgpu_connector_atom_dig *dig_connector;
447 
448 	/* dp bridges are always DP */
449 	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
450 		return ATOM_ENCODER_MODE_DP;
451 
452 	/* DVO is always DVO */
453 	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
454 	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
455 		return ATOM_ENCODER_MODE_DVO;
456 
457 	connector = amdgpu_get_connector_for_encoder(encoder);
458 	/* if we don't have an active device yet, just use one of
459 	 * the connectors tied to the encoder.
460 	 */
461 	if (!connector)
462 		connector = amdgpu_get_connector_for_encoder_init(encoder);
463 	amdgpu_connector = to_amdgpu_connector(connector);
464 
465 	switch (connector->connector_type) {
466 	case DRM_MODE_CONNECTOR_DVII:
467 	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
468 		if (amdgpu_audio != 0) {
469 			if (amdgpu_connector->use_digital &&
470 			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
471 				return ATOM_ENCODER_MODE_HDMI;
472 			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
473 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
474 				return ATOM_ENCODER_MODE_HDMI;
475 			else if (amdgpu_connector->use_digital)
476 				return ATOM_ENCODER_MODE_DVI;
477 			else
478 				return ATOM_ENCODER_MODE_CRT;
479 		} else if (amdgpu_connector->use_digital) {
480 			return ATOM_ENCODER_MODE_DVI;
481 		} else {
482 			return ATOM_ENCODER_MODE_CRT;
483 		}
484 		break;
485 	case DRM_MODE_CONNECTOR_DVID:
486 	case DRM_MODE_CONNECTOR_HDMIA:
487 	default:
488 		if (amdgpu_audio != 0) {
489 			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
490 				return ATOM_ENCODER_MODE_HDMI;
491 			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
492 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
493 				return ATOM_ENCODER_MODE_HDMI;
494 			else
495 				return ATOM_ENCODER_MODE_DVI;
496 		} else {
497 			return ATOM_ENCODER_MODE_DVI;
498 		}
499 		break;
500 	case DRM_MODE_CONNECTOR_LVDS:
501 		return ATOM_ENCODER_MODE_LVDS;
502 		break;
503 	case DRM_MODE_CONNECTOR_DisplayPort:
504 		dig_connector = amdgpu_connector->con_priv;
505 		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
506 		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
507 			return ATOM_ENCODER_MODE_DP;
508 		} else if (amdgpu_audio != 0) {
509 			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
510 				return ATOM_ENCODER_MODE_HDMI;
511 			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
512 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
513 				return ATOM_ENCODER_MODE_HDMI;
514 			else
515 				return ATOM_ENCODER_MODE_DVI;
516 		} else {
517 			return ATOM_ENCODER_MODE_DVI;
518 		}
519 		break;
520 	case DRM_MODE_CONNECTOR_eDP:
521 		return ATOM_ENCODER_MODE_DP;
522 	case DRM_MODE_CONNECTOR_DVIA:
523 	case DRM_MODE_CONNECTOR_VGA:
524 		return ATOM_ENCODER_MODE_CRT;
525 		break;
526 	case DRM_MODE_CONNECTOR_Composite:
527 	case DRM_MODE_CONNECTOR_SVIDEO:
528 	case DRM_MODE_CONNECTOR_9PinDIN:
529 		/* fix me */
530 		return ATOM_ENCODER_MODE_TV;
531 		/*return ATOM_ENCODER_MODE_CV;*/
532 		break;
533 	}
534 }
535 
536 /*
537  * DIG Encoder/Transmitter Setup
538  *
539  * DCE 6.0
540  * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
541  * Supports up to 6 digital outputs
542  * - 6 DIG encoder blocks.
543  * - DIG to PHY mapping is hardcoded
544  * DIG1 drives UNIPHY0 link A, A+B
545  * DIG2 drives UNIPHY0 link B
546  * DIG3 drives UNIPHY1 link A, A+B
547  * DIG4 drives UNIPHY1 link B
548  * DIG5 drives UNIPHY2 link A, A+B
549  * DIG6 drives UNIPHY2 link B
550  *
551  * Routing
552  * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
553  * Examples:
554  * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
555  * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
556  * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
557  * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
558  */
559 
560 union dig_encoder_control {
561 	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
562 	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
563 	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
564 	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
565 	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
566 };
567 
568 void
569 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
570 				   int action, int panel_mode)
571 {
572 	struct drm_device *dev = encoder->dev;
573 	struct amdgpu_device *adev = dev->dev_private;
574 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
575 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
576 	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
577 	union dig_encoder_control args;
578 	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
579 	uint8_t frev, crev;
580 	int dp_clock = 0;
581 	int dp_lane_count = 0;
582 	int hpd_id = AMDGPU_HPD_NONE;
583 
584 	if (connector) {
585 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
586 		struct amdgpu_connector_atom_dig *dig_connector =
587 			amdgpu_connector->con_priv;
588 
589 		dp_clock = dig_connector->dp_clock;
590 		dp_lane_count = dig_connector->dp_lane_count;
591 		hpd_id = amdgpu_connector->hpd.hpd;
592 	}
593 
594 	/* no dig encoder assigned */
595 	if (dig->dig_encoder == -1)
596 		return;
597 
598 	memset(&args, 0, sizeof(args));
599 
600 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
601 		return;
602 
603 	switch (frev) {
604 	case 1:
605 		switch (crev) {
606 		case 1:
607 			args.v1.ucAction = action;
608 			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
609 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
610 				args.v3.ucPanelMode = panel_mode;
611 			else
612 				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
613 
614 			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
615 				args.v1.ucLaneNum = dp_lane_count;
616 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
617 				args.v1.ucLaneNum = 8;
618 			else
619 				args.v1.ucLaneNum = 4;
620 
621 			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
622 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
623 			switch (amdgpu_encoder->encoder_id) {
624 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
625 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
626 				break;
627 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
628 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
629 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
630 				break;
631 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
632 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
633 				break;
634 			}
635 			if (dig->linkb)
636 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
637 			else
638 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
639 			break;
640 		case 2:
641 		case 3:
642 			args.v3.ucAction = action;
643 			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
644 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
645 				args.v3.ucPanelMode = panel_mode;
646 			else
647 				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
648 
649 			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
650 				args.v3.ucLaneNum = dp_lane_count;
651 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
652 				args.v3.ucLaneNum = 8;
653 			else
654 				args.v3.ucLaneNum = 4;
655 
656 			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
657 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
658 			args.v3.acConfig.ucDigSel = dig->dig_encoder;
659 			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
660 			break;
661 		case 4:
662 			args.v4.ucAction = action;
663 			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
664 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
665 				args.v4.ucPanelMode = panel_mode;
666 			else
667 				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
668 
669 			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
670 				args.v4.ucLaneNum = dp_lane_count;
671 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
672 				args.v4.ucLaneNum = 8;
673 			else
674 				args.v4.ucLaneNum = 4;
675 
676 			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
677 				if (dp_clock == 540000)
678 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
679 				else if (dp_clock == 324000)
680 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
681 				else if (dp_clock == 270000)
682 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
683 				else
684 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
685 			}
686 			args.v4.acConfig.ucDigSel = dig->dig_encoder;
687 			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
688 			if (hpd_id == AMDGPU_HPD_NONE)
689 				args.v4.ucHPD_ID = 0;
690 			else
691 				args.v4.ucHPD_ID = hpd_id + 1;
692 			break;
693 		case 5:
694 			switch (action) {
695 			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
696 				args.v5.asDPPanelModeParam.ucAction = action;
697 				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
698 				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
699 				break;
700 			case ATOM_ENCODER_CMD_STREAM_SETUP:
701 				args.v5.asStreamParam.ucAction = action;
702 				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
703 				args.v5.asStreamParam.ucDigMode =
704 					amdgpu_atombios_encoder_get_encoder_mode(encoder);
705 				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
706 					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
707 				else if (amdgpu_dig_monitor_is_duallink(encoder,
708 									amdgpu_encoder->pixel_clock))
709 					args.v5.asStreamParam.ucLaneNum = 8;
710 				else
711 					args.v5.asStreamParam.ucLaneNum = 4;
712 				args.v5.asStreamParam.ulPixelClock =
713 					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
714 				args.v5.asStreamParam.ucBitPerColor =
715 					amdgpu_atombios_encoder_get_bpc(encoder);
716 				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
717 				break;
718 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
719 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
720 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
721 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
722 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
723 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
724 			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
725 			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
726 				args.v5.asCmdParam.ucAction = action;
727 				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
728 				break;
729 			default:
730 				DRM_ERROR("Unsupported action 0x%x\n", action);
731 				break;
732 			}
733 			break;
734 		default:
735 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
736 			break;
737 		}
738 		break;
739 	default:
740 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
741 		break;
742 	}
743 
744 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
745 
746 }
747 
748 union dig_transmitter_control {
749 	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
750 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
751 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
752 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
753 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
754 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
755 };
756 
757 void
758 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
759 					      uint8_t lane_num, uint8_t lane_set)
760 {
761 	struct drm_device *dev = encoder->dev;
762 	struct amdgpu_device *adev = dev->dev_private;
763 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
764 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
765 	struct drm_connector *connector;
766 	union dig_transmitter_control args;
767 	int index = 0;
768 	uint8_t frev, crev;
769 	bool is_dp = false;
770 	int pll_id = 0;
771 	int dp_clock = 0;
772 	int dp_lane_count = 0;
773 	int connector_object_id = 0;
774 	int igp_lane_info = 0;
775 	int dig_encoder = dig->dig_encoder;
776 	int hpd_id = AMDGPU_HPD_NONE;
777 
778 	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
779 		connector = amdgpu_get_connector_for_encoder_init(encoder);
780 		/* just needed to avoid bailing in the encoder check.  the encoder
781 		 * isn't used for init
782 		 */
783 		dig_encoder = 0;
784 	} else
785 		connector = amdgpu_get_connector_for_encoder(encoder);
786 
787 	if (connector) {
788 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
789 		struct amdgpu_connector_atom_dig *dig_connector =
790 			amdgpu_connector->con_priv;
791 
792 		hpd_id = amdgpu_connector->hpd.hpd;
793 		dp_clock = dig_connector->dp_clock;
794 		dp_lane_count = dig_connector->dp_lane_count;
795 		connector_object_id =
796 			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
797 	}
798 
799 	if (encoder->crtc) {
800 		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
801 		pll_id = amdgpu_crtc->pll_id;
802 	}
803 
804 	/* no dig encoder assigned */
805 	if (dig_encoder == -1)
806 		return;
807 
808 	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
809 		is_dp = true;
810 
811 	memset(&args, 0, sizeof(args));
812 
813 	switch (amdgpu_encoder->encoder_id) {
814 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
815 		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
816 		break;
817 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
818 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
819 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
820 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
821 		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
822 		break;
823 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
824 		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
825 		break;
826 	}
827 
828 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
829 		return;
830 
831 	switch (frev) {
832 	case 1:
833 		switch (crev) {
834 		case 1:
835 			args.v1.ucAction = action;
836 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
837 				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
838 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
839 				args.v1.asMode.ucLaneSel = lane_num;
840 				args.v1.asMode.ucLaneSet = lane_set;
841 			} else {
842 				if (is_dp)
843 					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
844 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
845 					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
846 				else
847 					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
848 			}
849 
850 			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
851 
852 			if (dig_encoder)
853 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
854 			else
855 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
856 
857 			if ((adev->flags & AMD_IS_APU) &&
858 			    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
859 				if (is_dp ||
860 				    !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
861 					if (igp_lane_info & 0x1)
862 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
863 					else if (igp_lane_info & 0x2)
864 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
865 					else if (igp_lane_info & 0x4)
866 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
867 					else if (igp_lane_info & 0x8)
868 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
869 				} else {
870 					if (igp_lane_info & 0x3)
871 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
872 					else if (igp_lane_info & 0xc)
873 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
874 				}
875 			}
876 
877 			if (dig->linkb)
878 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
879 			else
880 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
881 
882 			if (is_dp)
883 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
884 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
885 				if (dig->coherent_mode)
886 					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
887 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
888 					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
889 			}
890 			break;
891 		case 2:
892 			args.v2.ucAction = action;
893 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
894 				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
895 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
896 				args.v2.asMode.ucLaneSel = lane_num;
897 				args.v2.asMode.ucLaneSet = lane_set;
898 			} else {
899 				if (is_dp)
900 					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
901 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
902 					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
903 				else
904 					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
905 			}
906 
907 			args.v2.acConfig.ucEncoderSel = dig_encoder;
908 			if (dig->linkb)
909 				args.v2.acConfig.ucLinkSel = 1;
910 
911 			switch (amdgpu_encoder->encoder_id) {
912 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
913 				args.v2.acConfig.ucTransmitterSel = 0;
914 				break;
915 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
916 				args.v2.acConfig.ucTransmitterSel = 1;
917 				break;
918 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
919 				args.v2.acConfig.ucTransmitterSel = 2;
920 				break;
921 			}
922 
923 			if (is_dp) {
924 				args.v2.acConfig.fCoherentMode = 1;
925 				args.v2.acConfig.fDPConnector = 1;
926 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
927 				if (dig->coherent_mode)
928 					args.v2.acConfig.fCoherentMode = 1;
929 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
930 					args.v2.acConfig.fDualLinkConnector = 1;
931 			}
932 			break;
933 		case 3:
934 			args.v3.ucAction = action;
935 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
936 				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
937 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
938 				args.v3.asMode.ucLaneSel = lane_num;
939 				args.v3.asMode.ucLaneSet = lane_set;
940 			} else {
941 				if (is_dp)
942 					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
943 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
944 					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
945 				else
946 					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
947 			}
948 
949 			if (is_dp)
950 				args.v3.ucLaneNum = dp_lane_count;
951 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
952 				args.v3.ucLaneNum = 8;
953 			else
954 				args.v3.ucLaneNum = 4;
955 
956 			if (dig->linkb)
957 				args.v3.acConfig.ucLinkSel = 1;
958 			if (dig_encoder & 1)
959 				args.v3.acConfig.ucEncoderSel = 1;
960 
961 			/* Select the PLL for the PHY
962 			 * DP PHY should be clocked from external src if there is
963 			 * one.
964 			 */
965 			/* On DCE4, if there is an external clock, it generates the DP ref clock */
966 			if (is_dp && adev->clock.dp_extclk)
967 				args.v3.acConfig.ucRefClkSource = 2; /* external src */
968 			else
969 				args.v3.acConfig.ucRefClkSource = pll_id;
970 
971 			switch (amdgpu_encoder->encoder_id) {
972 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
973 				args.v3.acConfig.ucTransmitterSel = 0;
974 				break;
975 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
976 				args.v3.acConfig.ucTransmitterSel = 1;
977 				break;
978 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
979 				args.v3.acConfig.ucTransmitterSel = 2;
980 				break;
981 			}
982 
983 			if (is_dp)
984 				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
985 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
986 				if (dig->coherent_mode)
987 					args.v3.acConfig.fCoherentMode = 1;
988 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
989 					args.v3.acConfig.fDualLinkConnector = 1;
990 			}
991 			break;
992 		case 4:
993 			args.v4.ucAction = action;
994 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
995 				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
996 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
997 				args.v4.asMode.ucLaneSel = lane_num;
998 				args.v4.asMode.ucLaneSet = lane_set;
999 			} else {
1000 				if (is_dp)
1001 					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
1002 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1003 					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1004 				else
1005 					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1006 			}
1007 
1008 			if (is_dp)
1009 				args.v4.ucLaneNum = dp_lane_count;
1010 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1011 				args.v4.ucLaneNum = 8;
1012 			else
1013 				args.v4.ucLaneNum = 4;
1014 
1015 			if (dig->linkb)
1016 				args.v4.acConfig.ucLinkSel = 1;
1017 			if (dig_encoder & 1)
1018 				args.v4.acConfig.ucEncoderSel = 1;
1019 
1020 			/* Select the PLL for the PHY
1021 			 * DP PHY should be clocked from external src if there is
1022 			 * one.
1023 			 */
1024 			/* On DCE5 DCPLL usually generates the DP ref clock */
1025 			if (is_dp) {
1026 				if (adev->clock.dp_extclk)
1027 					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1028 				else
1029 					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1030 			} else
1031 				args.v4.acConfig.ucRefClkSource = pll_id;
1032 
1033 			switch (amdgpu_encoder->encoder_id) {
1034 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1035 				args.v4.acConfig.ucTransmitterSel = 0;
1036 				break;
1037 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1038 				args.v4.acConfig.ucTransmitterSel = 1;
1039 				break;
1040 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1041 				args.v4.acConfig.ucTransmitterSel = 2;
1042 				break;
1043 			}
1044 
1045 			if (is_dp)
1046 				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1047 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1048 				if (dig->coherent_mode)
1049 					args.v4.acConfig.fCoherentMode = 1;
1050 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1051 					args.v4.acConfig.fDualLinkConnector = 1;
1052 			}
1053 			break;
1054 		case 5:
1055 			args.v5.ucAction = action;
1056 			if (is_dp)
1057 				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1058 			else
1059 				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1060 
1061 			switch (amdgpu_encoder->encoder_id) {
1062 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1063 				if (dig->linkb)
1064 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1065 				else
1066 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1067 				break;
1068 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1069 				if (dig->linkb)
1070 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1071 				else
1072 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1073 				break;
1074 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1075 				if (dig->linkb)
1076 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1077 				else
1078 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1079 				break;
1080 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1081 				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1082 				break;
1083 			}
1084 			if (is_dp)
1085 				args.v5.ucLaneNum = dp_lane_count;
1086 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1087 				args.v5.ucLaneNum = 8;
1088 			else
1089 				args.v5.ucLaneNum = 4;
1090 			args.v5.ucConnObjId = connector_object_id;
1091 			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1092 
1093 			if (is_dp && adev->clock.dp_extclk)
1094 				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1095 			else
1096 				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1097 
1098 			if (is_dp)
1099 				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1100 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1101 				if (dig->coherent_mode)
1102 					args.v5.asConfig.ucCoherentMode = 1;
1103 			}
1104 			if (hpd_id == AMDGPU_HPD_NONE)
1105 				args.v5.asConfig.ucHPDSel = 0;
1106 			else
1107 				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1108 			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1109 			args.v5.ucDPLaneSet = lane_set;
1110 			break;
1111 		case 6:
1112 			args.v6.ucAction = action;
1113 			if (is_dp)
1114 				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1115 			else
1116 				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1117 
1118 			switch (amdgpu_encoder->encoder_id) {
1119 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1120 				if (dig->linkb)
1121 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1122 				else
1123 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1124 				break;
1125 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1126 				if (dig->linkb)
1127 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1128 				else
1129 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1130 				break;
1131 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1132 				if (dig->linkb)
1133 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1134 				else
1135 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1136 				break;
1137 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1138 				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1139 				break;
1140 			}
1141 			if (is_dp)
1142 				args.v6.ucLaneNum = dp_lane_count;
1143 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1144 				args.v6.ucLaneNum = 8;
1145 			else
1146 				args.v6.ucLaneNum = 4;
1147 			args.v6.ucConnObjId = connector_object_id;
1148 			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1149 				args.v6.ucDPLaneSet = lane_set;
1150 			else
1151 				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1152 
1153 			if (hpd_id == AMDGPU_HPD_NONE)
1154 				args.v6.ucHPDSel = 0;
1155 			else
1156 				args.v6.ucHPDSel = hpd_id + 1;
1157 			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1158 			break;
1159 		default:
1160 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1161 			break;
1162 		}
1163 		break;
1164 	default:
1165 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1166 		break;
1167 	}
1168 
1169 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1170 }
1171 
1172 bool
1173 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1174 				     int action)
1175 {
1176 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1177 	struct drm_device *dev = amdgpu_connector->base.dev;
1178 	struct amdgpu_device *adev = dev->dev_private;
1179 	union dig_transmitter_control args;
1180 	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1181 	uint8_t frev, crev;
1182 
1183 	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1184 		goto done;
1185 
1186 	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1187 	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1188 		goto done;
1189 
1190 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1191 		goto done;
1192 
1193 	memset(&args, 0, sizeof(args));
1194 
1195 	args.v1.ucAction = action;
1196 
1197 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1198 
1199 	/* wait for the panel to power up */
1200 	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1201 		int i;
1202 
1203 		for (i = 0; i < 300; i++) {
1204 			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1205 				return true;
1206 			mdelay(1);
1207 		}
1208 		return false;
1209 	}
1210 done:
1211 	return true;
1212 }
1213 
1214 union external_encoder_control {
1215 	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1216 	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1217 };
1218 
1219 static void
1220 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1221 					struct drm_encoder *ext_encoder,
1222 					int action)
1223 {
1224 	struct drm_device *dev = encoder->dev;
1225 	struct amdgpu_device *adev = dev->dev_private;
1226 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1227 	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1228 	union external_encoder_control args;
1229 	struct drm_connector *connector;
1230 	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1231 	u8 frev, crev;
1232 	int dp_clock = 0;
1233 	int dp_lane_count = 0;
1234 	int connector_object_id = 0;
1235 	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1236 
1237 	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1238 		connector = amdgpu_get_connector_for_encoder_init(encoder);
1239 	else
1240 		connector = amdgpu_get_connector_for_encoder(encoder);
1241 
1242 	if (connector) {
1243 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1244 		struct amdgpu_connector_atom_dig *dig_connector =
1245 			amdgpu_connector->con_priv;
1246 
1247 		dp_clock = dig_connector->dp_clock;
1248 		dp_lane_count = dig_connector->dp_lane_count;
1249 		connector_object_id =
1250 			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1251 	}
1252 
1253 	memset(&args, 0, sizeof(args));
1254 
1255 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1256 		return;
1257 
1258 	switch (frev) {
1259 	case 1:
1260 		/* no params on frev 1 */
1261 		break;
1262 	case 2:
1263 		switch (crev) {
1264 		case 1:
1265 		case 2:
1266 			args.v1.sDigEncoder.ucAction = action;
1267 			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1268 			args.v1.sDigEncoder.ucEncoderMode =
1269 				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1270 
1271 			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1272 				if (dp_clock == 270000)
1273 					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1274 				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1275 			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1276 				args.v1.sDigEncoder.ucLaneNum = 8;
1277 			else
1278 				args.v1.sDigEncoder.ucLaneNum = 4;
1279 			break;
1280 		case 3:
1281 			args.v3.sExtEncoder.ucAction = action;
1282 			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1283 				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1284 			else
1285 				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1286 			args.v3.sExtEncoder.ucEncoderMode =
1287 				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1288 
1289 			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1290 				if (dp_clock == 270000)
1291 					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1292 				else if (dp_clock == 540000)
1293 					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1294 				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1295 			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1296 				args.v3.sExtEncoder.ucLaneNum = 8;
1297 			else
1298 				args.v3.sExtEncoder.ucLaneNum = 4;
1299 			switch (ext_enum) {
1300 			case GRAPH_OBJECT_ENUM_ID1:
1301 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1302 				break;
1303 			case GRAPH_OBJECT_ENUM_ID2:
1304 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1305 				break;
1306 			case GRAPH_OBJECT_ENUM_ID3:
1307 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1308 				break;
1309 			}
1310 			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1311 			break;
1312 		default:
1313 			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1314 			return;
1315 		}
1316 		break;
1317 	default:
1318 		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1319 		return;
1320 	}
1321 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1322 }
1323 
1324 static void
1325 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1326 {
1327 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1328 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1329 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1330 	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1331 	struct amdgpu_connector *amdgpu_connector = NULL;
1332 	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1333 
1334 	if (connector) {
1335 		amdgpu_connector = to_amdgpu_connector(connector);
1336 		amdgpu_dig_connector = amdgpu_connector->con_priv;
1337 	}
1338 
1339 	if (action == ATOM_ENABLE) {
1340 		if (!connector)
1341 			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1342 		else
1343 			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1344 
1345 		/* setup and enable the encoder */
1346 		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1347 		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1348 						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1349 						   dig->panel_mode);
1350 		if (ext_encoder)
1351 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1352 								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1353 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1354 		    connector) {
1355 			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1356 				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1357 								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1358 				amdgpu_dig_connector->edp_on = true;
1359 			}
1360 		}
1361 		/* enable the transmitter */
1362 		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1363 						       ATOM_TRANSMITTER_ACTION_ENABLE,
1364 						       0, 0);
1365 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1366 		    connector) {
1367 			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1368 			amdgpu_atombios_dp_link_train(encoder, connector);
1369 			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1370 		}
1371 		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1372 			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1373 		if (ext_encoder)
1374 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1375 	} else {
1376 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1377 		    connector)
1378 			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1379 							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1380 		if (ext_encoder)
1381 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1382 		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1383 			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1384 							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1385 
1386 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1387 		    connector)
1388 			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1389 		/* disable the transmitter */
1390 		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1391 						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1392 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1393 		    connector) {
1394 			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1395 				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1396 								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1397 				amdgpu_dig_connector->edp_on = false;
1398 			}
1399 		}
1400 	}
1401 }
1402 
1403 void
1404 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1405 {
1406 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1407 
1408 	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1409 		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1410 		  amdgpu_encoder->active_device);
1411 	switch (amdgpu_encoder->encoder_id) {
1412 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1413 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1414 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1415 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1416 		switch (mode) {
1417 		case DRM_MODE_DPMS_ON:
1418 			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1419 			break;
1420 		case DRM_MODE_DPMS_STANDBY:
1421 		case DRM_MODE_DPMS_SUSPEND:
1422 		case DRM_MODE_DPMS_OFF:
1423 			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1424 			break;
1425 		}
1426 		break;
1427 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1428 		switch (mode) {
1429 		case DRM_MODE_DPMS_ON:
1430 			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1431 			break;
1432 		case DRM_MODE_DPMS_STANDBY:
1433 		case DRM_MODE_DPMS_SUSPEND:
1434 		case DRM_MODE_DPMS_OFF:
1435 			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1436 			break;
1437 		}
1438 		break;
1439 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1440 		switch (mode) {
1441 		case DRM_MODE_DPMS_ON:
1442 			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1443 			break;
1444 		case DRM_MODE_DPMS_STANDBY:
1445 		case DRM_MODE_DPMS_SUSPEND:
1446 		case DRM_MODE_DPMS_OFF:
1447 			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1448 			break;
1449 		}
1450 		break;
1451 	default:
1452 		return;
1453 	}
1454 }
1455 
1456 union crtc_source_param {
1457 	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1458 	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1459 	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1460 };
1461 
1462 void
1463 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1464 {
1465 	struct drm_device *dev = encoder->dev;
1466 	struct amdgpu_device *adev = dev->dev_private;
1467 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1468 	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1469 	union crtc_source_param args;
1470 	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1471 	uint8_t frev, crev;
1472 	struct amdgpu_encoder_atom_dig *dig;
1473 
1474 	memset(&args, 0, sizeof(args));
1475 
1476 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1477 		return;
1478 
1479 	switch (frev) {
1480 	case 1:
1481 		switch (crev) {
1482 		case 1:
1483 		default:
1484 			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1485 			switch (amdgpu_encoder->encoder_id) {
1486 			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1487 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1488 				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1489 				break;
1490 			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1491 			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1492 				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1493 					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1494 				else
1495 					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1496 				break;
1497 			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1498 			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1499 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1500 				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1501 				break;
1502 			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1503 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1504 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1505 					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1506 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1507 					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1508 				else
1509 					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1510 				break;
1511 			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1512 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1513 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1514 					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1515 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1516 					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1517 				else
1518 					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1519 				break;
1520 			}
1521 			break;
1522 		case 2:
1523 			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1524 			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1525 				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1526 
1527 				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1528 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1529 				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1530 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1531 				else
1532 					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1533 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1534 				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1535 			} else {
1536 				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1537 			}
1538 			switch (amdgpu_encoder->encoder_id) {
1539 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1540 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1541 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1542 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1543 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1544 				dig = amdgpu_encoder->enc_priv;
1545 				switch (dig->dig_encoder) {
1546 				case 0:
1547 					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1548 					break;
1549 				case 1:
1550 					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1551 					break;
1552 				case 2:
1553 					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1554 					break;
1555 				case 3:
1556 					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1557 					break;
1558 				case 4:
1559 					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1560 					break;
1561 				case 5:
1562 					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1563 					break;
1564 				case 6:
1565 					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1566 					break;
1567 				}
1568 				break;
1569 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1570 				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1571 				break;
1572 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1573 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1574 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1575 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1576 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1577 				else
1578 					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1579 				break;
1580 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1581 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1582 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1583 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1584 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1585 				else
1586 					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1587 				break;
1588 			}
1589 			break;
1590 		case 3:
1591 			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1592 			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1593 				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1594 
1595 				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1596 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1597 				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1598 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1599 				else
1600 					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1601 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1602 				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1603 			} else {
1604 				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1605 			}
1606 			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1607 			switch (amdgpu_encoder->encoder_id) {
1608 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1609 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1610 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1611 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1612 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1613 				dig = amdgpu_encoder->enc_priv;
1614 				switch (dig->dig_encoder) {
1615 				case 0:
1616 					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1617 					break;
1618 				case 1:
1619 					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1620 					break;
1621 				case 2:
1622 					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1623 					break;
1624 				case 3:
1625 					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1626 					break;
1627 				case 4:
1628 					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1629 					break;
1630 				case 5:
1631 					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1632 					break;
1633 				case 6:
1634 					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1635 					break;
1636 				}
1637 				break;
1638 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1639 				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1640 				break;
1641 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1642 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1643 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1644 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1645 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1646 				else
1647 					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1648 				break;
1649 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1650 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1651 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1652 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1653 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1654 				else
1655 					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1656 				break;
1657 			}
1658 			break;
1659 		}
1660 		break;
1661 	default:
1662 		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1663 		return;
1664 	}
1665 
1666 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1667 }
1668 
1669 /* This only needs to be called once at startup */
1670 void
1671 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1672 {
1673 	struct drm_device *dev = adev->ddev;
1674 	struct drm_encoder *encoder;
1675 
1676 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1677 		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1678 		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1679 
1680 		switch (amdgpu_encoder->encoder_id) {
1681 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1682 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1683 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1684 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1685 			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1686 							       0, 0);
1687 			break;
1688 		}
1689 
1690 		if (ext_encoder)
1691 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1692 								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1693 	}
1694 }
1695 
1696 static bool
1697 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1698 				 struct drm_connector *connector)
1699 {
1700 	struct drm_device *dev = encoder->dev;
1701 	struct amdgpu_device *adev = dev->dev_private;
1702 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1703 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1704 
1705 	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1706 				       ATOM_DEVICE_CV_SUPPORT |
1707 				       ATOM_DEVICE_CRT_SUPPORT)) {
1708 		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1709 		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1710 		uint8_t frev, crev;
1711 
1712 		memset(&args, 0, sizeof(args));
1713 
1714 		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1715 			return false;
1716 
1717 		args.sDacload.ucMisc = 0;
1718 
1719 		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1720 		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1721 			args.sDacload.ucDacType = ATOM_DAC_A;
1722 		else
1723 			args.sDacload.ucDacType = ATOM_DAC_B;
1724 
1725 		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1726 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1727 		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1728 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1729 		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1730 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1731 			if (crev >= 3)
1732 				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1733 		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1734 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1735 			if (crev >= 3)
1736 				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1737 		}
1738 
1739 		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1740 
1741 		return true;
1742 	} else
1743 		return false;
1744 }
1745 
1746 enum drm_connector_status
1747 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1748 			    struct drm_connector *connector)
1749 {
1750 	struct drm_device *dev = encoder->dev;
1751 	struct amdgpu_device *adev = dev->dev_private;
1752 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1753 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1754 	uint32_t bios_0_scratch;
1755 
1756 	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1757 		DRM_DEBUG_KMS("detect returned false \n");
1758 		return connector_status_unknown;
1759 	}
1760 
1761 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1762 
1763 	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1764 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1765 		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1766 			return connector_status_connected;
1767 	}
1768 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1769 		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1770 			return connector_status_connected;
1771 	}
1772 	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1773 		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1774 			return connector_status_connected;
1775 	}
1776 	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1777 		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1778 			return connector_status_connected; /* CTV */
1779 		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1780 			return connector_status_connected; /* STV */
1781 	}
1782 	return connector_status_disconnected;
1783 }
1784 
1785 enum drm_connector_status
1786 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1787 			    struct drm_connector *connector)
1788 {
1789 	struct drm_device *dev = encoder->dev;
1790 	struct amdgpu_device *adev = dev->dev_private;
1791 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1792 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1793 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1794 	u32 bios_0_scratch;
1795 
1796 	if (!ext_encoder)
1797 		return connector_status_unknown;
1798 
1799 	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1800 		return connector_status_unknown;
1801 
1802 	/* load detect on the dp bridge */
1803 	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1804 						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1805 
1806 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1807 
1808 	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1809 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1810 		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1811 			return connector_status_connected;
1812 	}
1813 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1814 		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1815 			return connector_status_connected;
1816 	}
1817 	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1818 		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1819 			return connector_status_connected;
1820 	}
1821 	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1822 		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1823 			return connector_status_connected; /* CTV */
1824 		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1825 			return connector_status_connected; /* STV */
1826 	}
1827 	return connector_status_disconnected;
1828 }
1829 
1830 void
1831 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1832 {
1833 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1834 
1835 	if (ext_encoder)
1836 		/* ddc_setup on the dp bridge */
1837 		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1838 							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1839 
1840 }
1841 
1842 void
1843 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1844 				       struct drm_encoder *encoder,
1845 				       bool connected)
1846 {
1847 	struct drm_device *dev = connector->dev;
1848 	struct amdgpu_device *adev = dev->dev_private;
1849 	struct amdgpu_connector *amdgpu_connector =
1850 	    to_amdgpu_connector(connector);
1851 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1852 	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1853 
1854 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1855 	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1856 	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1857 
1858 	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1859 	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1860 		if (connected) {
1861 			DRM_DEBUG_KMS("LCD1 connected\n");
1862 			bios_0_scratch |= ATOM_S0_LCD1;
1863 			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1864 			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1865 		} else {
1866 			DRM_DEBUG_KMS("LCD1 disconnected\n");
1867 			bios_0_scratch &= ~ATOM_S0_LCD1;
1868 			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1869 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1870 		}
1871 	}
1872 	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1873 	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1874 		if (connected) {
1875 			DRM_DEBUG_KMS("CRT1 connected\n");
1876 			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1877 			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1878 			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1879 		} else {
1880 			DRM_DEBUG_KMS("CRT1 disconnected\n");
1881 			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1882 			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1883 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1884 		}
1885 	}
1886 	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1887 	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1888 		if (connected) {
1889 			DRM_DEBUG_KMS("CRT2 connected\n");
1890 			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1891 			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1892 			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1893 		} else {
1894 			DRM_DEBUG_KMS("CRT2 disconnected\n");
1895 			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1896 			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1897 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1898 		}
1899 	}
1900 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1901 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1902 		if (connected) {
1903 			DRM_DEBUG_KMS("DFP1 connected\n");
1904 			bios_0_scratch |= ATOM_S0_DFP1;
1905 			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1906 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1907 		} else {
1908 			DRM_DEBUG_KMS("DFP1 disconnected\n");
1909 			bios_0_scratch &= ~ATOM_S0_DFP1;
1910 			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1911 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1912 		}
1913 	}
1914 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1915 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1916 		if (connected) {
1917 			DRM_DEBUG_KMS("DFP2 connected\n");
1918 			bios_0_scratch |= ATOM_S0_DFP2;
1919 			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1920 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1921 		} else {
1922 			DRM_DEBUG_KMS("DFP2 disconnected\n");
1923 			bios_0_scratch &= ~ATOM_S0_DFP2;
1924 			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1925 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1926 		}
1927 	}
1928 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1929 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1930 		if (connected) {
1931 			DRM_DEBUG_KMS("DFP3 connected\n");
1932 			bios_0_scratch |= ATOM_S0_DFP3;
1933 			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1934 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1935 		} else {
1936 			DRM_DEBUG_KMS("DFP3 disconnected\n");
1937 			bios_0_scratch &= ~ATOM_S0_DFP3;
1938 			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1939 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1940 		}
1941 	}
1942 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1943 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1944 		if (connected) {
1945 			DRM_DEBUG_KMS("DFP4 connected\n");
1946 			bios_0_scratch |= ATOM_S0_DFP4;
1947 			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1948 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1949 		} else {
1950 			DRM_DEBUG_KMS("DFP4 disconnected\n");
1951 			bios_0_scratch &= ~ATOM_S0_DFP4;
1952 			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1953 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1954 		}
1955 	}
1956 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1957 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1958 		if (connected) {
1959 			DRM_DEBUG_KMS("DFP5 connected\n");
1960 			bios_0_scratch |= ATOM_S0_DFP5;
1961 			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1962 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1963 		} else {
1964 			DRM_DEBUG_KMS("DFP5 disconnected\n");
1965 			bios_0_scratch &= ~ATOM_S0_DFP5;
1966 			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1967 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1968 		}
1969 	}
1970 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1971 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1972 		if (connected) {
1973 			DRM_DEBUG_KMS("DFP6 connected\n");
1974 			bios_0_scratch |= ATOM_S0_DFP6;
1975 			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1976 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1977 		} else {
1978 			DRM_DEBUG_KMS("DFP6 disconnected\n");
1979 			bios_0_scratch &= ~ATOM_S0_DFP6;
1980 			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1981 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1982 		}
1983 	}
1984 
1985 	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1986 	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1987 	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1988 }
1989 
1990 union lvds_info {
1991 	struct _ATOM_LVDS_INFO info;
1992 	struct _ATOM_LVDS_INFO_V12 info_12;
1993 };
1994 
1995 struct amdgpu_encoder_atom_dig *
1996 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1997 {
1998 	struct drm_device *dev = encoder->base.dev;
1999 	struct amdgpu_device *adev = dev->dev_private;
2000 	struct amdgpu_mode_info *mode_info = &adev->mode_info;
2001 	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
2002 	uint16_t data_offset, misc;
2003 	union lvds_info *lvds_info;
2004 	uint8_t frev, crev;
2005 	struct amdgpu_encoder_atom_dig *lvds = NULL;
2006 	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2007 
2008 	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2009 				   &frev, &crev, &data_offset)) {
2010 		lvds_info =
2011 			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
2012 		lvds =
2013 		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2014 
2015 		if (!lvds)
2016 			return NULL;
2017 
2018 		lvds->native_mode.clock =
2019 		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2020 		lvds->native_mode.hdisplay =
2021 		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2022 		lvds->native_mode.vdisplay =
2023 		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2024 		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2025 			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2026 		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2027 			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2028 		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2029 			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2030 		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2031 			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2032 		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2033 			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2034 		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2035 			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2036 		lvds->panel_pwr_delay =
2037 		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2038 		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2039 
2040 		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2041 		if (misc & ATOM_VSYNC_POLARITY)
2042 			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2043 		if (misc & ATOM_HSYNC_POLARITY)
2044 			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2045 		if (misc & ATOM_COMPOSITESYNC)
2046 			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2047 		if (misc & ATOM_INTERLACE)
2048 			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2049 		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2050 			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2051 
2052 		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2053 		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2054 
2055 		/* set crtc values */
2056 		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2057 
2058 		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2059 
2060 		encoder->native_mode = lvds->native_mode;
2061 
2062 		if (encoder_enum == 2)
2063 			lvds->linkb = true;
2064 		else
2065 			lvds->linkb = false;
2066 
2067 		/* parse the lcd record table */
2068 		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2069 			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2070 			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2071 			bool bad_record = false;
2072 			u8 *record;
2073 
2074 			if ((frev == 1) && (crev < 2))
2075 				/* absolute */
2076 				record = (u8 *)(mode_info->atom_context->bios +
2077 						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2078 			else
2079 				/* relative */
2080 				record = (u8 *)(mode_info->atom_context->bios +
2081 						data_offset +
2082 						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2083 			while (*record != ATOM_RECORD_END_TYPE) {
2084 				switch (*record) {
2085 				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2086 					record += sizeof(ATOM_PATCH_RECORD_MODE);
2087 					break;
2088 				case LCD_RTS_RECORD_TYPE:
2089 					record += sizeof(ATOM_LCD_RTS_RECORD);
2090 					break;
2091 				case LCD_CAP_RECORD_TYPE:
2092 					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2093 					break;
2094 				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2095 					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2096 					if (fake_edid_record->ucFakeEDIDLength) {
2097 						struct edid *edid;
2098 						int edid_size =
2099 							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2100 						edid = kmalloc(edid_size, GFP_KERNEL);
2101 						if (edid) {
2102 							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2103 							       fake_edid_record->ucFakeEDIDLength);
2104 
2105 							if (drm_edid_is_valid(edid)) {
2106 								adev->mode_info.bios_hardcoded_edid = edid;
2107 								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2108 							} else
2109 								kfree(edid);
2110 						}
2111 					}
2112 					record += fake_edid_record->ucFakeEDIDLength ?
2113 						fake_edid_record->ucFakeEDIDLength + 2 :
2114 						sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2115 					break;
2116 				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2117 					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2118 					lvds->native_mode.width_mm = panel_res_record->usHSize;
2119 					lvds->native_mode.height_mm = panel_res_record->usVSize;
2120 					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2121 					break;
2122 				default:
2123 					DRM_ERROR("Bad LCD record %d\n", *record);
2124 					bad_record = true;
2125 					break;
2126 				}
2127 				if (bad_record)
2128 					break;
2129 			}
2130 		}
2131 	}
2132 	return lvds;
2133 }
2134 
2135 struct amdgpu_encoder_atom_dig *
2136 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2137 {
2138 	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2139 	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2140 
2141 	if (!dig)
2142 		return NULL;
2143 
2144 	/* coherent mode by default */
2145 	dig->coherent_mode = true;
2146 	dig->dig_encoder = -1;
2147 
2148 	if (encoder_enum == 2)
2149 		dig->linkb = true;
2150 	else
2151 		dig->linkb = false;
2152 
2153 	return dig;
2154 }
2155 
2156