1 /*
2  * Copyright 2007-8 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21  * OTHER DEALINGS IN THE SOFTWARE.
22  *
23  * Authors: Dave Airlie
24  *          Alex Deucher
25  */
26 #include <drm/drmP.h>
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/radeon_drm.h>
29 #include <drm/drm_fixed.h>
30 #include "radeon.h"
31 #include "atom.h"
32 #include "atom-bits.h"
33 
34 static void atombios_overscan_setup(struct drm_crtc *crtc,
35 				    struct drm_display_mode *mode,
36 				    struct drm_display_mode *adjusted_mode)
37 {
38 	struct drm_device *dev = crtc->dev;
39 	struct radeon_device *rdev = dev->dev_private;
40 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
41 	SET_CRTC_OVERSCAN_PS_ALLOCATION args;
42 	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
43 	int a1, a2;
44 
45 	memset(&args, 0, sizeof(args));
46 
47 	args.ucCRTC = radeon_crtc->crtc_id;
48 
49 	switch (radeon_crtc->rmx_type) {
50 	case RMX_CENTER:
51 		args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
52 		args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
53 		args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
54 		args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
55 		break;
56 	case RMX_ASPECT:
57 		a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
58 		a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
59 
60 		if (a1 > a2) {
61 			args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
62 			args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
63 		} else if (a2 > a1) {
64 			args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
65 			args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
66 		}
67 		break;
68 	case RMX_FULL:
69 	default:
70 		args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
71 		args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
72 		args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
73 		args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
74 		break;
75 	}
76 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
77 }
78 
79 static void atombios_scaler_setup(struct drm_crtc *crtc)
80 {
81 	struct drm_device *dev = crtc->dev;
82 	struct radeon_device *rdev = dev->dev_private;
83 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
84 	ENABLE_SCALER_PS_ALLOCATION args;
85 	int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
86 
87 	/* fixme - fill in enc_priv for atom dac */
88 	enum radeon_tv_std tv_std = TV_STD_NTSC;
89 	bool is_tv = false, is_cv = false;
90 	struct drm_encoder *encoder;
91 
92 	if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
93 		return;
94 
95 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
96 		/* find tv std */
97 		if (encoder->crtc == crtc) {
98 			struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
99 			if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
100 				struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
101 				tv_std = tv_dac->tv_std;
102 				is_tv = true;
103 			}
104 		}
105 	}
106 
107 	memset(&args, 0, sizeof(args));
108 
109 	args.ucScaler = radeon_crtc->crtc_id;
110 
111 	if (is_tv) {
112 		switch (tv_std) {
113 		case TV_STD_NTSC:
114 		default:
115 			args.ucTVStandard = ATOM_TV_NTSC;
116 			break;
117 		case TV_STD_PAL:
118 			args.ucTVStandard = ATOM_TV_PAL;
119 			break;
120 		case TV_STD_PAL_M:
121 			args.ucTVStandard = ATOM_TV_PALM;
122 			break;
123 		case TV_STD_PAL_60:
124 			args.ucTVStandard = ATOM_TV_PAL60;
125 			break;
126 		case TV_STD_NTSC_J:
127 			args.ucTVStandard = ATOM_TV_NTSCJ;
128 			break;
129 		case TV_STD_SCART_PAL:
130 			args.ucTVStandard = ATOM_TV_PAL; /* ??? */
131 			break;
132 		case TV_STD_SECAM:
133 			args.ucTVStandard = ATOM_TV_SECAM;
134 			break;
135 		case TV_STD_PAL_CN:
136 			args.ucTVStandard = ATOM_TV_PALCN;
137 			break;
138 		}
139 		args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
140 	} else if (is_cv) {
141 		args.ucTVStandard = ATOM_TV_CV;
142 		args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
143 	} else {
144 		switch (radeon_crtc->rmx_type) {
145 		case RMX_FULL:
146 			args.ucEnable = ATOM_SCALER_EXPANSION;
147 			break;
148 		case RMX_CENTER:
149 			args.ucEnable = ATOM_SCALER_CENTER;
150 			break;
151 		case RMX_ASPECT:
152 			args.ucEnable = ATOM_SCALER_EXPANSION;
153 			break;
154 		default:
155 			if (ASIC_IS_AVIVO(rdev))
156 				args.ucEnable = ATOM_SCALER_DISABLE;
157 			else
158 				args.ucEnable = ATOM_SCALER_CENTER;
159 			break;
160 		}
161 	}
162 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
163 	if ((is_tv || is_cv)
164 	    && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
165 		atom_rv515_force_tv_scaler(rdev, radeon_crtc);
166 	}
167 }
168 
169 static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
170 {
171 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
172 	struct drm_device *dev = crtc->dev;
173 	struct radeon_device *rdev = dev->dev_private;
174 	int index =
175 	    GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
176 	ENABLE_CRTC_PS_ALLOCATION args;
177 
178 	memset(&args, 0, sizeof(args));
179 
180 	args.ucCRTC = radeon_crtc->crtc_id;
181 	args.ucEnable = lock;
182 
183 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
184 }
185 
186 static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
187 {
188 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
189 	struct drm_device *dev = crtc->dev;
190 	struct radeon_device *rdev = dev->dev_private;
191 	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
192 	ENABLE_CRTC_PS_ALLOCATION args;
193 
194 	memset(&args, 0, sizeof(args));
195 
196 	args.ucCRTC = radeon_crtc->crtc_id;
197 	args.ucEnable = state;
198 
199 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
200 }
201 
202 static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
203 {
204 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
205 	struct drm_device *dev = crtc->dev;
206 	struct radeon_device *rdev = dev->dev_private;
207 	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
208 	ENABLE_CRTC_PS_ALLOCATION args;
209 
210 	memset(&args, 0, sizeof(args));
211 
212 	args.ucCRTC = radeon_crtc->crtc_id;
213 	args.ucEnable = state;
214 
215 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
216 }
217 
218 static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
219 {
220 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
221 	struct drm_device *dev = crtc->dev;
222 	struct radeon_device *rdev = dev->dev_private;
223 	int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
224 	BLANK_CRTC_PS_ALLOCATION args;
225 
226 	memset(&args, 0, sizeof(args));
227 
228 	args.ucCRTC = radeon_crtc->crtc_id;
229 	args.ucBlanking = state;
230 
231 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
232 }
233 
234 static void atombios_powergate_crtc(struct drm_crtc *crtc, int state)
235 {
236 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
237 	struct drm_device *dev = crtc->dev;
238 	struct radeon_device *rdev = dev->dev_private;
239 	int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating);
240 	ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args;
241 
242 	memset(&args, 0, sizeof(args));
243 
244 	args.ucDispPipeId = radeon_crtc->crtc_id;
245 	args.ucEnable = state;
246 
247 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
248 }
249 
250 void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
251 {
252 	struct drm_device *dev = crtc->dev;
253 	struct radeon_device *rdev = dev->dev_private;
254 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
255 
256 	switch (mode) {
257 	case DRM_MODE_DPMS_ON:
258 		radeon_crtc->enabled = true;
259 		/* adjust pm to dpms changes BEFORE enabling crtcs */
260 		radeon_pm_compute_clocks(rdev);
261 		if (ASIC_IS_DCE6(rdev) && !radeon_crtc->in_mode_set)
262 			atombios_powergate_crtc(crtc, ATOM_DISABLE);
263 		atombios_enable_crtc(crtc, ATOM_ENABLE);
264 		if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
265 			atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
266 		atombios_blank_crtc(crtc, ATOM_DISABLE);
267 		drm_vblank_post_modeset(dev, radeon_crtc->crtc_id);
268 		radeon_crtc_load_lut(crtc);
269 		break;
270 	case DRM_MODE_DPMS_STANDBY:
271 	case DRM_MODE_DPMS_SUSPEND:
272 	case DRM_MODE_DPMS_OFF:
273 		drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
274 		if (radeon_crtc->enabled)
275 			atombios_blank_crtc(crtc, ATOM_ENABLE);
276 		if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
277 			atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
278 		atombios_enable_crtc(crtc, ATOM_DISABLE);
279 		radeon_crtc->enabled = false;
280 		if (ASIC_IS_DCE6(rdev) && !radeon_crtc->in_mode_set)
281 			atombios_powergate_crtc(crtc, ATOM_ENABLE);
282 		/* adjust pm to dpms changes AFTER disabling crtcs */
283 		radeon_pm_compute_clocks(rdev);
284 		break;
285 	}
286 }
287 
288 static void
289 atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
290 			     struct drm_display_mode *mode)
291 {
292 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
293 	struct drm_device *dev = crtc->dev;
294 	struct radeon_device *rdev = dev->dev_private;
295 	SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
296 	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
297 	u16 misc = 0;
298 
299 	memset(&args, 0, sizeof(args));
300 	args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
301 	args.usH_Blanking_Time =
302 		cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
303 	args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
304 	args.usV_Blanking_Time =
305 		cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
306 	args.usH_SyncOffset =
307 		cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
308 	args.usH_SyncWidth =
309 		cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
310 	args.usV_SyncOffset =
311 		cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
312 	args.usV_SyncWidth =
313 		cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
314 	args.ucH_Border = radeon_crtc->h_border;
315 	args.ucV_Border = radeon_crtc->v_border;
316 
317 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
318 		misc |= ATOM_VSYNC_POLARITY;
319 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
320 		misc |= ATOM_HSYNC_POLARITY;
321 	if (mode->flags & DRM_MODE_FLAG_CSYNC)
322 		misc |= ATOM_COMPOSITESYNC;
323 	if (mode->flags & DRM_MODE_FLAG_INTERLACE)
324 		misc |= ATOM_INTERLACE;
325 	if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
326 		misc |= ATOM_DOUBLE_CLOCK_MODE;
327 
328 	args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
329 	args.ucCRTC = radeon_crtc->crtc_id;
330 
331 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
332 }
333 
334 static void atombios_crtc_set_timing(struct drm_crtc *crtc,
335 				     struct drm_display_mode *mode)
336 {
337 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
338 	struct drm_device *dev = crtc->dev;
339 	struct radeon_device *rdev = dev->dev_private;
340 	SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
341 	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
342 	u16 misc = 0;
343 
344 	memset(&args, 0, sizeof(args));
345 	args.usH_Total = cpu_to_le16(mode->crtc_htotal);
346 	args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
347 	args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
348 	args.usH_SyncWidth =
349 		cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
350 	args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
351 	args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
352 	args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
353 	args.usV_SyncWidth =
354 		cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
355 
356 	args.ucOverscanRight = radeon_crtc->h_border;
357 	args.ucOverscanLeft = radeon_crtc->h_border;
358 	args.ucOverscanBottom = radeon_crtc->v_border;
359 	args.ucOverscanTop = radeon_crtc->v_border;
360 
361 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
362 		misc |= ATOM_VSYNC_POLARITY;
363 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
364 		misc |= ATOM_HSYNC_POLARITY;
365 	if (mode->flags & DRM_MODE_FLAG_CSYNC)
366 		misc |= ATOM_COMPOSITESYNC;
367 	if (mode->flags & DRM_MODE_FLAG_INTERLACE)
368 		misc |= ATOM_INTERLACE;
369 	if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
370 		misc |= ATOM_DOUBLE_CLOCK_MODE;
371 
372 	args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
373 	args.ucCRTC = radeon_crtc->crtc_id;
374 
375 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
376 }
377 
378 static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
379 {
380 	u32 ss_cntl;
381 
382 	if (ASIC_IS_DCE4(rdev)) {
383 		switch (pll_id) {
384 		case ATOM_PPLL1:
385 			ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
386 			ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
387 			WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
388 			break;
389 		case ATOM_PPLL2:
390 			ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
391 			ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
392 			WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
393 			break;
394 		case ATOM_DCPLL:
395 		case ATOM_PPLL_INVALID:
396 			return;
397 		}
398 	} else if (ASIC_IS_AVIVO(rdev)) {
399 		switch (pll_id) {
400 		case ATOM_PPLL1:
401 			ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
402 			ss_cntl &= ~1;
403 			WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
404 			break;
405 		case ATOM_PPLL2:
406 			ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
407 			ss_cntl &= ~1;
408 			WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
409 			break;
410 		case ATOM_DCPLL:
411 		case ATOM_PPLL_INVALID:
412 			return;
413 		}
414 	}
415 }
416 
417 
418 union atom_enable_ss {
419 	ENABLE_LVDS_SS_PARAMETERS lvds_ss;
420 	ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
421 	ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
422 	ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
423 	ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
424 };
425 
426 static void atombios_crtc_program_ss(struct radeon_device *rdev,
427 				     int enable,
428 				     int pll_id,
429 				     int crtc_id,
430 				     struct radeon_atom_ss *ss)
431 {
432 	unsigned i;
433 	int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
434 	union atom_enable_ss args;
435 
436 	if (!enable) {
437 		for (i = 0; i < rdev->num_crtc; i++) {
438 			if (rdev->mode_info.crtcs[i] &&
439 			    rdev->mode_info.crtcs[i]->enabled &&
440 			    i != crtc_id &&
441 			    pll_id == rdev->mode_info.crtcs[i]->pll_id) {
442 				/* one other crtc is using this pll don't turn
443 				 * off spread spectrum as it might turn off
444 				 * display on active crtc
445 				 */
446 				return;
447 			}
448 		}
449 	}
450 
451 	memset(&args, 0, sizeof(args));
452 
453 	if (ASIC_IS_DCE5(rdev)) {
454 		args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
455 		args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
456 		switch (pll_id) {
457 		case ATOM_PPLL1:
458 			args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
459 			break;
460 		case ATOM_PPLL2:
461 			args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
462 			break;
463 		case ATOM_DCPLL:
464 			args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
465 			break;
466 		case ATOM_PPLL_INVALID:
467 			return;
468 		}
469 		args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
470 		args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
471 		args.v3.ucEnable = enable;
472 		if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE61(rdev))
473 			args.v3.ucEnable = ATOM_DISABLE;
474 	} else if (ASIC_IS_DCE4(rdev)) {
475 		args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
476 		args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
477 		switch (pll_id) {
478 		case ATOM_PPLL1:
479 			args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
480 			break;
481 		case ATOM_PPLL2:
482 			args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
483 			break;
484 		case ATOM_DCPLL:
485 			args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
486 			break;
487 		case ATOM_PPLL_INVALID:
488 			return;
489 		}
490 		args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
491 		args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
492 		args.v2.ucEnable = enable;
493 		if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE41(rdev))
494 			args.v2.ucEnable = ATOM_DISABLE;
495 	} else if (ASIC_IS_DCE3(rdev)) {
496 		args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
497 		args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
498 		args.v1.ucSpreadSpectrumStep = ss->step;
499 		args.v1.ucSpreadSpectrumDelay = ss->delay;
500 		args.v1.ucSpreadSpectrumRange = ss->range;
501 		args.v1.ucPpll = pll_id;
502 		args.v1.ucEnable = enable;
503 	} else if (ASIC_IS_AVIVO(rdev)) {
504 		if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
505 		    (ss->type & ATOM_EXTERNAL_SS_MASK)) {
506 			atombios_disable_ss(rdev, pll_id);
507 			return;
508 		}
509 		args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
510 		args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
511 		args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
512 		args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
513 		args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
514 		args.lvds_ss_2.ucEnable = enable;
515 	} else {
516 		if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
517 		    (ss->type & ATOM_EXTERNAL_SS_MASK)) {
518 			atombios_disable_ss(rdev, pll_id);
519 			return;
520 		}
521 		args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
522 		args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
523 		args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
524 		args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
525 		args.lvds_ss.ucEnable = enable;
526 	}
527 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
528 }
529 
530 union adjust_pixel_clock {
531 	ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
532 	ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
533 };
534 
535 static u32 atombios_adjust_pll(struct drm_crtc *crtc,
536 			       struct drm_display_mode *mode,
537 			       struct radeon_pll *pll,
538 			       bool ss_enabled,
539 			       struct radeon_atom_ss *ss)
540 {
541 	struct drm_device *dev = crtc->dev;
542 	struct radeon_device *rdev = dev->dev_private;
543 	struct drm_encoder *encoder = NULL;
544 	struct radeon_encoder *radeon_encoder = NULL;
545 	struct drm_connector *connector = NULL;
546 	u32 adjusted_clock = mode->clock;
547 	int encoder_mode = 0;
548 	u32 dp_clock = mode->clock;
549 	int bpc = 8;
550 	bool is_duallink = false;
551 
552 	/* reset the pll flags */
553 	pll->flags = 0;
554 
555 	if (ASIC_IS_AVIVO(rdev)) {
556 		if ((rdev->family == CHIP_RS600) ||
557 		    (rdev->family == CHIP_RS690) ||
558 		    (rdev->family == CHIP_RS740))
559 			pll->flags |= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
560 				       RADEON_PLL_PREFER_CLOSEST_LOWER);
561 
562 		if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)	/* range limits??? */
563 			pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
564 		else
565 			pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
566 
567 		if (rdev->family < CHIP_RV770)
568 			pll->flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
569 		/* use frac fb div on APUs */
570 		if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev))
571 			pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
572 	} else {
573 		pll->flags |= RADEON_PLL_LEGACY;
574 
575 		if (mode->clock > 200000)	/* range limits??? */
576 			pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
577 		else
578 			pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
579 	}
580 
581 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
582 		if (encoder->crtc == crtc) {
583 			radeon_encoder = to_radeon_encoder(encoder);
584 			connector = radeon_get_connector_for_encoder(encoder);
585 			bpc = radeon_get_monitor_bpc(connector);
586 			encoder_mode = atombios_get_encoder_mode(encoder);
587 			is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
588 			if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
589 			    (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
590 				if (connector) {
591 					struct radeon_connector *radeon_connector = to_radeon_connector(connector);
592 					struct radeon_connector_atom_dig *dig_connector =
593 						radeon_connector->con_priv;
594 
595 					dp_clock = dig_connector->dp_clock;
596 				}
597 			}
598 
599 			/* use recommended ref_div for ss */
600 			if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
601 				if (ss_enabled) {
602 					if (ss->refdiv) {
603 						pll->flags |= RADEON_PLL_USE_REF_DIV;
604 						pll->reference_div = ss->refdiv;
605 						if (ASIC_IS_AVIVO(rdev))
606 							pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
607 					}
608 				}
609 			}
610 
611 			if (ASIC_IS_AVIVO(rdev)) {
612 				/* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
613 				if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
614 					adjusted_clock = mode->clock * 2;
615 				if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
616 					pll->flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
617 				if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
618 					pll->flags |= RADEON_PLL_IS_LCD;
619 			} else {
620 				if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
621 					pll->flags |= RADEON_PLL_NO_ODD_POST_DIV;
622 				if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
623 					pll->flags |= RADEON_PLL_USE_REF_DIV;
624 			}
625 			break;
626 		}
627 	}
628 
629 	/* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
630 	 * accordingly based on the encoder/transmitter to work around
631 	 * special hw requirements.
632 	 */
633 	if (ASIC_IS_DCE3(rdev)) {
634 		union adjust_pixel_clock args;
635 		u8 frev, crev;
636 		int index;
637 
638 		index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
639 		if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
640 					   &crev))
641 			return adjusted_clock;
642 
643 		memset(&args, 0, sizeof(args));
644 
645 		switch (frev) {
646 		case 1:
647 			switch (crev) {
648 			case 1:
649 			case 2:
650 				args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
651 				args.v1.ucTransmitterID = radeon_encoder->encoder_id;
652 				args.v1.ucEncodeMode = encoder_mode;
653 				if (ss_enabled && ss->percentage)
654 					args.v1.ucConfig |=
655 						ADJUST_DISPLAY_CONFIG_SS_ENABLE;
656 
657 				atom_execute_table(rdev->mode_info.atom_context,
658 						   index, (uint32_t *)&args);
659 				adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
660 				break;
661 			case 3:
662 				args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10);
663 				args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
664 				args.v3.sInput.ucEncodeMode = encoder_mode;
665 				args.v3.sInput.ucDispPllConfig = 0;
666 				if (ss_enabled && ss->percentage)
667 					args.v3.sInput.ucDispPllConfig |=
668 						DISPPLL_CONFIG_SS_ENABLE;
669 				if (ENCODER_MODE_IS_DP(encoder_mode)) {
670 					args.v3.sInput.ucDispPllConfig |=
671 						DISPPLL_CONFIG_COHERENT_MODE;
672 					/* 16200 or 27000 */
673 					args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
674 				} else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
675 					struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
676 					if (encoder_mode == ATOM_ENCODER_MODE_HDMI)
677 						/* deep color support */
678 						args.v3.sInput.usPixelClock =
679 							cpu_to_le16((mode->clock * bpc / 8) / 10);
680 					if (dig->coherent_mode)
681 						args.v3.sInput.ucDispPllConfig |=
682 							DISPPLL_CONFIG_COHERENT_MODE;
683 					if (is_duallink)
684 						args.v3.sInput.ucDispPllConfig |=
685 							DISPPLL_CONFIG_DUAL_LINK;
686 				}
687 				if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
688 				    ENCODER_OBJECT_ID_NONE)
689 					args.v3.sInput.ucExtTransmitterID =
690 						radeon_encoder_get_dp_bridge_encoder_id(encoder);
691 				else
692 					args.v3.sInput.ucExtTransmitterID = 0;
693 
694 				atom_execute_table(rdev->mode_info.atom_context,
695 						   index, (uint32_t *)&args);
696 				adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
697 				if (args.v3.sOutput.ucRefDiv) {
698 					pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
699 					pll->flags |= RADEON_PLL_USE_REF_DIV;
700 					pll->reference_div = args.v3.sOutput.ucRefDiv;
701 				}
702 				if (args.v3.sOutput.ucPostDiv) {
703 					pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
704 					pll->flags |= RADEON_PLL_USE_POST_DIV;
705 					pll->post_div = args.v3.sOutput.ucPostDiv;
706 				}
707 				break;
708 			default:
709 				DRM_ERROR("Unknown table version %d %d\n", frev, crev);
710 				return adjusted_clock;
711 			}
712 			break;
713 		default:
714 			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
715 			return adjusted_clock;
716 		}
717 	}
718 	return adjusted_clock;
719 }
720 
721 union set_pixel_clock {
722 	SET_PIXEL_CLOCK_PS_ALLOCATION base;
723 	PIXEL_CLOCK_PARAMETERS v1;
724 	PIXEL_CLOCK_PARAMETERS_V2 v2;
725 	PIXEL_CLOCK_PARAMETERS_V3 v3;
726 	PIXEL_CLOCK_PARAMETERS_V5 v5;
727 	PIXEL_CLOCK_PARAMETERS_V6 v6;
728 };
729 
730 /* on DCE5, make sure the voltage is high enough to support the
731  * required disp clk.
732  */
733 static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev,
734 				    u32 dispclk)
735 {
736 	u8 frev, crev;
737 	int index;
738 	union set_pixel_clock args;
739 
740 	memset(&args, 0, sizeof(args));
741 
742 	index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
743 	if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
744 				   &crev))
745 		return;
746 
747 	switch (frev) {
748 	case 1:
749 		switch (crev) {
750 		case 5:
751 			/* if the default dcpll clock is specified,
752 			 * SetPixelClock provides the dividers
753 			 */
754 			args.v5.ucCRTC = ATOM_CRTC_INVALID;
755 			args.v5.usPixelClock = cpu_to_le16(dispclk);
756 			args.v5.ucPpll = ATOM_DCPLL;
757 			break;
758 		case 6:
759 			/* if the default dcpll clock is specified,
760 			 * SetPixelClock provides the dividers
761 			 */
762 			args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
763 			if (ASIC_IS_DCE61(rdev))
764 				args.v6.ucPpll = ATOM_EXT_PLL1;
765 			else if (ASIC_IS_DCE6(rdev))
766 				args.v6.ucPpll = ATOM_PPLL0;
767 			else
768 				args.v6.ucPpll = ATOM_DCPLL;
769 			break;
770 		default:
771 			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
772 			return;
773 		}
774 		break;
775 	default:
776 		DRM_ERROR("Unknown table version %d %d\n", frev, crev);
777 		return;
778 	}
779 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
780 }
781 
782 static void atombios_crtc_program_pll(struct drm_crtc *crtc,
783 				      u32 crtc_id,
784 				      int pll_id,
785 				      u32 encoder_mode,
786 				      u32 encoder_id,
787 				      u32 clock,
788 				      u32 ref_div,
789 				      u32 fb_div,
790 				      u32 frac_fb_div,
791 				      u32 post_div,
792 				      int bpc,
793 				      bool ss_enabled,
794 				      struct radeon_atom_ss *ss)
795 {
796 	struct drm_device *dev = crtc->dev;
797 	struct radeon_device *rdev = dev->dev_private;
798 	u8 frev, crev;
799 	int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
800 	union set_pixel_clock args;
801 
802 	memset(&args, 0, sizeof(args));
803 
804 	if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
805 				   &crev))
806 		return;
807 
808 	switch (frev) {
809 	case 1:
810 		switch (crev) {
811 		case 1:
812 			if (clock == ATOM_DISABLE)
813 				return;
814 			args.v1.usPixelClock = cpu_to_le16(clock / 10);
815 			args.v1.usRefDiv = cpu_to_le16(ref_div);
816 			args.v1.usFbDiv = cpu_to_le16(fb_div);
817 			args.v1.ucFracFbDiv = frac_fb_div;
818 			args.v1.ucPostDiv = post_div;
819 			args.v1.ucPpll = pll_id;
820 			args.v1.ucCRTC = crtc_id;
821 			args.v1.ucRefDivSrc = 1;
822 			break;
823 		case 2:
824 			args.v2.usPixelClock = cpu_to_le16(clock / 10);
825 			args.v2.usRefDiv = cpu_to_le16(ref_div);
826 			args.v2.usFbDiv = cpu_to_le16(fb_div);
827 			args.v2.ucFracFbDiv = frac_fb_div;
828 			args.v2.ucPostDiv = post_div;
829 			args.v2.ucPpll = pll_id;
830 			args.v2.ucCRTC = crtc_id;
831 			args.v2.ucRefDivSrc = 1;
832 			break;
833 		case 3:
834 			args.v3.usPixelClock = cpu_to_le16(clock / 10);
835 			args.v3.usRefDiv = cpu_to_le16(ref_div);
836 			args.v3.usFbDiv = cpu_to_le16(fb_div);
837 			args.v3.ucFracFbDiv = frac_fb_div;
838 			args.v3.ucPostDiv = post_div;
839 			args.v3.ucPpll = pll_id;
840 			args.v3.ucMiscInfo = (pll_id << 2);
841 			if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
842 				args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
843 			args.v3.ucTransmitterId = encoder_id;
844 			args.v3.ucEncoderMode = encoder_mode;
845 			break;
846 		case 5:
847 			args.v5.ucCRTC = crtc_id;
848 			args.v5.usPixelClock = cpu_to_le16(clock / 10);
849 			args.v5.ucRefDiv = ref_div;
850 			args.v5.usFbDiv = cpu_to_le16(fb_div);
851 			args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
852 			args.v5.ucPostDiv = post_div;
853 			args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */
854 			if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
855 				args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
856 			switch (bpc) {
857 			case 8:
858 			default:
859 				args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
860 				break;
861 			case 10:
862 				args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
863 				break;
864 			}
865 			args.v5.ucTransmitterID = encoder_id;
866 			args.v5.ucEncoderMode = encoder_mode;
867 			args.v5.ucPpll = pll_id;
868 			break;
869 		case 6:
870 			args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
871 			args.v6.ucRefDiv = ref_div;
872 			args.v6.usFbDiv = cpu_to_le16(fb_div);
873 			args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
874 			args.v6.ucPostDiv = post_div;
875 			args.v6.ucMiscInfo = 0; /* HDMI depth, etc. */
876 			if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
877 				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
878 			switch (bpc) {
879 			case 8:
880 			default:
881 				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
882 				break;
883 			case 10:
884 				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP;
885 				break;
886 			case 12:
887 				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP;
888 				break;
889 			case 16:
890 				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
891 				break;
892 			}
893 			args.v6.ucTransmitterID = encoder_id;
894 			args.v6.ucEncoderMode = encoder_mode;
895 			args.v6.ucPpll = pll_id;
896 			break;
897 		default:
898 			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
899 			return;
900 		}
901 		break;
902 	default:
903 		DRM_ERROR("Unknown table version %d %d\n", frev, crev);
904 		return;
905 	}
906 
907 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
908 }
909 
910 static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
911 {
912 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
913 	struct drm_device *dev = crtc->dev;
914 	struct radeon_device *rdev = dev->dev_private;
915 	struct drm_encoder *encoder = NULL;
916 	struct radeon_encoder *radeon_encoder = NULL;
917 	u32 pll_clock = mode->clock;
918 	u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
919 	struct radeon_pll *pll;
920 	u32 adjusted_clock;
921 	int encoder_mode = 0;
922 	struct radeon_atom_ss ss;
923 	bool ss_enabled = false;
924 	int bpc = 8;
925 
926 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
927 		if (encoder->crtc == crtc) {
928 			radeon_encoder = to_radeon_encoder(encoder);
929 			encoder_mode = atombios_get_encoder_mode(encoder);
930 			break;
931 		}
932 	}
933 
934 	if (!radeon_encoder)
935 		return;
936 
937 	switch (radeon_crtc->pll_id) {
938 	case ATOM_PPLL1:
939 		pll = &rdev->clock.p1pll;
940 		break;
941 	case ATOM_PPLL2:
942 		pll = &rdev->clock.p2pll;
943 		break;
944 	case ATOM_DCPLL:
945 	case ATOM_PPLL_INVALID:
946 	default:
947 		pll = &rdev->clock.dcpll;
948 		break;
949 	}
950 
951 	if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
952 	    (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
953 		struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
954 		struct drm_connector *connector =
955 			radeon_get_connector_for_encoder(encoder);
956 		struct radeon_connector *radeon_connector =
957 			to_radeon_connector(connector);
958 		struct radeon_connector_atom_dig *dig_connector =
959 			radeon_connector->con_priv;
960 		int dp_clock;
961 		bpc = radeon_get_monitor_bpc(connector);
962 
963 		switch (encoder_mode) {
964 		case ATOM_ENCODER_MODE_DP_MST:
965 		case ATOM_ENCODER_MODE_DP:
966 			/* DP/eDP */
967 			dp_clock = dig_connector->dp_clock / 10;
968 			if (ASIC_IS_DCE4(rdev))
969 				ss_enabled =
970 					radeon_atombios_get_asic_ss_info(rdev, &ss,
971 									 ASIC_INTERNAL_SS_ON_DP,
972 									 dp_clock);
973 			else {
974 				if (dp_clock == 16200) {
975 					ss_enabled =
976 						radeon_atombios_get_ppll_ss_info(rdev, &ss,
977 										 ATOM_DP_SS_ID2);
978 					if (!ss_enabled)
979 						ss_enabled =
980 							radeon_atombios_get_ppll_ss_info(rdev, &ss,
981 											 ATOM_DP_SS_ID1);
982 				} else
983 					ss_enabled =
984 						radeon_atombios_get_ppll_ss_info(rdev, &ss,
985 										 ATOM_DP_SS_ID1);
986 			}
987 			break;
988 		case ATOM_ENCODER_MODE_LVDS:
989 			if (ASIC_IS_DCE4(rdev))
990 				ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
991 									      dig->lcd_ss_id,
992 									      mode->clock / 10);
993 			else
994 				ss_enabled = radeon_atombios_get_ppll_ss_info(rdev, &ss,
995 									      dig->lcd_ss_id);
996 			break;
997 		case ATOM_ENCODER_MODE_DVI:
998 			if (ASIC_IS_DCE4(rdev))
999 				ss_enabled =
1000 					radeon_atombios_get_asic_ss_info(rdev, &ss,
1001 									 ASIC_INTERNAL_SS_ON_TMDS,
1002 									 mode->clock / 10);
1003 			break;
1004 		case ATOM_ENCODER_MODE_HDMI:
1005 			if (ASIC_IS_DCE4(rdev))
1006 				ss_enabled =
1007 					radeon_atombios_get_asic_ss_info(rdev, &ss,
1008 									 ASIC_INTERNAL_SS_ON_HDMI,
1009 									 mode->clock / 10);
1010 			break;
1011 		default:
1012 			break;
1013 		}
1014 	}
1015 
1016 	/* adjust pixel clock as needed */
1017 	adjusted_clock = atombios_adjust_pll(crtc, mode, pll, ss_enabled, &ss);
1018 
1019 	if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1020 		/* TV seems to prefer the legacy algo on some boards */
1021 		radeon_compute_pll_legacy(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1022 					  &ref_div, &post_div);
1023 	else if (ASIC_IS_AVIVO(rdev))
1024 		radeon_compute_pll_avivo(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1025 					 &ref_div, &post_div);
1026 	else
1027 		radeon_compute_pll_legacy(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1028 					  &ref_div, &post_div);
1029 
1030 	atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id, radeon_crtc->crtc_id, &ss);
1031 
1032 	atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1033 				  encoder_mode, radeon_encoder->encoder_id, mode->clock,
1034 				  ref_div, fb_div, frac_fb_div, post_div, bpc, ss_enabled, &ss);
1035 
1036 	if (ss_enabled) {
1037 		/* calculate ss amount and step size */
1038 		if (ASIC_IS_DCE4(rdev)) {
1039 			u32 step_size;
1040 			u32 amount = (((fb_div * 10) + frac_fb_div) * ss.percentage) / 10000;
1041 			ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1042 			ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1043 				ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1044 			if (ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1045 				step_size = (4 * amount * ref_div * (ss.rate * 2048)) /
1046 					(125 * 25 * pll->reference_freq / 100);
1047 			else
1048 				step_size = (2 * amount * ref_div * (ss.rate * 2048)) /
1049 					(125 * 25 * pll->reference_freq / 100);
1050 			ss.step = step_size;
1051 		}
1052 
1053 		atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id, radeon_crtc->crtc_id, &ss);
1054 	}
1055 }
1056 
1057 static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1058 				 struct drm_framebuffer *fb,
1059 				 int x, int y, int atomic)
1060 {
1061 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1062 	struct drm_device *dev = crtc->dev;
1063 	struct radeon_device *rdev = dev->dev_private;
1064 	struct radeon_framebuffer *radeon_fb;
1065 	struct drm_framebuffer *target_fb;
1066 	struct drm_gem_object *obj;
1067 	struct radeon_bo *rbo;
1068 	uint64_t fb_location;
1069 	uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1070 	unsigned bankw, bankh, mtaspect, tile_split;
1071 	u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1072 	u32 tmp, viewport_w, viewport_h;
1073 	int r;
1074 
1075 	/* no fb bound */
1076 	if (!atomic && !crtc->fb) {
1077 		DRM_DEBUG_KMS("No FB bound\n");
1078 		return 0;
1079 	}
1080 
1081 	if (atomic) {
1082 		radeon_fb = to_radeon_framebuffer(fb);
1083 		target_fb = fb;
1084 	}
1085 	else {
1086 		radeon_fb = to_radeon_framebuffer(crtc->fb);
1087 		target_fb = crtc->fb;
1088 	}
1089 
1090 	/* If atomic, assume fb object is pinned & idle & fenced and
1091 	 * just update base pointers
1092 	 */
1093 	obj = radeon_fb->obj;
1094 	rbo = gem_to_radeon_bo(obj);
1095 	r = radeon_bo_reserve(rbo, false);
1096 	if (unlikely(r != 0))
1097 		return r;
1098 
1099 	if (atomic)
1100 		fb_location = radeon_bo_gpu_offset(rbo);
1101 	else {
1102 		r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1103 		if (unlikely(r != 0)) {
1104 			radeon_bo_unreserve(rbo);
1105 			return -EINVAL;
1106 		}
1107 	}
1108 
1109 	radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1110 	radeon_bo_unreserve(rbo);
1111 
1112 	switch (target_fb->bits_per_pixel) {
1113 	case 8:
1114 		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1115 			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1116 		break;
1117 	case 15:
1118 		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1119 			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1120 		break;
1121 	case 16:
1122 		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1123 			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1124 #ifdef __BIG_ENDIAN
1125 		fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1126 #endif
1127 		break;
1128 	case 24:
1129 	case 32:
1130 		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1131 			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1132 #ifdef __BIG_ENDIAN
1133 		fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1134 #endif
1135 		break;
1136 	default:
1137 		DRM_ERROR("Unsupported screen depth %d\n",
1138 			  target_fb->bits_per_pixel);
1139 		return -EINVAL;
1140 	}
1141 
1142 	if (tiling_flags & RADEON_TILING_MACRO) {
1143 		if (rdev->family >= CHIP_TAHITI)
1144 			tmp = rdev->config.si.tile_config;
1145 		else if (rdev->family >= CHIP_CAYMAN)
1146 			tmp = rdev->config.cayman.tile_config;
1147 		else
1148 			tmp = rdev->config.evergreen.tile_config;
1149 
1150 		switch ((tmp & 0xf0) >> 4) {
1151 		case 0: /* 4 banks */
1152 			fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1153 			break;
1154 		case 1: /* 8 banks */
1155 		default:
1156 			fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1157 			break;
1158 		case 2: /* 16 banks */
1159 			fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1160 			break;
1161 		}
1162 
1163 		fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1164 
1165 		evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split);
1166 		fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split);
1167 		fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw);
1168 		fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh);
1169 		fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect);
1170 	} else if (tiling_flags & RADEON_TILING_MICRO)
1171 		fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1172 
1173 	if ((rdev->family == CHIP_TAHITI) ||
1174 	    (rdev->family == CHIP_PITCAIRN))
1175 		fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16);
1176 	else if (rdev->family == CHIP_VERDE)
1177 		fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16);
1178 
1179 	switch (radeon_crtc->crtc_id) {
1180 	case 0:
1181 		WREG32(AVIVO_D1VGA_CONTROL, 0);
1182 		break;
1183 	case 1:
1184 		WREG32(AVIVO_D2VGA_CONTROL, 0);
1185 		break;
1186 	case 2:
1187 		WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1188 		break;
1189 	case 3:
1190 		WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1191 		break;
1192 	case 4:
1193 		WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1194 		break;
1195 	case 5:
1196 		WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1197 		break;
1198 	default:
1199 		break;
1200 	}
1201 
1202 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1203 	       upper_32_bits(fb_location));
1204 	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1205 	       upper_32_bits(fb_location));
1206 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1207 	       (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1208 	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1209 	       (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1210 	WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1211 	WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1212 
1213 	WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1214 	WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1215 	WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1216 	WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1217 	WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1218 	WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1219 
1220 	fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1221 	WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1222 	WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1223 
1224 	WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1225 	       target_fb->height);
1226 	x &= ~3;
1227 	y &= ~1;
1228 	WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1229 	       (x << 16) | y);
1230 	viewport_w = crtc->mode.hdisplay;
1231 	viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1232 	WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1233 	       (viewport_w << 16) | viewport_h);
1234 
1235 	/* pageflip setup */
1236 	/* make sure flip is at vb rather than hb */
1237 	tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1238 	tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1239 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1240 
1241 	/* set pageflip to happen anywhere in vblank interval */
1242 	WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1243 
1244 	if (!atomic && fb && fb != crtc->fb) {
1245 		radeon_fb = to_radeon_framebuffer(fb);
1246 		rbo = gem_to_radeon_bo(radeon_fb->obj);
1247 		r = radeon_bo_reserve(rbo, false);
1248 		if (unlikely(r != 0))
1249 			return r;
1250 		radeon_bo_unpin(rbo);
1251 		radeon_bo_unreserve(rbo);
1252 	}
1253 
1254 	/* Bytes per pixel may have changed */
1255 	radeon_bandwidth_update(rdev);
1256 
1257 	return 0;
1258 }
1259 
1260 static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1261 				  struct drm_framebuffer *fb,
1262 				  int x, int y, int atomic)
1263 {
1264 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1265 	struct drm_device *dev = crtc->dev;
1266 	struct radeon_device *rdev = dev->dev_private;
1267 	struct radeon_framebuffer *radeon_fb;
1268 	struct drm_gem_object *obj;
1269 	struct radeon_bo *rbo;
1270 	struct drm_framebuffer *target_fb;
1271 	uint64_t fb_location;
1272 	uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1273 	u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1274 	u32 tmp, viewport_w, viewport_h;
1275 	int r;
1276 
1277 	/* no fb bound */
1278 	if (!atomic && !crtc->fb) {
1279 		DRM_DEBUG_KMS("No FB bound\n");
1280 		return 0;
1281 	}
1282 
1283 	if (atomic) {
1284 		radeon_fb = to_radeon_framebuffer(fb);
1285 		target_fb = fb;
1286 	}
1287 	else {
1288 		radeon_fb = to_radeon_framebuffer(crtc->fb);
1289 		target_fb = crtc->fb;
1290 	}
1291 
1292 	obj = radeon_fb->obj;
1293 	rbo = gem_to_radeon_bo(obj);
1294 	r = radeon_bo_reserve(rbo, false);
1295 	if (unlikely(r != 0))
1296 		return r;
1297 
1298 	/* If atomic, assume fb object is pinned & idle & fenced and
1299 	 * just update base pointers
1300 	 */
1301 	if (atomic)
1302 		fb_location = radeon_bo_gpu_offset(rbo);
1303 	else {
1304 		r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1305 		if (unlikely(r != 0)) {
1306 			radeon_bo_unreserve(rbo);
1307 			return -EINVAL;
1308 		}
1309 	}
1310 	radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1311 	radeon_bo_unreserve(rbo);
1312 
1313 	switch (target_fb->bits_per_pixel) {
1314 	case 8:
1315 		fb_format =
1316 		    AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1317 		    AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1318 		break;
1319 	case 15:
1320 		fb_format =
1321 		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1322 		    AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1323 		break;
1324 	case 16:
1325 		fb_format =
1326 		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1327 		    AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1328 #ifdef __BIG_ENDIAN
1329 		fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1330 #endif
1331 		break;
1332 	case 24:
1333 	case 32:
1334 		fb_format =
1335 		    AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1336 		    AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1337 #ifdef __BIG_ENDIAN
1338 		fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1339 #endif
1340 		break;
1341 	default:
1342 		DRM_ERROR("Unsupported screen depth %d\n",
1343 			  target_fb->bits_per_pixel);
1344 		return -EINVAL;
1345 	}
1346 
1347 	if (rdev->family >= CHIP_R600) {
1348 		if (tiling_flags & RADEON_TILING_MACRO)
1349 			fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1350 		else if (tiling_flags & RADEON_TILING_MICRO)
1351 			fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1352 	} else {
1353 		if (tiling_flags & RADEON_TILING_MACRO)
1354 			fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1355 
1356 		if (tiling_flags & RADEON_TILING_MICRO)
1357 			fb_format |= AVIVO_D1GRPH_TILED;
1358 	}
1359 
1360 	if (radeon_crtc->crtc_id == 0)
1361 		WREG32(AVIVO_D1VGA_CONTROL, 0);
1362 	else
1363 		WREG32(AVIVO_D2VGA_CONTROL, 0);
1364 
1365 	if (rdev->family >= CHIP_RV770) {
1366 		if (radeon_crtc->crtc_id) {
1367 			WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1368 			WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1369 		} else {
1370 			WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1371 			WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1372 		}
1373 	}
1374 	WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1375 	       (u32) fb_location);
1376 	WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1377 	       radeon_crtc->crtc_offset, (u32) fb_location);
1378 	WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1379 	if (rdev->family >= CHIP_R600)
1380 		WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1381 
1382 	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1383 	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1384 	WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1385 	WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1386 	WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1387 	WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1388 
1389 	fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1390 	WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1391 	WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1392 
1393 	WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1394 	       target_fb->height);
1395 	x &= ~3;
1396 	y &= ~1;
1397 	WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1398 	       (x << 16) | y);
1399 	viewport_w = crtc->mode.hdisplay;
1400 	viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1401 	WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1402 	       (viewport_w << 16) | viewport_h);
1403 
1404 	/* pageflip setup */
1405 	/* make sure flip is at vb rather than hb */
1406 	tmp = RREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1407 	tmp &= ~AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1408 	WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1409 
1410 	/* set pageflip to happen anywhere in vblank interval */
1411 	WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1412 
1413 	if (!atomic && fb && fb != crtc->fb) {
1414 		radeon_fb = to_radeon_framebuffer(fb);
1415 		rbo = gem_to_radeon_bo(radeon_fb->obj);
1416 		r = radeon_bo_reserve(rbo, false);
1417 		if (unlikely(r != 0))
1418 			return r;
1419 		radeon_bo_unpin(rbo);
1420 		radeon_bo_unreserve(rbo);
1421 	}
1422 
1423 	/* Bytes per pixel may have changed */
1424 	radeon_bandwidth_update(rdev);
1425 
1426 	return 0;
1427 }
1428 
1429 int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1430 			   struct drm_framebuffer *old_fb)
1431 {
1432 	struct drm_device *dev = crtc->dev;
1433 	struct radeon_device *rdev = dev->dev_private;
1434 
1435 	if (ASIC_IS_DCE4(rdev))
1436 		return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1437 	else if (ASIC_IS_AVIVO(rdev))
1438 		return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1439 	else
1440 		return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1441 }
1442 
1443 int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1444                                   struct drm_framebuffer *fb,
1445 				  int x, int y, enum mode_set_atomic state)
1446 {
1447        struct drm_device *dev = crtc->dev;
1448        struct radeon_device *rdev = dev->dev_private;
1449 
1450 	if (ASIC_IS_DCE4(rdev))
1451 		return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1452 	else if (ASIC_IS_AVIVO(rdev))
1453 		return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1454 	else
1455 		return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1456 }
1457 
1458 /* properly set additional regs when using atombios */
1459 static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1460 {
1461 	struct drm_device *dev = crtc->dev;
1462 	struct radeon_device *rdev = dev->dev_private;
1463 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1464 	u32 disp_merge_cntl;
1465 
1466 	switch (radeon_crtc->crtc_id) {
1467 	case 0:
1468 		disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1469 		disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1470 		WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1471 		break;
1472 	case 1:
1473 		disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1474 		disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1475 		WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1476 		WREG32(RADEON_FP_H2_SYNC_STRT_WID,   RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1477 		WREG32(RADEON_FP_V2_SYNC_STRT_WID,   RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1478 		break;
1479 	}
1480 }
1481 
1482 static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1483 {
1484 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1485 	struct drm_device *dev = crtc->dev;
1486 	struct radeon_device *rdev = dev->dev_private;
1487 	struct drm_encoder *test_encoder;
1488 	struct drm_crtc *test_crtc;
1489 	uint32_t pll_in_use = 0;
1490 
1491 	if (ASIC_IS_DCE61(rdev)) {
1492 		list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1493 			if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
1494 				struct radeon_encoder *test_radeon_encoder =
1495 					to_radeon_encoder(test_encoder);
1496 				struct radeon_encoder_atom_dig *dig =
1497 					test_radeon_encoder->enc_priv;
1498 
1499 				if ((test_radeon_encoder->encoder_id ==
1500 				     ENCODER_OBJECT_ID_INTERNAL_UNIPHY) &&
1501 				    (dig->linkb == false)) /* UNIPHY A uses PPLL2 */
1502 					return ATOM_PPLL2;
1503 			}
1504 		}
1505 		/* UNIPHY B/C/D/E/F */
1506 		list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1507 			struct radeon_crtc *radeon_test_crtc;
1508 
1509 			if (crtc == test_crtc)
1510 				continue;
1511 
1512 			radeon_test_crtc = to_radeon_crtc(test_crtc);
1513 			if ((radeon_test_crtc->pll_id == ATOM_PPLL0) ||
1514 			    (radeon_test_crtc->pll_id == ATOM_PPLL1))
1515 				pll_in_use |= (1 << radeon_test_crtc->pll_id);
1516 		}
1517 		if (!(pll_in_use & 4))
1518 			return ATOM_PPLL0;
1519 		return ATOM_PPLL1;
1520 	} else if (ASIC_IS_DCE4(rdev)) {
1521 		list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1522 			if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
1523 				/* in DP mode, the DP ref clock can come from PPLL, DCPLL, or ext clock,
1524 				 * depending on the asic:
1525 				 * DCE4: PPLL or ext clock
1526 				 * DCE5: DCPLL or ext clock
1527 				 *
1528 				 * Setting ATOM_PPLL_INVALID will cause SetPixelClock to skip
1529 				 * PPLL/DCPLL programming and only program the DP DTO for the
1530 				 * crtc virtual pixel clock.
1531 				 */
1532 				if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_encoder))) {
1533 					if (rdev->clock.dp_extclk)
1534 						return ATOM_PPLL_INVALID;
1535 					else if (ASIC_IS_DCE6(rdev))
1536 						return ATOM_PPLL0;
1537 					else if (ASIC_IS_DCE5(rdev))
1538 						return ATOM_DCPLL;
1539 				}
1540 			}
1541 		}
1542 
1543 		/* otherwise, pick one of the plls */
1544 		list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1545 			struct radeon_crtc *radeon_test_crtc;
1546 
1547 			if (crtc == test_crtc)
1548 				continue;
1549 
1550 			radeon_test_crtc = to_radeon_crtc(test_crtc);
1551 			if ((radeon_test_crtc->pll_id >= ATOM_PPLL1) &&
1552 			    (radeon_test_crtc->pll_id <= ATOM_PPLL2))
1553 				pll_in_use |= (1 << radeon_test_crtc->pll_id);
1554 		}
1555 		if (!(pll_in_use & 1))
1556 			return ATOM_PPLL1;
1557 		return ATOM_PPLL2;
1558 	} else
1559 		return radeon_crtc->crtc_id;
1560 
1561 }
1562 
1563 void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev)
1564 {
1565 	/* always set DCPLL */
1566 	if (ASIC_IS_DCE6(rdev))
1567 		atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
1568 	else if (ASIC_IS_DCE4(rdev)) {
1569 		struct radeon_atom_ss ss;
1570 		bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
1571 								   ASIC_INTERNAL_SS_ON_DCPLL,
1572 								   rdev->clock.default_dispclk);
1573 		if (ss_enabled)
1574 			atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss);
1575 		/* XXX: DCE5, make sure voltage, dispclk is high enough */
1576 		atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
1577 		if (ss_enabled)
1578 			atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss);
1579 	}
1580 
1581 }
1582 
1583 int atombios_crtc_mode_set(struct drm_crtc *crtc,
1584 			   struct drm_display_mode *mode,
1585 			   struct drm_display_mode *adjusted_mode,
1586 			   int x, int y, struct drm_framebuffer *old_fb)
1587 {
1588 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1589 	struct drm_device *dev = crtc->dev;
1590 	struct radeon_device *rdev = dev->dev_private;
1591 	struct drm_encoder *encoder;
1592 	bool is_tvcv = false;
1593 
1594 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1595 		/* find tv std */
1596 		if (encoder->crtc == crtc) {
1597 			struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1598 			if (radeon_encoder->active_device &
1599 			    (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
1600 				is_tvcv = true;
1601 		}
1602 	}
1603 
1604 	atombios_crtc_set_pll(crtc, adjusted_mode);
1605 
1606 	if (ASIC_IS_DCE4(rdev))
1607 		atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1608 	else if (ASIC_IS_AVIVO(rdev)) {
1609 		if (is_tvcv)
1610 			atombios_crtc_set_timing(crtc, adjusted_mode);
1611 		else
1612 			atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1613 	} else {
1614 		atombios_crtc_set_timing(crtc, adjusted_mode);
1615 		if (radeon_crtc->crtc_id == 0)
1616 			atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1617 		radeon_legacy_atom_fixup(crtc);
1618 	}
1619 	atombios_crtc_set_base(crtc, x, y, old_fb);
1620 	atombios_overscan_setup(crtc, mode, adjusted_mode);
1621 	atombios_scaler_setup(crtc);
1622 	return 0;
1623 }
1624 
1625 static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
1626 				     const struct drm_display_mode *mode,
1627 				     struct drm_display_mode *adjusted_mode)
1628 {
1629 	if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
1630 		return false;
1631 	return true;
1632 }
1633 
1634 static void atombios_crtc_prepare(struct drm_crtc *crtc)
1635 {
1636 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1637 	struct drm_device *dev = crtc->dev;
1638 	struct radeon_device *rdev = dev->dev_private;
1639 
1640 	radeon_crtc->in_mode_set = true;
1641 	/* pick pll */
1642 	radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
1643 
1644 	/* disable crtc pair power gating before programming */
1645 	if (ASIC_IS_DCE6(rdev))
1646 		atombios_powergate_crtc(crtc, ATOM_DISABLE);
1647 
1648 	atombios_lock_crtc(crtc, ATOM_ENABLE);
1649 	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1650 }
1651 
1652 static void atombios_crtc_commit(struct drm_crtc *crtc)
1653 {
1654 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1655 
1656 	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
1657 	atombios_lock_crtc(crtc, ATOM_DISABLE);
1658 	radeon_crtc->in_mode_set = false;
1659 }
1660 
1661 static void atombios_crtc_disable(struct drm_crtc *crtc)
1662 {
1663 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1664 	struct drm_device *dev = crtc->dev;
1665 	struct radeon_device *rdev = dev->dev_private;
1666 	struct radeon_atom_ss ss;
1667 	int i;
1668 
1669 	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1670 
1671 	for (i = 0; i < rdev->num_crtc; i++) {
1672 		if (rdev->mode_info.crtcs[i] &&
1673 		    rdev->mode_info.crtcs[i]->enabled &&
1674 		    i != radeon_crtc->crtc_id &&
1675 		    radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) {
1676 			/* one other crtc is using this pll don't turn
1677 			 * off the pll
1678 			 */
1679 			goto done;
1680 		}
1681 	}
1682 
1683 	switch (radeon_crtc->pll_id) {
1684 	case ATOM_PPLL1:
1685 	case ATOM_PPLL2:
1686 		/* disable the ppll */
1687 		atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1688 					  0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
1689 		break;
1690 	case ATOM_PPLL0:
1691 		/* disable the ppll */
1692 		if (ASIC_IS_DCE61(rdev))
1693 			atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1694 						  0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
1695 		break;
1696 	default:
1697 		break;
1698 	}
1699 done:
1700 	radeon_crtc->pll_id = -1;
1701 }
1702 
1703 static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
1704 	.dpms = atombios_crtc_dpms,
1705 	.mode_fixup = atombios_crtc_mode_fixup,
1706 	.mode_set = atombios_crtc_mode_set,
1707 	.mode_set_base = atombios_crtc_set_base,
1708 	.mode_set_base_atomic = atombios_crtc_set_base_atomic,
1709 	.prepare = atombios_crtc_prepare,
1710 	.commit = atombios_crtc_commit,
1711 	.load_lut = radeon_crtc_load_lut,
1712 	.disable = atombios_crtc_disable,
1713 };
1714 
1715 void radeon_atombios_init_crtc(struct drm_device *dev,
1716 			       struct radeon_crtc *radeon_crtc)
1717 {
1718 	struct radeon_device *rdev = dev->dev_private;
1719 
1720 	if (ASIC_IS_DCE4(rdev)) {
1721 		switch (radeon_crtc->crtc_id) {
1722 		case 0:
1723 		default:
1724 			radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
1725 			break;
1726 		case 1:
1727 			radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
1728 			break;
1729 		case 2:
1730 			radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
1731 			break;
1732 		case 3:
1733 			radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
1734 			break;
1735 		case 4:
1736 			radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
1737 			break;
1738 		case 5:
1739 			radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
1740 			break;
1741 		}
1742 	} else {
1743 		if (radeon_crtc->crtc_id == 1)
1744 			radeon_crtc->crtc_offset =
1745 				AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
1746 		else
1747 			radeon_crtc->crtc_offset = 0;
1748 	}
1749 	radeon_crtc->pll_id = -1;
1750 	drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
1751 }
1752