1 /*
2  * Copyright © 2006-2017 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include "intel_atomic.h"
25 #include "intel_cdclk.h"
26 #include "intel_display_types.h"
27 #include "intel_sideband.h"
28 
29 /**
30  * DOC: CDCLK / RAWCLK
31  *
32  * The display engine uses several different clocks to do its work. There
33  * are two main clocks involved that aren't directly related to the actual
34  * pixel clock or any symbol/bit clock of the actual output port. These
35  * are the core display clock (CDCLK) and RAWCLK.
36  *
37  * CDCLK clocks most of the display pipe logic, and thus its frequency
38  * must be high enough to support the rate at which pixels are flowing
39  * through the pipes. Downscaling must also be accounted as that increases
40  * the effective pixel rate.
41  *
42  * On several platforms the CDCLK frequency can be changed dynamically
43  * to minimize power consumption for a given display configuration.
44  * Typically changes to the CDCLK frequency require all the display pipes
45  * to be shut down while the frequency is being changed.
46  *
47  * On SKL+ the DMC will toggle the CDCLK off/on during DC5/6 entry/exit.
48  * DMC will not change the active CDCLK frequency however, so that part
49  * will still be performed by the driver directly.
50  *
51  * RAWCLK is a fixed frequency clock, often used by various auxiliary
52  * blocks such as AUX CH or backlight PWM. Hence the only thing we
53  * really need to know about RAWCLK is its frequency so that various
54  * dividers can be programmed correctly.
55  */
56 
57 static void fixed_133mhz_get_cdclk(struct drm_i915_private *dev_priv,
58 				   struct intel_cdclk_state *cdclk_state)
59 {
60 	cdclk_state->cdclk = 133333;
61 }
62 
63 static void fixed_200mhz_get_cdclk(struct drm_i915_private *dev_priv,
64 				   struct intel_cdclk_state *cdclk_state)
65 {
66 	cdclk_state->cdclk = 200000;
67 }
68 
69 static void fixed_266mhz_get_cdclk(struct drm_i915_private *dev_priv,
70 				   struct intel_cdclk_state *cdclk_state)
71 {
72 	cdclk_state->cdclk = 266667;
73 }
74 
75 static void fixed_333mhz_get_cdclk(struct drm_i915_private *dev_priv,
76 				   struct intel_cdclk_state *cdclk_state)
77 {
78 	cdclk_state->cdclk = 333333;
79 }
80 
81 static void fixed_400mhz_get_cdclk(struct drm_i915_private *dev_priv,
82 				   struct intel_cdclk_state *cdclk_state)
83 {
84 	cdclk_state->cdclk = 400000;
85 }
86 
87 static void fixed_450mhz_get_cdclk(struct drm_i915_private *dev_priv,
88 				   struct intel_cdclk_state *cdclk_state)
89 {
90 	cdclk_state->cdclk = 450000;
91 }
92 
93 static void i85x_get_cdclk(struct drm_i915_private *dev_priv,
94 			   struct intel_cdclk_state *cdclk_state)
95 {
96 	struct pci_dev *pdev = dev_priv->drm.pdev;
97 	u16 hpllcc = 0;
98 
99 	/*
100 	 * 852GM/852GMV only supports 133 MHz and the HPLLCC
101 	 * encoding is different :(
102 	 * FIXME is this the right way to detect 852GM/852GMV?
103 	 */
104 	if (pdev->revision == 0x1) {
105 		cdclk_state->cdclk = 133333;
106 		return;
107 	}
108 
109 	pci_bus_read_config_word(pdev->bus,
110 				 PCI_DEVFN(0, 3), HPLLCC, &hpllcc);
111 
112 	/* Assume that the hardware is in the high speed state.  This
113 	 * should be the default.
114 	 */
115 	switch (hpllcc & GC_CLOCK_CONTROL_MASK) {
116 	case GC_CLOCK_133_200:
117 	case GC_CLOCK_133_200_2:
118 	case GC_CLOCK_100_200:
119 		cdclk_state->cdclk = 200000;
120 		break;
121 	case GC_CLOCK_166_250:
122 		cdclk_state->cdclk = 250000;
123 		break;
124 	case GC_CLOCK_100_133:
125 		cdclk_state->cdclk = 133333;
126 		break;
127 	case GC_CLOCK_133_266:
128 	case GC_CLOCK_133_266_2:
129 	case GC_CLOCK_166_266:
130 		cdclk_state->cdclk = 266667;
131 		break;
132 	}
133 }
134 
135 static void i915gm_get_cdclk(struct drm_i915_private *dev_priv,
136 			     struct intel_cdclk_state *cdclk_state)
137 {
138 	struct pci_dev *pdev = dev_priv->drm.pdev;
139 	u16 gcfgc = 0;
140 
141 	pci_read_config_word(pdev, GCFGC, &gcfgc);
142 
143 	if (gcfgc & GC_LOW_FREQUENCY_ENABLE) {
144 		cdclk_state->cdclk = 133333;
145 		return;
146 	}
147 
148 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
149 	case GC_DISPLAY_CLOCK_333_320_MHZ:
150 		cdclk_state->cdclk = 333333;
151 		break;
152 	default:
153 	case GC_DISPLAY_CLOCK_190_200_MHZ:
154 		cdclk_state->cdclk = 190000;
155 		break;
156 	}
157 }
158 
159 static void i945gm_get_cdclk(struct drm_i915_private *dev_priv,
160 			     struct intel_cdclk_state *cdclk_state)
161 {
162 	struct pci_dev *pdev = dev_priv->drm.pdev;
163 	u16 gcfgc = 0;
164 
165 	pci_read_config_word(pdev, GCFGC, &gcfgc);
166 
167 	if (gcfgc & GC_LOW_FREQUENCY_ENABLE) {
168 		cdclk_state->cdclk = 133333;
169 		return;
170 	}
171 
172 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
173 	case GC_DISPLAY_CLOCK_333_320_MHZ:
174 		cdclk_state->cdclk = 320000;
175 		break;
176 	default:
177 	case GC_DISPLAY_CLOCK_190_200_MHZ:
178 		cdclk_state->cdclk = 200000;
179 		break;
180 	}
181 }
182 
183 static unsigned int intel_hpll_vco(struct drm_i915_private *dev_priv)
184 {
185 	static const unsigned int blb_vco[8] = {
186 		[0] = 3200000,
187 		[1] = 4000000,
188 		[2] = 5333333,
189 		[3] = 4800000,
190 		[4] = 6400000,
191 	};
192 	static const unsigned int pnv_vco[8] = {
193 		[0] = 3200000,
194 		[1] = 4000000,
195 		[2] = 5333333,
196 		[3] = 4800000,
197 		[4] = 2666667,
198 	};
199 	static const unsigned int cl_vco[8] = {
200 		[0] = 3200000,
201 		[1] = 4000000,
202 		[2] = 5333333,
203 		[3] = 6400000,
204 		[4] = 3333333,
205 		[5] = 3566667,
206 		[6] = 4266667,
207 	};
208 	static const unsigned int elk_vco[8] = {
209 		[0] = 3200000,
210 		[1] = 4000000,
211 		[2] = 5333333,
212 		[3] = 4800000,
213 	};
214 	static const unsigned int ctg_vco[8] = {
215 		[0] = 3200000,
216 		[1] = 4000000,
217 		[2] = 5333333,
218 		[3] = 6400000,
219 		[4] = 2666667,
220 		[5] = 4266667,
221 	};
222 	const unsigned int *vco_table;
223 	unsigned int vco;
224 	u8 tmp = 0;
225 
226 	/* FIXME other chipsets? */
227 	if (IS_GM45(dev_priv))
228 		vco_table = ctg_vco;
229 	else if (IS_G45(dev_priv))
230 		vco_table = elk_vco;
231 	else if (IS_I965GM(dev_priv))
232 		vco_table = cl_vco;
233 	else if (IS_PINEVIEW(dev_priv))
234 		vco_table = pnv_vco;
235 	else if (IS_G33(dev_priv))
236 		vco_table = blb_vco;
237 	else
238 		return 0;
239 
240 	tmp = I915_READ(IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv) ?
241 			HPLLVCO_MOBILE : HPLLVCO);
242 
243 	vco = vco_table[tmp & 0x7];
244 	if (vco == 0)
245 		DRM_ERROR("Bad HPLL VCO (HPLLVCO=0x%02x)\n", tmp);
246 	else
247 		DRM_DEBUG_KMS("HPLL VCO %u kHz\n", vco);
248 
249 	return vco;
250 }
251 
252 static void g33_get_cdclk(struct drm_i915_private *dev_priv,
253 			  struct intel_cdclk_state *cdclk_state)
254 {
255 	struct pci_dev *pdev = dev_priv->drm.pdev;
256 	static const u8 div_3200[] = { 12, 10,  8,  7, 5, 16 };
257 	static const u8 div_4000[] = { 14, 12, 10,  8, 6, 20 };
258 	static const u8 div_4800[] = { 20, 14, 12, 10, 8, 24 };
259 	static const u8 div_5333[] = { 20, 16, 12, 12, 8, 28 };
260 	const u8 *div_table;
261 	unsigned int cdclk_sel;
262 	u16 tmp = 0;
263 
264 	cdclk_state->vco = intel_hpll_vco(dev_priv);
265 
266 	pci_read_config_word(pdev, GCFGC, &tmp);
267 
268 	cdclk_sel = (tmp >> 4) & 0x7;
269 
270 	if (cdclk_sel >= ARRAY_SIZE(div_3200))
271 		goto fail;
272 
273 	switch (cdclk_state->vco) {
274 	case 3200000:
275 		div_table = div_3200;
276 		break;
277 	case 4000000:
278 		div_table = div_4000;
279 		break;
280 	case 4800000:
281 		div_table = div_4800;
282 		break;
283 	case 5333333:
284 		div_table = div_5333;
285 		break;
286 	default:
287 		goto fail;
288 	}
289 
290 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco,
291 					       div_table[cdclk_sel]);
292 	return;
293 
294 fail:
295 	DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%08x\n",
296 		  cdclk_state->vco, tmp);
297 	cdclk_state->cdclk = 190476;
298 }
299 
300 static void pnv_get_cdclk(struct drm_i915_private *dev_priv,
301 			  struct intel_cdclk_state *cdclk_state)
302 {
303 	struct pci_dev *pdev = dev_priv->drm.pdev;
304 	u16 gcfgc = 0;
305 
306 	pci_read_config_word(pdev, GCFGC, &gcfgc);
307 
308 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
309 	case GC_DISPLAY_CLOCK_267_MHZ_PNV:
310 		cdclk_state->cdclk = 266667;
311 		break;
312 	case GC_DISPLAY_CLOCK_333_MHZ_PNV:
313 		cdclk_state->cdclk = 333333;
314 		break;
315 	case GC_DISPLAY_CLOCK_444_MHZ_PNV:
316 		cdclk_state->cdclk = 444444;
317 		break;
318 	case GC_DISPLAY_CLOCK_200_MHZ_PNV:
319 		cdclk_state->cdclk = 200000;
320 		break;
321 	default:
322 		DRM_ERROR("Unknown pnv display core clock 0x%04x\n", gcfgc);
323 		/* fall through */
324 	case GC_DISPLAY_CLOCK_133_MHZ_PNV:
325 		cdclk_state->cdclk = 133333;
326 		break;
327 	case GC_DISPLAY_CLOCK_167_MHZ_PNV:
328 		cdclk_state->cdclk = 166667;
329 		break;
330 	}
331 }
332 
333 static void i965gm_get_cdclk(struct drm_i915_private *dev_priv,
334 			     struct intel_cdclk_state *cdclk_state)
335 {
336 	struct pci_dev *pdev = dev_priv->drm.pdev;
337 	static const u8 div_3200[] = { 16, 10,  8 };
338 	static const u8 div_4000[] = { 20, 12, 10 };
339 	static const u8 div_5333[] = { 24, 16, 14 };
340 	const u8 *div_table;
341 	unsigned int cdclk_sel;
342 	u16 tmp = 0;
343 
344 	cdclk_state->vco = intel_hpll_vco(dev_priv);
345 
346 	pci_read_config_word(pdev, GCFGC, &tmp);
347 
348 	cdclk_sel = ((tmp >> 8) & 0x1f) - 1;
349 
350 	if (cdclk_sel >= ARRAY_SIZE(div_3200))
351 		goto fail;
352 
353 	switch (cdclk_state->vco) {
354 	case 3200000:
355 		div_table = div_3200;
356 		break;
357 	case 4000000:
358 		div_table = div_4000;
359 		break;
360 	case 5333333:
361 		div_table = div_5333;
362 		break;
363 	default:
364 		goto fail;
365 	}
366 
367 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco,
368 					       div_table[cdclk_sel]);
369 	return;
370 
371 fail:
372 	DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%04x\n",
373 		  cdclk_state->vco, tmp);
374 	cdclk_state->cdclk = 200000;
375 }
376 
377 static void gm45_get_cdclk(struct drm_i915_private *dev_priv,
378 			   struct intel_cdclk_state *cdclk_state)
379 {
380 	struct pci_dev *pdev = dev_priv->drm.pdev;
381 	unsigned int cdclk_sel;
382 	u16 tmp = 0;
383 
384 	cdclk_state->vco = intel_hpll_vco(dev_priv);
385 
386 	pci_read_config_word(pdev, GCFGC, &tmp);
387 
388 	cdclk_sel = (tmp >> 12) & 0x1;
389 
390 	switch (cdclk_state->vco) {
391 	case 2666667:
392 	case 4000000:
393 	case 5333333:
394 		cdclk_state->cdclk = cdclk_sel ? 333333 : 222222;
395 		break;
396 	case 3200000:
397 		cdclk_state->cdclk = cdclk_sel ? 320000 : 228571;
398 		break;
399 	default:
400 		DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u, CFGC=0x%04x\n",
401 			  cdclk_state->vco, tmp);
402 		cdclk_state->cdclk = 222222;
403 		break;
404 	}
405 }
406 
407 static void hsw_get_cdclk(struct drm_i915_private *dev_priv,
408 			  struct intel_cdclk_state *cdclk_state)
409 {
410 	u32 lcpll = I915_READ(LCPLL_CTL);
411 	u32 freq = lcpll & LCPLL_CLK_FREQ_MASK;
412 
413 	if (lcpll & LCPLL_CD_SOURCE_FCLK)
414 		cdclk_state->cdclk = 800000;
415 	else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
416 		cdclk_state->cdclk = 450000;
417 	else if (freq == LCPLL_CLK_FREQ_450)
418 		cdclk_state->cdclk = 450000;
419 	else if (IS_HSW_ULT(dev_priv))
420 		cdclk_state->cdclk = 337500;
421 	else
422 		cdclk_state->cdclk = 540000;
423 }
424 
425 static int vlv_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
426 {
427 	int freq_320 = (dev_priv->hpll_freq <<  1) % 320000 != 0 ?
428 		333333 : 320000;
429 
430 	/*
431 	 * We seem to get an unstable or solid color picture at 200MHz.
432 	 * Not sure what's wrong. For now use 200MHz only when all pipes
433 	 * are off.
434 	 */
435 	if (IS_VALLEYVIEW(dev_priv) && min_cdclk > freq_320)
436 		return 400000;
437 	else if (min_cdclk > 266667)
438 		return freq_320;
439 	else if (min_cdclk > 0)
440 		return 266667;
441 	else
442 		return 200000;
443 }
444 
445 static u8 vlv_calc_voltage_level(struct drm_i915_private *dev_priv, int cdclk)
446 {
447 	if (IS_VALLEYVIEW(dev_priv)) {
448 		if (cdclk >= 320000) /* jump to highest voltage for 400MHz too */
449 			return 2;
450 		else if (cdclk >= 266667)
451 			return 1;
452 		else
453 			return 0;
454 	} else {
455 		/*
456 		 * Specs are full of misinformation, but testing on actual
457 		 * hardware has shown that we just need to write the desired
458 		 * CCK divider into the Punit register.
459 		 */
460 		return DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1, cdclk) - 1;
461 	}
462 }
463 
464 static void vlv_get_cdclk(struct drm_i915_private *dev_priv,
465 			  struct intel_cdclk_state *cdclk_state)
466 {
467 	u32 val;
468 
469 	vlv_iosf_sb_get(dev_priv,
470 			BIT(VLV_IOSF_SB_CCK) | BIT(VLV_IOSF_SB_PUNIT));
471 
472 	cdclk_state->vco = vlv_get_hpll_vco(dev_priv);
473 	cdclk_state->cdclk = vlv_get_cck_clock(dev_priv, "cdclk",
474 					       CCK_DISPLAY_CLOCK_CONTROL,
475 					       cdclk_state->vco);
476 
477 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
478 
479 	vlv_iosf_sb_put(dev_priv,
480 			BIT(VLV_IOSF_SB_CCK) | BIT(VLV_IOSF_SB_PUNIT));
481 
482 	if (IS_VALLEYVIEW(dev_priv))
483 		cdclk_state->voltage_level = (val & DSPFREQGUAR_MASK) >>
484 			DSPFREQGUAR_SHIFT;
485 	else
486 		cdclk_state->voltage_level = (val & DSPFREQGUAR_MASK_CHV) >>
487 			DSPFREQGUAR_SHIFT_CHV;
488 }
489 
490 static void vlv_program_pfi_credits(struct drm_i915_private *dev_priv)
491 {
492 	unsigned int credits, default_credits;
493 
494 	if (IS_CHERRYVIEW(dev_priv))
495 		default_credits = PFI_CREDIT(12);
496 	else
497 		default_credits = PFI_CREDIT(8);
498 
499 	if (dev_priv->cdclk.hw.cdclk >= dev_priv->czclk_freq) {
500 		/* CHV suggested value is 31 or 63 */
501 		if (IS_CHERRYVIEW(dev_priv))
502 			credits = PFI_CREDIT_63;
503 		else
504 			credits = PFI_CREDIT(15);
505 	} else {
506 		credits = default_credits;
507 	}
508 
509 	/*
510 	 * WA - write default credits before re-programming
511 	 * FIXME: should we also set the resend bit here?
512 	 */
513 	I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
514 		   default_credits);
515 
516 	I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
517 		   credits | PFI_CREDIT_RESEND);
518 
519 	/*
520 	 * FIXME is this guaranteed to clear
521 	 * immediately or should we poll for it?
522 	 */
523 	WARN_ON(I915_READ(GCI_CONTROL) & PFI_CREDIT_RESEND);
524 }
525 
526 static void vlv_set_cdclk(struct drm_i915_private *dev_priv,
527 			  const struct intel_cdclk_state *cdclk_state,
528 			  enum pipe pipe)
529 {
530 	int cdclk = cdclk_state->cdclk;
531 	u32 val, cmd = cdclk_state->voltage_level;
532 	intel_wakeref_t wakeref;
533 
534 	switch (cdclk) {
535 	case 400000:
536 	case 333333:
537 	case 320000:
538 	case 266667:
539 	case 200000:
540 		break;
541 	default:
542 		MISSING_CASE(cdclk);
543 		return;
544 	}
545 
546 	/* There are cases where we can end up here with power domains
547 	 * off and a CDCLK frequency other than the minimum, like when
548 	 * issuing a modeset without actually changing any display after
549 	 * a system suspend.  So grab the display core domain, which covers
550 	 * the HW blocks needed for the following programming.
551 	 */
552 	wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
553 
554 	vlv_iosf_sb_get(dev_priv,
555 			BIT(VLV_IOSF_SB_CCK) |
556 			BIT(VLV_IOSF_SB_BUNIT) |
557 			BIT(VLV_IOSF_SB_PUNIT));
558 
559 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
560 	val &= ~DSPFREQGUAR_MASK;
561 	val |= (cmd << DSPFREQGUAR_SHIFT);
562 	vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
563 	if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
564 		      DSPFREQSTAT_MASK) == (cmd << DSPFREQSTAT_SHIFT),
565 		     50)) {
566 		DRM_ERROR("timed out waiting for CDclk change\n");
567 	}
568 
569 	if (cdclk == 400000) {
570 		u32 divider;
571 
572 		divider = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1,
573 					    cdclk) - 1;
574 
575 		/* adjust cdclk divider */
576 		val = vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL);
577 		val &= ~CCK_FREQUENCY_VALUES;
578 		val |= divider;
579 		vlv_cck_write(dev_priv, CCK_DISPLAY_CLOCK_CONTROL, val);
580 
581 		if (wait_for((vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL) &
582 			      CCK_FREQUENCY_STATUS) == (divider << CCK_FREQUENCY_STATUS_SHIFT),
583 			     50))
584 			DRM_ERROR("timed out waiting for CDclk change\n");
585 	}
586 
587 	/* adjust self-refresh exit latency value */
588 	val = vlv_bunit_read(dev_priv, BUNIT_REG_BISOC);
589 	val &= ~0x7f;
590 
591 	/*
592 	 * For high bandwidth configs, we set a higher latency in the bunit
593 	 * so that the core display fetch happens in time to avoid underruns.
594 	 */
595 	if (cdclk == 400000)
596 		val |= 4500 / 250; /* 4.5 usec */
597 	else
598 		val |= 3000 / 250; /* 3.0 usec */
599 	vlv_bunit_write(dev_priv, BUNIT_REG_BISOC, val);
600 
601 	vlv_iosf_sb_put(dev_priv,
602 			BIT(VLV_IOSF_SB_CCK) |
603 			BIT(VLV_IOSF_SB_BUNIT) |
604 			BIT(VLV_IOSF_SB_PUNIT));
605 
606 	intel_update_cdclk(dev_priv);
607 
608 	vlv_program_pfi_credits(dev_priv);
609 
610 	intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
611 }
612 
613 static void chv_set_cdclk(struct drm_i915_private *dev_priv,
614 			  const struct intel_cdclk_state *cdclk_state,
615 			  enum pipe pipe)
616 {
617 	int cdclk = cdclk_state->cdclk;
618 	u32 val, cmd = cdclk_state->voltage_level;
619 	intel_wakeref_t wakeref;
620 
621 	switch (cdclk) {
622 	case 333333:
623 	case 320000:
624 	case 266667:
625 	case 200000:
626 		break;
627 	default:
628 		MISSING_CASE(cdclk);
629 		return;
630 	}
631 
632 	/* There are cases where we can end up here with power domains
633 	 * off and a CDCLK frequency other than the minimum, like when
634 	 * issuing a modeset without actually changing any display after
635 	 * a system suspend.  So grab the display core domain, which covers
636 	 * the HW blocks needed for the following programming.
637 	 */
638 	wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
639 
640 	vlv_punit_get(dev_priv);
641 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
642 	val &= ~DSPFREQGUAR_MASK_CHV;
643 	val |= (cmd << DSPFREQGUAR_SHIFT_CHV);
644 	vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
645 	if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
646 		      DSPFREQSTAT_MASK_CHV) == (cmd << DSPFREQSTAT_SHIFT_CHV),
647 		     50)) {
648 		DRM_ERROR("timed out waiting for CDclk change\n");
649 	}
650 
651 	vlv_punit_put(dev_priv);
652 
653 	intel_update_cdclk(dev_priv);
654 
655 	vlv_program_pfi_credits(dev_priv);
656 
657 	intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
658 }
659 
660 static int bdw_calc_cdclk(int min_cdclk)
661 {
662 	if (min_cdclk > 540000)
663 		return 675000;
664 	else if (min_cdclk > 450000)
665 		return 540000;
666 	else if (min_cdclk > 337500)
667 		return 450000;
668 	else
669 		return 337500;
670 }
671 
672 static u8 bdw_calc_voltage_level(int cdclk)
673 {
674 	switch (cdclk) {
675 	default:
676 	case 337500:
677 		return 2;
678 	case 450000:
679 		return 0;
680 	case 540000:
681 		return 1;
682 	case 675000:
683 		return 3;
684 	}
685 }
686 
687 static void bdw_get_cdclk(struct drm_i915_private *dev_priv,
688 			  struct intel_cdclk_state *cdclk_state)
689 {
690 	u32 lcpll = I915_READ(LCPLL_CTL);
691 	u32 freq = lcpll & LCPLL_CLK_FREQ_MASK;
692 
693 	if (lcpll & LCPLL_CD_SOURCE_FCLK)
694 		cdclk_state->cdclk = 800000;
695 	else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
696 		cdclk_state->cdclk = 450000;
697 	else if (freq == LCPLL_CLK_FREQ_450)
698 		cdclk_state->cdclk = 450000;
699 	else if (freq == LCPLL_CLK_FREQ_54O_BDW)
700 		cdclk_state->cdclk = 540000;
701 	else if (freq == LCPLL_CLK_FREQ_337_5_BDW)
702 		cdclk_state->cdclk = 337500;
703 	else
704 		cdclk_state->cdclk = 675000;
705 
706 	/*
707 	 * Can't read this out :( Let's assume it's
708 	 * at least what the CDCLK frequency requires.
709 	 */
710 	cdclk_state->voltage_level =
711 		bdw_calc_voltage_level(cdclk_state->cdclk);
712 }
713 
714 static void bdw_set_cdclk(struct drm_i915_private *dev_priv,
715 			  const struct intel_cdclk_state *cdclk_state,
716 			  enum pipe pipe)
717 {
718 	int cdclk = cdclk_state->cdclk;
719 	u32 val;
720 	int ret;
721 
722 	if (WARN((I915_READ(LCPLL_CTL) &
723 		  (LCPLL_PLL_DISABLE | LCPLL_PLL_LOCK |
724 		   LCPLL_CD_CLOCK_DISABLE | LCPLL_ROOT_CD_CLOCK_DISABLE |
725 		   LCPLL_CD2X_CLOCK_DISABLE | LCPLL_POWER_DOWN_ALLOW |
726 		   LCPLL_CD_SOURCE_FCLK)) != LCPLL_PLL_LOCK,
727 		 "trying to change cdclk frequency with cdclk not enabled\n"))
728 		return;
729 
730 	ret = sandybridge_pcode_write(dev_priv,
731 				      BDW_PCODE_DISPLAY_FREQ_CHANGE_REQ, 0x0);
732 	if (ret) {
733 		DRM_ERROR("failed to inform pcode about cdclk change\n");
734 		return;
735 	}
736 
737 	val = I915_READ(LCPLL_CTL);
738 	val |= LCPLL_CD_SOURCE_FCLK;
739 	I915_WRITE(LCPLL_CTL, val);
740 
741 	/*
742 	 * According to the spec, it should be enough to poll for this 1 us.
743 	 * However, extensive testing shows that this can take longer.
744 	 */
745 	if (wait_for_us(I915_READ(LCPLL_CTL) &
746 			LCPLL_CD_SOURCE_FCLK_DONE, 100))
747 		DRM_ERROR("Switching to FCLK failed\n");
748 
749 	val = I915_READ(LCPLL_CTL);
750 	val &= ~LCPLL_CLK_FREQ_MASK;
751 
752 	switch (cdclk) {
753 	default:
754 		MISSING_CASE(cdclk);
755 		/* fall through */
756 	case 337500:
757 		val |= LCPLL_CLK_FREQ_337_5_BDW;
758 		break;
759 	case 450000:
760 		val |= LCPLL_CLK_FREQ_450;
761 		break;
762 	case 540000:
763 		val |= LCPLL_CLK_FREQ_54O_BDW;
764 		break;
765 	case 675000:
766 		val |= LCPLL_CLK_FREQ_675_BDW;
767 		break;
768 	}
769 
770 	I915_WRITE(LCPLL_CTL, val);
771 
772 	val = I915_READ(LCPLL_CTL);
773 	val &= ~LCPLL_CD_SOURCE_FCLK;
774 	I915_WRITE(LCPLL_CTL, val);
775 
776 	if (wait_for_us((I915_READ(LCPLL_CTL) &
777 			LCPLL_CD_SOURCE_FCLK_DONE) == 0, 1))
778 		DRM_ERROR("Switching back to LCPLL failed\n");
779 
780 	sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ,
781 				cdclk_state->voltage_level);
782 
783 	I915_WRITE(CDCLK_FREQ, DIV_ROUND_CLOSEST(cdclk, 1000) - 1);
784 
785 	intel_update_cdclk(dev_priv);
786 }
787 
788 static int skl_calc_cdclk(int min_cdclk, int vco)
789 {
790 	if (vco == 8640000) {
791 		if (min_cdclk > 540000)
792 			return 617143;
793 		else if (min_cdclk > 432000)
794 			return 540000;
795 		else if (min_cdclk > 308571)
796 			return 432000;
797 		else
798 			return 308571;
799 	} else {
800 		if (min_cdclk > 540000)
801 			return 675000;
802 		else if (min_cdclk > 450000)
803 			return 540000;
804 		else if (min_cdclk > 337500)
805 			return 450000;
806 		else
807 			return 337500;
808 	}
809 }
810 
811 static u8 skl_calc_voltage_level(int cdclk)
812 {
813 	if (cdclk > 540000)
814 		return 3;
815 	else if (cdclk > 450000)
816 		return 2;
817 	else if (cdclk > 337500)
818 		return 1;
819 	else
820 		return 0;
821 }
822 
823 static void skl_dpll0_update(struct drm_i915_private *dev_priv,
824 			     struct intel_cdclk_state *cdclk_state)
825 {
826 	u32 val;
827 
828 	cdclk_state->ref = 24000;
829 	cdclk_state->vco = 0;
830 
831 	val = I915_READ(LCPLL1_CTL);
832 	if ((val & LCPLL_PLL_ENABLE) == 0)
833 		return;
834 
835 	if (WARN_ON((val & LCPLL_PLL_LOCK) == 0))
836 		return;
837 
838 	val = I915_READ(DPLL_CTRL1);
839 
840 	if (WARN_ON((val & (DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) |
841 			    DPLL_CTRL1_SSC(SKL_DPLL0) |
842 			    DPLL_CTRL1_OVERRIDE(SKL_DPLL0))) !=
843 		    DPLL_CTRL1_OVERRIDE(SKL_DPLL0)))
844 		return;
845 
846 	switch (val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0)) {
847 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, SKL_DPLL0):
848 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, SKL_DPLL0):
849 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, SKL_DPLL0):
850 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, SKL_DPLL0):
851 		cdclk_state->vco = 8100000;
852 		break;
853 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, SKL_DPLL0):
854 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, SKL_DPLL0):
855 		cdclk_state->vco = 8640000;
856 		break;
857 	default:
858 		MISSING_CASE(val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
859 		break;
860 	}
861 }
862 
863 static void skl_get_cdclk(struct drm_i915_private *dev_priv,
864 			  struct intel_cdclk_state *cdclk_state)
865 {
866 	u32 cdctl;
867 
868 	skl_dpll0_update(dev_priv, cdclk_state);
869 
870 	cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref;
871 
872 	if (cdclk_state->vco == 0)
873 		goto out;
874 
875 	cdctl = I915_READ(CDCLK_CTL);
876 
877 	if (cdclk_state->vco == 8640000) {
878 		switch (cdctl & CDCLK_FREQ_SEL_MASK) {
879 		case CDCLK_FREQ_450_432:
880 			cdclk_state->cdclk = 432000;
881 			break;
882 		case CDCLK_FREQ_337_308:
883 			cdclk_state->cdclk = 308571;
884 			break;
885 		case CDCLK_FREQ_540:
886 			cdclk_state->cdclk = 540000;
887 			break;
888 		case CDCLK_FREQ_675_617:
889 			cdclk_state->cdclk = 617143;
890 			break;
891 		default:
892 			MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
893 			break;
894 		}
895 	} else {
896 		switch (cdctl & CDCLK_FREQ_SEL_MASK) {
897 		case CDCLK_FREQ_450_432:
898 			cdclk_state->cdclk = 450000;
899 			break;
900 		case CDCLK_FREQ_337_308:
901 			cdclk_state->cdclk = 337500;
902 			break;
903 		case CDCLK_FREQ_540:
904 			cdclk_state->cdclk = 540000;
905 			break;
906 		case CDCLK_FREQ_675_617:
907 			cdclk_state->cdclk = 675000;
908 			break;
909 		default:
910 			MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
911 			break;
912 		}
913 	}
914 
915  out:
916 	/*
917 	 * Can't read this out :( Let's assume it's
918 	 * at least what the CDCLK frequency requires.
919 	 */
920 	cdclk_state->voltage_level =
921 		skl_calc_voltage_level(cdclk_state->cdclk);
922 }
923 
924 /* convert from kHz to .1 fixpoint MHz with -1MHz offset */
925 static int skl_cdclk_decimal(int cdclk)
926 {
927 	return DIV_ROUND_CLOSEST(cdclk - 1000, 500);
928 }
929 
930 static void skl_set_preferred_cdclk_vco(struct drm_i915_private *dev_priv,
931 					int vco)
932 {
933 	bool changed = dev_priv->skl_preferred_vco_freq != vco;
934 
935 	dev_priv->skl_preferred_vco_freq = vco;
936 
937 	if (changed)
938 		intel_update_max_cdclk(dev_priv);
939 }
940 
941 static void skl_dpll0_enable(struct drm_i915_private *dev_priv, int vco)
942 {
943 	u32 val;
944 
945 	WARN_ON(vco != 8100000 && vco != 8640000);
946 
947 	/*
948 	 * We always enable DPLL0 with the lowest link rate possible, but still
949 	 * taking into account the VCO required to operate the eDP panel at the
950 	 * desired frequency. The usual DP link rates operate with a VCO of
951 	 * 8100 while the eDP 1.4 alternate link rates need a VCO of 8640.
952 	 * The modeset code is responsible for the selection of the exact link
953 	 * rate later on, with the constraint of choosing a frequency that
954 	 * works with vco.
955 	 */
956 	val = I915_READ(DPLL_CTRL1);
957 
958 	val &= ~(DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) | DPLL_CTRL1_SSC(SKL_DPLL0) |
959 		 DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
960 	val |= DPLL_CTRL1_OVERRIDE(SKL_DPLL0);
961 	if (vco == 8640000)
962 		val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080,
963 					    SKL_DPLL0);
964 	else
965 		val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810,
966 					    SKL_DPLL0);
967 
968 	I915_WRITE(DPLL_CTRL1, val);
969 	POSTING_READ(DPLL_CTRL1);
970 
971 	I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) | LCPLL_PLL_ENABLE);
972 
973 	if (intel_de_wait_for_set(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 5))
974 		DRM_ERROR("DPLL0 not locked\n");
975 
976 	dev_priv->cdclk.hw.vco = vco;
977 
978 	/* We'll want to keep using the current vco from now on. */
979 	skl_set_preferred_cdclk_vco(dev_priv, vco);
980 }
981 
982 static void skl_dpll0_disable(struct drm_i915_private *dev_priv)
983 {
984 	I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) & ~LCPLL_PLL_ENABLE);
985 	if (intel_de_wait_for_clear(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 1))
986 		DRM_ERROR("Couldn't disable DPLL0\n");
987 
988 	dev_priv->cdclk.hw.vco = 0;
989 }
990 
991 static void skl_set_cdclk(struct drm_i915_private *dev_priv,
992 			  const struct intel_cdclk_state *cdclk_state,
993 			  enum pipe pipe)
994 {
995 	int cdclk = cdclk_state->cdclk;
996 	int vco = cdclk_state->vco;
997 	u32 freq_select, cdclk_ctl;
998 	int ret;
999 
1000 	/*
1001 	 * Based on WA#1183 CDCLK rates 308 and 617MHz CDCLK rates are
1002 	 * unsupported on SKL. In theory this should never happen since only
1003 	 * the eDP1.4 2.16 and 4.32Gbps rates require it, but eDP1.4 is not
1004 	 * supported on SKL either, see the above WA. WARN whenever trying to
1005 	 * use the corresponding VCO freq as that always leads to using the
1006 	 * minimum 308MHz CDCLK.
1007 	 */
1008 	WARN_ON_ONCE(IS_SKYLAKE(dev_priv) && vco == 8640000);
1009 
1010 	ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1011 				SKL_CDCLK_PREPARE_FOR_CHANGE,
1012 				SKL_CDCLK_READY_FOR_CHANGE,
1013 				SKL_CDCLK_READY_FOR_CHANGE, 3);
1014 	if (ret) {
1015 		DRM_ERROR("Failed to inform PCU about cdclk change (%d)\n",
1016 			  ret);
1017 		return;
1018 	}
1019 
1020 	/* Choose frequency for this cdclk */
1021 	switch (cdclk) {
1022 	default:
1023 		WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1024 		WARN_ON(vco != 0);
1025 		/* fall through */
1026 	case 308571:
1027 	case 337500:
1028 		freq_select = CDCLK_FREQ_337_308;
1029 		break;
1030 	case 450000:
1031 	case 432000:
1032 		freq_select = CDCLK_FREQ_450_432;
1033 		break;
1034 	case 540000:
1035 		freq_select = CDCLK_FREQ_540;
1036 		break;
1037 	case 617143:
1038 	case 675000:
1039 		freq_select = CDCLK_FREQ_675_617;
1040 		break;
1041 	}
1042 
1043 	if (dev_priv->cdclk.hw.vco != 0 &&
1044 	    dev_priv->cdclk.hw.vco != vco)
1045 		skl_dpll0_disable(dev_priv);
1046 
1047 	cdclk_ctl = I915_READ(CDCLK_CTL);
1048 
1049 	if (dev_priv->cdclk.hw.vco != vco) {
1050 		/* Wa Display #1183: skl,kbl,cfl */
1051 		cdclk_ctl &= ~(CDCLK_FREQ_SEL_MASK | CDCLK_FREQ_DECIMAL_MASK);
1052 		cdclk_ctl |= freq_select | skl_cdclk_decimal(cdclk);
1053 		I915_WRITE(CDCLK_CTL, cdclk_ctl);
1054 	}
1055 
1056 	/* Wa Display #1183: skl,kbl,cfl */
1057 	cdclk_ctl |= CDCLK_DIVMUX_CD_OVERRIDE;
1058 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1059 	POSTING_READ(CDCLK_CTL);
1060 
1061 	if (dev_priv->cdclk.hw.vco != vco)
1062 		skl_dpll0_enable(dev_priv, vco);
1063 
1064 	/* Wa Display #1183: skl,kbl,cfl */
1065 	cdclk_ctl &= ~(CDCLK_FREQ_SEL_MASK | CDCLK_FREQ_DECIMAL_MASK);
1066 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1067 
1068 	cdclk_ctl |= freq_select | skl_cdclk_decimal(cdclk);
1069 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1070 
1071 	/* Wa Display #1183: skl,kbl,cfl */
1072 	cdclk_ctl &= ~CDCLK_DIVMUX_CD_OVERRIDE;
1073 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1074 	POSTING_READ(CDCLK_CTL);
1075 
1076 	/* inform PCU of the change */
1077 	sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1078 				cdclk_state->voltage_level);
1079 
1080 	intel_update_cdclk(dev_priv);
1081 }
1082 
1083 static void skl_sanitize_cdclk(struct drm_i915_private *dev_priv)
1084 {
1085 	u32 cdctl, expected;
1086 
1087 	/*
1088 	 * check if the pre-os initialized the display
1089 	 * There is SWF18 scratchpad register defined which is set by the
1090 	 * pre-os which can be used by the OS drivers to check the status
1091 	 */
1092 	if ((I915_READ(SWF_ILK(0x18)) & 0x00FFFFFF) == 0)
1093 		goto sanitize;
1094 
1095 	intel_update_cdclk(dev_priv);
1096 	intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1097 
1098 	/* Is PLL enabled and locked ? */
1099 	if (dev_priv->cdclk.hw.vco == 0 ||
1100 	    dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1101 		goto sanitize;
1102 
1103 	/* DPLL okay; verify the cdclock
1104 	 *
1105 	 * Noticed in some instances that the freq selection is correct but
1106 	 * decimal part is programmed wrong from BIOS where pre-os does not
1107 	 * enable display. Verify the same as well.
1108 	 */
1109 	cdctl = I915_READ(CDCLK_CTL);
1110 	expected = (cdctl & CDCLK_FREQ_SEL_MASK) |
1111 		skl_cdclk_decimal(dev_priv->cdclk.hw.cdclk);
1112 	if (cdctl == expected)
1113 		/* All well; nothing to sanitize */
1114 		return;
1115 
1116 sanitize:
1117 	DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1118 
1119 	/* force cdclk programming */
1120 	dev_priv->cdclk.hw.cdclk = 0;
1121 	/* force full PLL disable + enable */
1122 	dev_priv->cdclk.hw.vco = -1;
1123 }
1124 
1125 static void skl_init_cdclk(struct drm_i915_private *dev_priv)
1126 {
1127 	struct intel_cdclk_state cdclk_state;
1128 
1129 	skl_sanitize_cdclk(dev_priv);
1130 
1131 	if (dev_priv->cdclk.hw.cdclk != 0 &&
1132 	    dev_priv->cdclk.hw.vco != 0) {
1133 		/*
1134 		 * Use the current vco as our initial
1135 		 * guess as to what the preferred vco is.
1136 		 */
1137 		if (dev_priv->skl_preferred_vco_freq == 0)
1138 			skl_set_preferred_cdclk_vco(dev_priv,
1139 						    dev_priv->cdclk.hw.vco);
1140 		return;
1141 	}
1142 
1143 	cdclk_state = dev_priv->cdclk.hw;
1144 
1145 	cdclk_state.vco = dev_priv->skl_preferred_vco_freq;
1146 	if (cdclk_state.vco == 0)
1147 		cdclk_state.vco = 8100000;
1148 	cdclk_state.cdclk = skl_calc_cdclk(0, cdclk_state.vco);
1149 	cdclk_state.voltage_level = skl_calc_voltage_level(cdclk_state.cdclk);
1150 
1151 	skl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1152 }
1153 
1154 static void skl_uninit_cdclk(struct drm_i915_private *dev_priv)
1155 {
1156 	struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1157 
1158 	cdclk_state.cdclk = cdclk_state.bypass;
1159 	cdclk_state.vco = 0;
1160 	cdclk_state.voltage_level = skl_calc_voltage_level(cdclk_state.cdclk);
1161 
1162 	skl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1163 }
1164 
1165 static const struct intel_cdclk_vals bxt_cdclk_table[] = {
1166 	{ .refclk = 19200, .cdclk = 144000, .divider = 8, .ratio = 60 },
1167 	{ .refclk = 19200, .cdclk = 288000, .divider = 4, .ratio = 60 },
1168 	{ .refclk = 19200, .cdclk = 384000, .divider = 3, .ratio = 60 },
1169 	{ .refclk = 19200, .cdclk = 576000, .divider = 2, .ratio = 60 },
1170 	{ .refclk = 19200, .cdclk = 624000, .divider = 2, .ratio = 65 },
1171 	{}
1172 };
1173 
1174 static const struct intel_cdclk_vals glk_cdclk_table[] = {
1175 	{ .refclk = 19200, .cdclk =  79200, .divider = 8, .ratio = 33 },
1176 	{ .refclk = 19200, .cdclk = 158400, .divider = 4, .ratio = 33 },
1177 	{ .refclk = 19200, .cdclk = 316800, .divider = 2, .ratio = 33 },
1178 	{}
1179 };
1180 
1181 static const struct intel_cdclk_vals cnl_cdclk_table[] = {
1182 	{ .refclk = 19200, .cdclk = 168000, .divider = 4, .ratio = 35 },
1183 	{ .refclk = 19200, .cdclk = 336000, .divider = 2, .ratio = 35 },
1184 	{ .refclk = 19200, .cdclk = 528000, .divider = 2, .ratio = 55 },
1185 
1186 	{ .refclk = 24000, .cdclk = 168000, .divider = 4, .ratio = 28 },
1187 	{ .refclk = 24000, .cdclk = 336000, .divider = 2, .ratio = 28 },
1188 	{ .refclk = 24000, .cdclk = 528000, .divider = 2, .ratio = 44 },
1189 	{}
1190 };
1191 
1192 static const struct intel_cdclk_vals icl_cdclk_table[] = {
1193 	{ .refclk = 19200, .cdclk = 172800, .divider = 2, .ratio = 18 },
1194 	{ .refclk = 19200, .cdclk = 192000, .divider = 2, .ratio = 20 },
1195 	{ .refclk = 19200, .cdclk = 307200, .divider = 2, .ratio = 32 },
1196 	{ .refclk = 19200, .cdclk = 326400, .divider = 4, .ratio = 68 },
1197 	{ .refclk = 19200, .cdclk = 556800, .divider = 2, .ratio = 58 },
1198 	{ .refclk = 19200, .cdclk = 652800, .divider = 2, .ratio = 68 },
1199 
1200 	{ .refclk = 24000, .cdclk = 180000, .divider = 2, .ratio = 15 },
1201 	{ .refclk = 24000, .cdclk = 192000, .divider = 2, .ratio = 16 },
1202 	{ .refclk = 24000, .cdclk = 312000, .divider = 2, .ratio = 26 },
1203 	{ .refclk = 24000, .cdclk = 324000, .divider = 4, .ratio = 54 },
1204 	{ .refclk = 24000, .cdclk = 552000, .divider = 2, .ratio = 46 },
1205 	{ .refclk = 24000, .cdclk = 648000, .divider = 2, .ratio = 54 },
1206 
1207 	{ .refclk = 38400, .cdclk = 172800, .divider = 2, .ratio =  9 },
1208 	{ .refclk = 38400, .cdclk = 192000, .divider = 2, .ratio = 10 },
1209 	{ .refclk = 38400, .cdclk = 307200, .divider = 2, .ratio = 16 },
1210 	{ .refclk = 38400, .cdclk = 326400, .divider = 4, .ratio = 34 },
1211 	{ .refclk = 38400, .cdclk = 556800, .divider = 2, .ratio = 29 },
1212 	{ .refclk = 38400, .cdclk = 652800, .divider = 2, .ratio = 34 },
1213 	{}
1214 };
1215 
1216 static int bxt_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
1217 {
1218 	const struct intel_cdclk_vals *table = dev_priv->cdclk.table;
1219 	int i;
1220 
1221 	for (i = 0; table[i].refclk; i++)
1222 		if (table[i].refclk == dev_priv->cdclk.hw.ref &&
1223 		    table[i].cdclk >= min_cdclk)
1224 			return table[i].cdclk;
1225 
1226 	WARN(1, "Cannot satisfy minimum cdclk %d with refclk %u\n",
1227 	     min_cdclk, dev_priv->cdclk.hw.ref);
1228 	return 0;
1229 }
1230 
1231 static int bxt_calc_cdclk_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
1232 {
1233 	const struct intel_cdclk_vals *table = dev_priv->cdclk.table;
1234 	int i;
1235 
1236 	if (cdclk == dev_priv->cdclk.hw.bypass)
1237 		return 0;
1238 
1239 	for (i = 0; table[i].refclk; i++)
1240 		if (table[i].refclk == dev_priv->cdclk.hw.ref &&
1241 		    table[i].cdclk == cdclk)
1242 			return dev_priv->cdclk.hw.ref * table[i].ratio;
1243 
1244 	WARN(1, "cdclk %d not valid for refclk %u\n",
1245 	     cdclk, dev_priv->cdclk.hw.ref);
1246 	return 0;
1247 }
1248 
1249 static u8 bxt_calc_voltage_level(int cdclk)
1250 {
1251 	return DIV_ROUND_UP(cdclk, 25000);
1252 }
1253 
1254 static u8 cnl_calc_voltage_level(int cdclk)
1255 {
1256 	if (cdclk > 336000)
1257 		return 2;
1258 	else if (cdclk > 168000)
1259 		return 1;
1260 	else
1261 		return 0;
1262 }
1263 
1264 static u8 icl_calc_voltage_level(int cdclk)
1265 {
1266 	if (cdclk > 556800)
1267 		return 2;
1268 	else if (cdclk > 312000)
1269 		return 1;
1270 	else
1271 		return 0;
1272 }
1273 
1274 static u8 ehl_calc_voltage_level(int cdclk)
1275 {
1276 	if (cdclk > 312000)
1277 		return 2;
1278 	else if (cdclk > 180000)
1279 		return 1;
1280 	else
1281 		return 0;
1282 }
1283 
1284 static void cnl_readout_refclk(struct drm_i915_private *dev_priv,
1285 			       struct intel_cdclk_state *cdclk_state)
1286 {
1287 	if (I915_READ(SKL_DSSM) & CNL_DSSM_CDCLK_PLL_REFCLK_24MHz)
1288 		cdclk_state->ref = 24000;
1289 	else
1290 		cdclk_state->ref = 19200;
1291 }
1292 
1293 static void icl_readout_refclk(struct drm_i915_private *dev_priv,
1294 			       struct intel_cdclk_state *cdclk_state)
1295 {
1296 	u32 dssm = I915_READ(SKL_DSSM) & ICL_DSSM_CDCLK_PLL_REFCLK_MASK;
1297 
1298 	switch (dssm) {
1299 	default:
1300 		MISSING_CASE(dssm);
1301 		/* fall through */
1302 	case ICL_DSSM_CDCLK_PLL_REFCLK_24MHz:
1303 		cdclk_state->ref = 24000;
1304 		break;
1305 	case ICL_DSSM_CDCLK_PLL_REFCLK_19_2MHz:
1306 		cdclk_state->ref = 19200;
1307 		break;
1308 	case ICL_DSSM_CDCLK_PLL_REFCLK_38_4MHz:
1309 		cdclk_state->ref = 38400;
1310 		break;
1311 	}
1312 }
1313 
1314 static void bxt_de_pll_readout(struct drm_i915_private *dev_priv,
1315 			       struct intel_cdclk_state *cdclk_state)
1316 {
1317 	u32 val, ratio;
1318 
1319 	if (INTEL_GEN(dev_priv) >= 11)
1320 		icl_readout_refclk(dev_priv, cdclk_state);
1321 	else if (IS_CANNONLAKE(dev_priv))
1322 		cnl_readout_refclk(dev_priv, cdclk_state);
1323 	else
1324 		cdclk_state->ref = 19200;
1325 
1326 	val = I915_READ(BXT_DE_PLL_ENABLE);
1327 	if ((val & BXT_DE_PLL_PLL_ENABLE) == 0 ||
1328 	    (val & BXT_DE_PLL_LOCK) == 0) {
1329 		/*
1330 		 * CDCLK PLL is disabled, the VCO/ratio doesn't matter, but
1331 		 * setting it to zero is a way to signal that.
1332 		 */
1333 		cdclk_state->vco = 0;
1334 		return;
1335 	}
1336 
1337 	/*
1338 	 * CNL+ have the ratio directly in the PLL enable register, gen9lp had
1339 	 * it in a separate PLL control register.
1340 	 */
1341 	if (INTEL_GEN(dev_priv) >= 10)
1342 		ratio = val & CNL_CDCLK_PLL_RATIO_MASK;
1343 	else
1344 		ratio = I915_READ(BXT_DE_PLL_CTL) & BXT_DE_PLL_RATIO_MASK;
1345 
1346 	cdclk_state->vco = ratio * cdclk_state->ref;
1347 }
1348 
1349 static void bxt_get_cdclk(struct drm_i915_private *dev_priv,
1350 			  struct intel_cdclk_state *cdclk_state)
1351 {
1352 	u32 divider;
1353 	int div;
1354 
1355 	bxt_de_pll_readout(dev_priv, cdclk_state);
1356 
1357 	if (INTEL_GEN(dev_priv) >= 12)
1358 		cdclk_state->bypass = cdclk_state->ref / 2;
1359 	else if (INTEL_GEN(dev_priv) >= 11)
1360 		cdclk_state->bypass = 50000;
1361 	else
1362 		cdclk_state->bypass = cdclk_state->ref;
1363 
1364 	if (cdclk_state->vco == 0) {
1365 		cdclk_state->cdclk = cdclk_state->bypass;
1366 		goto out;
1367 	}
1368 
1369 	divider = I915_READ(CDCLK_CTL) & BXT_CDCLK_CD2X_DIV_SEL_MASK;
1370 
1371 	switch (divider) {
1372 	case BXT_CDCLK_CD2X_DIV_SEL_1:
1373 		div = 2;
1374 		break;
1375 	case BXT_CDCLK_CD2X_DIV_SEL_1_5:
1376 		WARN(IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 10,
1377 		     "Unsupported divider\n");
1378 		div = 3;
1379 		break;
1380 	case BXT_CDCLK_CD2X_DIV_SEL_2:
1381 		div = 4;
1382 		break;
1383 	case BXT_CDCLK_CD2X_DIV_SEL_4:
1384 		WARN(INTEL_GEN(dev_priv) >= 10, "Unsupported divider\n");
1385 		div = 8;
1386 		break;
1387 	default:
1388 		MISSING_CASE(divider);
1389 		return;
1390 	}
1391 
1392 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco, div);
1393 
1394  out:
1395 	/*
1396 	 * Can't read this out :( Let's assume it's
1397 	 * at least what the CDCLK frequency requires.
1398 	 */
1399 	cdclk_state->voltage_level =
1400 		dev_priv->display.calc_voltage_level(cdclk_state->cdclk);
1401 }
1402 
1403 static void bxt_de_pll_disable(struct drm_i915_private *dev_priv)
1404 {
1405 	I915_WRITE(BXT_DE_PLL_ENABLE, 0);
1406 
1407 	/* Timeout 200us */
1408 	if (intel_de_wait_for_clear(dev_priv,
1409 				    BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1410 		DRM_ERROR("timeout waiting for DE PLL unlock\n");
1411 
1412 	dev_priv->cdclk.hw.vco = 0;
1413 }
1414 
1415 static void bxt_de_pll_enable(struct drm_i915_private *dev_priv, int vco)
1416 {
1417 	int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref);
1418 	u32 val;
1419 
1420 	val = I915_READ(BXT_DE_PLL_CTL);
1421 	val &= ~BXT_DE_PLL_RATIO_MASK;
1422 	val |= BXT_DE_PLL_RATIO(ratio);
1423 	I915_WRITE(BXT_DE_PLL_CTL, val);
1424 
1425 	I915_WRITE(BXT_DE_PLL_ENABLE, BXT_DE_PLL_PLL_ENABLE);
1426 
1427 	/* Timeout 200us */
1428 	if (intel_de_wait_for_set(dev_priv,
1429 				  BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1430 		DRM_ERROR("timeout waiting for DE PLL lock\n");
1431 
1432 	dev_priv->cdclk.hw.vco = vco;
1433 }
1434 
1435 static void cnl_cdclk_pll_disable(struct drm_i915_private *dev_priv)
1436 {
1437 	u32 val;
1438 
1439 	val = I915_READ(BXT_DE_PLL_ENABLE);
1440 	val &= ~BXT_DE_PLL_PLL_ENABLE;
1441 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1442 
1443 	/* Timeout 200us */
1444 	if (wait_for((I915_READ(BXT_DE_PLL_ENABLE) & BXT_DE_PLL_LOCK) == 0, 1))
1445 		DRM_ERROR("timeout waiting for CDCLK PLL unlock\n");
1446 
1447 	dev_priv->cdclk.hw.vco = 0;
1448 }
1449 
1450 static void cnl_cdclk_pll_enable(struct drm_i915_private *dev_priv, int vco)
1451 {
1452 	int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref);
1453 	u32 val;
1454 
1455 	val = CNL_CDCLK_PLL_RATIO(ratio);
1456 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1457 
1458 	val |= BXT_DE_PLL_PLL_ENABLE;
1459 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1460 
1461 	/* Timeout 200us */
1462 	if (wait_for((I915_READ(BXT_DE_PLL_ENABLE) & BXT_DE_PLL_LOCK) != 0, 1))
1463 		DRM_ERROR("timeout waiting for CDCLK PLL lock\n");
1464 
1465 	dev_priv->cdclk.hw.vco = vco;
1466 }
1467 
1468 static u32 bxt_cdclk_cd2x_pipe(struct drm_i915_private *dev_priv, enum pipe pipe)
1469 {
1470 	if (INTEL_GEN(dev_priv) >= 12) {
1471 		if (pipe == INVALID_PIPE)
1472 			return TGL_CDCLK_CD2X_PIPE_NONE;
1473 		else
1474 			return TGL_CDCLK_CD2X_PIPE(pipe);
1475 	} else if (INTEL_GEN(dev_priv) >= 11) {
1476 		if (pipe == INVALID_PIPE)
1477 			return ICL_CDCLK_CD2X_PIPE_NONE;
1478 		else
1479 			return ICL_CDCLK_CD2X_PIPE(pipe);
1480 	} else {
1481 		if (pipe == INVALID_PIPE)
1482 			return BXT_CDCLK_CD2X_PIPE_NONE;
1483 		else
1484 			return BXT_CDCLK_CD2X_PIPE(pipe);
1485 	}
1486 }
1487 
1488 static void bxt_set_cdclk(struct drm_i915_private *dev_priv,
1489 			  const struct intel_cdclk_state *cdclk_state,
1490 			  enum pipe pipe)
1491 {
1492 	int cdclk = cdclk_state->cdclk;
1493 	int vco = cdclk_state->vco;
1494 	u32 val, divider;
1495 	int ret;
1496 
1497 	/* Inform power controller of upcoming frequency change. */
1498 	if (INTEL_GEN(dev_priv) >= 10)
1499 		ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1500 					SKL_CDCLK_PREPARE_FOR_CHANGE,
1501 					SKL_CDCLK_READY_FOR_CHANGE,
1502 					SKL_CDCLK_READY_FOR_CHANGE, 3);
1503 	else
1504 		/*
1505 		 * BSpec requires us to wait up to 150usec, but that leads to
1506 		 * timeouts; the 2ms used here is based on experiment.
1507 		 */
1508 		ret = sandybridge_pcode_write_timeout(dev_priv,
1509 						      HSW_PCODE_DE_WRITE_FREQ_REQ,
1510 						      0x80000000, 150, 2);
1511 
1512 	if (ret) {
1513 		DRM_ERROR("Failed to inform PCU about cdclk change (err %d, freq %d)\n",
1514 			  ret, cdclk);
1515 		return;
1516 	}
1517 
1518 	/* cdclk = vco / 2 / div{1,1.5,2,4} */
1519 	switch (DIV_ROUND_CLOSEST(vco, cdclk)) {
1520 	default:
1521 		WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1522 		WARN_ON(vco != 0);
1523 		/* fall through */
1524 	case 2:
1525 		divider = BXT_CDCLK_CD2X_DIV_SEL_1;
1526 		break;
1527 	case 3:
1528 		WARN(IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 10,
1529 		     "Unsupported divider\n");
1530 		divider = BXT_CDCLK_CD2X_DIV_SEL_1_5;
1531 		break;
1532 	case 4:
1533 		divider = BXT_CDCLK_CD2X_DIV_SEL_2;
1534 		break;
1535 	case 8:
1536 		WARN(INTEL_GEN(dev_priv) >= 10, "Unsupported divider\n");
1537 		divider = BXT_CDCLK_CD2X_DIV_SEL_4;
1538 		break;
1539 	}
1540 
1541 	if (INTEL_GEN(dev_priv) >= 10) {
1542 		if (dev_priv->cdclk.hw.vco != 0 &&
1543 		    dev_priv->cdclk.hw.vco != vco)
1544 			cnl_cdclk_pll_disable(dev_priv);
1545 
1546 		if (dev_priv->cdclk.hw.vco != vco)
1547 			cnl_cdclk_pll_enable(dev_priv, vco);
1548 
1549 	} else {
1550 		if (dev_priv->cdclk.hw.vco != 0 &&
1551 		    dev_priv->cdclk.hw.vco != vco)
1552 			bxt_de_pll_disable(dev_priv);
1553 
1554 		if (dev_priv->cdclk.hw.vco != vco)
1555 			bxt_de_pll_enable(dev_priv, vco);
1556 	}
1557 
1558 	val = divider | skl_cdclk_decimal(cdclk) |
1559 		bxt_cdclk_cd2x_pipe(dev_priv, pipe);
1560 
1561 	/*
1562 	 * Disable SSA Precharge when CD clock frequency < 500 MHz,
1563 	 * enable otherwise.
1564 	 */
1565 	if (IS_GEN9_LP(dev_priv) && cdclk >= 500000)
1566 		val |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
1567 	I915_WRITE(CDCLK_CTL, val);
1568 
1569 	if (pipe != INVALID_PIPE)
1570 		intel_wait_for_vblank(dev_priv, pipe);
1571 
1572 	if (INTEL_GEN(dev_priv) >= 10) {
1573 		ret = sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1574 					      cdclk_state->voltage_level);
1575 	} else {
1576 		/*
1577 		 * The timeout isn't specified, the 2ms used here is based on
1578 		 * experiment.
1579 		 * FIXME: Waiting for the request completion could be delayed
1580 		 * until the next PCODE request based on BSpec.
1581 		 */
1582 		ret = sandybridge_pcode_write_timeout(dev_priv,
1583 						      HSW_PCODE_DE_WRITE_FREQ_REQ,
1584 						      cdclk_state->voltage_level,
1585 						      150, 2);
1586 	}
1587 
1588 	if (ret) {
1589 		DRM_ERROR("PCode CDCLK freq set failed, (err %d, freq %d)\n",
1590 			  ret, cdclk);
1591 		return;
1592 	}
1593 
1594 	intel_update_cdclk(dev_priv);
1595 
1596 	if (INTEL_GEN(dev_priv) >= 10)
1597 		/*
1598 		 * Can't read out the voltage level :(
1599 		 * Let's just assume everything is as expected.
1600 		 */
1601 		dev_priv->cdclk.hw.voltage_level = cdclk_state->voltage_level;
1602 }
1603 
1604 static void bxt_sanitize_cdclk(struct drm_i915_private *dev_priv)
1605 {
1606 	u32 cdctl, expected;
1607 	int cdclk, vco;
1608 
1609 	intel_update_cdclk(dev_priv);
1610 	intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1611 
1612 	if (dev_priv->cdclk.hw.vco == 0 ||
1613 	    dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1614 		goto sanitize;
1615 
1616 	/* DPLL okay; verify the cdclock
1617 	 *
1618 	 * Some BIOS versions leave an incorrect decimal frequency value and
1619 	 * set reserved MBZ bits in CDCLK_CTL at least during exiting from S4,
1620 	 * so sanitize this register.
1621 	 */
1622 	cdctl = I915_READ(CDCLK_CTL);
1623 	/*
1624 	 * Let's ignore the pipe field, since BIOS could have configured the
1625 	 * dividers both synching to an active pipe, or asynchronously
1626 	 * (PIPE_NONE).
1627 	 */
1628 	cdctl &= ~bxt_cdclk_cd2x_pipe(dev_priv, INVALID_PIPE);
1629 
1630 	/* Make sure this is a legal cdclk value for the platform */
1631 	cdclk = bxt_calc_cdclk(dev_priv, dev_priv->cdclk.hw.cdclk);
1632 	if (cdclk != dev_priv->cdclk.hw.cdclk)
1633 		goto sanitize;
1634 
1635 	/* Make sure the VCO is correct for the cdclk */
1636 	vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
1637 	if (vco != dev_priv->cdclk.hw.vco)
1638 		goto sanitize;
1639 
1640 	expected = skl_cdclk_decimal(cdclk);
1641 
1642 	/* Figure out what CD2X divider we should be using for this cdclk */
1643 	switch (DIV_ROUND_CLOSEST(dev_priv->cdclk.hw.vco,
1644 				  dev_priv->cdclk.hw.cdclk)) {
1645 	case 2:
1646 		expected |= BXT_CDCLK_CD2X_DIV_SEL_1;
1647 		break;
1648 	case 3:
1649 		expected |= BXT_CDCLK_CD2X_DIV_SEL_1_5;
1650 		break;
1651 	case 4:
1652 		expected |= BXT_CDCLK_CD2X_DIV_SEL_2;
1653 		break;
1654 	case 8:
1655 		expected |= BXT_CDCLK_CD2X_DIV_SEL_4;
1656 		break;
1657 	default:
1658 		goto sanitize;
1659 	}
1660 
1661 	/*
1662 	 * Disable SSA Precharge when CD clock frequency < 500 MHz,
1663 	 * enable otherwise.
1664 	 */
1665 	if (IS_GEN9_LP(dev_priv) && dev_priv->cdclk.hw.cdclk >= 500000)
1666 		expected |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
1667 
1668 	if (cdctl == expected)
1669 		/* All well; nothing to sanitize */
1670 		return;
1671 
1672 sanitize:
1673 	DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1674 
1675 	/* force cdclk programming */
1676 	dev_priv->cdclk.hw.cdclk = 0;
1677 
1678 	/* force full PLL disable + enable */
1679 	dev_priv->cdclk.hw.vco = -1;
1680 }
1681 
1682 static void bxt_init_cdclk(struct drm_i915_private *dev_priv)
1683 {
1684 	struct intel_cdclk_state cdclk_state;
1685 
1686 	bxt_sanitize_cdclk(dev_priv);
1687 
1688 	if (dev_priv->cdclk.hw.cdclk != 0 &&
1689 	    dev_priv->cdclk.hw.vco != 0)
1690 		return;
1691 
1692 	cdclk_state = dev_priv->cdclk.hw;
1693 
1694 	/*
1695 	 * FIXME:
1696 	 * - The initial CDCLK needs to be read from VBT.
1697 	 *   Need to make this change after VBT has changes for BXT.
1698 	 */
1699 	cdclk_state.cdclk = bxt_calc_cdclk(dev_priv, 0);
1700 	cdclk_state.vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk_state.cdclk);
1701 	cdclk_state.voltage_level =
1702 		dev_priv->display.calc_voltage_level(cdclk_state.cdclk);
1703 
1704 	bxt_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1705 }
1706 
1707 static void bxt_uninit_cdclk(struct drm_i915_private *dev_priv)
1708 {
1709 	struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1710 
1711 	cdclk_state.cdclk = cdclk_state.bypass;
1712 	cdclk_state.vco = 0;
1713 	cdclk_state.voltage_level =
1714 		dev_priv->display.calc_voltage_level(cdclk_state.cdclk);
1715 
1716 	bxt_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1717 }
1718 
1719 /**
1720  * intel_cdclk_init - Initialize CDCLK
1721  * @i915: i915 device
1722  *
1723  * Initialize CDCLK. This consists mainly of initializing dev_priv->cdclk.hw and
1724  * sanitizing the state of the hardware if needed. This is generally done only
1725  * during the display core initialization sequence, after which the DMC will
1726  * take care of turning CDCLK off/on as needed.
1727  */
1728 void intel_cdclk_init(struct drm_i915_private *i915)
1729 {
1730 	if (IS_GEN9_LP(i915) || INTEL_GEN(i915) >= 10)
1731 		bxt_init_cdclk(i915);
1732 	else if (IS_GEN9_BC(i915))
1733 		skl_init_cdclk(i915);
1734 }
1735 
1736 /**
1737  * intel_cdclk_uninit - Uninitialize CDCLK
1738  * @i915: i915 device
1739  *
1740  * Uninitialize CDCLK. This is done only during the display core
1741  * uninitialization sequence.
1742  */
1743 void intel_cdclk_uninit(struct drm_i915_private *i915)
1744 {
1745 	if (INTEL_GEN(i915) >= 10 || IS_GEN9_LP(i915))
1746 		bxt_uninit_cdclk(i915);
1747 	else if (IS_GEN9_BC(i915))
1748 		skl_uninit_cdclk(i915);
1749 }
1750 
1751 /**
1752  * intel_cdclk_needs_modeset - Determine if two CDCLK states require a modeset on all pipes
1753  * @a: first CDCLK state
1754  * @b: second CDCLK state
1755  *
1756  * Returns:
1757  * True if the CDCLK states require pipes to be off during reprogramming, false if not.
1758  */
1759 bool intel_cdclk_needs_modeset(const struct intel_cdclk_state *a,
1760 			       const struct intel_cdclk_state *b)
1761 {
1762 	return a->cdclk != b->cdclk ||
1763 		a->vco != b->vco ||
1764 		a->ref != b->ref;
1765 }
1766 
1767 /**
1768  * intel_cdclk_needs_cd2x_update - Determine if two CDCLK states require a cd2x divider update
1769  * @dev_priv: Not a CDCLK state, it's the drm_i915_private!
1770  * @a: first CDCLK state
1771  * @b: second CDCLK state
1772  *
1773  * Returns:
1774  * True if the CDCLK states require just a cd2x divider update, false if not.
1775  */
1776 static bool intel_cdclk_needs_cd2x_update(struct drm_i915_private *dev_priv,
1777 					  const struct intel_cdclk_state *a,
1778 					  const struct intel_cdclk_state *b)
1779 {
1780 	/* Older hw doesn't have the capability */
1781 	if (INTEL_GEN(dev_priv) < 10 && !IS_GEN9_LP(dev_priv))
1782 		return false;
1783 
1784 	return a->cdclk != b->cdclk &&
1785 		a->vco == b->vco &&
1786 		a->ref == b->ref;
1787 }
1788 
1789 /**
1790  * intel_cdclk_changed - Determine if two CDCLK states are different
1791  * @a: first CDCLK state
1792  * @b: second CDCLK state
1793  *
1794  * Returns:
1795  * True if the CDCLK states don't match, false if they do.
1796  */
1797 static bool intel_cdclk_changed(const struct intel_cdclk_state *a,
1798 				const struct intel_cdclk_state *b)
1799 {
1800 	return intel_cdclk_needs_modeset(a, b) ||
1801 		a->voltage_level != b->voltage_level;
1802 }
1803 
1804 /**
1805  * intel_cdclk_swap_state - make atomic CDCLK configuration effective
1806  * @state: atomic state
1807  *
1808  * This is the CDCLK version of drm_atomic_helper_swap_state() since the
1809  * helper does not handle driver-specific global state.
1810  *
1811  * Similarly to the atomic helpers this function does a complete swap,
1812  * i.e. it also puts the old state into @state. This is used by the commit
1813  * code to determine how CDCLK has changed (for instance did it increase or
1814  * decrease).
1815  */
1816 void intel_cdclk_swap_state(struct intel_atomic_state *state)
1817 {
1818 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
1819 
1820 	swap(state->cdclk.logical, dev_priv->cdclk.logical);
1821 	swap(state->cdclk.actual, dev_priv->cdclk.actual);
1822 }
1823 
1824 void intel_dump_cdclk_state(const struct intel_cdclk_state *cdclk_state,
1825 			    const char *context)
1826 {
1827 	DRM_DEBUG_DRIVER("%s %d kHz, VCO %d kHz, ref %d kHz, bypass %d kHz, voltage level %d\n",
1828 			 context, cdclk_state->cdclk, cdclk_state->vco,
1829 			 cdclk_state->ref, cdclk_state->bypass,
1830 			 cdclk_state->voltage_level);
1831 }
1832 
1833 /**
1834  * intel_set_cdclk - Push the CDCLK state to the hardware
1835  * @dev_priv: i915 device
1836  * @cdclk_state: new CDCLK state
1837  * @pipe: pipe with which to synchronize the update
1838  *
1839  * Program the hardware based on the passed in CDCLK state,
1840  * if necessary.
1841  */
1842 static void intel_set_cdclk(struct drm_i915_private *dev_priv,
1843 			    const struct intel_cdclk_state *cdclk_state,
1844 			    enum pipe pipe)
1845 {
1846 	if (!intel_cdclk_changed(&dev_priv->cdclk.hw, cdclk_state))
1847 		return;
1848 
1849 	if (WARN_ON_ONCE(!dev_priv->display.set_cdclk))
1850 		return;
1851 
1852 	intel_dump_cdclk_state(cdclk_state, "Changing CDCLK to");
1853 
1854 	dev_priv->display.set_cdclk(dev_priv, cdclk_state, pipe);
1855 
1856 	if (WARN(intel_cdclk_changed(&dev_priv->cdclk.hw, cdclk_state),
1857 		 "cdclk state doesn't match!\n")) {
1858 		intel_dump_cdclk_state(&dev_priv->cdclk.hw, "[hw state]");
1859 		intel_dump_cdclk_state(cdclk_state, "[sw state]");
1860 	}
1861 }
1862 
1863 /**
1864  * intel_set_cdclk_pre_plane_update - Push the CDCLK state to the hardware
1865  * @dev_priv: i915 device
1866  * @old_state: old CDCLK state
1867  * @new_state: new CDCLK state
1868  * @pipe: pipe with which to synchronize the update
1869  *
1870  * Program the hardware before updating the HW plane state based on the passed
1871  * in CDCLK state, if necessary.
1872  */
1873 void
1874 intel_set_cdclk_pre_plane_update(struct drm_i915_private *dev_priv,
1875 				 const struct intel_cdclk_state *old_state,
1876 				 const struct intel_cdclk_state *new_state,
1877 				 enum pipe pipe)
1878 {
1879 	if (pipe == INVALID_PIPE || old_state->cdclk <= new_state->cdclk)
1880 		intel_set_cdclk(dev_priv, new_state, pipe);
1881 }
1882 
1883 /**
1884  * intel_set_cdclk_post_plane_update - Push the CDCLK state to the hardware
1885  * @dev_priv: i915 device
1886  * @old_state: old CDCLK state
1887  * @new_state: new CDCLK state
1888  * @pipe: pipe with which to synchronize the update
1889  *
1890  * Program the hardware after updating the HW plane state based on the passed
1891  * in CDCLK state, if necessary.
1892  */
1893 void
1894 intel_set_cdclk_post_plane_update(struct drm_i915_private *dev_priv,
1895 				  const struct intel_cdclk_state *old_state,
1896 				  const struct intel_cdclk_state *new_state,
1897 				  enum pipe pipe)
1898 {
1899 	if (pipe != INVALID_PIPE && old_state->cdclk > new_state->cdclk)
1900 		intel_set_cdclk(dev_priv, new_state, pipe);
1901 }
1902 
1903 static int intel_pixel_rate_to_cdclk(const struct intel_crtc_state *crtc_state)
1904 {
1905 	struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
1906 	int pixel_rate = crtc_state->pixel_rate;
1907 
1908 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
1909 		return DIV_ROUND_UP(pixel_rate, 2);
1910 	else if (IS_GEN(dev_priv, 9) ||
1911 		 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
1912 		return pixel_rate;
1913 	else if (IS_CHERRYVIEW(dev_priv))
1914 		return DIV_ROUND_UP(pixel_rate * 100, 95);
1915 	else if (crtc_state->double_wide)
1916 		return DIV_ROUND_UP(pixel_rate * 100, 90 * 2);
1917 	else
1918 		return DIV_ROUND_UP(pixel_rate * 100, 90);
1919 }
1920 
1921 static int intel_planes_min_cdclk(const struct intel_crtc_state *crtc_state)
1922 {
1923 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
1924 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1925 	struct intel_plane *plane;
1926 	int min_cdclk = 0;
1927 
1928 	for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane)
1929 		min_cdclk = max(crtc_state->min_cdclk[plane->id], min_cdclk);
1930 
1931 	return min_cdclk;
1932 }
1933 
1934 int intel_crtc_compute_min_cdclk(const struct intel_crtc_state *crtc_state)
1935 {
1936 	struct drm_i915_private *dev_priv =
1937 		to_i915(crtc_state->base.crtc->dev);
1938 	int min_cdclk;
1939 
1940 	if (!crtc_state->base.enable)
1941 		return 0;
1942 
1943 	min_cdclk = intel_pixel_rate_to_cdclk(crtc_state);
1944 
1945 	/* pixel rate mustn't exceed 95% of cdclk with IPS on BDW */
1946 	if (IS_BROADWELL(dev_priv) && hsw_crtc_state_ips_capable(crtc_state))
1947 		min_cdclk = DIV_ROUND_UP(min_cdclk * 100, 95);
1948 
1949 	/* BSpec says "Do not use DisplayPort with CDCLK less than 432 MHz,
1950 	 * audio enabled, port width x4, and link rate HBR2 (5.4 GHz), or else
1951 	 * there may be audio corruption or screen corruption." This cdclk
1952 	 * restriction for GLK is 316.8 MHz.
1953 	 */
1954 	if (intel_crtc_has_dp_encoder(crtc_state) &&
1955 	    crtc_state->has_audio &&
1956 	    crtc_state->port_clock >= 540000 &&
1957 	    crtc_state->lane_count == 4) {
1958 		if (IS_CANNONLAKE(dev_priv) || IS_GEMINILAKE(dev_priv)) {
1959 			/* Display WA #1145: glk,cnl */
1960 			min_cdclk = max(316800, min_cdclk);
1961 		} else if (IS_GEN(dev_priv, 9) || IS_BROADWELL(dev_priv)) {
1962 			/* Display WA #1144: skl,bxt */
1963 			min_cdclk = max(432000, min_cdclk);
1964 		}
1965 	}
1966 
1967 	/*
1968 	 * According to BSpec, "The CD clock frequency must be at least twice
1969 	 * the frequency of the Azalia BCLK." and BCLK is 96 MHz by default.
1970 	 */
1971 	if (crtc_state->has_audio && INTEL_GEN(dev_priv) >= 9)
1972 		min_cdclk = max(2 * 96000, min_cdclk);
1973 
1974 	/*
1975 	 * "For DP audio configuration, cdclk frequency shall be set to
1976 	 *  meet the following requirements:
1977 	 *  DP Link Frequency(MHz) | Cdclk frequency(MHz)
1978 	 *  270                    | 320 or higher
1979 	 *  162                    | 200 or higher"
1980 	 */
1981 	if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
1982 	    intel_crtc_has_dp_encoder(crtc_state) && crtc_state->has_audio)
1983 		min_cdclk = max(crtc_state->port_clock, min_cdclk);
1984 
1985 	/*
1986 	 * On Valleyview some DSI panels lose (v|h)sync when the clock is lower
1987 	 * than 320000KHz.
1988 	 */
1989 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) &&
1990 	    IS_VALLEYVIEW(dev_priv))
1991 		min_cdclk = max(320000, min_cdclk);
1992 
1993 	/*
1994 	 * On Geminilake once the CDCLK gets as low as 79200
1995 	 * picture gets unstable, despite that values are
1996 	 * correct for DSI PLL and DE PLL.
1997 	 */
1998 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) &&
1999 	    IS_GEMINILAKE(dev_priv))
2000 		min_cdclk = max(158400, min_cdclk);
2001 
2002 	/* Account for additional needs from the planes */
2003 	min_cdclk = max(intel_planes_min_cdclk(crtc_state), min_cdclk);
2004 
2005 	if (min_cdclk > dev_priv->max_cdclk_freq) {
2006 		DRM_DEBUG_KMS("required cdclk (%d kHz) exceeds max (%d kHz)\n",
2007 			      min_cdclk, dev_priv->max_cdclk_freq);
2008 		return -EINVAL;
2009 	}
2010 
2011 	return min_cdclk;
2012 }
2013 
2014 static int intel_compute_min_cdclk(struct intel_atomic_state *state)
2015 {
2016 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2017 	struct intel_crtc *crtc;
2018 	struct intel_crtc_state *crtc_state;
2019 	int min_cdclk, i;
2020 	enum pipe pipe;
2021 
2022 	memcpy(state->min_cdclk, dev_priv->min_cdclk,
2023 	       sizeof(state->min_cdclk));
2024 
2025 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2026 		int ret;
2027 
2028 		min_cdclk = intel_crtc_compute_min_cdclk(crtc_state);
2029 		if (min_cdclk < 0)
2030 			return min_cdclk;
2031 
2032 		if (state->min_cdclk[i] == min_cdclk)
2033 			continue;
2034 
2035 		state->min_cdclk[i] = min_cdclk;
2036 
2037 		ret = intel_atomic_lock_global_state(state);
2038 		if (ret)
2039 			return ret;
2040 	}
2041 
2042 	min_cdclk = state->cdclk.force_min_cdclk;
2043 	for_each_pipe(dev_priv, pipe)
2044 		min_cdclk = max(state->min_cdclk[pipe], min_cdclk);
2045 
2046 	return min_cdclk;
2047 }
2048 
2049 /*
2050  * Account for port clock min voltage level requirements.
2051  * This only really does something on CNL+ but can be
2052  * called on earlier platforms as well.
2053  *
2054  * Note that this functions assumes that 0 is
2055  * the lowest voltage value, and higher values
2056  * correspond to increasingly higher voltages.
2057  *
2058  * Should that relationship no longer hold on
2059  * future platforms this code will need to be
2060  * adjusted.
2061  */
2062 static int bxt_compute_min_voltage_level(struct intel_atomic_state *state)
2063 {
2064 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2065 	struct intel_crtc *crtc;
2066 	struct intel_crtc_state *crtc_state;
2067 	u8 min_voltage_level;
2068 	int i;
2069 	enum pipe pipe;
2070 
2071 	memcpy(state->min_voltage_level, dev_priv->min_voltage_level,
2072 	       sizeof(state->min_voltage_level));
2073 
2074 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2075 		int ret;
2076 
2077 		if (crtc_state->base.enable)
2078 			min_voltage_level = crtc_state->min_voltage_level;
2079 		else
2080 			min_voltage_level = 0;
2081 
2082 		if (state->min_voltage_level[i] == min_voltage_level)
2083 			continue;
2084 
2085 		state->min_voltage_level[i] = min_voltage_level;
2086 
2087 		ret = intel_atomic_lock_global_state(state);
2088 		if (ret)
2089 			return ret;
2090 	}
2091 
2092 	min_voltage_level = 0;
2093 	for_each_pipe(dev_priv, pipe)
2094 		min_voltage_level = max(state->min_voltage_level[pipe],
2095 					min_voltage_level);
2096 
2097 	return min_voltage_level;
2098 }
2099 
2100 static int vlv_modeset_calc_cdclk(struct intel_atomic_state *state)
2101 {
2102 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2103 	int min_cdclk, cdclk;
2104 
2105 	min_cdclk = intel_compute_min_cdclk(state);
2106 	if (min_cdclk < 0)
2107 		return min_cdclk;
2108 
2109 	cdclk = vlv_calc_cdclk(dev_priv, min_cdclk);
2110 
2111 	state->cdclk.logical.cdclk = cdclk;
2112 	state->cdclk.logical.voltage_level =
2113 		vlv_calc_voltage_level(dev_priv, cdclk);
2114 
2115 	if (!state->active_pipes) {
2116 		cdclk = vlv_calc_cdclk(dev_priv, state->cdclk.force_min_cdclk);
2117 
2118 		state->cdclk.actual.cdclk = cdclk;
2119 		state->cdclk.actual.voltage_level =
2120 			vlv_calc_voltage_level(dev_priv, cdclk);
2121 	} else {
2122 		state->cdclk.actual = state->cdclk.logical;
2123 	}
2124 
2125 	return 0;
2126 }
2127 
2128 static int bdw_modeset_calc_cdclk(struct intel_atomic_state *state)
2129 {
2130 	int min_cdclk, cdclk;
2131 
2132 	min_cdclk = intel_compute_min_cdclk(state);
2133 	if (min_cdclk < 0)
2134 		return min_cdclk;
2135 
2136 	/*
2137 	 * FIXME should also account for plane ratio
2138 	 * once 64bpp pixel formats are supported.
2139 	 */
2140 	cdclk = bdw_calc_cdclk(min_cdclk);
2141 
2142 	state->cdclk.logical.cdclk = cdclk;
2143 	state->cdclk.logical.voltage_level =
2144 		bdw_calc_voltage_level(cdclk);
2145 
2146 	if (!state->active_pipes) {
2147 		cdclk = bdw_calc_cdclk(state->cdclk.force_min_cdclk);
2148 
2149 		state->cdclk.actual.cdclk = cdclk;
2150 		state->cdclk.actual.voltage_level =
2151 			bdw_calc_voltage_level(cdclk);
2152 	} else {
2153 		state->cdclk.actual = state->cdclk.logical;
2154 	}
2155 
2156 	return 0;
2157 }
2158 
2159 static int skl_dpll0_vco(struct intel_atomic_state *state)
2160 {
2161 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2162 	struct intel_crtc *crtc;
2163 	struct intel_crtc_state *crtc_state;
2164 	int vco, i;
2165 
2166 	vco = state->cdclk.logical.vco;
2167 	if (!vco)
2168 		vco = dev_priv->skl_preferred_vco_freq;
2169 
2170 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2171 		if (!crtc_state->base.enable)
2172 			continue;
2173 
2174 		if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2175 			continue;
2176 
2177 		/*
2178 		 * DPLL0 VCO may need to be adjusted to get the correct
2179 		 * clock for eDP. This will affect cdclk as well.
2180 		 */
2181 		switch (crtc_state->port_clock / 2) {
2182 		case 108000:
2183 		case 216000:
2184 			vco = 8640000;
2185 			break;
2186 		default:
2187 			vco = 8100000;
2188 			break;
2189 		}
2190 	}
2191 
2192 	return vco;
2193 }
2194 
2195 static int skl_modeset_calc_cdclk(struct intel_atomic_state *state)
2196 {
2197 	int min_cdclk, cdclk, vco;
2198 
2199 	min_cdclk = intel_compute_min_cdclk(state);
2200 	if (min_cdclk < 0)
2201 		return min_cdclk;
2202 
2203 	vco = skl_dpll0_vco(state);
2204 
2205 	/*
2206 	 * FIXME should also account for plane ratio
2207 	 * once 64bpp pixel formats are supported.
2208 	 */
2209 	cdclk = skl_calc_cdclk(min_cdclk, vco);
2210 
2211 	state->cdclk.logical.vco = vco;
2212 	state->cdclk.logical.cdclk = cdclk;
2213 	state->cdclk.logical.voltage_level =
2214 		skl_calc_voltage_level(cdclk);
2215 
2216 	if (!state->active_pipes) {
2217 		cdclk = skl_calc_cdclk(state->cdclk.force_min_cdclk, vco);
2218 
2219 		state->cdclk.actual.vco = vco;
2220 		state->cdclk.actual.cdclk = cdclk;
2221 		state->cdclk.actual.voltage_level =
2222 			skl_calc_voltage_level(cdclk);
2223 	} else {
2224 		state->cdclk.actual = state->cdclk.logical;
2225 	}
2226 
2227 	return 0;
2228 }
2229 
2230 static int bxt_modeset_calc_cdclk(struct intel_atomic_state *state)
2231 {
2232 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2233 	int min_cdclk, min_voltage_level, cdclk, vco;
2234 
2235 	min_cdclk = intel_compute_min_cdclk(state);
2236 	if (min_cdclk < 0)
2237 		return min_cdclk;
2238 
2239 	min_voltage_level = bxt_compute_min_voltage_level(state);
2240 	if (min_voltage_level < 0)
2241 		return min_voltage_level;
2242 
2243 	cdclk = bxt_calc_cdclk(dev_priv, min_cdclk);
2244 	vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
2245 
2246 	state->cdclk.logical.vco = vco;
2247 	state->cdclk.logical.cdclk = cdclk;
2248 	state->cdclk.logical.voltage_level =
2249 		max_t(int, min_voltage_level,
2250 		      dev_priv->display.calc_voltage_level(cdclk));
2251 
2252 	if (!state->active_pipes) {
2253 		cdclk = bxt_calc_cdclk(dev_priv, state->cdclk.force_min_cdclk);
2254 		vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
2255 
2256 		state->cdclk.actual.vco = vco;
2257 		state->cdclk.actual.cdclk = cdclk;
2258 		state->cdclk.actual.voltage_level =
2259 			dev_priv->display.calc_voltage_level(cdclk);
2260 	} else {
2261 		state->cdclk.actual = state->cdclk.logical;
2262 	}
2263 
2264 	return 0;
2265 }
2266 
2267 static int intel_modeset_all_pipes(struct intel_atomic_state *state)
2268 {
2269 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2270 	struct intel_crtc *crtc;
2271 
2272 	/*
2273 	 * Add all pipes to the state, and force
2274 	 * a modeset on all the active ones.
2275 	 */
2276 	for_each_intel_crtc(&dev_priv->drm, crtc) {
2277 		struct intel_crtc_state *crtc_state;
2278 		int ret;
2279 
2280 		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
2281 		if (IS_ERR(crtc_state))
2282 			return PTR_ERR(crtc_state);
2283 
2284 		if (!crtc_state->base.active ||
2285 		    drm_atomic_crtc_needs_modeset(&crtc_state->base))
2286 			continue;
2287 
2288 		crtc_state->base.mode_changed = true;
2289 
2290 		ret = drm_atomic_add_affected_connectors(&state->base,
2291 							 &crtc->base);
2292 		if (ret)
2293 			return ret;
2294 
2295 		ret = drm_atomic_add_affected_planes(&state->base,
2296 						     &crtc->base);
2297 		if (ret)
2298 			return ret;
2299 
2300 		crtc_state->update_planes |= crtc_state->active_planes;
2301 	}
2302 
2303 	return 0;
2304 }
2305 
2306 static int fixed_modeset_calc_cdclk(struct intel_atomic_state *state)
2307 {
2308 	int min_cdclk;
2309 
2310 	/*
2311 	 * We can't change the cdclk frequency, but we still want to
2312 	 * check that the required minimum frequency doesn't exceed
2313 	 * the actual cdclk frequency.
2314 	 */
2315 	min_cdclk = intel_compute_min_cdclk(state);
2316 	if (min_cdclk < 0)
2317 		return min_cdclk;
2318 
2319 	return 0;
2320 }
2321 
2322 int intel_modeset_calc_cdclk(struct intel_atomic_state *state)
2323 {
2324 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2325 	enum pipe pipe;
2326 	int ret;
2327 
2328 	ret = dev_priv->display.modeset_calc_cdclk(state);
2329 	if (ret)
2330 		return ret;
2331 
2332 	/*
2333 	 * Writes to dev_priv->cdclk.{actual,logical} must protected
2334 	 * by holding all the crtc mutexes even if we don't end up
2335 	 * touching the hardware
2336 	 */
2337 	if (intel_cdclk_changed(&dev_priv->cdclk.actual,
2338 				&state->cdclk.actual)) {
2339 		/*
2340 		 * Also serialize commits across all crtcs
2341 		 * if the actual hw needs to be poked.
2342 		 */
2343 		ret = intel_atomic_serialize_global_state(state);
2344 		if (ret)
2345 			return ret;
2346 	} else if (intel_cdclk_changed(&dev_priv->cdclk.logical,
2347 				       &state->cdclk.logical)) {
2348 		ret = intel_atomic_lock_global_state(state);
2349 		if (ret)
2350 			return ret;
2351 	} else {
2352 		return 0;
2353 	}
2354 
2355 	if (is_power_of_2(state->active_pipes) &&
2356 	    intel_cdclk_needs_cd2x_update(dev_priv,
2357 					  &dev_priv->cdclk.actual,
2358 					  &state->cdclk.actual)) {
2359 		struct intel_crtc *crtc;
2360 		struct intel_crtc_state *crtc_state;
2361 
2362 		pipe = ilog2(state->active_pipes);
2363 		crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
2364 
2365 		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
2366 		if (IS_ERR(crtc_state))
2367 			return PTR_ERR(crtc_state);
2368 
2369 		if (drm_atomic_crtc_needs_modeset(&crtc_state->base))
2370 			pipe = INVALID_PIPE;
2371 	} else {
2372 		pipe = INVALID_PIPE;
2373 	}
2374 
2375 	if (pipe != INVALID_PIPE) {
2376 		state->cdclk.pipe = pipe;
2377 
2378 		DRM_DEBUG_KMS("Can change cdclk with pipe %c active\n",
2379 			      pipe_name(pipe));
2380 	} else if (intel_cdclk_needs_modeset(&dev_priv->cdclk.actual,
2381 					     &state->cdclk.actual)) {
2382 		/* All pipes must be switched off while we change the cdclk. */
2383 		ret = intel_modeset_all_pipes(state);
2384 		if (ret)
2385 			return ret;
2386 
2387 		state->cdclk.pipe = INVALID_PIPE;
2388 
2389 		DRM_DEBUG_KMS("Modeset required for cdclk change\n");
2390 	}
2391 
2392 	DRM_DEBUG_KMS("New cdclk calculated to be logical %u kHz, actual %u kHz\n",
2393 		      state->cdclk.logical.cdclk,
2394 		      state->cdclk.actual.cdclk);
2395 	DRM_DEBUG_KMS("New voltage level calculated to be logical %u, actual %u\n",
2396 		      state->cdclk.logical.voltage_level,
2397 		      state->cdclk.actual.voltage_level);
2398 
2399 	return 0;
2400 }
2401 
2402 static int intel_compute_max_dotclk(struct drm_i915_private *dev_priv)
2403 {
2404 	int max_cdclk_freq = dev_priv->max_cdclk_freq;
2405 
2406 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
2407 		return 2 * max_cdclk_freq;
2408 	else if (IS_GEN(dev_priv, 9) ||
2409 		 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
2410 		return max_cdclk_freq;
2411 	else if (IS_CHERRYVIEW(dev_priv))
2412 		return max_cdclk_freq*95/100;
2413 	else if (INTEL_GEN(dev_priv) < 4)
2414 		return 2*max_cdclk_freq*90/100;
2415 	else
2416 		return max_cdclk_freq*90/100;
2417 }
2418 
2419 /**
2420  * intel_update_max_cdclk - Determine the maximum support CDCLK frequency
2421  * @dev_priv: i915 device
2422  *
2423  * Determine the maximum CDCLK frequency the platform supports, and also
2424  * derive the maximum dot clock frequency the maximum CDCLK frequency
2425  * allows.
2426  */
2427 void intel_update_max_cdclk(struct drm_i915_private *dev_priv)
2428 {
2429 	if (IS_ELKHARTLAKE(dev_priv)) {
2430 		if (dev_priv->cdclk.hw.ref == 24000)
2431 			dev_priv->max_cdclk_freq = 552000;
2432 		else
2433 			dev_priv->max_cdclk_freq = 556800;
2434 	} else if (INTEL_GEN(dev_priv) >= 11) {
2435 		if (dev_priv->cdclk.hw.ref == 24000)
2436 			dev_priv->max_cdclk_freq = 648000;
2437 		else
2438 			dev_priv->max_cdclk_freq = 652800;
2439 	} else if (IS_CANNONLAKE(dev_priv)) {
2440 		dev_priv->max_cdclk_freq = 528000;
2441 	} else if (IS_GEN9_BC(dev_priv)) {
2442 		u32 limit = I915_READ(SKL_DFSM) & SKL_DFSM_CDCLK_LIMIT_MASK;
2443 		int max_cdclk, vco;
2444 
2445 		vco = dev_priv->skl_preferred_vco_freq;
2446 		WARN_ON(vco != 8100000 && vco != 8640000);
2447 
2448 		/*
2449 		 * Use the lower (vco 8640) cdclk values as a
2450 		 * first guess. skl_calc_cdclk() will correct it
2451 		 * if the preferred vco is 8100 instead.
2452 		 */
2453 		if (limit == SKL_DFSM_CDCLK_LIMIT_675)
2454 			max_cdclk = 617143;
2455 		else if (limit == SKL_DFSM_CDCLK_LIMIT_540)
2456 			max_cdclk = 540000;
2457 		else if (limit == SKL_DFSM_CDCLK_LIMIT_450)
2458 			max_cdclk = 432000;
2459 		else
2460 			max_cdclk = 308571;
2461 
2462 		dev_priv->max_cdclk_freq = skl_calc_cdclk(max_cdclk, vco);
2463 	} else if (IS_GEMINILAKE(dev_priv)) {
2464 		dev_priv->max_cdclk_freq = 316800;
2465 	} else if (IS_BROXTON(dev_priv)) {
2466 		dev_priv->max_cdclk_freq = 624000;
2467 	} else if (IS_BROADWELL(dev_priv))  {
2468 		/*
2469 		 * FIXME with extra cooling we can allow
2470 		 * 540 MHz for ULX and 675 Mhz for ULT.
2471 		 * How can we know if extra cooling is
2472 		 * available? PCI ID, VTB, something else?
2473 		 */
2474 		if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
2475 			dev_priv->max_cdclk_freq = 450000;
2476 		else if (IS_BDW_ULX(dev_priv))
2477 			dev_priv->max_cdclk_freq = 450000;
2478 		else if (IS_BDW_ULT(dev_priv))
2479 			dev_priv->max_cdclk_freq = 540000;
2480 		else
2481 			dev_priv->max_cdclk_freq = 675000;
2482 	} else if (IS_CHERRYVIEW(dev_priv)) {
2483 		dev_priv->max_cdclk_freq = 320000;
2484 	} else if (IS_VALLEYVIEW(dev_priv)) {
2485 		dev_priv->max_cdclk_freq = 400000;
2486 	} else {
2487 		/* otherwise assume cdclk is fixed */
2488 		dev_priv->max_cdclk_freq = dev_priv->cdclk.hw.cdclk;
2489 	}
2490 
2491 	dev_priv->max_dotclk_freq = intel_compute_max_dotclk(dev_priv);
2492 
2493 	DRM_DEBUG_DRIVER("Max CD clock rate: %d kHz\n",
2494 			 dev_priv->max_cdclk_freq);
2495 
2496 	DRM_DEBUG_DRIVER("Max dotclock rate: %d kHz\n",
2497 			 dev_priv->max_dotclk_freq);
2498 }
2499 
2500 /**
2501  * intel_update_cdclk - Determine the current CDCLK frequency
2502  * @dev_priv: i915 device
2503  *
2504  * Determine the current CDCLK frequency.
2505  */
2506 void intel_update_cdclk(struct drm_i915_private *dev_priv)
2507 {
2508 	dev_priv->display.get_cdclk(dev_priv, &dev_priv->cdclk.hw);
2509 
2510 	/*
2511 	 * 9:0 CMBUS [sic] CDCLK frequency (cdfreq):
2512 	 * Programmng [sic] note: bit[9:2] should be programmed to the number
2513 	 * of cdclk that generates 4MHz reference clock freq which is used to
2514 	 * generate GMBus clock. This will vary with the cdclk freq.
2515 	 */
2516 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2517 		I915_WRITE(GMBUSFREQ_VLV,
2518 			   DIV_ROUND_UP(dev_priv->cdclk.hw.cdclk, 1000));
2519 }
2520 
2521 static int cnp_rawclk(struct drm_i915_private *dev_priv)
2522 {
2523 	u32 rawclk;
2524 	int divider, fraction;
2525 
2526 	if (I915_READ(SFUSE_STRAP) & SFUSE_STRAP_RAW_FREQUENCY) {
2527 		/* 24 MHz */
2528 		divider = 24000;
2529 		fraction = 0;
2530 	} else {
2531 		/* 19.2 MHz */
2532 		divider = 19000;
2533 		fraction = 200;
2534 	}
2535 
2536 	rawclk = CNP_RAWCLK_DIV(divider / 1000);
2537 	if (fraction) {
2538 		int numerator = 1;
2539 
2540 		rawclk |= CNP_RAWCLK_DEN(DIV_ROUND_CLOSEST(numerator * 1000,
2541 							   fraction) - 1);
2542 		if (INTEL_PCH_TYPE(dev_priv) >= PCH_ICP)
2543 			rawclk |= ICP_RAWCLK_NUM(numerator);
2544 	}
2545 
2546 	I915_WRITE(PCH_RAWCLK_FREQ, rawclk);
2547 	return divider + fraction;
2548 }
2549 
2550 static int pch_rawclk(struct drm_i915_private *dev_priv)
2551 {
2552 	return (I915_READ(PCH_RAWCLK_FREQ) & RAWCLK_FREQ_MASK) * 1000;
2553 }
2554 
2555 static int vlv_hrawclk(struct drm_i915_private *dev_priv)
2556 {
2557 	/* RAWCLK_FREQ_VLV register updated from power well code */
2558 	return vlv_get_cck_clock_hpll(dev_priv, "hrawclk",
2559 				      CCK_DISPLAY_REF_CLOCK_CONTROL);
2560 }
2561 
2562 static int g4x_hrawclk(struct drm_i915_private *dev_priv)
2563 {
2564 	u32 clkcfg;
2565 
2566 	/* hrawclock is 1/4 the FSB frequency */
2567 	clkcfg = I915_READ(CLKCFG);
2568 	switch (clkcfg & CLKCFG_FSB_MASK) {
2569 	case CLKCFG_FSB_400:
2570 		return 100000;
2571 	case CLKCFG_FSB_533:
2572 		return 133333;
2573 	case CLKCFG_FSB_667:
2574 		return 166667;
2575 	case CLKCFG_FSB_800:
2576 		return 200000;
2577 	case CLKCFG_FSB_1067:
2578 	case CLKCFG_FSB_1067_ALT:
2579 		return 266667;
2580 	case CLKCFG_FSB_1333:
2581 	case CLKCFG_FSB_1333_ALT:
2582 		return 333333;
2583 	default:
2584 		return 133333;
2585 	}
2586 }
2587 
2588 /**
2589  * intel_update_rawclk - Determine the current RAWCLK frequency
2590  * @dev_priv: i915 device
2591  *
2592  * Determine the current RAWCLK frequency. RAWCLK is a fixed
2593  * frequency clock so this needs to done only once.
2594  */
2595 void intel_update_rawclk(struct drm_i915_private *dev_priv)
2596 {
2597 	if (INTEL_PCH_TYPE(dev_priv) >= PCH_CNP)
2598 		dev_priv->rawclk_freq = cnp_rawclk(dev_priv);
2599 	else if (HAS_PCH_SPLIT(dev_priv))
2600 		dev_priv->rawclk_freq = pch_rawclk(dev_priv);
2601 	else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2602 		dev_priv->rawclk_freq = vlv_hrawclk(dev_priv);
2603 	else if (IS_G4X(dev_priv) || IS_PINEVIEW(dev_priv))
2604 		dev_priv->rawclk_freq = g4x_hrawclk(dev_priv);
2605 	else
2606 		/* no rawclk on other platforms, or no need to know it */
2607 		return;
2608 
2609 	DRM_DEBUG_DRIVER("rawclk rate: %d kHz\n", dev_priv->rawclk_freq);
2610 }
2611 
2612 /**
2613  * intel_init_cdclk_hooks - Initialize CDCLK related modesetting hooks
2614  * @dev_priv: i915 device
2615  */
2616 void intel_init_cdclk_hooks(struct drm_i915_private *dev_priv)
2617 {
2618 	if (IS_ELKHARTLAKE(dev_priv)) {
2619 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2620 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2621 		dev_priv->display.calc_voltage_level = ehl_calc_voltage_level;
2622 		dev_priv->cdclk.table = icl_cdclk_table;
2623 	} else if (INTEL_GEN(dev_priv) >= 11) {
2624 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2625 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2626 		dev_priv->display.calc_voltage_level = icl_calc_voltage_level;
2627 		dev_priv->cdclk.table = icl_cdclk_table;
2628 	} else if (IS_CANNONLAKE(dev_priv)) {
2629 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2630 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2631 		dev_priv->display.calc_voltage_level = cnl_calc_voltage_level;
2632 		dev_priv->cdclk.table = cnl_cdclk_table;
2633 	} else if (IS_GEN9_LP(dev_priv)) {
2634 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2635 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2636 		dev_priv->display.calc_voltage_level = bxt_calc_voltage_level;
2637 		if (IS_GEMINILAKE(dev_priv))
2638 			dev_priv->cdclk.table = glk_cdclk_table;
2639 		else
2640 			dev_priv->cdclk.table = bxt_cdclk_table;
2641 	} else if (IS_GEN9_BC(dev_priv)) {
2642 		dev_priv->display.set_cdclk = skl_set_cdclk;
2643 		dev_priv->display.modeset_calc_cdclk = skl_modeset_calc_cdclk;
2644 	} else if (IS_BROADWELL(dev_priv)) {
2645 		dev_priv->display.set_cdclk = bdw_set_cdclk;
2646 		dev_priv->display.modeset_calc_cdclk = bdw_modeset_calc_cdclk;
2647 	} else if (IS_CHERRYVIEW(dev_priv)) {
2648 		dev_priv->display.set_cdclk = chv_set_cdclk;
2649 		dev_priv->display.modeset_calc_cdclk = vlv_modeset_calc_cdclk;
2650 	} else if (IS_VALLEYVIEW(dev_priv)) {
2651 		dev_priv->display.set_cdclk = vlv_set_cdclk;
2652 		dev_priv->display.modeset_calc_cdclk = vlv_modeset_calc_cdclk;
2653 	} else {
2654 		dev_priv->display.modeset_calc_cdclk = fixed_modeset_calc_cdclk;
2655 	}
2656 
2657 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEN9_LP(dev_priv))
2658 		dev_priv->display.get_cdclk = bxt_get_cdclk;
2659 	else if (IS_GEN9_BC(dev_priv))
2660 		dev_priv->display.get_cdclk = skl_get_cdclk;
2661 	else if (IS_BROADWELL(dev_priv))
2662 		dev_priv->display.get_cdclk = bdw_get_cdclk;
2663 	else if (IS_HASWELL(dev_priv))
2664 		dev_priv->display.get_cdclk = hsw_get_cdclk;
2665 	else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2666 		dev_priv->display.get_cdclk = vlv_get_cdclk;
2667 	else if (IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv))
2668 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2669 	else if (IS_GEN(dev_priv, 5))
2670 		dev_priv->display.get_cdclk = fixed_450mhz_get_cdclk;
2671 	else if (IS_GM45(dev_priv))
2672 		dev_priv->display.get_cdclk = gm45_get_cdclk;
2673 	else if (IS_G45(dev_priv))
2674 		dev_priv->display.get_cdclk = g33_get_cdclk;
2675 	else if (IS_I965GM(dev_priv))
2676 		dev_priv->display.get_cdclk = i965gm_get_cdclk;
2677 	else if (IS_I965G(dev_priv))
2678 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2679 	else if (IS_PINEVIEW(dev_priv))
2680 		dev_priv->display.get_cdclk = pnv_get_cdclk;
2681 	else if (IS_G33(dev_priv))
2682 		dev_priv->display.get_cdclk = g33_get_cdclk;
2683 	else if (IS_I945GM(dev_priv))
2684 		dev_priv->display.get_cdclk = i945gm_get_cdclk;
2685 	else if (IS_I945G(dev_priv))
2686 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2687 	else if (IS_I915GM(dev_priv))
2688 		dev_priv->display.get_cdclk = i915gm_get_cdclk;
2689 	else if (IS_I915G(dev_priv))
2690 		dev_priv->display.get_cdclk = fixed_333mhz_get_cdclk;
2691 	else if (IS_I865G(dev_priv))
2692 		dev_priv->display.get_cdclk = fixed_266mhz_get_cdclk;
2693 	else if (IS_I85X(dev_priv))
2694 		dev_priv->display.get_cdclk = i85x_get_cdclk;
2695 	else if (IS_I845G(dev_priv))
2696 		dev_priv->display.get_cdclk = fixed_200mhz_get_cdclk;
2697 	else { /* 830 */
2698 		WARN(!IS_I830(dev_priv),
2699 		     "Unknown platform. Assuming 133 MHz CDCLK\n");
2700 		dev_priv->display.get_cdclk = fixed_133mhz_get_cdclk;
2701 	}
2702 }
2703