1 /*
2  * Copyright © 2006-2017 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 #include "intel_atomic.h"
25 #include "intel_cdclk.h"
26 #include "intel_display_types.h"
27 #include "intel_sideband.h"
28 
29 /**
30  * DOC: CDCLK / RAWCLK
31  *
32  * The display engine uses several different clocks to do its work. There
33  * are two main clocks involved that aren't directly related to the actual
34  * pixel clock or any symbol/bit clock of the actual output port. These
35  * are the core display clock (CDCLK) and RAWCLK.
36  *
37  * CDCLK clocks most of the display pipe logic, and thus its frequency
38  * must be high enough to support the rate at which pixels are flowing
39  * through the pipes. Downscaling must also be accounted as that increases
40  * the effective pixel rate.
41  *
42  * On several platforms the CDCLK frequency can be changed dynamically
43  * to minimize power consumption for a given display configuration.
44  * Typically changes to the CDCLK frequency require all the display pipes
45  * to be shut down while the frequency is being changed.
46  *
47  * On SKL+ the DMC will toggle the CDCLK off/on during DC5/6 entry/exit.
48  * DMC will not change the active CDCLK frequency however, so that part
49  * will still be performed by the driver directly.
50  *
51  * RAWCLK is a fixed frequency clock, often used by various auxiliary
52  * blocks such as AUX CH or backlight PWM. Hence the only thing we
53  * really need to know about RAWCLK is its frequency so that various
54  * dividers can be programmed correctly.
55  */
56 
57 static void fixed_133mhz_get_cdclk(struct drm_i915_private *dev_priv,
58 				   struct intel_cdclk_state *cdclk_state)
59 {
60 	cdclk_state->cdclk = 133333;
61 }
62 
63 static void fixed_200mhz_get_cdclk(struct drm_i915_private *dev_priv,
64 				   struct intel_cdclk_state *cdclk_state)
65 {
66 	cdclk_state->cdclk = 200000;
67 }
68 
69 static void fixed_266mhz_get_cdclk(struct drm_i915_private *dev_priv,
70 				   struct intel_cdclk_state *cdclk_state)
71 {
72 	cdclk_state->cdclk = 266667;
73 }
74 
75 static void fixed_333mhz_get_cdclk(struct drm_i915_private *dev_priv,
76 				   struct intel_cdclk_state *cdclk_state)
77 {
78 	cdclk_state->cdclk = 333333;
79 }
80 
81 static void fixed_400mhz_get_cdclk(struct drm_i915_private *dev_priv,
82 				   struct intel_cdclk_state *cdclk_state)
83 {
84 	cdclk_state->cdclk = 400000;
85 }
86 
87 static void fixed_450mhz_get_cdclk(struct drm_i915_private *dev_priv,
88 				   struct intel_cdclk_state *cdclk_state)
89 {
90 	cdclk_state->cdclk = 450000;
91 }
92 
93 static void i85x_get_cdclk(struct drm_i915_private *dev_priv,
94 			   struct intel_cdclk_state *cdclk_state)
95 {
96 	struct pci_dev *pdev = dev_priv->drm.pdev;
97 	u16 hpllcc = 0;
98 
99 	/*
100 	 * 852GM/852GMV only supports 133 MHz and the HPLLCC
101 	 * encoding is different :(
102 	 * FIXME is this the right way to detect 852GM/852GMV?
103 	 */
104 	if (pdev->revision == 0x1) {
105 		cdclk_state->cdclk = 133333;
106 		return;
107 	}
108 
109 	pci_bus_read_config_word(pdev->bus,
110 				 PCI_DEVFN(0, 3), HPLLCC, &hpllcc);
111 
112 	/* Assume that the hardware is in the high speed state.  This
113 	 * should be the default.
114 	 */
115 	switch (hpllcc & GC_CLOCK_CONTROL_MASK) {
116 	case GC_CLOCK_133_200:
117 	case GC_CLOCK_133_200_2:
118 	case GC_CLOCK_100_200:
119 		cdclk_state->cdclk = 200000;
120 		break;
121 	case GC_CLOCK_166_250:
122 		cdclk_state->cdclk = 250000;
123 		break;
124 	case GC_CLOCK_100_133:
125 		cdclk_state->cdclk = 133333;
126 		break;
127 	case GC_CLOCK_133_266:
128 	case GC_CLOCK_133_266_2:
129 	case GC_CLOCK_166_266:
130 		cdclk_state->cdclk = 266667;
131 		break;
132 	}
133 }
134 
135 static void i915gm_get_cdclk(struct drm_i915_private *dev_priv,
136 			     struct intel_cdclk_state *cdclk_state)
137 {
138 	struct pci_dev *pdev = dev_priv->drm.pdev;
139 	u16 gcfgc = 0;
140 
141 	pci_read_config_word(pdev, GCFGC, &gcfgc);
142 
143 	if (gcfgc & GC_LOW_FREQUENCY_ENABLE) {
144 		cdclk_state->cdclk = 133333;
145 		return;
146 	}
147 
148 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
149 	case GC_DISPLAY_CLOCK_333_320_MHZ:
150 		cdclk_state->cdclk = 333333;
151 		break;
152 	default:
153 	case GC_DISPLAY_CLOCK_190_200_MHZ:
154 		cdclk_state->cdclk = 190000;
155 		break;
156 	}
157 }
158 
159 static void i945gm_get_cdclk(struct drm_i915_private *dev_priv,
160 			     struct intel_cdclk_state *cdclk_state)
161 {
162 	struct pci_dev *pdev = dev_priv->drm.pdev;
163 	u16 gcfgc = 0;
164 
165 	pci_read_config_word(pdev, GCFGC, &gcfgc);
166 
167 	if (gcfgc & GC_LOW_FREQUENCY_ENABLE) {
168 		cdclk_state->cdclk = 133333;
169 		return;
170 	}
171 
172 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
173 	case GC_DISPLAY_CLOCK_333_320_MHZ:
174 		cdclk_state->cdclk = 320000;
175 		break;
176 	default:
177 	case GC_DISPLAY_CLOCK_190_200_MHZ:
178 		cdclk_state->cdclk = 200000;
179 		break;
180 	}
181 }
182 
183 static unsigned int intel_hpll_vco(struct drm_i915_private *dev_priv)
184 {
185 	static const unsigned int blb_vco[8] = {
186 		[0] = 3200000,
187 		[1] = 4000000,
188 		[2] = 5333333,
189 		[3] = 4800000,
190 		[4] = 6400000,
191 	};
192 	static const unsigned int pnv_vco[8] = {
193 		[0] = 3200000,
194 		[1] = 4000000,
195 		[2] = 5333333,
196 		[3] = 4800000,
197 		[4] = 2666667,
198 	};
199 	static const unsigned int cl_vco[8] = {
200 		[0] = 3200000,
201 		[1] = 4000000,
202 		[2] = 5333333,
203 		[3] = 6400000,
204 		[4] = 3333333,
205 		[5] = 3566667,
206 		[6] = 4266667,
207 	};
208 	static const unsigned int elk_vco[8] = {
209 		[0] = 3200000,
210 		[1] = 4000000,
211 		[2] = 5333333,
212 		[3] = 4800000,
213 	};
214 	static const unsigned int ctg_vco[8] = {
215 		[0] = 3200000,
216 		[1] = 4000000,
217 		[2] = 5333333,
218 		[3] = 6400000,
219 		[4] = 2666667,
220 		[5] = 4266667,
221 	};
222 	const unsigned int *vco_table;
223 	unsigned int vco;
224 	u8 tmp = 0;
225 
226 	/* FIXME other chipsets? */
227 	if (IS_GM45(dev_priv))
228 		vco_table = ctg_vco;
229 	else if (IS_G45(dev_priv))
230 		vco_table = elk_vco;
231 	else if (IS_I965GM(dev_priv))
232 		vco_table = cl_vco;
233 	else if (IS_PINEVIEW(dev_priv))
234 		vco_table = pnv_vco;
235 	else if (IS_G33(dev_priv))
236 		vco_table = blb_vco;
237 	else
238 		return 0;
239 
240 	tmp = I915_READ(IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv) ?
241 			HPLLVCO_MOBILE : HPLLVCO);
242 
243 	vco = vco_table[tmp & 0x7];
244 	if (vco == 0)
245 		DRM_ERROR("Bad HPLL VCO (HPLLVCO=0x%02x)\n", tmp);
246 	else
247 		DRM_DEBUG_KMS("HPLL VCO %u kHz\n", vco);
248 
249 	return vco;
250 }
251 
252 static void g33_get_cdclk(struct drm_i915_private *dev_priv,
253 			  struct intel_cdclk_state *cdclk_state)
254 {
255 	struct pci_dev *pdev = dev_priv->drm.pdev;
256 	static const u8 div_3200[] = { 12, 10,  8,  7, 5, 16 };
257 	static const u8 div_4000[] = { 14, 12, 10,  8, 6, 20 };
258 	static const u8 div_4800[] = { 20, 14, 12, 10, 8, 24 };
259 	static const u8 div_5333[] = { 20, 16, 12, 12, 8, 28 };
260 	const u8 *div_table;
261 	unsigned int cdclk_sel;
262 	u16 tmp = 0;
263 
264 	cdclk_state->vco = intel_hpll_vco(dev_priv);
265 
266 	pci_read_config_word(pdev, GCFGC, &tmp);
267 
268 	cdclk_sel = (tmp >> 4) & 0x7;
269 
270 	if (cdclk_sel >= ARRAY_SIZE(div_3200))
271 		goto fail;
272 
273 	switch (cdclk_state->vco) {
274 	case 3200000:
275 		div_table = div_3200;
276 		break;
277 	case 4000000:
278 		div_table = div_4000;
279 		break;
280 	case 4800000:
281 		div_table = div_4800;
282 		break;
283 	case 5333333:
284 		div_table = div_5333;
285 		break;
286 	default:
287 		goto fail;
288 	}
289 
290 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco,
291 					       div_table[cdclk_sel]);
292 	return;
293 
294 fail:
295 	DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%08x\n",
296 		  cdclk_state->vco, tmp);
297 	cdclk_state->cdclk = 190476;
298 }
299 
300 static void pnv_get_cdclk(struct drm_i915_private *dev_priv,
301 			  struct intel_cdclk_state *cdclk_state)
302 {
303 	struct pci_dev *pdev = dev_priv->drm.pdev;
304 	u16 gcfgc = 0;
305 
306 	pci_read_config_word(pdev, GCFGC, &gcfgc);
307 
308 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
309 	case GC_DISPLAY_CLOCK_267_MHZ_PNV:
310 		cdclk_state->cdclk = 266667;
311 		break;
312 	case GC_DISPLAY_CLOCK_333_MHZ_PNV:
313 		cdclk_state->cdclk = 333333;
314 		break;
315 	case GC_DISPLAY_CLOCK_444_MHZ_PNV:
316 		cdclk_state->cdclk = 444444;
317 		break;
318 	case GC_DISPLAY_CLOCK_200_MHZ_PNV:
319 		cdclk_state->cdclk = 200000;
320 		break;
321 	default:
322 		DRM_ERROR("Unknown pnv display core clock 0x%04x\n", gcfgc);
323 		/* fall through */
324 	case GC_DISPLAY_CLOCK_133_MHZ_PNV:
325 		cdclk_state->cdclk = 133333;
326 		break;
327 	case GC_DISPLAY_CLOCK_167_MHZ_PNV:
328 		cdclk_state->cdclk = 166667;
329 		break;
330 	}
331 }
332 
333 static void i965gm_get_cdclk(struct drm_i915_private *dev_priv,
334 			     struct intel_cdclk_state *cdclk_state)
335 {
336 	struct pci_dev *pdev = dev_priv->drm.pdev;
337 	static const u8 div_3200[] = { 16, 10,  8 };
338 	static const u8 div_4000[] = { 20, 12, 10 };
339 	static const u8 div_5333[] = { 24, 16, 14 };
340 	const u8 *div_table;
341 	unsigned int cdclk_sel;
342 	u16 tmp = 0;
343 
344 	cdclk_state->vco = intel_hpll_vco(dev_priv);
345 
346 	pci_read_config_word(pdev, GCFGC, &tmp);
347 
348 	cdclk_sel = ((tmp >> 8) & 0x1f) - 1;
349 
350 	if (cdclk_sel >= ARRAY_SIZE(div_3200))
351 		goto fail;
352 
353 	switch (cdclk_state->vco) {
354 	case 3200000:
355 		div_table = div_3200;
356 		break;
357 	case 4000000:
358 		div_table = div_4000;
359 		break;
360 	case 5333333:
361 		div_table = div_5333;
362 		break;
363 	default:
364 		goto fail;
365 	}
366 
367 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco,
368 					       div_table[cdclk_sel]);
369 	return;
370 
371 fail:
372 	DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%04x\n",
373 		  cdclk_state->vco, tmp);
374 	cdclk_state->cdclk = 200000;
375 }
376 
377 static void gm45_get_cdclk(struct drm_i915_private *dev_priv,
378 			   struct intel_cdclk_state *cdclk_state)
379 {
380 	struct pci_dev *pdev = dev_priv->drm.pdev;
381 	unsigned int cdclk_sel;
382 	u16 tmp = 0;
383 
384 	cdclk_state->vco = intel_hpll_vco(dev_priv);
385 
386 	pci_read_config_word(pdev, GCFGC, &tmp);
387 
388 	cdclk_sel = (tmp >> 12) & 0x1;
389 
390 	switch (cdclk_state->vco) {
391 	case 2666667:
392 	case 4000000:
393 	case 5333333:
394 		cdclk_state->cdclk = cdclk_sel ? 333333 : 222222;
395 		break;
396 	case 3200000:
397 		cdclk_state->cdclk = cdclk_sel ? 320000 : 228571;
398 		break;
399 	default:
400 		DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u, CFGC=0x%04x\n",
401 			  cdclk_state->vco, tmp);
402 		cdclk_state->cdclk = 222222;
403 		break;
404 	}
405 }
406 
407 static void hsw_get_cdclk(struct drm_i915_private *dev_priv,
408 			  struct intel_cdclk_state *cdclk_state)
409 {
410 	u32 lcpll = I915_READ(LCPLL_CTL);
411 	u32 freq = lcpll & LCPLL_CLK_FREQ_MASK;
412 
413 	if (lcpll & LCPLL_CD_SOURCE_FCLK)
414 		cdclk_state->cdclk = 800000;
415 	else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
416 		cdclk_state->cdclk = 450000;
417 	else if (freq == LCPLL_CLK_FREQ_450)
418 		cdclk_state->cdclk = 450000;
419 	else if (IS_HSW_ULT(dev_priv))
420 		cdclk_state->cdclk = 337500;
421 	else
422 		cdclk_state->cdclk = 540000;
423 }
424 
425 static int vlv_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
426 {
427 	int freq_320 = (dev_priv->hpll_freq <<  1) % 320000 != 0 ?
428 		333333 : 320000;
429 
430 	/*
431 	 * We seem to get an unstable or solid color picture at 200MHz.
432 	 * Not sure what's wrong. For now use 200MHz only when all pipes
433 	 * are off.
434 	 */
435 	if (IS_VALLEYVIEW(dev_priv) && min_cdclk > freq_320)
436 		return 400000;
437 	else if (min_cdclk > 266667)
438 		return freq_320;
439 	else if (min_cdclk > 0)
440 		return 266667;
441 	else
442 		return 200000;
443 }
444 
445 static u8 vlv_calc_voltage_level(struct drm_i915_private *dev_priv, int cdclk)
446 {
447 	if (IS_VALLEYVIEW(dev_priv)) {
448 		if (cdclk >= 320000) /* jump to highest voltage for 400MHz too */
449 			return 2;
450 		else if (cdclk >= 266667)
451 			return 1;
452 		else
453 			return 0;
454 	} else {
455 		/*
456 		 * Specs are full of misinformation, but testing on actual
457 		 * hardware has shown that we just need to write the desired
458 		 * CCK divider into the Punit register.
459 		 */
460 		return DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1, cdclk) - 1;
461 	}
462 }
463 
464 static void vlv_get_cdclk(struct drm_i915_private *dev_priv,
465 			  struct intel_cdclk_state *cdclk_state)
466 {
467 	u32 val;
468 
469 	vlv_iosf_sb_get(dev_priv,
470 			BIT(VLV_IOSF_SB_CCK) | BIT(VLV_IOSF_SB_PUNIT));
471 
472 	cdclk_state->vco = vlv_get_hpll_vco(dev_priv);
473 	cdclk_state->cdclk = vlv_get_cck_clock(dev_priv, "cdclk",
474 					       CCK_DISPLAY_CLOCK_CONTROL,
475 					       cdclk_state->vco);
476 
477 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
478 
479 	vlv_iosf_sb_put(dev_priv,
480 			BIT(VLV_IOSF_SB_CCK) | BIT(VLV_IOSF_SB_PUNIT));
481 
482 	if (IS_VALLEYVIEW(dev_priv))
483 		cdclk_state->voltage_level = (val & DSPFREQGUAR_MASK) >>
484 			DSPFREQGUAR_SHIFT;
485 	else
486 		cdclk_state->voltage_level = (val & DSPFREQGUAR_MASK_CHV) >>
487 			DSPFREQGUAR_SHIFT_CHV;
488 }
489 
490 static void vlv_program_pfi_credits(struct drm_i915_private *dev_priv)
491 {
492 	unsigned int credits, default_credits;
493 
494 	if (IS_CHERRYVIEW(dev_priv))
495 		default_credits = PFI_CREDIT(12);
496 	else
497 		default_credits = PFI_CREDIT(8);
498 
499 	if (dev_priv->cdclk.hw.cdclk >= dev_priv->czclk_freq) {
500 		/* CHV suggested value is 31 or 63 */
501 		if (IS_CHERRYVIEW(dev_priv))
502 			credits = PFI_CREDIT_63;
503 		else
504 			credits = PFI_CREDIT(15);
505 	} else {
506 		credits = default_credits;
507 	}
508 
509 	/*
510 	 * WA - write default credits before re-programming
511 	 * FIXME: should we also set the resend bit here?
512 	 */
513 	I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
514 		   default_credits);
515 
516 	I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
517 		   credits | PFI_CREDIT_RESEND);
518 
519 	/*
520 	 * FIXME is this guaranteed to clear
521 	 * immediately or should we poll for it?
522 	 */
523 	WARN_ON(I915_READ(GCI_CONTROL) & PFI_CREDIT_RESEND);
524 }
525 
526 static void vlv_set_cdclk(struct drm_i915_private *dev_priv,
527 			  const struct intel_cdclk_state *cdclk_state,
528 			  enum pipe pipe)
529 {
530 	int cdclk = cdclk_state->cdclk;
531 	u32 val, cmd = cdclk_state->voltage_level;
532 	intel_wakeref_t wakeref;
533 
534 	switch (cdclk) {
535 	case 400000:
536 	case 333333:
537 	case 320000:
538 	case 266667:
539 	case 200000:
540 		break;
541 	default:
542 		MISSING_CASE(cdclk);
543 		return;
544 	}
545 
546 	/* There are cases where we can end up here with power domains
547 	 * off and a CDCLK frequency other than the minimum, like when
548 	 * issuing a modeset without actually changing any display after
549 	 * a system suspend.  So grab the display core domain, which covers
550 	 * the HW blocks needed for the following programming.
551 	 */
552 	wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
553 
554 	vlv_iosf_sb_get(dev_priv,
555 			BIT(VLV_IOSF_SB_CCK) |
556 			BIT(VLV_IOSF_SB_BUNIT) |
557 			BIT(VLV_IOSF_SB_PUNIT));
558 
559 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
560 	val &= ~DSPFREQGUAR_MASK;
561 	val |= (cmd << DSPFREQGUAR_SHIFT);
562 	vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
563 	if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
564 		      DSPFREQSTAT_MASK) == (cmd << DSPFREQSTAT_SHIFT),
565 		     50)) {
566 		DRM_ERROR("timed out waiting for CDclk change\n");
567 	}
568 
569 	if (cdclk == 400000) {
570 		u32 divider;
571 
572 		divider = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1,
573 					    cdclk) - 1;
574 
575 		/* adjust cdclk divider */
576 		val = vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL);
577 		val &= ~CCK_FREQUENCY_VALUES;
578 		val |= divider;
579 		vlv_cck_write(dev_priv, CCK_DISPLAY_CLOCK_CONTROL, val);
580 
581 		if (wait_for((vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL) &
582 			      CCK_FREQUENCY_STATUS) == (divider << CCK_FREQUENCY_STATUS_SHIFT),
583 			     50))
584 			DRM_ERROR("timed out waiting for CDclk change\n");
585 	}
586 
587 	/* adjust self-refresh exit latency value */
588 	val = vlv_bunit_read(dev_priv, BUNIT_REG_BISOC);
589 	val &= ~0x7f;
590 
591 	/*
592 	 * For high bandwidth configs, we set a higher latency in the bunit
593 	 * so that the core display fetch happens in time to avoid underruns.
594 	 */
595 	if (cdclk == 400000)
596 		val |= 4500 / 250; /* 4.5 usec */
597 	else
598 		val |= 3000 / 250; /* 3.0 usec */
599 	vlv_bunit_write(dev_priv, BUNIT_REG_BISOC, val);
600 
601 	vlv_iosf_sb_put(dev_priv,
602 			BIT(VLV_IOSF_SB_CCK) |
603 			BIT(VLV_IOSF_SB_BUNIT) |
604 			BIT(VLV_IOSF_SB_PUNIT));
605 
606 	intel_update_cdclk(dev_priv);
607 
608 	vlv_program_pfi_credits(dev_priv);
609 
610 	intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
611 }
612 
613 static void chv_set_cdclk(struct drm_i915_private *dev_priv,
614 			  const struct intel_cdclk_state *cdclk_state,
615 			  enum pipe pipe)
616 {
617 	int cdclk = cdclk_state->cdclk;
618 	u32 val, cmd = cdclk_state->voltage_level;
619 	intel_wakeref_t wakeref;
620 
621 	switch (cdclk) {
622 	case 333333:
623 	case 320000:
624 	case 266667:
625 	case 200000:
626 		break;
627 	default:
628 		MISSING_CASE(cdclk);
629 		return;
630 	}
631 
632 	/* There are cases where we can end up here with power domains
633 	 * off and a CDCLK frequency other than the minimum, like when
634 	 * issuing a modeset without actually changing any display after
635 	 * a system suspend.  So grab the display core domain, which covers
636 	 * the HW blocks needed for the following programming.
637 	 */
638 	wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
639 
640 	vlv_punit_get(dev_priv);
641 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
642 	val &= ~DSPFREQGUAR_MASK_CHV;
643 	val |= (cmd << DSPFREQGUAR_SHIFT_CHV);
644 	vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
645 	if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
646 		      DSPFREQSTAT_MASK_CHV) == (cmd << DSPFREQSTAT_SHIFT_CHV),
647 		     50)) {
648 		DRM_ERROR("timed out waiting for CDclk change\n");
649 	}
650 
651 	vlv_punit_put(dev_priv);
652 
653 	intel_update_cdclk(dev_priv);
654 
655 	vlv_program_pfi_credits(dev_priv);
656 
657 	intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
658 }
659 
660 static int bdw_calc_cdclk(int min_cdclk)
661 {
662 	if (min_cdclk > 540000)
663 		return 675000;
664 	else if (min_cdclk > 450000)
665 		return 540000;
666 	else if (min_cdclk > 337500)
667 		return 450000;
668 	else
669 		return 337500;
670 }
671 
672 static u8 bdw_calc_voltage_level(int cdclk)
673 {
674 	switch (cdclk) {
675 	default:
676 	case 337500:
677 		return 2;
678 	case 450000:
679 		return 0;
680 	case 540000:
681 		return 1;
682 	case 675000:
683 		return 3;
684 	}
685 }
686 
687 static void bdw_get_cdclk(struct drm_i915_private *dev_priv,
688 			  struct intel_cdclk_state *cdclk_state)
689 {
690 	u32 lcpll = I915_READ(LCPLL_CTL);
691 	u32 freq = lcpll & LCPLL_CLK_FREQ_MASK;
692 
693 	if (lcpll & LCPLL_CD_SOURCE_FCLK)
694 		cdclk_state->cdclk = 800000;
695 	else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
696 		cdclk_state->cdclk = 450000;
697 	else if (freq == LCPLL_CLK_FREQ_450)
698 		cdclk_state->cdclk = 450000;
699 	else if (freq == LCPLL_CLK_FREQ_54O_BDW)
700 		cdclk_state->cdclk = 540000;
701 	else if (freq == LCPLL_CLK_FREQ_337_5_BDW)
702 		cdclk_state->cdclk = 337500;
703 	else
704 		cdclk_state->cdclk = 675000;
705 
706 	/*
707 	 * Can't read this out :( Let's assume it's
708 	 * at least what the CDCLK frequency requires.
709 	 */
710 	cdclk_state->voltage_level =
711 		bdw_calc_voltage_level(cdclk_state->cdclk);
712 }
713 
714 static void bdw_set_cdclk(struct drm_i915_private *dev_priv,
715 			  const struct intel_cdclk_state *cdclk_state,
716 			  enum pipe pipe)
717 {
718 	int cdclk = cdclk_state->cdclk;
719 	u32 val;
720 	int ret;
721 
722 	if (WARN((I915_READ(LCPLL_CTL) &
723 		  (LCPLL_PLL_DISABLE | LCPLL_PLL_LOCK |
724 		   LCPLL_CD_CLOCK_DISABLE | LCPLL_ROOT_CD_CLOCK_DISABLE |
725 		   LCPLL_CD2X_CLOCK_DISABLE | LCPLL_POWER_DOWN_ALLOW |
726 		   LCPLL_CD_SOURCE_FCLK)) != LCPLL_PLL_LOCK,
727 		 "trying to change cdclk frequency with cdclk not enabled\n"))
728 		return;
729 
730 	ret = sandybridge_pcode_write(dev_priv,
731 				      BDW_PCODE_DISPLAY_FREQ_CHANGE_REQ, 0x0);
732 	if (ret) {
733 		DRM_ERROR("failed to inform pcode about cdclk change\n");
734 		return;
735 	}
736 
737 	val = I915_READ(LCPLL_CTL);
738 	val |= LCPLL_CD_SOURCE_FCLK;
739 	I915_WRITE(LCPLL_CTL, val);
740 
741 	/*
742 	 * According to the spec, it should be enough to poll for this 1 us.
743 	 * However, extensive testing shows that this can take longer.
744 	 */
745 	if (wait_for_us(I915_READ(LCPLL_CTL) &
746 			LCPLL_CD_SOURCE_FCLK_DONE, 100))
747 		DRM_ERROR("Switching to FCLK failed\n");
748 
749 	val = I915_READ(LCPLL_CTL);
750 	val &= ~LCPLL_CLK_FREQ_MASK;
751 
752 	switch (cdclk) {
753 	default:
754 		MISSING_CASE(cdclk);
755 		/* fall through */
756 	case 337500:
757 		val |= LCPLL_CLK_FREQ_337_5_BDW;
758 		break;
759 	case 450000:
760 		val |= LCPLL_CLK_FREQ_450;
761 		break;
762 	case 540000:
763 		val |= LCPLL_CLK_FREQ_54O_BDW;
764 		break;
765 	case 675000:
766 		val |= LCPLL_CLK_FREQ_675_BDW;
767 		break;
768 	}
769 
770 	I915_WRITE(LCPLL_CTL, val);
771 
772 	val = I915_READ(LCPLL_CTL);
773 	val &= ~LCPLL_CD_SOURCE_FCLK;
774 	I915_WRITE(LCPLL_CTL, val);
775 
776 	if (wait_for_us((I915_READ(LCPLL_CTL) &
777 			LCPLL_CD_SOURCE_FCLK_DONE) == 0, 1))
778 		DRM_ERROR("Switching back to LCPLL failed\n");
779 
780 	sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ,
781 				cdclk_state->voltage_level);
782 
783 	I915_WRITE(CDCLK_FREQ, DIV_ROUND_CLOSEST(cdclk, 1000) - 1);
784 
785 	intel_update_cdclk(dev_priv);
786 }
787 
788 static int skl_calc_cdclk(int min_cdclk, int vco)
789 {
790 	if (vco == 8640000) {
791 		if (min_cdclk > 540000)
792 			return 617143;
793 		else if (min_cdclk > 432000)
794 			return 540000;
795 		else if (min_cdclk > 308571)
796 			return 432000;
797 		else
798 			return 308571;
799 	} else {
800 		if (min_cdclk > 540000)
801 			return 675000;
802 		else if (min_cdclk > 450000)
803 			return 540000;
804 		else if (min_cdclk > 337500)
805 			return 450000;
806 		else
807 			return 337500;
808 	}
809 }
810 
811 static u8 skl_calc_voltage_level(int cdclk)
812 {
813 	if (cdclk > 540000)
814 		return 3;
815 	else if (cdclk > 450000)
816 		return 2;
817 	else if (cdclk > 337500)
818 		return 1;
819 	else
820 		return 0;
821 }
822 
823 static void skl_dpll0_update(struct drm_i915_private *dev_priv,
824 			     struct intel_cdclk_state *cdclk_state)
825 {
826 	u32 val;
827 
828 	cdclk_state->ref = 24000;
829 	cdclk_state->vco = 0;
830 
831 	val = I915_READ(LCPLL1_CTL);
832 	if ((val & LCPLL_PLL_ENABLE) == 0)
833 		return;
834 
835 	if (WARN_ON((val & LCPLL_PLL_LOCK) == 0))
836 		return;
837 
838 	val = I915_READ(DPLL_CTRL1);
839 
840 	if (WARN_ON((val & (DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) |
841 			    DPLL_CTRL1_SSC(SKL_DPLL0) |
842 			    DPLL_CTRL1_OVERRIDE(SKL_DPLL0))) !=
843 		    DPLL_CTRL1_OVERRIDE(SKL_DPLL0)))
844 		return;
845 
846 	switch (val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0)) {
847 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, SKL_DPLL0):
848 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, SKL_DPLL0):
849 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, SKL_DPLL0):
850 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, SKL_DPLL0):
851 		cdclk_state->vco = 8100000;
852 		break;
853 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, SKL_DPLL0):
854 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, SKL_DPLL0):
855 		cdclk_state->vco = 8640000;
856 		break;
857 	default:
858 		MISSING_CASE(val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
859 		break;
860 	}
861 }
862 
863 static void skl_get_cdclk(struct drm_i915_private *dev_priv,
864 			  struct intel_cdclk_state *cdclk_state)
865 {
866 	u32 cdctl;
867 
868 	skl_dpll0_update(dev_priv, cdclk_state);
869 
870 	cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref;
871 
872 	if (cdclk_state->vco == 0)
873 		goto out;
874 
875 	cdctl = I915_READ(CDCLK_CTL);
876 
877 	if (cdclk_state->vco == 8640000) {
878 		switch (cdctl & CDCLK_FREQ_SEL_MASK) {
879 		case CDCLK_FREQ_450_432:
880 			cdclk_state->cdclk = 432000;
881 			break;
882 		case CDCLK_FREQ_337_308:
883 			cdclk_state->cdclk = 308571;
884 			break;
885 		case CDCLK_FREQ_540:
886 			cdclk_state->cdclk = 540000;
887 			break;
888 		case CDCLK_FREQ_675_617:
889 			cdclk_state->cdclk = 617143;
890 			break;
891 		default:
892 			MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
893 			break;
894 		}
895 	} else {
896 		switch (cdctl & CDCLK_FREQ_SEL_MASK) {
897 		case CDCLK_FREQ_450_432:
898 			cdclk_state->cdclk = 450000;
899 			break;
900 		case CDCLK_FREQ_337_308:
901 			cdclk_state->cdclk = 337500;
902 			break;
903 		case CDCLK_FREQ_540:
904 			cdclk_state->cdclk = 540000;
905 			break;
906 		case CDCLK_FREQ_675_617:
907 			cdclk_state->cdclk = 675000;
908 			break;
909 		default:
910 			MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
911 			break;
912 		}
913 	}
914 
915  out:
916 	/*
917 	 * Can't read this out :( Let's assume it's
918 	 * at least what the CDCLK frequency requires.
919 	 */
920 	cdclk_state->voltage_level =
921 		skl_calc_voltage_level(cdclk_state->cdclk);
922 }
923 
924 /* convert from kHz to .1 fixpoint MHz with -1MHz offset */
925 static int skl_cdclk_decimal(int cdclk)
926 {
927 	return DIV_ROUND_CLOSEST(cdclk - 1000, 500);
928 }
929 
930 static void skl_set_preferred_cdclk_vco(struct drm_i915_private *dev_priv,
931 					int vco)
932 {
933 	bool changed = dev_priv->skl_preferred_vco_freq != vco;
934 
935 	dev_priv->skl_preferred_vco_freq = vco;
936 
937 	if (changed)
938 		intel_update_max_cdclk(dev_priv);
939 }
940 
941 static void skl_dpll0_enable(struct drm_i915_private *dev_priv, int vco)
942 {
943 	u32 val;
944 
945 	WARN_ON(vco != 8100000 && vco != 8640000);
946 
947 	/*
948 	 * We always enable DPLL0 with the lowest link rate possible, but still
949 	 * taking into account the VCO required to operate the eDP panel at the
950 	 * desired frequency. The usual DP link rates operate with a VCO of
951 	 * 8100 while the eDP 1.4 alternate link rates need a VCO of 8640.
952 	 * The modeset code is responsible for the selection of the exact link
953 	 * rate later on, with the constraint of choosing a frequency that
954 	 * works with vco.
955 	 */
956 	val = I915_READ(DPLL_CTRL1);
957 
958 	val &= ~(DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) | DPLL_CTRL1_SSC(SKL_DPLL0) |
959 		 DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
960 	val |= DPLL_CTRL1_OVERRIDE(SKL_DPLL0);
961 	if (vco == 8640000)
962 		val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080,
963 					    SKL_DPLL0);
964 	else
965 		val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810,
966 					    SKL_DPLL0);
967 
968 	I915_WRITE(DPLL_CTRL1, val);
969 	POSTING_READ(DPLL_CTRL1);
970 
971 	I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) | LCPLL_PLL_ENABLE);
972 
973 	if (intel_de_wait_for_set(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 5))
974 		DRM_ERROR("DPLL0 not locked\n");
975 
976 	dev_priv->cdclk.hw.vco = vco;
977 
978 	/* We'll want to keep using the current vco from now on. */
979 	skl_set_preferred_cdclk_vco(dev_priv, vco);
980 }
981 
982 static void skl_dpll0_disable(struct drm_i915_private *dev_priv)
983 {
984 	I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) & ~LCPLL_PLL_ENABLE);
985 	if (intel_de_wait_for_clear(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 1))
986 		DRM_ERROR("Couldn't disable DPLL0\n");
987 
988 	dev_priv->cdclk.hw.vco = 0;
989 }
990 
991 static void skl_set_cdclk(struct drm_i915_private *dev_priv,
992 			  const struct intel_cdclk_state *cdclk_state,
993 			  enum pipe pipe)
994 {
995 	int cdclk = cdclk_state->cdclk;
996 	int vco = cdclk_state->vco;
997 	u32 freq_select, cdclk_ctl;
998 	int ret;
999 
1000 	/*
1001 	 * Based on WA#1183 CDCLK rates 308 and 617MHz CDCLK rates are
1002 	 * unsupported on SKL. In theory this should never happen since only
1003 	 * the eDP1.4 2.16 and 4.32Gbps rates require it, but eDP1.4 is not
1004 	 * supported on SKL either, see the above WA. WARN whenever trying to
1005 	 * use the corresponding VCO freq as that always leads to using the
1006 	 * minimum 308MHz CDCLK.
1007 	 */
1008 	WARN_ON_ONCE(IS_SKYLAKE(dev_priv) && vco == 8640000);
1009 
1010 	ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1011 				SKL_CDCLK_PREPARE_FOR_CHANGE,
1012 				SKL_CDCLK_READY_FOR_CHANGE,
1013 				SKL_CDCLK_READY_FOR_CHANGE, 3);
1014 	if (ret) {
1015 		DRM_ERROR("Failed to inform PCU about cdclk change (%d)\n",
1016 			  ret);
1017 		return;
1018 	}
1019 
1020 	/* Choose frequency for this cdclk */
1021 	switch (cdclk) {
1022 	default:
1023 		WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1024 		WARN_ON(vco != 0);
1025 		/* fall through */
1026 	case 308571:
1027 	case 337500:
1028 		freq_select = CDCLK_FREQ_337_308;
1029 		break;
1030 	case 450000:
1031 	case 432000:
1032 		freq_select = CDCLK_FREQ_450_432;
1033 		break;
1034 	case 540000:
1035 		freq_select = CDCLK_FREQ_540;
1036 		break;
1037 	case 617143:
1038 	case 675000:
1039 		freq_select = CDCLK_FREQ_675_617;
1040 		break;
1041 	}
1042 
1043 	if (dev_priv->cdclk.hw.vco != 0 &&
1044 	    dev_priv->cdclk.hw.vco != vco)
1045 		skl_dpll0_disable(dev_priv);
1046 
1047 	cdclk_ctl = I915_READ(CDCLK_CTL);
1048 
1049 	if (dev_priv->cdclk.hw.vco != vco) {
1050 		/* Wa Display #1183: skl,kbl,cfl */
1051 		cdclk_ctl &= ~(CDCLK_FREQ_SEL_MASK | CDCLK_FREQ_DECIMAL_MASK);
1052 		cdclk_ctl |= freq_select | skl_cdclk_decimal(cdclk);
1053 		I915_WRITE(CDCLK_CTL, cdclk_ctl);
1054 	}
1055 
1056 	/* Wa Display #1183: skl,kbl,cfl */
1057 	cdclk_ctl |= CDCLK_DIVMUX_CD_OVERRIDE;
1058 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1059 	POSTING_READ(CDCLK_CTL);
1060 
1061 	if (dev_priv->cdclk.hw.vco != vco)
1062 		skl_dpll0_enable(dev_priv, vco);
1063 
1064 	/* Wa Display #1183: skl,kbl,cfl */
1065 	cdclk_ctl &= ~(CDCLK_FREQ_SEL_MASK | CDCLK_FREQ_DECIMAL_MASK);
1066 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1067 
1068 	cdclk_ctl |= freq_select | skl_cdclk_decimal(cdclk);
1069 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1070 
1071 	/* Wa Display #1183: skl,kbl,cfl */
1072 	cdclk_ctl &= ~CDCLK_DIVMUX_CD_OVERRIDE;
1073 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1074 	POSTING_READ(CDCLK_CTL);
1075 
1076 	/* inform PCU of the change */
1077 	sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1078 				cdclk_state->voltage_level);
1079 
1080 	intel_update_cdclk(dev_priv);
1081 }
1082 
1083 static void skl_sanitize_cdclk(struct drm_i915_private *dev_priv)
1084 {
1085 	u32 cdctl, expected;
1086 
1087 	/*
1088 	 * check if the pre-os initialized the display
1089 	 * There is SWF18 scratchpad register defined which is set by the
1090 	 * pre-os which can be used by the OS drivers to check the status
1091 	 */
1092 	if ((I915_READ(SWF_ILK(0x18)) & 0x00FFFFFF) == 0)
1093 		goto sanitize;
1094 
1095 	intel_update_cdclk(dev_priv);
1096 	intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1097 
1098 	/* Is PLL enabled and locked ? */
1099 	if (dev_priv->cdclk.hw.vco == 0 ||
1100 	    dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1101 		goto sanitize;
1102 
1103 	/* DPLL okay; verify the cdclock
1104 	 *
1105 	 * Noticed in some instances that the freq selection is correct but
1106 	 * decimal part is programmed wrong from BIOS where pre-os does not
1107 	 * enable display. Verify the same as well.
1108 	 */
1109 	cdctl = I915_READ(CDCLK_CTL);
1110 	expected = (cdctl & CDCLK_FREQ_SEL_MASK) |
1111 		skl_cdclk_decimal(dev_priv->cdclk.hw.cdclk);
1112 	if (cdctl == expected)
1113 		/* All well; nothing to sanitize */
1114 		return;
1115 
1116 sanitize:
1117 	DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1118 
1119 	/* force cdclk programming */
1120 	dev_priv->cdclk.hw.cdclk = 0;
1121 	/* force full PLL disable + enable */
1122 	dev_priv->cdclk.hw.vco = -1;
1123 }
1124 
1125 static void skl_init_cdclk(struct drm_i915_private *dev_priv)
1126 {
1127 	struct intel_cdclk_state cdclk_state;
1128 
1129 	skl_sanitize_cdclk(dev_priv);
1130 
1131 	if (dev_priv->cdclk.hw.cdclk != 0 &&
1132 	    dev_priv->cdclk.hw.vco != 0) {
1133 		/*
1134 		 * Use the current vco as our initial
1135 		 * guess as to what the preferred vco is.
1136 		 */
1137 		if (dev_priv->skl_preferred_vco_freq == 0)
1138 			skl_set_preferred_cdclk_vco(dev_priv,
1139 						    dev_priv->cdclk.hw.vco);
1140 		return;
1141 	}
1142 
1143 	cdclk_state = dev_priv->cdclk.hw;
1144 
1145 	cdclk_state.vco = dev_priv->skl_preferred_vco_freq;
1146 	if (cdclk_state.vco == 0)
1147 		cdclk_state.vco = 8100000;
1148 	cdclk_state.cdclk = skl_calc_cdclk(0, cdclk_state.vco);
1149 	cdclk_state.voltage_level = skl_calc_voltage_level(cdclk_state.cdclk);
1150 
1151 	skl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1152 }
1153 
1154 static void skl_uninit_cdclk(struct drm_i915_private *dev_priv)
1155 {
1156 	struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1157 
1158 	cdclk_state.cdclk = cdclk_state.bypass;
1159 	cdclk_state.vco = 0;
1160 	cdclk_state.voltage_level = skl_calc_voltage_level(cdclk_state.cdclk);
1161 
1162 	skl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1163 }
1164 
1165 static const struct intel_cdclk_vals bxt_cdclk_table[] = {
1166 	{ .refclk = 19200, .cdclk = 144000, .divider = 8, .ratio = 60 },
1167 	{ .refclk = 19200, .cdclk = 288000, .divider = 4, .ratio = 60 },
1168 	{ .refclk = 19200, .cdclk = 384000, .divider = 3, .ratio = 60 },
1169 	{ .refclk = 19200, .cdclk = 576000, .divider = 2, .ratio = 60 },
1170 	{ .refclk = 19200, .cdclk = 624000, .divider = 2, .ratio = 65 },
1171 	{}
1172 };
1173 
1174 static const struct intel_cdclk_vals glk_cdclk_table[] = {
1175 	{ .refclk = 19200, .cdclk =  79200, .divider = 8, .ratio = 33 },
1176 	{ .refclk = 19200, .cdclk = 158400, .divider = 4, .ratio = 33 },
1177 	{ .refclk = 19200, .cdclk = 316800, .divider = 2, .ratio = 33 },
1178 	{}
1179 };
1180 
1181 static const struct intel_cdclk_vals cnl_cdclk_table[] = {
1182 	{ .refclk = 19200, .cdclk = 168000, .divider = 4, .ratio = 35 },
1183 	{ .refclk = 19200, .cdclk = 336000, .divider = 2, .ratio = 35 },
1184 	{ .refclk = 19200, .cdclk = 528000, .divider = 2, .ratio = 55 },
1185 
1186 	{ .refclk = 24000, .cdclk = 168000, .divider = 4, .ratio = 28 },
1187 	{ .refclk = 24000, .cdclk = 336000, .divider = 2, .ratio = 28 },
1188 	{ .refclk = 24000, .cdclk = 528000, .divider = 2, .ratio = 44 },
1189 	{}
1190 };
1191 
1192 static const struct intel_cdclk_vals icl_cdclk_table[] = {
1193 	{ .refclk = 19200, .cdclk = 172800, .divider = 2, .ratio = 18 },
1194 	{ .refclk = 19200, .cdclk = 192000, .divider = 2, .ratio = 20 },
1195 	{ .refclk = 19200, .cdclk = 307200, .divider = 2, .ratio = 32 },
1196 	{ .refclk = 19200, .cdclk = 326400, .divider = 4, .ratio = 68 },
1197 	{ .refclk = 19200, .cdclk = 556800, .divider = 2, .ratio = 58 },
1198 	{ .refclk = 19200, .cdclk = 652800, .divider = 2, .ratio = 68 },
1199 
1200 	{ .refclk = 24000, .cdclk = 180000, .divider = 2, .ratio = 15 },
1201 	{ .refclk = 24000, .cdclk = 192000, .divider = 2, .ratio = 16 },
1202 	{ .refclk = 24000, .cdclk = 312000, .divider = 2, .ratio = 26 },
1203 	{ .refclk = 24000, .cdclk = 324000, .divider = 4, .ratio = 54 },
1204 	{ .refclk = 24000, .cdclk = 552000, .divider = 2, .ratio = 46 },
1205 	{ .refclk = 24000, .cdclk = 648000, .divider = 2, .ratio = 54 },
1206 
1207 	{ .refclk = 38400, .cdclk = 172800, .divider = 2, .ratio =  9 },
1208 	{ .refclk = 38400, .cdclk = 192000, .divider = 2, .ratio = 10 },
1209 	{ .refclk = 38400, .cdclk = 307200, .divider = 2, .ratio = 16 },
1210 	{ .refclk = 38400, .cdclk = 326400, .divider = 4, .ratio = 34 },
1211 	{ .refclk = 38400, .cdclk = 556800, .divider = 2, .ratio = 29 },
1212 	{ .refclk = 38400, .cdclk = 652800, .divider = 2, .ratio = 34 },
1213 	{}
1214 };
1215 
1216 static int bxt_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
1217 {
1218 	const struct intel_cdclk_vals *table = dev_priv->cdclk.table;
1219 	int i;
1220 
1221 	for (i = 0; table[i].refclk; i++)
1222 		if (table[i].refclk == dev_priv->cdclk.hw.ref &&
1223 		    table[i].cdclk >= min_cdclk)
1224 			return table[i].cdclk;
1225 
1226 	WARN(1, "Cannot satisfy minimum cdclk %d with refclk %u\n",
1227 	     min_cdclk, dev_priv->cdclk.hw.ref);
1228 	return 0;
1229 }
1230 
1231 static int bxt_calc_cdclk_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
1232 {
1233 	const struct intel_cdclk_vals *table = dev_priv->cdclk.table;
1234 	int i;
1235 
1236 	if (cdclk == dev_priv->cdclk.hw.bypass)
1237 		return 0;
1238 
1239 	for (i = 0; table[i].refclk; i++)
1240 		if (table[i].refclk == dev_priv->cdclk.hw.ref &&
1241 		    table[i].cdclk == cdclk)
1242 			return dev_priv->cdclk.hw.ref * table[i].ratio;
1243 
1244 	WARN(1, "cdclk %d not valid for refclk %u\n",
1245 	     cdclk, dev_priv->cdclk.hw.ref);
1246 	return 0;
1247 }
1248 
1249 static u8 bxt_calc_voltage_level(int cdclk)
1250 {
1251 	return DIV_ROUND_UP(cdclk, 25000);
1252 }
1253 
1254 static u8 cnl_calc_voltage_level(int cdclk)
1255 {
1256 	if (cdclk > 336000)
1257 		return 2;
1258 	else if (cdclk > 168000)
1259 		return 1;
1260 	else
1261 		return 0;
1262 }
1263 
1264 static u8 icl_calc_voltage_level(int cdclk)
1265 {
1266 	if (cdclk > 556800)
1267 		return 2;
1268 	else if (cdclk > 312000)
1269 		return 1;
1270 	else
1271 		return 0;
1272 }
1273 
1274 static u8 ehl_calc_voltage_level(int cdclk)
1275 {
1276 	if (cdclk > 326400)
1277 		return 3;
1278 	else if (cdclk > 312000)
1279 		return 2;
1280 	else if (cdclk > 180000)
1281 		return 1;
1282 	else
1283 		return 0;
1284 }
1285 
1286 static void cnl_readout_refclk(struct drm_i915_private *dev_priv,
1287 			       struct intel_cdclk_state *cdclk_state)
1288 {
1289 	if (I915_READ(SKL_DSSM) & CNL_DSSM_CDCLK_PLL_REFCLK_24MHz)
1290 		cdclk_state->ref = 24000;
1291 	else
1292 		cdclk_state->ref = 19200;
1293 }
1294 
1295 static void icl_readout_refclk(struct drm_i915_private *dev_priv,
1296 			       struct intel_cdclk_state *cdclk_state)
1297 {
1298 	u32 dssm = I915_READ(SKL_DSSM) & ICL_DSSM_CDCLK_PLL_REFCLK_MASK;
1299 
1300 	switch (dssm) {
1301 	default:
1302 		MISSING_CASE(dssm);
1303 		/* fall through */
1304 	case ICL_DSSM_CDCLK_PLL_REFCLK_24MHz:
1305 		cdclk_state->ref = 24000;
1306 		break;
1307 	case ICL_DSSM_CDCLK_PLL_REFCLK_19_2MHz:
1308 		cdclk_state->ref = 19200;
1309 		break;
1310 	case ICL_DSSM_CDCLK_PLL_REFCLK_38_4MHz:
1311 		cdclk_state->ref = 38400;
1312 		break;
1313 	}
1314 }
1315 
1316 static void bxt_de_pll_readout(struct drm_i915_private *dev_priv,
1317 			       struct intel_cdclk_state *cdclk_state)
1318 {
1319 	u32 val, ratio;
1320 
1321 	if (INTEL_GEN(dev_priv) >= 11)
1322 		icl_readout_refclk(dev_priv, cdclk_state);
1323 	else if (IS_CANNONLAKE(dev_priv))
1324 		cnl_readout_refclk(dev_priv, cdclk_state);
1325 	else
1326 		cdclk_state->ref = 19200;
1327 
1328 	val = I915_READ(BXT_DE_PLL_ENABLE);
1329 	if ((val & BXT_DE_PLL_PLL_ENABLE) == 0 ||
1330 	    (val & BXT_DE_PLL_LOCK) == 0) {
1331 		/*
1332 		 * CDCLK PLL is disabled, the VCO/ratio doesn't matter, but
1333 		 * setting it to zero is a way to signal that.
1334 		 */
1335 		cdclk_state->vco = 0;
1336 		return;
1337 	}
1338 
1339 	/*
1340 	 * CNL+ have the ratio directly in the PLL enable register, gen9lp had
1341 	 * it in a separate PLL control register.
1342 	 */
1343 	if (INTEL_GEN(dev_priv) >= 10)
1344 		ratio = val & CNL_CDCLK_PLL_RATIO_MASK;
1345 	else
1346 		ratio = I915_READ(BXT_DE_PLL_CTL) & BXT_DE_PLL_RATIO_MASK;
1347 
1348 	cdclk_state->vco = ratio * cdclk_state->ref;
1349 }
1350 
1351 static void bxt_get_cdclk(struct drm_i915_private *dev_priv,
1352 			  struct intel_cdclk_state *cdclk_state)
1353 {
1354 	u32 divider;
1355 	int div;
1356 
1357 	bxt_de_pll_readout(dev_priv, cdclk_state);
1358 
1359 	if (INTEL_GEN(dev_priv) >= 12)
1360 		cdclk_state->bypass = cdclk_state->ref / 2;
1361 	else if (INTEL_GEN(dev_priv) >= 11)
1362 		cdclk_state->bypass = 50000;
1363 	else
1364 		cdclk_state->bypass = cdclk_state->ref;
1365 
1366 	if (cdclk_state->vco == 0) {
1367 		cdclk_state->cdclk = cdclk_state->bypass;
1368 		goto out;
1369 	}
1370 
1371 	divider = I915_READ(CDCLK_CTL) & BXT_CDCLK_CD2X_DIV_SEL_MASK;
1372 
1373 	switch (divider) {
1374 	case BXT_CDCLK_CD2X_DIV_SEL_1:
1375 		div = 2;
1376 		break;
1377 	case BXT_CDCLK_CD2X_DIV_SEL_1_5:
1378 		WARN(IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 10,
1379 		     "Unsupported divider\n");
1380 		div = 3;
1381 		break;
1382 	case BXT_CDCLK_CD2X_DIV_SEL_2:
1383 		div = 4;
1384 		break;
1385 	case BXT_CDCLK_CD2X_DIV_SEL_4:
1386 		WARN(INTEL_GEN(dev_priv) >= 10, "Unsupported divider\n");
1387 		div = 8;
1388 		break;
1389 	default:
1390 		MISSING_CASE(divider);
1391 		return;
1392 	}
1393 
1394 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco, div);
1395 
1396  out:
1397 	/*
1398 	 * Can't read this out :( Let's assume it's
1399 	 * at least what the CDCLK frequency requires.
1400 	 */
1401 	cdclk_state->voltage_level =
1402 		dev_priv->display.calc_voltage_level(cdclk_state->cdclk);
1403 }
1404 
1405 static void bxt_de_pll_disable(struct drm_i915_private *dev_priv)
1406 {
1407 	I915_WRITE(BXT_DE_PLL_ENABLE, 0);
1408 
1409 	/* Timeout 200us */
1410 	if (intel_de_wait_for_clear(dev_priv,
1411 				    BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1412 		DRM_ERROR("timeout waiting for DE PLL unlock\n");
1413 
1414 	dev_priv->cdclk.hw.vco = 0;
1415 }
1416 
1417 static void bxt_de_pll_enable(struct drm_i915_private *dev_priv, int vco)
1418 {
1419 	int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref);
1420 	u32 val;
1421 
1422 	val = I915_READ(BXT_DE_PLL_CTL);
1423 	val &= ~BXT_DE_PLL_RATIO_MASK;
1424 	val |= BXT_DE_PLL_RATIO(ratio);
1425 	I915_WRITE(BXT_DE_PLL_CTL, val);
1426 
1427 	I915_WRITE(BXT_DE_PLL_ENABLE, BXT_DE_PLL_PLL_ENABLE);
1428 
1429 	/* Timeout 200us */
1430 	if (intel_de_wait_for_set(dev_priv,
1431 				  BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1432 		DRM_ERROR("timeout waiting for DE PLL lock\n");
1433 
1434 	dev_priv->cdclk.hw.vco = vco;
1435 }
1436 
1437 static void cnl_cdclk_pll_disable(struct drm_i915_private *dev_priv)
1438 {
1439 	u32 val;
1440 
1441 	val = I915_READ(BXT_DE_PLL_ENABLE);
1442 	val &= ~BXT_DE_PLL_PLL_ENABLE;
1443 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1444 
1445 	/* Timeout 200us */
1446 	if (wait_for((I915_READ(BXT_DE_PLL_ENABLE) & BXT_DE_PLL_LOCK) == 0, 1))
1447 		DRM_ERROR("timeout waiting for CDCLK PLL unlock\n");
1448 
1449 	dev_priv->cdclk.hw.vco = 0;
1450 }
1451 
1452 static void cnl_cdclk_pll_enable(struct drm_i915_private *dev_priv, int vco)
1453 {
1454 	int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref);
1455 	u32 val;
1456 
1457 	val = CNL_CDCLK_PLL_RATIO(ratio);
1458 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1459 
1460 	val |= BXT_DE_PLL_PLL_ENABLE;
1461 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1462 
1463 	/* Timeout 200us */
1464 	if (wait_for((I915_READ(BXT_DE_PLL_ENABLE) & BXT_DE_PLL_LOCK) != 0, 1))
1465 		DRM_ERROR("timeout waiting for CDCLK PLL lock\n");
1466 
1467 	dev_priv->cdclk.hw.vco = vco;
1468 }
1469 
1470 static u32 bxt_cdclk_cd2x_pipe(struct drm_i915_private *dev_priv, enum pipe pipe)
1471 {
1472 	if (INTEL_GEN(dev_priv) >= 12) {
1473 		if (pipe == INVALID_PIPE)
1474 			return TGL_CDCLK_CD2X_PIPE_NONE;
1475 		else
1476 			return TGL_CDCLK_CD2X_PIPE(pipe);
1477 	} else if (INTEL_GEN(dev_priv) >= 11) {
1478 		if (pipe == INVALID_PIPE)
1479 			return ICL_CDCLK_CD2X_PIPE_NONE;
1480 		else
1481 			return ICL_CDCLK_CD2X_PIPE(pipe);
1482 	} else {
1483 		if (pipe == INVALID_PIPE)
1484 			return BXT_CDCLK_CD2X_PIPE_NONE;
1485 		else
1486 			return BXT_CDCLK_CD2X_PIPE(pipe);
1487 	}
1488 }
1489 
1490 static void bxt_set_cdclk(struct drm_i915_private *dev_priv,
1491 			  const struct intel_cdclk_state *cdclk_state,
1492 			  enum pipe pipe)
1493 {
1494 	int cdclk = cdclk_state->cdclk;
1495 	int vco = cdclk_state->vco;
1496 	u32 val, divider;
1497 	int ret;
1498 
1499 	/* Inform power controller of upcoming frequency change. */
1500 	if (INTEL_GEN(dev_priv) >= 10)
1501 		ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1502 					SKL_CDCLK_PREPARE_FOR_CHANGE,
1503 					SKL_CDCLK_READY_FOR_CHANGE,
1504 					SKL_CDCLK_READY_FOR_CHANGE, 3);
1505 	else
1506 		/*
1507 		 * BSpec requires us to wait up to 150usec, but that leads to
1508 		 * timeouts; the 2ms used here is based on experiment.
1509 		 */
1510 		ret = sandybridge_pcode_write_timeout(dev_priv,
1511 						      HSW_PCODE_DE_WRITE_FREQ_REQ,
1512 						      0x80000000, 150, 2);
1513 
1514 	if (ret) {
1515 		DRM_ERROR("Failed to inform PCU about cdclk change (err %d, freq %d)\n",
1516 			  ret, cdclk);
1517 		return;
1518 	}
1519 
1520 	/* cdclk = vco / 2 / div{1,1.5,2,4} */
1521 	switch (DIV_ROUND_CLOSEST(vco, cdclk)) {
1522 	default:
1523 		WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1524 		WARN_ON(vco != 0);
1525 		/* fall through */
1526 	case 2:
1527 		divider = BXT_CDCLK_CD2X_DIV_SEL_1;
1528 		break;
1529 	case 3:
1530 		WARN(IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 10,
1531 		     "Unsupported divider\n");
1532 		divider = BXT_CDCLK_CD2X_DIV_SEL_1_5;
1533 		break;
1534 	case 4:
1535 		divider = BXT_CDCLK_CD2X_DIV_SEL_2;
1536 		break;
1537 	case 8:
1538 		WARN(INTEL_GEN(dev_priv) >= 10, "Unsupported divider\n");
1539 		divider = BXT_CDCLK_CD2X_DIV_SEL_4;
1540 		break;
1541 	}
1542 
1543 	if (INTEL_GEN(dev_priv) >= 10) {
1544 		if (dev_priv->cdclk.hw.vco != 0 &&
1545 		    dev_priv->cdclk.hw.vco != vco)
1546 			cnl_cdclk_pll_disable(dev_priv);
1547 
1548 		if (dev_priv->cdclk.hw.vco != vco)
1549 			cnl_cdclk_pll_enable(dev_priv, vco);
1550 
1551 	} else {
1552 		if (dev_priv->cdclk.hw.vco != 0 &&
1553 		    dev_priv->cdclk.hw.vco != vco)
1554 			bxt_de_pll_disable(dev_priv);
1555 
1556 		if (dev_priv->cdclk.hw.vco != vco)
1557 			bxt_de_pll_enable(dev_priv, vco);
1558 	}
1559 
1560 	val = divider | skl_cdclk_decimal(cdclk) |
1561 		bxt_cdclk_cd2x_pipe(dev_priv, pipe);
1562 
1563 	/*
1564 	 * Disable SSA Precharge when CD clock frequency < 500 MHz,
1565 	 * enable otherwise.
1566 	 */
1567 	if (IS_GEN9_LP(dev_priv) && cdclk >= 500000)
1568 		val |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
1569 	I915_WRITE(CDCLK_CTL, val);
1570 
1571 	if (pipe != INVALID_PIPE)
1572 		intel_wait_for_vblank(dev_priv, pipe);
1573 
1574 	if (INTEL_GEN(dev_priv) >= 10) {
1575 		ret = sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1576 					      cdclk_state->voltage_level);
1577 	} else {
1578 		/*
1579 		 * The timeout isn't specified, the 2ms used here is based on
1580 		 * experiment.
1581 		 * FIXME: Waiting for the request completion could be delayed
1582 		 * until the next PCODE request based on BSpec.
1583 		 */
1584 		ret = sandybridge_pcode_write_timeout(dev_priv,
1585 						      HSW_PCODE_DE_WRITE_FREQ_REQ,
1586 						      cdclk_state->voltage_level,
1587 						      150, 2);
1588 	}
1589 
1590 	if (ret) {
1591 		DRM_ERROR("PCode CDCLK freq set failed, (err %d, freq %d)\n",
1592 			  ret, cdclk);
1593 		return;
1594 	}
1595 
1596 	intel_update_cdclk(dev_priv);
1597 
1598 	if (INTEL_GEN(dev_priv) >= 10)
1599 		/*
1600 		 * Can't read out the voltage level :(
1601 		 * Let's just assume everything is as expected.
1602 		 */
1603 		dev_priv->cdclk.hw.voltage_level = cdclk_state->voltage_level;
1604 }
1605 
1606 static void bxt_sanitize_cdclk(struct drm_i915_private *dev_priv)
1607 {
1608 	u32 cdctl, expected;
1609 	int cdclk, vco;
1610 
1611 	intel_update_cdclk(dev_priv);
1612 	intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1613 
1614 	if (dev_priv->cdclk.hw.vco == 0 ||
1615 	    dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1616 		goto sanitize;
1617 
1618 	/* DPLL okay; verify the cdclock
1619 	 *
1620 	 * Some BIOS versions leave an incorrect decimal frequency value and
1621 	 * set reserved MBZ bits in CDCLK_CTL at least during exiting from S4,
1622 	 * so sanitize this register.
1623 	 */
1624 	cdctl = I915_READ(CDCLK_CTL);
1625 	/*
1626 	 * Let's ignore the pipe field, since BIOS could have configured the
1627 	 * dividers both synching to an active pipe, or asynchronously
1628 	 * (PIPE_NONE).
1629 	 */
1630 	cdctl &= ~bxt_cdclk_cd2x_pipe(dev_priv, INVALID_PIPE);
1631 
1632 	/* Make sure this is a legal cdclk value for the platform */
1633 	cdclk = bxt_calc_cdclk(dev_priv, dev_priv->cdclk.hw.cdclk);
1634 	if (cdclk != dev_priv->cdclk.hw.cdclk)
1635 		goto sanitize;
1636 
1637 	/* Make sure the VCO is correct for the cdclk */
1638 	vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
1639 	if (vco != dev_priv->cdclk.hw.vco)
1640 		goto sanitize;
1641 
1642 	expected = skl_cdclk_decimal(cdclk);
1643 
1644 	/* Figure out what CD2X divider we should be using for this cdclk */
1645 	switch (DIV_ROUND_CLOSEST(dev_priv->cdclk.hw.vco,
1646 				  dev_priv->cdclk.hw.cdclk)) {
1647 	case 2:
1648 		expected |= BXT_CDCLK_CD2X_DIV_SEL_1;
1649 		break;
1650 	case 3:
1651 		expected |= BXT_CDCLK_CD2X_DIV_SEL_1_5;
1652 		break;
1653 	case 4:
1654 		expected |= BXT_CDCLK_CD2X_DIV_SEL_2;
1655 		break;
1656 	case 8:
1657 		expected |= BXT_CDCLK_CD2X_DIV_SEL_4;
1658 		break;
1659 	default:
1660 		goto sanitize;
1661 	}
1662 
1663 	/*
1664 	 * Disable SSA Precharge when CD clock frequency < 500 MHz,
1665 	 * enable otherwise.
1666 	 */
1667 	if (IS_GEN9_LP(dev_priv) && dev_priv->cdclk.hw.cdclk >= 500000)
1668 		expected |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
1669 
1670 	if (cdctl == expected)
1671 		/* All well; nothing to sanitize */
1672 		return;
1673 
1674 sanitize:
1675 	DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1676 
1677 	/* force cdclk programming */
1678 	dev_priv->cdclk.hw.cdclk = 0;
1679 
1680 	/* force full PLL disable + enable */
1681 	dev_priv->cdclk.hw.vco = -1;
1682 }
1683 
1684 static void bxt_init_cdclk(struct drm_i915_private *dev_priv)
1685 {
1686 	struct intel_cdclk_state cdclk_state;
1687 
1688 	bxt_sanitize_cdclk(dev_priv);
1689 
1690 	if (dev_priv->cdclk.hw.cdclk != 0 &&
1691 	    dev_priv->cdclk.hw.vco != 0)
1692 		return;
1693 
1694 	cdclk_state = dev_priv->cdclk.hw;
1695 
1696 	/*
1697 	 * FIXME:
1698 	 * - The initial CDCLK needs to be read from VBT.
1699 	 *   Need to make this change after VBT has changes for BXT.
1700 	 */
1701 	cdclk_state.cdclk = bxt_calc_cdclk(dev_priv, 0);
1702 	cdclk_state.vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk_state.cdclk);
1703 	cdclk_state.voltage_level =
1704 		dev_priv->display.calc_voltage_level(cdclk_state.cdclk);
1705 
1706 	bxt_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1707 }
1708 
1709 static void bxt_uninit_cdclk(struct drm_i915_private *dev_priv)
1710 {
1711 	struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1712 
1713 	cdclk_state.cdclk = cdclk_state.bypass;
1714 	cdclk_state.vco = 0;
1715 	cdclk_state.voltage_level =
1716 		dev_priv->display.calc_voltage_level(cdclk_state.cdclk);
1717 
1718 	bxt_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1719 }
1720 
1721 /**
1722  * intel_cdclk_init - Initialize CDCLK
1723  * @i915: i915 device
1724  *
1725  * Initialize CDCLK. This consists mainly of initializing dev_priv->cdclk.hw and
1726  * sanitizing the state of the hardware if needed. This is generally done only
1727  * during the display core initialization sequence, after which the DMC will
1728  * take care of turning CDCLK off/on as needed.
1729  */
1730 void intel_cdclk_init(struct drm_i915_private *i915)
1731 {
1732 	if (IS_GEN9_LP(i915) || INTEL_GEN(i915) >= 10)
1733 		bxt_init_cdclk(i915);
1734 	else if (IS_GEN9_BC(i915))
1735 		skl_init_cdclk(i915);
1736 }
1737 
1738 /**
1739  * intel_cdclk_uninit - Uninitialize CDCLK
1740  * @i915: i915 device
1741  *
1742  * Uninitialize CDCLK. This is done only during the display core
1743  * uninitialization sequence.
1744  */
1745 void intel_cdclk_uninit(struct drm_i915_private *i915)
1746 {
1747 	if (INTEL_GEN(i915) >= 10 || IS_GEN9_LP(i915))
1748 		bxt_uninit_cdclk(i915);
1749 	else if (IS_GEN9_BC(i915))
1750 		skl_uninit_cdclk(i915);
1751 }
1752 
1753 /**
1754  * intel_cdclk_needs_modeset - Determine if two CDCLK states require a modeset on all pipes
1755  * @a: first CDCLK state
1756  * @b: second CDCLK state
1757  *
1758  * Returns:
1759  * True if the CDCLK states require pipes to be off during reprogramming, false if not.
1760  */
1761 bool intel_cdclk_needs_modeset(const struct intel_cdclk_state *a,
1762 			       const struct intel_cdclk_state *b)
1763 {
1764 	return a->cdclk != b->cdclk ||
1765 		a->vco != b->vco ||
1766 		a->ref != b->ref;
1767 }
1768 
1769 /**
1770  * intel_cdclk_needs_cd2x_update - Determine if two CDCLK states require a cd2x divider update
1771  * @dev_priv: Not a CDCLK state, it's the drm_i915_private!
1772  * @a: first CDCLK state
1773  * @b: second CDCLK state
1774  *
1775  * Returns:
1776  * True if the CDCLK states require just a cd2x divider update, false if not.
1777  */
1778 static bool intel_cdclk_needs_cd2x_update(struct drm_i915_private *dev_priv,
1779 					  const struct intel_cdclk_state *a,
1780 					  const struct intel_cdclk_state *b)
1781 {
1782 	/* Older hw doesn't have the capability */
1783 	if (INTEL_GEN(dev_priv) < 10 && !IS_GEN9_LP(dev_priv))
1784 		return false;
1785 
1786 	return a->cdclk != b->cdclk &&
1787 		a->vco == b->vco &&
1788 		a->ref == b->ref;
1789 }
1790 
1791 /**
1792  * intel_cdclk_changed - Determine if two CDCLK states are different
1793  * @a: first CDCLK state
1794  * @b: second CDCLK state
1795  *
1796  * Returns:
1797  * True if the CDCLK states don't match, false if they do.
1798  */
1799 static bool intel_cdclk_changed(const struct intel_cdclk_state *a,
1800 				const struct intel_cdclk_state *b)
1801 {
1802 	return intel_cdclk_needs_modeset(a, b) ||
1803 		a->voltage_level != b->voltage_level;
1804 }
1805 
1806 /**
1807  * intel_cdclk_swap_state - make atomic CDCLK configuration effective
1808  * @state: atomic state
1809  *
1810  * This is the CDCLK version of drm_atomic_helper_swap_state() since the
1811  * helper does not handle driver-specific global state.
1812  *
1813  * Similarly to the atomic helpers this function does a complete swap,
1814  * i.e. it also puts the old state into @state. This is used by the commit
1815  * code to determine how CDCLK has changed (for instance did it increase or
1816  * decrease).
1817  */
1818 void intel_cdclk_swap_state(struct intel_atomic_state *state)
1819 {
1820 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
1821 
1822 	swap(state->cdclk.logical, dev_priv->cdclk.logical);
1823 	swap(state->cdclk.actual, dev_priv->cdclk.actual);
1824 }
1825 
1826 void intel_dump_cdclk_state(const struct intel_cdclk_state *cdclk_state,
1827 			    const char *context)
1828 {
1829 	DRM_DEBUG_DRIVER("%s %d kHz, VCO %d kHz, ref %d kHz, bypass %d kHz, voltage level %d\n",
1830 			 context, cdclk_state->cdclk, cdclk_state->vco,
1831 			 cdclk_state->ref, cdclk_state->bypass,
1832 			 cdclk_state->voltage_level);
1833 }
1834 
1835 /**
1836  * intel_set_cdclk - Push the CDCLK state to the hardware
1837  * @dev_priv: i915 device
1838  * @cdclk_state: new CDCLK state
1839  * @pipe: pipe with which to synchronize the update
1840  *
1841  * Program the hardware based on the passed in CDCLK state,
1842  * if necessary.
1843  */
1844 static void intel_set_cdclk(struct drm_i915_private *dev_priv,
1845 			    const struct intel_cdclk_state *cdclk_state,
1846 			    enum pipe pipe)
1847 {
1848 	if (!intel_cdclk_changed(&dev_priv->cdclk.hw, cdclk_state))
1849 		return;
1850 
1851 	if (WARN_ON_ONCE(!dev_priv->display.set_cdclk))
1852 		return;
1853 
1854 	intel_dump_cdclk_state(cdclk_state, "Changing CDCLK to");
1855 
1856 	dev_priv->display.set_cdclk(dev_priv, cdclk_state, pipe);
1857 
1858 	if (WARN(intel_cdclk_changed(&dev_priv->cdclk.hw, cdclk_state),
1859 		 "cdclk state doesn't match!\n")) {
1860 		intel_dump_cdclk_state(&dev_priv->cdclk.hw, "[hw state]");
1861 		intel_dump_cdclk_state(cdclk_state, "[sw state]");
1862 	}
1863 }
1864 
1865 /**
1866  * intel_set_cdclk_pre_plane_update - Push the CDCLK state to the hardware
1867  * @dev_priv: i915 device
1868  * @old_state: old CDCLK state
1869  * @new_state: new CDCLK state
1870  * @pipe: pipe with which to synchronize the update
1871  *
1872  * Program the hardware before updating the HW plane state based on the passed
1873  * in CDCLK state, if necessary.
1874  */
1875 void
1876 intel_set_cdclk_pre_plane_update(struct drm_i915_private *dev_priv,
1877 				 const struct intel_cdclk_state *old_state,
1878 				 const struct intel_cdclk_state *new_state,
1879 				 enum pipe pipe)
1880 {
1881 	if (pipe == INVALID_PIPE || old_state->cdclk <= new_state->cdclk)
1882 		intel_set_cdclk(dev_priv, new_state, pipe);
1883 }
1884 
1885 /**
1886  * intel_set_cdclk_post_plane_update - Push the CDCLK state to the hardware
1887  * @dev_priv: i915 device
1888  * @old_state: old CDCLK state
1889  * @new_state: new CDCLK state
1890  * @pipe: pipe with which to synchronize the update
1891  *
1892  * Program the hardware after updating the HW plane state based on the passed
1893  * in CDCLK state, if necessary.
1894  */
1895 void
1896 intel_set_cdclk_post_plane_update(struct drm_i915_private *dev_priv,
1897 				  const struct intel_cdclk_state *old_state,
1898 				  const struct intel_cdclk_state *new_state,
1899 				  enum pipe pipe)
1900 {
1901 	if (pipe != INVALID_PIPE && old_state->cdclk > new_state->cdclk)
1902 		intel_set_cdclk(dev_priv, new_state, pipe);
1903 }
1904 
1905 static int intel_pixel_rate_to_cdclk(const struct intel_crtc_state *crtc_state)
1906 {
1907 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1908 	int pixel_rate = crtc_state->pixel_rate;
1909 
1910 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
1911 		return DIV_ROUND_UP(pixel_rate, 2);
1912 	else if (IS_GEN(dev_priv, 9) ||
1913 		 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
1914 		return pixel_rate;
1915 	else if (IS_CHERRYVIEW(dev_priv))
1916 		return DIV_ROUND_UP(pixel_rate * 100, 95);
1917 	else if (crtc_state->double_wide)
1918 		return DIV_ROUND_UP(pixel_rate * 100, 90 * 2);
1919 	else
1920 		return DIV_ROUND_UP(pixel_rate * 100, 90);
1921 }
1922 
1923 static int intel_planes_min_cdclk(const struct intel_crtc_state *crtc_state)
1924 {
1925 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1926 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1927 	struct intel_plane *plane;
1928 	int min_cdclk = 0;
1929 
1930 	for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane)
1931 		min_cdclk = max(crtc_state->min_cdclk[plane->id], min_cdclk);
1932 
1933 	return min_cdclk;
1934 }
1935 
1936 int intel_crtc_compute_min_cdclk(const struct intel_crtc_state *crtc_state)
1937 {
1938 	struct drm_i915_private *dev_priv =
1939 		to_i915(crtc_state->uapi.crtc->dev);
1940 	int min_cdclk;
1941 
1942 	if (!crtc_state->hw.enable)
1943 		return 0;
1944 
1945 	min_cdclk = intel_pixel_rate_to_cdclk(crtc_state);
1946 
1947 	/* pixel rate mustn't exceed 95% of cdclk with IPS on BDW */
1948 	if (IS_BROADWELL(dev_priv) && hsw_crtc_state_ips_capable(crtc_state))
1949 		min_cdclk = DIV_ROUND_UP(min_cdclk * 100, 95);
1950 
1951 	/* BSpec says "Do not use DisplayPort with CDCLK less than 432 MHz,
1952 	 * audio enabled, port width x4, and link rate HBR2 (5.4 GHz), or else
1953 	 * there may be audio corruption or screen corruption." This cdclk
1954 	 * restriction for GLK is 316.8 MHz.
1955 	 */
1956 	if (intel_crtc_has_dp_encoder(crtc_state) &&
1957 	    crtc_state->has_audio &&
1958 	    crtc_state->port_clock >= 540000 &&
1959 	    crtc_state->lane_count == 4) {
1960 		if (IS_CANNONLAKE(dev_priv) || IS_GEMINILAKE(dev_priv)) {
1961 			/* Display WA #1145: glk,cnl */
1962 			min_cdclk = max(316800, min_cdclk);
1963 		} else if (IS_GEN(dev_priv, 9) || IS_BROADWELL(dev_priv)) {
1964 			/* Display WA #1144: skl,bxt */
1965 			min_cdclk = max(432000, min_cdclk);
1966 		}
1967 	}
1968 
1969 	/*
1970 	 * According to BSpec, "The CD clock frequency must be at least twice
1971 	 * the frequency of the Azalia BCLK." and BCLK is 96 MHz by default.
1972 	 */
1973 	if (crtc_state->has_audio && INTEL_GEN(dev_priv) >= 9)
1974 		min_cdclk = max(2 * 96000, min_cdclk);
1975 
1976 	/*
1977 	 * "For DP audio configuration, cdclk frequency shall be set to
1978 	 *  meet the following requirements:
1979 	 *  DP Link Frequency(MHz) | Cdclk frequency(MHz)
1980 	 *  270                    | 320 or higher
1981 	 *  162                    | 200 or higher"
1982 	 */
1983 	if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
1984 	    intel_crtc_has_dp_encoder(crtc_state) && crtc_state->has_audio)
1985 		min_cdclk = max(crtc_state->port_clock, min_cdclk);
1986 
1987 	/*
1988 	 * On Valleyview some DSI panels lose (v|h)sync when the clock is lower
1989 	 * than 320000KHz.
1990 	 */
1991 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) &&
1992 	    IS_VALLEYVIEW(dev_priv))
1993 		min_cdclk = max(320000, min_cdclk);
1994 
1995 	/*
1996 	 * On Geminilake once the CDCLK gets as low as 79200
1997 	 * picture gets unstable, despite that values are
1998 	 * correct for DSI PLL and DE PLL.
1999 	 */
2000 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) &&
2001 	    IS_GEMINILAKE(dev_priv))
2002 		min_cdclk = max(158400, min_cdclk);
2003 
2004 	/* Account for additional needs from the planes */
2005 	min_cdclk = max(intel_planes_min_cdclk(crtc_state), min_cdclk);
2006 
2007 	/*
2008 	 * HACK. Currently for TGL platforms we calculate
2009 	 * min_cdclk initially based on pixel_rate divided
2010 	 * by 2, accounting for also plane requirements,
2011 	 * however in some cases the lowest possible CDCLK
2012 	 * doesn't work and causing the underruns.
2013 	 * Explicitly stating here that this seems to be currently
2014 	 * rather a Hack, than final solution.
2015 	 */
2016 	if (IS_TIGERLAKE(dev_priv))
2017 		min_cdclk = max(min_cdclk, (int)crtc_state->pixel_rate);
2018 
2019 	if (min_cdclk > dev_priv->max_cdclk_freq) {
2020 		DRM_DEBUG_KMS("required cdclk (%d kHz) exceeds max (%d kHz)\n",
2021 			      min_cdclk, dev_priv->max_cdclk_freq);
2022 		return -EINVAL;
2023 	}
2024 
2025 	return min_cdclk;
2026 }
2027 
2028 static int intel_compute_min_cdclk(struct intel_atomic_state *state)
2029 {
2030 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2031 	struct intel_crtc *crtc;
2032 	struct intel_crtc_state *crtc_state;
2033 	int min_cdclk, i;
2034 	enum pipe pipe;
2035 
2036 	memcpy(state->min_cdclk, dev_priv->min_cdclk,
2037 	       sizeof(state->min_cdclk));
2038 
2039 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2040 		int ret;
2041 
2042 		min_cdclk = intel_crtc_compute_min_cdclk(crtc_state);
2043 		if (min_cdclk < 0)
2044 			return min_cdclk;
2045 
2046 		if (state->min_cdclk[i] == min_cdclk)
2047 			continue;
2048 
2049 		state->min_cdclk[i] = min_cdclk;
2050 
2051 		ret = intel_atomic_lock_global_state(state);
2052 		if (ret)
2053 			return ret;
2054 	}
2055 
2056 	min_cdclk = state->cdclk.force_min_cdclk;
2057 	for_each_pipe(dev_priv, pipe)
2058 		min_cdclk = max(state->min_cdclk[pipe], min_cdclk);
2059 
2060 	return min_cdclk;
2061 }
2062 
2063 /*
2064  * Account for port clock min voltage level requirements.
2065  * This only really does something on CNL+ but can be
2066  * called on earlier platforms as well.
2067  *
2068  * Note that this functions assumes that 0 is
2069  * the lowest voltage value, and higher values
2070  * correspond to increasingly higher voltages.
2071  *
2072  * Should that relationship no longer hold on
2073  * future platforms this code will need to be
2074  * adjusted.
2075  */
2076 static int bxt_compute_min_voltage_level(struct intel_atomic_state *state)
2077 {
2078 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2079 	struct intel_crtc *crtc;
2080 	struct intel_crtc_state *crtc_state;
2081 	u8 min_voltage_level;
2082 	int i;
2083 	enum pipe pipe;
2084 
2085 	memcpy(state->min_voltage_level, dev_priv->min_voltage_level,
2086 	       sizeof(state->min_voltage_level));
2087 
2088 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2089 		int ret;
2090 
2091 		if (crtc_state->hw.enable)
2092 			min_voltage_level = crtc_state->min_voltage_level;
2093 		else
2094 			min_voltage_level = 0;
2095 
2096 		if (state->min_voltage_level[i] == min_voltage_level)
2097 			continue;
2098 
2099 		state->min_voltage_level[i] = min_voltage_level;
2100 
2101 		ret = intel_atomic_lock_global_state(state);
2102 		if (ret)
2103 			return ret;
2104 	}
2105 
2106 	min_voltage_level = 0;
2107 	for_each_pipe(dev_priv, pipe)
2108 		min_voltage_level = max(state->min_voltage_level[pipe],
2109 					min_voltage_level);
2110 
2111 	return min_voltage_level;
2112 }
2113 
2114 static int vlv_modeset_calc_cdclk(struct intel_atomic_state *state)
2115 {
2116 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2117 	int min_cdclk, cdclk;
2118 
2119 	min_cdclk = intel_compute_min_cdclk(state);
2120 	if (min_cdclk < 0)
2121 		return min_cdclk;
2122 
2123 	cdclk = vlv_calc_cdclk(dev_priv, min_cdclk);
2124 
2125 	state->cdclk.logical.cdclk = cdclk;
2126 	state->cdclk.logical.voltage_level =
2127 		vlv_calc_voltage_level(dev_priv, cdclk);
2128 
2129 	if (!state->active_pipes) {
2130 		cdclk = vlv_calc_cdclk(dev_priv, state->cdclk.force_min_cdclk);
2131 
2132 		state->cdclk.actual.cdclk = cdclk;
2133 		state->cdclk.actual.voltage_level =
2134 			vlv_calc_voltage_level(dev_priv, cdclk);
2135 	} else {
2136 		state->cdclk.actual = state->cdclk.logical;
2137 	}
2138 
2139 	return 0;
2140 }
2141 
2142 static int bdw_modeset_calc_cdclk(struct intel_atomic_state *state)
2143 {
2144 	int min_cdclk, cdclk;
2145 
2146 	min_cdclk = intel_compute_min_cdclk(state);
2147 	if (min_cdclk < 0)
2148 		return min_cdclk;
2149 
2150 	/*
2151 	 * FIXME should also account for plane ratio
2152 	 * once 64bpp pixel formats are supported.
2153 	 */
2154 	cdclk = bdw_calc_cdclk(min_cdclk);
2155 
2156 	state->cdclk.logical.cdclk = cdclk;
2157 	state->cdclk.logical.voltage_level =
2158 		bdw_calc_voltage_level(cdclk);
2159 
2160 	if (!state->active_pipes) {
2161 		cdclk = bdw_calc_cdclk(state->cdclk.force_min_cdclk);
2162 
2163 		state->cdclk.actual.cdclk = cdclk;
2164 		state->cdclk.actual.voltage_level =
2165 			bdw_calc_voltage_level(cdclk);
2166 	} else {
2167 		state->cdclk.actual = state->cdclk.logical;
2168 	}
2169 
2170 	return 0;
2171 }
2172 
2173 static int skl_dpll0_vco(struct intel_atomic_state *state)
2174 {
2175 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2176 	struct intel_crtc *crtc;
2177 	struct intel_crtc_state *crtc_state;
2178 	int vco, i;
2179 
2180 	vco = state->cdclk.logical.vco;
2181 	if (!vco)
2182 		vco = dev_priv->skl_preferred_vco_freq;
2183 
2184 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2185 		if (!crtc_state->hw.enable)
2186 			continue;
2187 
2188 		if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2189 			continue;
2190 
2191 		/*
2192 		 * DPLL0 VCO may need to be adjusted to get the correct
2193 		 * clock for eDP. This will affect cdclk as well.
2194 		 */
2195 		switch (crtc_state->port_clock / 2) {
2196 		case 108000:
2197 		case 216000:
2198 			vco = 8640000;
2199 			break;
2200 		default:
2201 			vco = 8100000;
2202 			break;
2203 		}
2204 	}
2205 
2206 	return vco;
2207 }
2208 
2209 static int skl_modeset_calc_cdclk(struct intel_atomic_state *state)
2210 {
2211 	int min_cdclk, cdclk, vco;
2212 
2213 	min_cdclk = intel_compute_min_cdclk(state);
2214 	if (min_cdclk < 0)
2215 		return min_cdclk;
2216 
2217 	vco = skl_dpll0_vco(state);
2218 
2219 	/*
2220 	 * FIXME should also account for plane ratio
2221 	 * once 64bpp pixel formats are supported.
2222 	 */
2223 	cdclk = skl_calc_cdclk(min_cdclk, vco);
2224 
2225 	state->cdclk.logical.vco = vco;
2226 	state->cdclk.logical.cdclk = cdclk;
2227 	state->cdclk.logical.voltage_level =
2228 		skl_calc_voltage_level(cdclk);
2229 
2230 	if (!state->active_pipes) {
2231 		cdclk = skl_calc_cdclk(state->cdclk.force_min_cdclk, vco);
2232 
2233 		state->cdclk.actual.vco = vco;
2234 		state->cdclk.actual.cdclk = cdclk;
2235 		state->cdclk.actual.voltage_level =
2236 			skl_calc_voltage_level(cdclk);
2237 	} else {
2238 		state->cdclk.actual = state->cdclk.logical;
2239 	}
2240 
2241 	return 0;
2242 }
2243 
2244 static int bxt_modeset_calc_cdclk(struct intel_atomic_state *state)
2245 {
2246 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2247 	int min_cdclk, min_voltage_level, cdclk, vco;
2248 
2249 	min_cdclk = intel_compute_min_cdclk(state);
2250 	if (min_cdclk < 0)
2251 		return min_cdclk;
2252 
2253 	min_voltage_level = bxt_compute_min_voltage_level(state);
2254 	if (min_voltage_level < 0)
2255 		return min_voltage_level;
2256 
2257 	cdclk = bxt_calc_cdclk(dev_priv, min_cdclk);
2258 	vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
2259 
2260 	state->cdclk.logical.vco = vco;
2261 	state->cdclk.logical.cdclk = cdclk;
2262 	state->cdclk.logical.voltage_level =
2263 		max_t(int, min_voltage_level,
2264 		      dev_priv->display.calc_voltage_level(cdclk));
2265 
2266 	if (!state->active_pipes) {
2267 		cdclk = bxt_calc_cdclk(dev_priv, state->cdclk.force_min_cdclk);
2268 		vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
2269 
2270 		state->cdclk.actual.vco = vco;
2271 		state->cdclk.actual.cdclk = cdclk;
2272 		state->cdclk.actual.voltage_level =
2273 			dev_priv->display.calc_voltage_level(cdclk);
2274 	} else {
2275 		state->cdclk.actual = state->cdclk.logical;
2276 	}
2277 
2278 	return 0;
2279 }
2280 
2281 static int intel_modeset_all_pipes(struct intel_atomic_state *state)
2282 {
2283 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2284 	struct intel_crtc *crtc;
2285 
2286 	/*
2287 	 * Add all pipes to the state, and force
2288 	 * a modeset on all the active ones.
2289 	 */
2290 	for_each_intel_crtc(&dev_priv->drm, crtc) {
2291 		struct intel_crtc_state *crtc_state;
2292 		int ret;
2293 
2294 		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
2295 		if (IS_ERR(crtc_state))
2296 			return PTR_ERR(crtc_state);
2297 
2298 		if (!crtc_state->hw.active ||
2299 		    drm_atomic_crtc_needs_modeset(&crtc_state->uapi))
2300 			continue;
2301 
2302 		crtc_state->uapi.mode_changed = true;
2303 
2304 		ret = drm_atomic_add_affected_connectors(&state->base,
2305 							 &crtc->base);
2306 		if (ret)
2307 			return ret;
2308 
2309 		ret = drm_atomic_add_affected_planes(&state->base,
2310 						     &crtc->base);
2311 		if (ret)
2312 			return ret;
2313 
2314 		crtc_state->update_planes |= crtc_state->active_planes;
2315 	}
2316 
2317 	return 0;
2318 }
2319 
2320 static int fixed_modeset_calc_cdclk(struct intel_atomic_state *state)
2321 {
2322 	int min_cdclk;
2323 
2324 	/*
2325 	 * We can't change the cdclk frequency, but we still want to
2326 	 * check that the required minimum frequency doesn't exceed
2327 	 * the actual cdclk frequency.
2328 	 */
2329 	min_cdclk = intel_compute_min_cdclk(state);
2330 	if (min_cdclk < 0)
2331 		return min_cdclk;
2332 
2333 	return 0;
2334 }
2335 
2336 int intel_modeset_calc_cdclk(struct intel_atomic_state *state)
2337 {
2338 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2339 	enum pipe pipe;
2340 	int ret;
2341 
2342 	ret = dev_priv->display.modeset_calc_cdclk(state);
2343 	if (ret)
2344 		return ret;
2345 
2346 	/*
2347 	 * Writes to dev_priv->cdclk.{actual,logical} must protected
2348 	 * by holding all the crtc mutexes even if we don't end up
2349 	 * touching the hardware
2350 	 */
2351 	if (intel_cdclk_changed(&dev_priv->cdclk.actual,
2352 				&state->cdclk.actual)) {
2353 		/*
2354 		 * Also serialize commits across all crtcs
2355 		 * if the actual hw needs to be poked.
2356 		 */
2357 		ret = intel_atomic_serialize_global_state(state);
2358 		if (ret)
2359 			return ret;
2360 	} else if (intel_cdclk_changed(&dev_priv->cdclk.logical,
2361 				       &state->cdclk.logical)) {
2362 		ret = intel_atomic_lock_global_state(state);
2363 		if (ret)
2364 			return ret;
2365 	} else {
2366 		return 0;
2367 	}
2368 
2369 	if (is_power_of_2(state->active_pipes) &&
2370 	    intel_cdclk_needs_cd2x_update(dev_priv,
2371 					  &dev_priv->cdclk.actual,
2372 					  &state->cdclk.actual)) {
2373 		struct intel_crtc *crtc;
2374 		struct intel_crtc_state *crtc_state;
2375 
2376 		pipe = ilog2(state->active_pipes);
2377 		crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
2378 
2379 		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
2380 		if (IS_ERR(crtc_state))
2381 			return PTR_ERR(crtc_state);
2382 
2383 		if (drm_atomic_crtc_needs_modeset(&crtc_state->uapi))
2384 			pipe = INVALID_PIPE;
2385 	} else {
2386 		pipe = INVALID_PIPE;
2387 	}
2388 
2389 	if (pipe != INVALID_PIPE) {
2390 		state->cdclk.pipe = pipe;
2391 
2392 		DRM_DEBUG_KMS("Can change cdclk with pipe %c active\n",
2393 			      pipe_name(pipe));
2394 	} else if (intel_cdclk_needs_modeset(&dev_priv->cdclk.actual,
2395 					     &state->cdclk.actual)) {
2396 		/* All pipes must be switched off while we change the cdclk. */
2397 		ret = intel_modeset_all_pipes(state);
2398 		if (ret)
2399 			return ret;
2400 
2401 		state->cdclk.pipe = INVALID_PIPE;
2402 
2403 		DRM_DEBUG_KMS("Modeset required for cdclk change\n");
2404 	}
2405 
2406 	DRM_DEBUG_KMS("New cdclk calculated to be logical %u kHz, actual %u kHz\n",
2407 		      state->cdclk.logical.cdclk,
2408 		      state->cdclk.actual.cdclk);
2409 	DRM_DEBUG_KMS("New voltage level calculated to be logical %u, actual %u\n",
2410 		      state->cdclk.logical.voltage_level,
2411 		      state->cdclk.actual.voltage_level);
2412 
2413 	return 0;
2414 }
2415 
2416 static int intel_compute_max_dotclk(struct drm_i915_private *dev_priv)
2417 {
2418 	int max_cdclk_freq = dev_priv->max_cdclk_freq;
2419 
2420 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
2421 		return 2 * max_cdclk_freq;
2422 	else if (IS_GEN(dev_priv, 9) ||
2423 		 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
2424 		return max_cdclk_freq;
2425 	else if (IS_CHERRYVIEW(dev_priv))
2426 		return max_cdclk_freq*95/100;
2427 	else if (INTEL_GEN(dev_priv) < 4)
2428 		return 2*max_cdclk_freq*90/100;
2429 	else
2430 		return max_cdclk_freq*90/100;
2431 }
2432 
2433 /**
2434  * intel_update_max_cdclk - Determine the maximum support CDCLK frequency
2435  * @dev_priv: i915 device
2436  *
2437  * Determine the maximum CDCLK frequency the platform supports, and also
2438  * derive the maximum dot clock frequency the maximum CDCLK frequency
2439  * allows.
2440  */
2441 void intel_update_max_cdclk(struct drm_i915_private *dev_priv)
2442 {
2443 	if (IS_ELKHARTLAKE(dev_priv)) {
2444 		if (dev_priv->cdclk.hw.ref == 24000)
2445 			dev_priv->max_cdclk_freq = 552000;
2446 		else
2447 			dev_priv->max_cdclk_freq = 556800;
2448 	} else if (INTEL_GEN(dev_priv) >= 11) {
2449 		if (dev_priv->cdclk.hw.ref == 24000)
2450 			dev_priv->max_cdclk_freq = 648000;
2451 		else
2452 			dev_priv->max_cdclk_freq = 652800;
2453 	} else if (IS_CANNONLAKE(dev_priv)) {
2454 		dev_priv->max_cdclk_freq = 528000;
2455 	} else if (IS_GEN9_BC(dev_priv)) {
2456 		u32 limit = I915_READ(SKL_DFSM) & SKL_DFSM_CDCLK_LIMIT_MASK;
2457 		int max_cdclk, vco;
2458 
2459 		vco = dev_priv->skl_preferred_vco_freq;
2460 		WARN_ON(vco != 8100000 && vco != 8640000);
2461 
2462 		/*
2463 		 * Use the lower (vco 8640) cdclk values as a
2464 		 * first guess. skl_calc_cdclk() will correct it
2465 		 * if the preferred vco is 8100 instead.
2466 		 */
2467 		if (limit == SKL_DFSM_CDCLK_LIMIT_675)
2468 			max_cdclk = 617143;
2469 		else if (limit == SKL_DFSM_CDCLK_LIMIT_540)
2470 			max_cdclk = 540000;
2471 		else if (limit == SKL_DFSM_CDCLK_LIMIT_450)
2472 			max_cdclk = 432000;
2473 		else
2474 			max_cdclk = 308571;
2475 
2476 		dev_priv->max_cdclk_freq = skl_calc_cdclk(max_cdclk, vco);
2477 	} else if (IS_GEMINILAKE(dev_priv)) {
2478 		dev_priv->max_cdclk_freq = 316800;
2479 	} else if (IS_BROXTON(dev_priv)) {
2480 		dev_priv->max_cdclk_freq = 624000;
2481 	} else if (IS_BROADWELL(dev_priv))  {
2482 		/*
2483 		 * FIXME with extra cooling we can allow
2484 		 * 540 MHz for ULX and 675 Mhz for ULT.
2485 		 * How can we know if extra cooling is
2486 		 * available? PCI ID, VTB, something else?
2487 		 */
2488 		if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
2489 			dev_priv->max_cdclk_freq = 450000;
2490 		else if (IS_BDW_ULX(dev_priv))
2491 			dev_priv->max_cdclk_freq = 450000;
2492 		else if (IS_BDW_ULT(dev_priv))
2493 			dev_priv->max_cdclk_freq = 540000;
2494 		else
2495 			dev_priv->max_cdclk_freq = 675000;
2496 	} else if (IS_CHERRYVIEW(dev_priv)) {
2497 		dev_priv->max_cdclk_freq = 320000;
2498 	} else if (IS_VALLEYVIEW(dev_priv)) {
2499 		dev_priv->max_cdclk_freq = 400000;
2500 	} else {
2501 		/* otherwise assume cdclk is fixed */
2502 		dev_priv->max_cdclk_freq = dev_priv->cdclk.hw.cdclk;
2503 	}
2504 
2505 	dev_priv->max_dotclk_freq = intel_compute_max_dotclk(dev_priv);
2506 
2507 	DRM_DEBUG_DRIVER("Max CD clock rate: %d kHz\n",
2508 			 dev_priv->max_cdclk_freq);
2509 
2510 	DRM_DEBUG_DRIVER("Max dotclock rate: %d kHz\n",
2511 			 dev_priv->max_dotclk_freq);
2512 }
2513 
2514 /**
2515  * intel_update_cdclk - Determine the current CDCLK frequency
2516  * @dev_priv: i915 device
2517  *
2518  * Determine the current CDCLK frequency.
2519  */
2520 void intel_update_cdclk(struct drm_i915_private *dev_priv)
2521 {
2522 	dev_priv->display.get_cdclk(dev_priv, &dev_priv->cdclk.hw);
2523 
2524 	/*
2525 	 * 9:0 CMBUS [sic] CDCLK frequency (cdfreq):
2526 	 * Programmng [sic] note: bit[9:2] should be programmed to the number
2527 	 * of cdclk that generates 4MHz reference clock freq which is used to
2528 	 * generate GMBus clock. This will vary with the cdclk freq.
2529 	 */
2530 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2531 		I915_WRITE(GMBUSFREQ_VLV,
2532 			   DIV_ROUND_UP(dev_priv->cdclk.hw.cdclk, 1000));
2533 }
2534 
2535 static int cnp_rawclk(struct drm_i915_private *dev_priv)
2536 {
2537 	u32 rawclk;
2538 	int divider, fraction;
2539 
2540 	if (I915_READ(SFUSE_STRAP) & SFUSE_STRAP_RAW_FREQUENCY) {
2541 		/* 24 MHz */
2542 		divider = 24000;
2543 		fraction = 0;
2544 	} else {
2545 		/* 19.2 MHz */
2546 		divider = 19000;
2547 		fraction = 200;
2548 	}
2549 
2550 	rawclk = CNP_RAWCLK_DIV(divider / 1000);
2551 	if (fraction) {
2552 		int numerator = 1;
2553 
2554 		rawclk |= CNP_RAWCLK_DEN(DIV_ROUND_CLOSEST(numerator * 1000,
2555 							   fraction) - 1);
2556 		if (INTEL_PCH_TYPE(dev_priv) >= PCH_ICP)
2557 			rawclk |= ICP_RAWCLK_NUM(numerator);
2558 	}
2559 
2560 	I915_WRITE(PCH_RAWCLK_FREQ, rawclk);
2561 	return divider + fraction;
2562 }
2563 
2564 static int pch_rawclk(struct drm_i915_private *dev_priv)
2565 {
2566 	return (I915_READ(PCH_RAWCLK_FREQ) & RAWCLK_FREQ_MASK) * 1000;
2567 }
2568 
2569 static int vlv_hrawclk(struct drm_i915_private *dev_priv)
2570 {
2571 	/* RAWCLK_FREQ_VLV register updated from power well code */
2572 	return vlv_get_cck_clock_hpll(dev_priv, "hrawclk",
2573 				      CCK_DISPLAY_REF_CLOCK_CONTROL);
2574 }
2575 
2576 static int g4x_hrawclk(struct drm_i915_private *dev_priv)
2577 {
2578 	u32 clkcfg;
2579 
2580 	/* hrawclock is 1/4 the FSB frequency */
2581 	clkcfg = I915_READ(CLKCFG);
2582 	switch (clkcfg & CLKCFG_FSB_MASK) {
2583 	case CLKCFG_FSB_400:
2584 		return 100000;
2585 	case CLKCFG_FSB_533:
2586 		return 133333;
2587 	case CLKCFG_FSB_667:
2588 		return 166667;
2589 	case CLKCFG_FSB_800:
2590 		return 200000;
2591 	case CLKCFG_FSB_1067:
2592 	case CLKCFG_FSB_1067_ALT:
2593 		return 266667;
2594 	case CLKCFG_FSB_1333:
2595 	case CLKCFG_FSB_1333_ALT:
2596 		return 333333;
2597 	default:
2598 		return 133333;
2599 	}
2600 }
2601 
2602 /**
2603  * intel_update_rawclk - Determine the current RAWCLK frequency
2604  * @dev_priv: i915 device
2605  *
2606  * Determine the current RAWCLK frequency. RAWCLK is a fixed
2607  * frequency clock so this needs to done only once.
2608  */
2609 void intel_update_rawclk(struct drm_i915_private *dev_priv)
2610 {
2611 	if (INTEL_PCH_TYPE(dev_priv) >= PCH_CNP)
2612 		dev_priv->rawclk_freq = cnp_rawclk(dev_priv);
2613 	else if (HAS_PCH_SPLIT(dev_priv))
2614 		dev_priv->rawclk_freq = pch_rawclk(dev_priv);
2615 	else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2616 		dev_priv->rawclk_freq = vlv_hrawclk(dev_priv);
2617 	else if (IS_G4X(dev_priv) || IS_PINEVIEW(dev_priv))
2618 		dev_priv->rawclk_freq = g4x_hrawclk(dev_priv);
2619 	else
2620 		/* no rawclk on other platforms, or no need to know it */
2621 		return;
2622 
2623 	DRM_DEBUG_DRIVER("rawclk rate: %d kHz\n", dev_priv->rawclk_freq);
2624 }
2625 
2626 /**
2627  * intel_init_cdclk_hooks - Initialize CDCLK related modesetting hooks
2628  * @dev_priv: i915 device
2629  */
2630 void intel_init_cdclk_hooks(struct drm_i915_private *dev_priv)
2631 {
2632 	if (IS_ELKHARTLAKE(dev_priv)) {
2633 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2634 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2635 		dev_priv->display.calc_voltage_level = ehl_calc_voltage_level;
2636 		dev_priv->cdclk.table = icl_cdclk_table;
2637 	} else if (INTEL_GEN(dev_priv) >= 11) {
2638 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2639 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2640 		dev_priv->display.calc_voltage_level = icl_calc_voltage_level;
2641 		dev_priv->cdclk.table = icl_cdclk_table;
2642 	} else if (IS_CANNONLAKE(dev_priv)) {
2643 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2644 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2645 		dev_priv->display.calc_voltage_level = cnl_calc_voltage_level;
2646 		dev_priv->cdclk.table = cnl_cdclk_table;
2647 	} else if (IS_GEN9_LP(dev_priv)) {
2648 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2649 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2650 		dev_priv->display.calc_voltage_level = bxt_calc_voltage_level;
2651 		if (IS_GEMINILAKE(dev_priv))
2652 			dev_priv->cdclk.table = glk_cdclk_table;
2653 		else
2654 			dev_priv->cdclk.table = bxt_cdclk_table;
2655 	} else if (IS_GEN9_BC(dev_priv)) {
2656 		dev_priv->display.set_cdclk = skl_set_cdclk;
2657 		dev_priv->display.modeset_calc_cdclk = skl_modeset_calc_cdclk;
2658 	} else if (IS_BROADWELL(dev_priv)) {
2659 		dev_priv->display.set_cdclk = bdw_set_cdclk;
2660 		dev_priv->display.modeset_calc_cdclk = bdw_modeset_calc_cdclk;
2661 	} else if (IS_CHERRYVIEW(dev_priv)) {
2662 		dev_priv->display.set_cdclk = chv_set_cdclk;
2663 		dev_priv->display.modeset_calc_cdclk = vlv_modeset_calc_cdclk;
2664 	} else if (IS_VALLEYVIEW(dev_priv)) {
2665 		dev_priv->display.set_cdclk = vlv_set_cdclk;
2666 		dev_priv->display.modeset_calc_cdclk = vlv_modeset_calc_cdclk;
2667 	} else {
2668 		dev_priv->display.modeset_calc_cdclk = fixed_modeset_calc_cdclk;
2669 	}
2670 
2671 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEN9_LP(dev_priv))
2672 		dev_priv->display.get_cdclk = bxt_get_cdclk;
2673 	else if (IS_GEN9_BC(dev_priv))
2674 		dev_priv->display.get_cdclk = skl_get_cdclk;
2675 	else if (IS_BROADWELL(dev_priv))
2676 		dev_priv->display.get_cdclk = bdw_get_cdclk;
2677 	else if (IS_HASWELL(dev_priv))
2678 		dev_priv->display.get_cdclk = hsw_get_cdclk;
2679 	else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2680 		dev_priv->display.get_cdclk = vlv_get_cdclk;
2681 	else if (IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv))
2682 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2683 	else if (IS_GEN(dev_priv, 5))
2684 		dev_priv->display.get_cdclk = fixed_450mhz_get_cdclk;
2685 	else if (IS_GM45(dev_priv))
2686 		dev_priv->display.get_cdclk = gm45_get_cdclk;
2687 	else if (IS_G45(dev_priv))
2688 		dev_priv->display.get_cdclk = g33_get_cdclk;
2689 	else if (IS_I965GM(dev_priv))
2690 		dev_priv->display.get_cdclk = i965gm_get_cdclk;
2691 	else if (IS_I965G(dev_priv))
2692 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2693 	else if (IS_PINEVIEW(dev_priv))
2694 		dev_priv->display.get_cdclk = pnv_get_cdclk;
2695 	else if (IS_G33(dev_priv))
2696 		dev_priv->display.get_cdclk = g33_get_cdclk;
2697 	else if (IS_I945GM(dev_priv))
2698 		dev_priv->display.get_cdclk = i945gm_get_cdclk;
2699 	else if (IS_I945G(dev_priv))
2700 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2701 	else if (IS_I915GM(dev_priv))
2702 		dev_priv->display.get_cdclk = i915gm_get_cdclk;
2703 	else if (IS_I915G(dev_priv))
2704 		dev_priv->display.get_cdclk = fixed_333mhz_get_cdclk;
2705 	else if (IS_I865G(dev_priv))
2706 		dev_priv->display.get_cdclk = fixed_266mhz_get_cdclk;
2707 	else if (IS_I85X(dev_priv))
2708 		dev_priv->display.get_cdclk = i85x_get_cdclk;
2709 	else if (IS_I845G(dev_priv))
2710 		dev_priv->display.get_cdclk = fixed_200mhz_get_cdclk;
2711 	else { /* 830 */
2712 		WARN(!IS_I830(dev_priv),
2713 		     "Unknown platform. Assuming 133 MHz CDCLK\n");
2714 		dev_priv->display.get_cdclk = fixed_133mhz_get_cdclk;
2715 	}
2716 }
2717