xref: /openbmc/linux/drivers/gpu/drm/i915/soc/intel_dram.c (revision ce988705)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include <linux/string_helpers.h>
7 
8 #include "i915_drv.h"
9 #include "i915_reg.h"
10 #include "intel_dram.h"
11 #include "intel_mchbar_regs.h"
12 #include "intel_pcode.h"
13 #include "vlv_sideband.h"
14 
15 struct dram_dimm_info {
16 	u16 size;
17 	u8 width, ranks;
18 };
19 
20 struct dram_channel_info {
21 	struct dram_dimm_info dimm_l, dimm_s;
22 	u8 ranks;
23 	bool is_16gb_dimm;
24 };
25 
26 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
27 
intel_dram_type_str(enum intel_dram_type type)28 static const char *intel_dram_type_str(enum intel_dram_type type)
29 {
30 	static const char * const str[] = {
31 		DRAM_TYPE_STR(UNKNOWN),
32 		DRAM_TYPE_STR(DDR3),
33 		DRAM_TYPE_STR(DDR4),
34 		DRAM_TYPE_STR(LPDDR3),
35 		DRAM_TYPE_STR(LPDDR4),
36 	};
37 
38 	if (type >= ARRAY_SIZE(str))
39 		type = INTEL_DRAM_UNKNOWN;
40 
41 	return str[type];
42 }
43 
44 #undef DRAM_TYPE_STR
45 
pnv_detect_mem_freq(struct drm_i915_private * dev_priv)46 static void pnv_detect_mem_freq(struct drm_i915_private *dev_priv)
47 {
48 	u32 tmp;
49 
50 	tmp = intel_uncore_read(&dev_priv->uncore, CLKCFG);
51 
52 	switch (tmp & CLKCFG_FSB_MASK) {
53 	case CLKCFG_FSB_533:
54 		dev_priv->fsb_freq = 533; /* 133*4 */
55 		break;
56 	case CLKCFG_FSB_800:
57 		dev_priv->fsb_freq = 800; /* 200*4 */
58 		break;
59 	case CLKCFG_FSB_667:
60 		dev_priv->fsb_freq =  667; /* 167*4 */
61 		break;
62 	case CLKCFG_FSB_400:
63 		dev_priv->fsb_freq = 400; /* 100*4 */
64 		break;
65 	}
66 
67 	switch (tmp & CLKCFG_MEM_MASK) {
68 	case CLKCFG_MEM_533:
69 		dev_priv->mem_freq = 533;
70 		break;
71 	case CLKCFG_MEM_667:
72 		dev_priv->mem_freq = 667;
73 		break;
74 	case CLKCFG_MEM_800:
75 		dev_priv->mem_freq = 800;
76 		break;
77 	}
78 
79 	/* detect pineview DDR3 setting */
80 	tmp = intel_uncore_read(&dev_priv->uncore, CSHRDDR3CTL);
81 	dev_priv->is_ddr3 = (tmp & CSHRDDR3CTL_DDR3) ? 1 : 0;
82 }
83 
ilk_detect_mem_freq(struct drm_i915_private * dev_priv)84 static void ilk_detect_mem_freq(struct drm_i915_private *dev_priv)
85 {
86 	u16 ddrpll, csipll;
87 
88 	ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1);
89 	switch (ddrpll & 0xff) {
90 	case 0xc:
91 		dev_priv->mem_freq = 800;
92 		break;
93 	case 0x10:
94 		dev_priv->mem_freq = 1066;
95 		break;
96 	case 0x14:
97 		dev_priv->mem_freq = 1333;
98 		break;
99 	case 0x18:
100 		dev_priv->mem_freq = 1600;
101 		break;
102 	default:
103 		drm_dbg(&dev_priv->drm, "unknown memory frequency 0x%02x\n",
104 			ddrpll & 0xff);
105 		dev_priv->mem_freq = 0;
106 		break;
107 	}
108 
109 	csipll = intel_uncore_read16(&dev_priv->uncore, CSIPLL0);
110 	switch (csipll & 0x3ff) {
111 	case 0x00c:
112 		dev_priv->fsb_freq = 3200;
113 		break;
114 	case 0x00e:
115 		dev_priv->fsb_freq = 3733;
116 		break;
117 	case 0x010:
118 		dev_priv->fsb_freq = 4266;
119 		break;
120 	case 0x012:
121 		dev_priv->fsb_freq = 4800;
122 		break;
123 	case 0x014:
124 		dev_priv->fsb_freq = 5333;
125 		break;
126 	case 0x016:
127 		dev_priv->fsb_freq = 5866;
128 		break;
129 	case 0x018:
130 		dev_priv->fsb_freq = 6400;
131 		break;
132 	default:
133 		drm_dbg(&dev_priv->drm, "unknown fsb frequency 0x%04x\n",
134 			csipll & 0x3ff);
135 		dev_priv->fsb_freq = 0;
136 		break;
137 	}
138 }
139 
chv_detect_mem_freq(struct drm_i915_private * i915)140 static void chv_detect_mem_freq(struct drm_i915_private *i915)
141 {
142 	u32 val;
143 
144 	vlv_iosf_sb_get(i915, BIT(VLV_IOSF_SB_CCK));
145 	val = vlv_cck_read(i915, CCK_FUSE_REG);
146 	vlv_iosf_sb_put(i915, BIT(VLV_IOSF_SB_CCK));
147 
148 	switch ((val >> 2) & 0x7) {
149 	case 3:
150 		i915->mem_freq = 2000;
151 		break;
152 	default:
153 		i915->mem_freq = 1600;
154 		break;
155 	}
156 }
157 
vlv_detect_mem_freq(struct drm_i915_private * i915)158 static void vlv_detect_mem_freq(struct drm_i915_private *i915)
159 {
160 	u32 val;
161 
162 	vlv_iosf_sb_get(i915, BIT(VLV_IOSF_SB_PUNIT));
163 	val = vlv_punit_read(i915, PUNIT_REG_GPU_FREQ_STS);
164 	vlv_iosf_sb_put(i915, BIT(VLV_IOSF_SB_PUNIT));
165 
166 	switch ((val >> 6) & 3) {
167 	case 0:
168 	case 1:
169 		i915->mem_freq = 800;
170 		break;
171 	case 2:
172 		i915->mem_freq = 1066;
173 		break;
174 	case 3:
175 		i915->mem_freq = 1333;
176 		break;
177 	}
178 }
179 
detect_mem_freq(struct drm_i915_private * i915)180 static void detect_mem_freq(struct drm_i915_private *i915)
181 {
182 	if (IS_PINEVIEW(i915))
183 		pnv_detect_mem_freq(i915);
184 	else if (GRAPHICS_VER(i915) == 5)
185 		ilk_detect_mem_freq(i915);
186 	else if (IS_CHERRYVIEW(i915))
187 		chv_detect_mem_freq(i915);
188 	else if (IS_VALLEYVIEW(i915))
189 		vlv_detect_mem_freq(i915);
190 
191 	if (i915->mem_freq)
192 		drm_dbg(&i915->drm, "DDR speed: %d MHz\n", i915->mem_freq);
193 }
194 
intel_dimm_num_devices(const struct dram_dimm_info * dimm)195 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
196 {
197 	return dimm->ranks * 64 / (dimm->width ?: 1);
198 }
199 
200 /* Returns total Gb for the whole DIMM */
skl_get_dimm_size(u16 val)201 static int skl_get_dimm_size(u16 val)
202 {
203 	return (val & SKL_DRAM_SIZE_MASK) * 8;
204 }
205 
skl_get_dimm_width(u16 val)206 static int skl_get_dimm_width(u16 val)
207 {
208 	if (skl_get_dimm_size(val) == 0)
209 		return 0;
210 
211 	switch (val & SKL_DRAM_WIDTH_MASK) {
212 	case SKL_DRAM_WIDTH_X8:
213 	case SKL_DRAM_WIDTH_X16:
214 	case SKL_DRAM_WIDTH_X32:
215 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
216 		return 8 << val;
217 	default:
218 		MISSING_CASE(val);
219 		return 0;
220 	}
221 }
222 
skl_get_dimm_ranks(u16 val)223 static int skl_get_dimm_ranks(u16 val)
224 {
225 	if (skl_get_dimm_size(val) == 0)
226 		return 0;
227 
228 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
229 
230 	return val + 1;
231 }
232 
233 /* Returns total Gb for the whole DIMM */
icl_get_dimm_size(u16 val)234 static int icl_get_dimm_size(u16 val)
235 {
236 	return (val & ICL_DRAM_SIZE_MASK) * 8 / 2;
237 }
238 
icl_get_dimm_width(u16 val)239 static int icl_get_dimm_width(u16 val)
240 {
241 	if (icl_get_dimm_size(val) == 0)
242 		return 0;
243 
244 	switch (val & ICL_DRAM_WIDTH_MASK) {
245 	case ICL_DRAM_WIDTH_X8:
246 	case ICL_DRAM_WIDTH_X16:
247 	case ICL_DRAM_WIDTH_X32:
248 		val = (val & ICL_DRAM_WIDTH_MASK) >> ICL_DRAM_WIDTH_SHIFT;
249 		return 8 << val;
250 	default:
251 		MISSING_CASE(val);
252 		return 0;
253 	}
254 }
255 
icl_get_dimm_ranks(u16 val)256 static int icl_get_dimm_ranks(u16 val)
257 {
258 	if (icl_get_dimm_size(val) == 0)
259 		return 0;
260 
261 	val = (val & ICL_DRAM_RANK_MASK) >> ICL_DRAM_RANK_SHIFT;
262 
263 	return val + 1;
264 }
265 
266 static bool
skl_is_16gb_dimm(const struct dram_dimm_info * dimm)267 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
268 {
269 	/* Convert total Gb to Gb per DRAM device */
270 	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
271 }
272 
273 static void
skl_dram_get_dimm_info(struct drm_i915_private * i915,struct dram_dimm_info * dimm,int channel,char dimm_name,u16 val)274 skl_dram_get_dimm_info(struct drm_i915_private *i915,
275 		       struct dram_dimm_info *dimm,
276 		       int channel, char dimm_name, u16 val)
277 {
278 	if (GRAPHICS_VER(i915) >= 11) {
279 		dimm->size = icl_get_dimm_size(val);
280 		dimm->width = icl_get_dimm_width(val);
281 		dimm->ranks = icl_get_dimm_ranks(val);
282 	} else {
283 		dimm->size = skl_get_dimm_size(val);
284 		dimm->width = skl_get_dimm_width(val);
285 		dimm->ranks = skl_get_dimm_ranks(val);
286 	}
287 
288 	drm_dbg_kms(&i915->drm,
289 		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
290 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
291 		    str_yes_no(skl_is_16gb_dimm(dimm)));
292 }
293 
294 static int
skl_dram_get_channel_info(struct drm_i915_private * i915,struct dram_channel_info * ch,int channel,u32 val)295 skl_dram_get_channel_info(struct drm_i915_private *i915,
296 			  struct dram_channel_info *ch,
297 			  int channel, u32 val)
298 {
299 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
300 			       channel, 'L', val & 0xffff);
301 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
302 			       channel, 'S', val >> 16);
303 
304 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
305 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
306 		return -EINVAL;
307 	}
308 
309 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
310 		ch->ranks = 2;
311 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
312 		ch->ranks = 2;
313 	else
314 		ch->ranks = 1;
315 
316 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
317 		skl_is_16gb_dimm(&ch->dimm_s);
318 
319 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
320 		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
321 
322 	return 0;
323 }
324 
325 static bool
intel_is_dram_symmetric(const struct dram_channel_info * ch0,const struct dram_channel_info * ch1)326 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
327 			const struct dram_channel_info *ch1)
328 {
329 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
330 		(ch0->dimm_s.size == 0 ||
331 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
332 }
333 
334 static int
skl_dram_get_channels_info(struct drm_i915_private * i915)335 skl_dram_get_channels_info(struct drm_i915_private *i915)
336 {
337 	struct dram_info *dram_info = &i915->dram_info;
338 	struct dram_channel_info ch0 = {}, ch1 = {};
339 	u32 val;
340 	int ret;
341 
342 	val = intel_uncore_read(&i915->uncore,
343 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
344 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
345 	if (ret == 0)
346 		dram_info->num_channels++;
347 
348 	val = intel_uncore_read(&i915->uncore,
349 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
350 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
351 	if (ret == 0)
352 		dram_info->num_channels++;
353 
354 	if (dram_info->num_channels == 0) {
355 		drm_info(&i915->drm, "Number of memory channels is zero\n");
356 		return -EINVAL;
357 	}
358 
359 	if (ch0.ranks == 0 && ch1.ranks == 0) {
360 		drm_info(&i915->drm, "couldn't get memory rank information\n");
361 		return -EINVAL;
362 	}
363 
364 	dram_info->wm_lv_0_adjust_needed = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
365 
366 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
367 
368 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
369 		    str_yes_no(dram_info->symmetric_memory));
370 
371 	return 0;
372 }
373 
374 static enum intel_dram_type
skl_get_dram_type(struct drm_i915_private * i915)375 skl_get_dram_type(struct drm_i915_private *i915)
376 {
377 	u32 val;
378 
379 	val = intel_uncore_read(&i915->uncore,
380 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
381 
382 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
383 	case SKL_DRAM_DDR_TYPE_DDR3:
384 		return INTEL_DRAM_DDR3;
385 	case SKL_DRAM_DDR_TYPE_DDR4:
386 		return INTEL_DRAM_DDR4;
387 	case SKL_DRAM_DDR_TYPE_LPDDR3:
388 		return INTEL_DRAM_LPDDR3;
389 	case SKL_DRAM_DDR_TYPE_LPDDR4:
390 		return INTEL_DRAM_LPDDR4;
391 	default:
392 		MISSING_CASE(val);
393 		return INTEL_DRAM_UNKNOWN;
394 	}
395 }
396 
397 static int
skl_get_dram_info(struct drm_i915_private * i915)398 skl_get_dram_info(struct drm_i915_private *i915)
399 {
400 	struct dram_info *dram_info = &i915->dram_info;
401 	int ret;
402 
403 	dram_info->type = skl_get_dram_type(i915);
404 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
405 		    intel_dram_type_str(dram_info->type));
406 
407 	ret = skl_dram_get_channels_info(i915);
408 	if (ret)
409 		return ret;
410 
411 	return 0;
412 }
413 
414 /* Returns Gb per DRAM device */
bxt_get_dimm_size(u32 val)415 static int bxt_get_dimm_size(u32 val)
416 {
417 	switch (val & BXT_DRAM_SIZE_MASK) {
418 	case BXT_DRAM_SIZE_4GBIT:
419 		return 4;
420 	case BXT_DRAM_SIZE_6GBIT:
421 		return 6;
422 	case BXT_DRAM_SIZE_8GBIT:
423 		return 8;
424 	case BXT_DRAM_SIZE_12GBIT:
425 		return 12;
426 	case BXT_DRAM_SIZE_16GBIT:
427 		return 16;
428 	default:
429 		MISSING_CASE(val);
430 		return 0;
431 	}
432 }
433 
bxt_get_dimm_width(u32 val)434 static int bxt_get_dimm_width(u32 val)
435 {
436 	if (!bxt_get_dimm_size(val))
437 		return 0;
438 
439 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
440 
441 	return 8 << val;
442 }
443 
bxt_get_dimm_ranks(u32 val)444 static int bxt_get_dimm_ranks(u32 val)
445 {
446 	if (!bxt_get_dimm_size(val))
447 		return 0;
448 
449 	switch (val & BXT_DRAM_RANK_MASK) {
450 	case BXT_DRAM_RANK_SINGLE:
451 		return 1;
452 	case BXT_DRAM_RANK_DUAL:
453 		return 2;
454 	default:
455 		MISSING_CASE(val);
456 		return 0;
457 	}
458 }
459 
bxt_get_dimm_type(u32 val)460 static enum intel_dram_type bxt_get_dimm_type(u32 val)
461 {
462 	if (!bxt_get_dimm_size(val))
463 		return INTEL_DRAM_UNKNOWN;
464 
465 	switch (val & BXT_DRAM_TYPE_MASK) {
466 	case BXT_DRAM_TYPE_DDR3:
467 		return INTEL_DRAM_DDR3;
468 	case BXT_DRAM_TYPE_LPDDR3:
469 		return INTEL_DRAM_LPDDR3;
470 	case BXT_DRAM_TYPE_DDR4:
471 		return INTEL_DRAM_DDR4;
472 	case BXT_DRAM_TYPE_LPDDR4:
473 		return INTEL_DRAM_LPDDR4;
474 	default:
475 		MISSING_CASE(val);
476 		return INTEL_DRAM_UNKNOWN;
477 	}
478 }
479 
bxt_get_dimm_info(struct dram_dimm_info * dimm,u32 val)480 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
481 {
482 	dimm->width = bxt_get_dimm_width(val);
483 	dimm->ranks = bxt_get_dimm_ranks(val);
484 
485 	/*
486 	 * Size in register is Gb per DRAM device. Convert to total
487 	 * Gb to match the way we report this for non-LP platforms.
488 	 */
489 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
490 }
491 
bxt_get_dram_info(struct drm_i915_private * i915)492 static int bxt_get_dram_info(struct drm_i915_private *i915)
493 {
494 	struct dram_info *dram_info = &i915->dram_info;
495 	u32 val;
496 	u8 valid_ranks = 0;
497 	int i;
498 
499 	/*
500 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
501 	 */
502 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
503 		struct dram_dimm_info dimm;
504 		enum intel_dram_type type;
505 
506 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
507 		if (val == 0xFFFFFFFF)
508 			continue;
509 
510 		dram_info->num_channels++;
511 
512 		bxt_get_dimm_info(&dimm, val);
513 		type = bxt_get_dimm_type(val);
514 
515 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
516 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
517 			    dram_info->type != type);
518 
519 		drm_dbg_kms(&i915->drm,
520 			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u, type: %s\n",
521 			    i - BXT_D_CR_DRP0_DUNIT_START,
522 			    dimm.size, dimm.width, dimm.ranks,
523 			    intel_dram_type_str(type));
524 
525 		if (valid_ranks == 0)
526 			valid_ranks = dimm.ranks;
527 
528 		if (type != INTEL_DRAM_UNKNOWN)
529 			dram_info->type = type;
530 	}
531 
532 	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
533 		drm_info(&i915->drm, "couldn't get memory information\n");
534 		return -EINVAL;
535 	}
536 
537 	return 0;
538 }
539 
icl_pcode_read_mem_global_info(struct drm_i915_private * dev_priv)540 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv)
541 {
542 	struct dram_info *dram_info = &dev_priv->dram_info;
543 	u32 val = 0;
544 	int ret;
545 
546 	ret = snb_pcode_read(&dev_priv->uncore, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
547 			     ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
548 	if (ret)
549 		return ret;
550 
551 	if (GRAPHICS_VER(dev_priv) == 12) {
552 		switch (val & 0xf) {
553 		case 0:
554 			dram_info->type = INTEL_DRAM_DDR4;
555 			break;
556 		case 1:
557 			dram_info->type = INTEL_DRAM_DDR5;
558 			break;
559 		case 2:
560 			dram_info->type = INTEL_DRAM_LPDDR5;
561 			break;
562 		case 3:
563 			dram_info->type = INTEL_DRAM_LPDDR4;
564 			break;
565 		case 4:
566 			dram_info->type = INTEL_DRAM_DDR3;
567 			break;
568 		case 5:
569 			dram_info->type = INTEL_DRAM_LPDDR3;
570 			break;
571 		default:
572 			MISSING_CASE(val & 0xf);
573 			return -EINVAL;
574 		}
575 	} else {
576 		switch (val & 0xf) {
577 		case 0:
578 			dram_info->type = INTEL_DRAM_DDR4;
579 			break;
580 		case 1:
581 			dram_info->type = INTEL_DRAM_DDR3;
582 			break;
583 		case 2:
584 			dram_info->type = INTEL_DRAM_LPDDR3;
585 			break;
586 		case 3:
587 			dram_info->type = INTEL_DRAM_LPDDR4;
588 			break;
589 		default:
590 			MISSING_CASE(val & 0xf);
591 			return -EINVAL;
592 		}
593 	}
594 
595 	dram_info->num_channels = (val & 0xf0) >> 4;
596 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
597 	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
598 
599 	return 0;
600 }
601 
gen11_get_dram_info(struct drm_i915_private * i915)602 static int gen11_get_dram_info(struct drm_i915_private *i915)
603 {
604 	int ret = skl_get_dram_info(i915);
605 
606 	if (ret)
607 		return ret;
608 
609 	return icl_pcode_read_mem_global_info(i915);
610 }
611 
gen12_get_dram_info(struct drm_i915_private * i915)612 static int gen12_get_dram_info(struct drm_i915_private *i915)
613 {
614 	i915->dram_info.wm_lv_0_adjust_needed = false;
615 
616 	return icl_pcode_read_mem_global_info(i915);
617 }
618 
xelpdp_get_dram_info(struct drm_i915_private * i915)619 static int xelpdp_get_dram_info(struct drm_i915_private *i915)
620 {
621 	u32 val = intel_uncore_read(&i915->uncore, MTL_MEM_SS_INFO_GLOBAL);
622 	struct dram_info *dram_info = &i915->dram_info;
623 
624 	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
625 	case 0:
626 		dram_info->type = INTEL_DRAM_DDR4;
627 		break;
628 	case 1:
629 		dram_info->type = INTEL_DRAM_DDR5;
630 		break;
631 	case 2:
632 		dram_info->type = INTEL_DRAM_LPDDR5;
633 		break;
634 	case 3:
635 		dram_info->type = INTEL_DRAM_LPDDR4;
636 		break;
637 	case 4:
638 		dram_info->type = INTEL_DRAM_DDR3;
639 		break;
640 	case 5:
641 		dram_info->type = INTEL_DRAM_LPDDR3;
642 		break;
643 	default:
644 		MISSING_CASE(val);
645 		return -EINVAL;
646 	}
647 
648 	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
649 	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
650 	/* PSF GV points not supported in D14+ */
651 
652 	return 0;
653 }
654 
intel_dram_detect(struct drm_i915_private * i915)655 void intel_dram_detect(struct drm_i915_private *i915)
656 {
657 	struct dram_info *dram_info = &i915->dram_info;
658 	int ret;
659 
660 	detect_mem_freq(i915);
661 
662 	if (GRAPHICS_VER(i915) < 9 || IS_DG2(i915) || !HAS_DISPLAY(i915))
663 		return;
664 
665 	/*
666 	 * Assume level 0 watermark latency adjustment is needed until proven
667 	 * otherwise, this w/a is not needed by bxt/glk.
668 	 */
669 	dram_info->wm_lv_0_adjust_needed = !IS_GEN9_LP(i915);
670 
671 	if (DISPLAY_VER(i915) >= 14)
672 		ret = xelpdp_get_dram_info(i915);
673 	else if (GRAPHICS_VER(i915) >= 12)
674 		ret = gen12_get_dram_info(i915);
675 	else if (GRAPHICS_VER(i915) >= 11)
676 		ret = gen11_get_dram_info(i915);
677 	else if (IS_GEN9_LP(i915))
678 		ret = bxt_get_dram_info(i915);
679 	else
680 		ret = skl_get_dram_info(i915);
681 	if (ret)
682 		return;
683 
684 	drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
685 
686 	drm_dbg_kms(&i915->drm, "Watermark level 0 adjustment needed: %s\n",
687 		    str_yes_no(dram_info->wm_lv_0_adjust_needed));
688 }
689 
gen9_edram_size_mb(struct drm_i915_private * i915,u32 cap)690 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
691 {
692 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
693 	static const u8 sets[4] = { 1, 1, 2, 2 };
694 
695 	return EDRAM_NUM_BANKS(cap) *
696 		ways[EDRAM_WAYS_IDX(cap)] *
697 		sets[EDRAM_SETS_IDX(cap)];
698 }
699 
intel_dram_edram_detect(struct drm_i915_private * i915)700 void intel_dram_edram_detect(struct drm_i915_private *i915)
701 {
702 	u32 edram_cap = 0;
703 
704 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || GRAPHICS_VER(i915) >= 9))
705 		return;
706 
707 	edram_cap = intel_uncore_read_fw(&i915->uncore, HSW_EDRAM_CAP);
708 
709 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
710 
711 	if (!(edram_cap & EDRAM_ENABLED))
712 		return;
713 
714 	/*
715 	 * The needed capability bits for size calculation are not there with
716 	 * pre gen9 so return 128MB always.
717 	 */
718 	if (GRAPHICS_VER(i915) < 9)
719 		i915->edram_size_mb = 128;
720 	else
721 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
722 
723 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
724 }
725