1 /*
2  * Copyright 2013 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  * 	    Roy Spliet <rspliet@eclipso.eu>
24  */
25 
26 #include "ramfuc.h"
27 #include "nv50.h"
28 
29 #include <core/option.h>
30 #include <subdev/bios.h>
31 #include <subdev/bios/M0205.h>
32 #include <subdev/bios/rammap.h>
33 #include <subdev/bios/timing.h>
34 #include <subdev/clk/gt215.h>
35 #include <subdev/gpio.h>
36 
37 /* XXX: Remove when memx gains GPIO support */
38 extern int nv50_gpio_location(int line, u32 *reg, u32 *shift);
39 
40 struct gt215_ramfuc {
41 	struct ramfuc base;
42 	struct ramfuc_reg r_0x001610;
43 	struct ramfuc_reg r_0x001700;
44 	struct ramfuc_reg r_0x002504;
45 	struct ramfuc_reg r_0x004000;
46 	struct ramfuc_reg r_0x004004;
47 	struct ramfuc_reg r_0x004018;
48 	struct ramfuc_reg r_0x004128;
49 	struct ramfuc_reg r_0x004168;
50 	struct ramfuc_reg r_0x100080;
51 	struct ramfuc_reg r_0x100200;
52 	struct ramfuc_reg r_0x100210;
53 	struct ramfuc_reg r_0x100220[9];
54 	struct ramfuc_reg r_0x100264;
55 	struct ramfuc_reg r_0x1002d0;
56 	struct ramfuc_reg r_0x1002d4;
57 	struct ramfuc_reg r_0x1002dc;
58 	struct ramfuc_reg r_0x10053c;
59 	struct ramfuc_reg r_0x1005a0;
60 	struct ramfuc_reg r_0x1005a4;
61 	struct ramfuc_reg r_0x100700;
62 	struct ramfuc_reg r_0x100714;
63 	struct ramfuc_reg r_0x100718;
64 	struct ramfuc_reg r_0x10071c;
65 	struct ramfuc_reg r_0x100720;
66 	struct ramfuc_reg r_0x100760;
67 	struct ramfuc_reg r_0x1007a0;
68 	struct ramfuc_reg r_0x1007e0;
69 	struct ramfuc_reg r_0x100da0;
70 	struct ramfuc_reg r_0x10f804;
71 	struct ramfuc_reg r_0x1110e0;
72 	struct ramfuc_reg r_0x111100;
73 	struct ramfuc_reg r_0x111104;
74 	struct ramfuc_reg r_0x1111e0;
75 	struct ramfuc_reg r_0x111400;
76 	struct ramfuc_reg r_0x611200;
77 	struct ramfuc_reg r_mr[4];
78 	struct ramfuc_reg r_gpioFBVREF;
79 };
80 
81 struct gt215_ltrain {
82 	enum {
83 		NVA3_TRAIN_UNKNOWN,
84 		NVA3_TRAIN_UNSUPPORTED,
85 		NVA3_TRAIN_ONCE,
86 		NVA3_TRAIN_EXEC,
87 		NVA3_TRAIN_DONE
88 	} state;
89 	u32 r_100720;
90 	u32 r_1111e0;
91 	u32 r_111400;
92 	struct nvkm_mem *mem;
93 };
94 
95 struct gt215_ram {
96 	struct nvkm_ram base;
97 	struct gt215_ramfuc fuc;
98 	struct gt215_ltrain ltrain;
99 };
100 
101 void
102 gt215_link_train_calc(u32 *vals, struct gt215_ltrain *train)
103 {
104 	int i, lo, hi;
105 	u8 median[8], bins[4] = {0, 0, 0, 0}, bin = 0, qty = 0;
106 
107 	for (i = 0; i < 8; i++) {
108 		for (lo = 0; lo < 0x40; lo++) {
109 			if (!(vals[lo] & 0x80000000))
110 				continue;
111 			if (vals[lo] & (0x101 << i))
112 				break;
113 		}
114 
115 		if (lo == 0x40)
116 			return;
117 
118 		for (hi = lo + 1; hi < 0x40; hi++) {
119 			if (!(vals[lo] & 0x80000000))
120 				continue;
121 			if (!(vals[hi] & (0x101 << i))) {
122 				hi--;
123 				break;
124 			}
125 		}
126 
127 		median[i] = ((hi - lo) >> 1) + lo;
128 		bins[(median[i] & 0xf0) >> 4]++;
129 		median[i] += 0x30;
130 	}
131 
132 	/* Find the best value for 0x1111e0 */
133 	for (i = 0; i < 4; i++) {
134 		if (bins[i] > qty) {
135 			bin = i + 3;
136 			qty = bins[i];
137 		}
138 	}
139 
140 	train->r_100720 = 0;
141 	for (i = 0; i < 8; i++) {
142 		median[i] = max(median[i], (u8) (bin << 4));
143 		median[i] = min(median[i], (u8) ((bin << 4) | 0xf));
144 
145 		train->r_100720 |= ((median[i] & 0x0f) << (i << 2));
146 	}
147 
148 	train->r_1111e0 = 0x02000000 | (bin * 0x101);
149 	train->r_111400 = 0x0;
150 }
151 
152 /*
153  * Link training for (at least) DDR3
154  */
155 int
156 gt215_link_train(struct nvkm_fb *fb)
157 {
158 	struct nvkm_bios *bios = nvkm_bios(fb);
159 	struct gt215_ram *ram = (void *)fb->ram;
160 	struct nvkm_clk *clk = nvkm_clk(fb);
161 	struct gt215_ltrain *train = &ram->ltrain;
162 	struct nvkm_device *device = nv_device(fb);
163 	struct gt215_ramfuc *fuc = &ram->fuc;
164 	u32 *result, r1700;
165 	int ret, i;
166 	struct nvbios_M0205T M0205T = { 0 };
167 	u8 ver, hdr, cnt, len, snr, ssz;
168 	unsigned int clk_current;
169 	unsigned long flags;
170 	unsigned long *f = &flags;
171 
172 	if (nvkm_boolopt(device->cfgopt, "NvMemExec", true) != true)
173 		return -ENOSYS;
174 
175 	/* XXX: Multiple partitions? */
176 	result = kmalloc(64 * sizeof(u32), GFP_KERNEL);
177 	if (!result)
178 		return -ENOMEM;
179 
180 	train->state = NVA3_TRAIN_EXEC;
181 
182 	/* Clock speeds for training and back */
183 	nvbios_M0205Tp(bios, &ver, &hdr, &cnt, &len, &snr, &ssz, &M0205T);
184 	if (M0205T.freq == 0) {
185 		kfree(result);
186 		return -ENOENT;
187 	}
188 
189 	clk_current = clk->read(clk, nv_clk_src_mem);
190 
191 	ret = gt215_clk_pre(clk, f);
192 	if (ret)
193 		goto out;
194 
195 	/* First: clock up/down */
196 	ret = ram->base.calc(fb, (u32) M0205T.freq * 1000);
197 	if (ret)
198 		goto out;
199 
200 	/* Do this *after* calc, eliminates write in script */
201 	nv_wr32(fb, 0x111400, 0x00000000);
202 	/* XXX: Magic writes that improve train reliability? */
203 	nv_mask(fb, 0x100674, 0x0000ffff, 0x00000000);
204 	nv_mask(fb, 0x1005e4, 0x0000ffff, 0x00000000);
205 	nv_mask(fb, 0x100b0c, 0x000000ff, 0x00000000);
206 	nv_wr32(fb, 0x100c04, 0x00000400);
207 
208 	/* Now the training script */
209 	r1700 = ram_rd32(fuc, 0x001700);
210 
211 	ram_mask(fuc, 0x100200, 0x00000800, 0x00000000);
212 	ram_wr32(fuc, 0x611200, 0x3300);
213 	ram_wait_vblank(fuc);
214 	ram_wait(fuc, 0x611200, 0x00000003, 0x00000000, 500000);
215 	ram_mask(fuc, 0x001610, 0x00000083, 0x00000003);
216 	ram_mask(fuc, 0x100080, 0x00000020, 0x00000000);
217 	ram_mask(fuc, 0x10f804, 0x80000000, 0x00000000);
218 	ram_wr32(fuc, 0x001700, 0x00000000);
219 
220 	ram_train(fuc);
221 
222 	/* Reset */
223 	ram_mask(fuc, 0x10f804, 0x80000000, 0x80000000);
224 	ram_wr32(fuc, 0x10053c, 0x0);
225 	ram_wr32(fuc, 0x100720, train->r_100720);
226 	ram_wr32(fuc, 0x1111e0, train->r_1111e0);
227 	ram_wr32(fuc, 0x111400, train->r_111400);
228 	ram_nuke(fuc, 0x100080);
229 	ram_mask(fuc, 0x100080, 0x00000020, 0x00000020);
230 	ram_nsec(fuc, 1000);
231 
232 	ram_wr32(fuc, 0x001700, r1700);
233 	ram_mask(fuc, 0x001610, 0x00000083, 0x00000080);
234 	ram_wr32(fuc, 0x611200, 0x3330);
235 	ram_mask(fuc, 0x100200, 0x00000800, 0x00000800);
236 
237 	ram_exec(fuc, true);
238 
239 	ram->base.calc(fb, clk_current);
240 	ram_exec(fuc, true);
241 
242 	/* Post-processing, avoids flicker */
243 	nv_mask(fb, 0x616308, 0x10, 0x10);
244 	nv_mask(fb, 0x616b08, 0x10, 0x10);
245 
246 	gt215_clk_post(clk, f);
247 
248 	ram_train_result(fb, result, 64);
249 	for (i = 0; i < 64; i++)
250 		nv_debug(fb, "Train: %08x", result[i]);
251 	gt215_link_train_calc(result, train);
252 
253 	nv_debug(fb, "Train: %08x %08x %08x", train->r_100720,
254 			train->r_1111e0, train->r_111400);
255 
256 	kfree(result);
257 
258 	train->state = NVA3_TRAIN_DONE;
259 
260 	return ret;
261 
262 out:
263 	if(ret == -EBUSY)
264 		f = NULL;
265 
266 	train->state = NVA3_TRAIN_UNSUPPORTED;
267 
268 	gt215_clk_post(clk, f);
269 	kfree(result);
270 	return ret;
271 }
272 
273 int
274 gt215_link_train_init(struct nvkm_fb *fb)
275 {
276 	static const u32 pattern[16] = {
277 		0xaaaaaaaa, 0xcccccccc, 0xdddddddd, 0xeeeeeeee,
278 		0x00000000, 0x11111111, 0x44444444, 0xdddddddd,
279 		0x33333333, 0x55555555, 0x77777777, 0x66666666,
280 		0x99999999, 0x88888888, 0xeeeeeeee, 0xbbbbbbbb,
281 	};
282 	struct nvkm_bios *bios = nvkm_bios(fb);
283 	struct gt215_ram *ram = (void *)fb->ram;
284 	struct gt215_ltrain *train = &ram->ltrain;
285 	struct nvkm_mem *mem;
286 	struct nvbios_M0205E M0205E;
287 	u8 ver, hdr, cnt, len;
288 	u32 r001700;
289 	int ret, i = 0;
290 
291 	train->state = NVA3_TRAIN_UNSUPPORTED;
292 
293 	/* We support type "5"
294 	 * XXX: training pattern table appears to be unused for this routine */
295 	if (!nvbios_M0205Ep(bios, i, &ver, &hdr, &cnt, &len, &M0205E))
296 		return -ENOENT;
297 
298 	if (M0205E.type != 5)
299 		return 0;
300 
301 	train->state = NVA3_TRAIN_ONCE;
302 
303 	ret = fb->ram->get(fb, 0x8000, 0x10000, 0, 0x800, &ram->ltrain.mem);
304 	if (ret)
305 		return ret;
306 
307 	mem = ram->ltrain.mem;
308 
309 	nv_wr32(fb, 0x100538, 0x10000000 | (mem->offset >> 16));
310 	nv_wr32(fb, 0x1005a8, 0x0000ffff);
311 	nv_mask(fb, 0x10f800, 0x00000001, 0x00000001);
312 
313 	for (i = 0; i < 0x30; i++) {
314 		nv_wr32(fb, 0x10f8c0, (i << 8) | i);
315 		nv_wr32(fb, 0x10f900, pattern[i % 16]);
316 	}
317 
318 	for (i = 0; i < 0x30; i++) {
319 		nv_wr32(fb, 0x10f8e0, (i << 8) | i);
320 		nv_wr32(fb, 0x10f920, pattern[i % 16]);
321 	}
322 
323 	/* And upload the pattern */
324 	r001700 = nv_rd32(fb, 0x1700);
325 	nv_wr32(fb, 0x1700, mem->offset >> 16);
326 	for (i = 0; i < 16; i++)
327 		nv_wr32(fb, 0x700000 + (i << 2), pattern[i]);
328 	for (i = 0; i < 16; i++)
329 		nv_wr32(fb, 0x700100 + (i << 2), pattern[i]);
330 	nv_wr32(fb, 0x1700, r001700);
331 
332 	train->r_100720 = nv_rd32(fb, 0x100720);
333 	train->r_1111e0 = nv_rd32(fb, 0x1111e0);
334 	train->r_111400 = nv_rd32(fb, 0x111400);
335 	return 0;
336 }
337 
338 void
339 gt215_link_train_fini(struct nvkm_fb *fb)
340 {
341 	struct gt215_ram *ram = (void *)fb->ram;
342 
343 	if (ram->ltrain.mem)
344 		fb->ram->put(fb, &ram->ltrain.mem);
345 }
346 
347 /*
348  * RAM reclocking
349  */
350 #define T(t) cfg->timing_10_##t
351 static int
352 gt215_ram_timing_calc(struct nvkm_fb *fb, u32 *timing)
353 {
354 	struct gt215_ram *ram = (void *)fb->ram;
355 	struct nvbios_ramcfg *cfg = &ram->base.target.bios;
356 	int tUNK_base, tUNK_40_0, prevCL;
357 	u32 cur2, cur3, cur7, cur8;
358 
359 	cur2 = nv_rd32(fb, 0x100228);
360 	cur3 = nv_rd32(fb, 0x10022c);
361 	cur7 = nv_rd32(fb, 0x10023c);
362 	cur8 = nv_rd32(fb, 0x100240);
363 
364 
365 	switch ((!T(CWL)) * ram->base.type) {
366 	case NV_MEM_TYPE_DDR2:
367 		T(CWL) = T(CL) - 1;
368 		break;
369 	case NV_MEM_TYPE_GDDR3:
370 		T(CWL) = ((cur2 & 0xff000000) >> 24) + 1;
371 		break;
372 	}
373 
374 	prevCL = (cur3 & 0x000000ff) + 1;
375 	tUNK_base = ((cur7 & 0x00ff0000) >> 16) - prevCL;
376 
377 	timing[0] = (T(RP) << 24 | T(RAS) << 16 | T(RFC) << 8 | T(RC));
378 	timing[1] = (T(WR) + 1 + T(CWL)) << 24 |
379 		    max_t(u8,T(18), 1) << 16 |
380 		    (T(WTR) + 1 + T(CWL)) << 8 |
381 		    (5 + T(CL) - T(CWL));
382 	timing[2] = (T(CWL) - 1) << 24 |
383 		    (T(RRD) << 16) |
384 		    (T(RCDWR) << 8) |
385 		    T(RCDRD);
386 	timing[3] = (cur3 & 0x00ff0000) |
387 		    (0x30 + T(CL)) << 24 |
388 		    (0xb + T(CL)) << 8 |
389 		    (T(CL) - 1);
390 	timing[4] = T(20) << 24 |
391 		    T(21) << 16 |
392 		    T(13) << 8 |
393 		    T(13);
394 	timing[5] = T(RFC) << 24 |
395 		    max_t(u8,T(RCDRD), T(RCDWR)) << 16 |
396 		    max_t(u8, (T(CWL) + 6), (T(CL) + 2)) << 8 |
397 		    T(RP);
398 	timing[6] = (0x5a + T(CL)) << 16 |
399 		    max_t(u8, 1, (6 - T(CL) + T(CWL))) << 8 |
400 		    (0x50 + T(CL) - T(CWL));
401 	timing[7] = (cur7 & 0xff000000) |
402 		    ((tUNK_base + T(CL)) << 16) |
403 		    0x202;
404 	timing[8] = cur8 & 0xffffff00;
405 
406 	switch (ram->base.type) {
407 	case NV_MEM_TYPE_DDR2:
408 	case NV_MEM_TYPE_GDDR3:
409 		tUNK_40_0 = prevCL - (cur8 & 0xff);
410 		if (tUNK_40_0 > 0)
411 			timing[8] |= T(CL);
412 		break;
413 	default:
414 		break;
415 	}
416 
417 	nv_debug(fb, "Entry: 220: %08x %08x %08x %08x\n",
418 			timing[0], timing[1], timing[2], timing[3]);
419 	nv_debug(fb, "  230: %08x %08x %08x %08x\n",
420 			timing[4], timing[5], timing[6], timing[7]);
421 	nv_debug(fb, "  240: %08x\n", timing[8]);
422 	return 0;
423 }
424 #undef T
425 
426 static void
427 nvkm_sddr2_dll_reset(struct gt215_ramfuc *fuc)
428 {
429 	ram_mask(fuc, mr[0], 0x100, 0x100);
430 	ram_nsec(fuc, 1000);
431 	ram_mask(fuc, mr[0], 0x100, 0x000);
432 	ram_nsec(fuc, 1000);
433 }
434 
435 static void
436 nvkm_sddr3_dll_disable(struct gt215_ramfuc *fuc, u32 *mr)
437 {
438 	u32 mr1_old = ram_rd32(fuc, mr[1]);
439 
440 	if (!(mr1_old & 0x1)) {
441 		ram_wr32(fuc, 0x1002d4, 0x00000001);
442 		ram_wr32(fuc, mr[1], mr[1]);
443 		ram_nsec(fuc, 1000);
444 	}
445 }
446 
447 static void
448 nvkm_gddr3_dll_disable(struct gt215_ramfuc *fuc, u32 *mr)
449 {
450 	u32 mr1_old = ram_rd32(fuc, mr[1]);
451 
452 	if (!(mr1_old & 0x40)) {
453 		ram_wr32(fuc, mr[1], mr[1]);
454 		ram_nsec(fuc, 1000);
455 	}
456 }
457 
458 static void
459 gt215_ram_lock_pll(struct gt215_ramfuc *fuc, struct gt215_clk_info *mclk)
460 {
461 	ram_wr32(fuc, 0x004004, mclk->pll);
462 	ram_mask(fuc, 0x004000, 0x00000001, 0x00000001);
463 	ram_mask(fuc, 0x004000, 0x00000010, 0x00000000);
464 	ram_wait(fuc, 0x004000, 0x00020000, 0x00020000, 64000);
465 	ram_mask(fuc, 0x004000, 0x00000010, 0x00000010);
466 }
467 
468 static void
469 gt215_ram_fbvref(struct gt215_ramfuc *fuc, u32 val)
470 {
471 	struct nvkm_gpio *gpio = nvkm_gpio(fuc->base.fb);
472 	struct dcb_gpio_func func;
473 	u32 reg, sh, gpio_val;
474 	int ret;
475 
476 	if (gpio->get(gpio, 0, 0x2e, DCB_GPIO_UNUSED) != val) {
477 		ret = gpio->find(gpio, 0, 0x2e, DCB_GPIO_UNUSED, &func);
478 		if (ret)
479 			return;
480 
481 		nv50_gpio_location(func.line, &reg, &sh);
482 		gpio_val = ram_rd32(fuc, gpioFBVREF);
483 		if (gpio_val & (8 << sh))
484 			val = !val;
485 
486 		ram_mask(fuc, gpioFBVREF, (0x3 << sh), ((val | 0x2) << sh));
487 		ram_nsec(fuc, 20000);
488 	}
489 }
490 
491 static int
492 gt215_ram_calc(struct nvkm_fb *fb, u32 freq)
493 {
494 	struct nvkm_bios *bios = nvkm_bios(fb);
495 	struct gt215_ram *ram = (void *)fb->ram;
496 	struct gt215_ramfuc *fuc = &ram->fuc;
497 	struct gt215_ltrain *train = &ram->ltrain;
498 	struct gt215_clk_info mclk;
499 	struct nvkm_ram_data *next;
500 	u8  ver, hdr, cnt, len, strap;
501 	u32 data;
502 	u32 r004018, r100760, r100da0, r111100, ctrl;
503 	u32 unk714, unk718, unk71c;
504 	int ret, i;
505 	u32 timing[9];
506 	bool pll2pll;
507 
508 	next = &ram->base.target;
509 	next->freq = freq;
510 	ram->base.next = next;
511 
512 	if (ram->ltrain.state == NVA3_TRAIN_ONCE)
513 		gt215_link_train(fb);
514 
515 	/* lookup memory config data relevant to the target frequency */
516 	data = nvbios_rammapEm(bios, freq / 1000, &ver, &hdr, &cnt, &len,
517 			       &next->bios);
518 	if (!data || ver != 0x10 || hdr < 0x05) {
519 		nv_error(fb, "invalid/missing rammap entry\n");
520 		return -EINVAL;
521 	}
522 
523 	/* locate specific data set for the attached memory */
524 	strap = nvbios_ramcfg_index(nv_subdev(fb));
525 	if (strap >= cnt) {
526 		nv_error(fb, "invalid ramcfg strap\n");
527 		return -EINVAL;
528 	}
529 
530 	data = nvbios_rammapSp(bios, data, ver, hdr, cnt, len, strap,
531 			       &ver, &hdr, &next->bios);
532 	if (!data || ver != 0x10 || hdr < 0x09) {
533 		nv_error(fb, "invalid/missing ramcfg entry\n");
534 		return -EINVAL;
535 	}
536 
537 	/* lookup memory timings, if bios says they're present */
538 	if (next->bios.ramcfg_timing != 0xff) {
539 		data = nvbios_timingEp(bios, next->bios.ramcfg_timing,
540 				       &ver, &hdr, &cnt, &len,
541 				       &next->bios);
542 		if (!data || ver != 0x10 || hdr < 0x17) {
543 			nv_error(fb, "invalid/missing timing entry\n");
544 			return -EINVAL;
545 		}
546 	}
547 
548 	ret = gt215_pll_info(nvkm_clk(fb), 0x12, 0x4000, freq, &mclk);
549 	if (ret < 0) {
550 		nv_error(fb, "failed mclk calculation\n");
551 		return ret;
552 	}
553 
554 	gt215_ram_timing_calc(fb, timing);
555 
556 	ret = ram_init(fuc, fb);
557 	if (ret)
558 		return ret;
559 
560 	/* Determine ram-specific MR values */
561 	ram->base.mr[0] = ram_rd32(fuc, mr[0]);
562 	ram->base.mr[1] = ram_rd32(fuc, mr[1]);
563 	ram->base.mr[2] = ram_rd32(fuc, mr[2]);
564 
565 	switch (ram->base.type) {
566 	case NV_MEM_TYPE_DDR2:
567 		ret = nvkm_sddr2_calc(&ram->base);
568 		break;
569 	case NV_MEM_TYPE_DDR3:
570 		ret = nvkm_sddr3_calc(&ram->base);
571 		break;
572 	case NV_MEM_TYPE_GDDR3:
573 		ret = nvkm_gddr3_calc(&ram->base);
574 		break;
575 	default:
576 		ret = -ENOSYS;
577 		break;
578 	}
579 
580 	if (ret)
581 		return ret;
582 
583 	/* XXX: 750MHz seems rather arbitrary */
584 	if (freq <= 750000) {
585 		r004018 = 0x10000000;
586 		r100760 = 0x22222222;
587 		r100da0 = 0x00000010;
588 	} else {
589 		r004018 = 0x00000000;
590 		r100760 = 0x00000000;
591 		r100da0 = 0x00000000;
592 	}
593 
594 	if (!next->bios.ramcfg_DLLoff)
595 		r004018 |= 0x00004000;
596 
597 	/* pll2pll requires to switch to a safe clock first */
598 	ctrl = ram_rd32(fuc, 0x004000);
599 	pll2pll = (!(ctrl & 0x00000008)) && mclk.pll;
600 
601 	/* Pre, NVIDIA does this outside the script */
602 	if (next->bios.ramcfg_10_02_10) {
603 		ram_mask(fuc, 0x111104, 0x00000600, 0x00000000);
604 	} else {
605 		ram_mask(fuc, 0x111100, 0x40000000, 0x40000000);
606 		ram_mask(fuc, 0x111104, 0x00000180, 0x00000000);
607 	}
608 	/* Always disable this bit during reclock */
609 	ram_mask(fuc, 0x100200, 0x00000800, 0x00000000);
610 
611 	/* If switching from non-pll to pll, lock before disabling FB */
612 	if (mclk.pll && !pll2pll) {
613 		ram_mask(fuc, 0x004128, 0x003f3141, mclk.clk | 0x00000101);
614 		gt215_ram_lock_pll(fuc, &mclk);
615 	}
616 
617 	/* Start with disabling some CRTCs and PFIFO? */
618 	ram_wait_vblank(fuc);
619 	ram_wr32(fuc, 0x611200, 0x3300);
620 	ram_mask(fuc, 0x002504, 0x1, 0x1);
621 	ram_nsec(fuc, 10000);
622 	ram_wait(fuc, 0x002504, 0x10, 0x10, 20000); /* XXX: or longer? */
623 	ram_block(fuc);
624 	ram_nsec(fuc, 2000);
625 
626 	if (!next->bios.ramcfg_10_02_10) {
627 		if (ram->base.type == NV_MEM_TYPE_GDDR3)
628 			ram_mask(fuc, 0x111100, 0x04020000, 0x00020000);
629 		else
630 			ram_mask(fuc, 0x111100, 0x04020000, 0x04020000);
631 	}
632 
633 	/* If we're disabling the DLL, do it now */
634 	switch (next->bios.ramcfg_DLLoff * ram->base.type) {
635 	case NV_MEM_TYPE_DDR3:
636 		nvkm_sddr3_dll_disable(fuc, ram->base.mr);
637 		break;
638 	case NV_MEM_TYPE_GDDR3:
639 		nvkm_gddr3_dll_disable(fuc, ram->base.mr);
640 		break;
641 	}
642 
643 	if (fuc->r_gpioFBVREF.addr && next->bios.timing_10_ODT)
644 		gt215_ram_fbvref(fuc, 0);
645 
646 	/* Brace RAM for impact */
647 	ram_wr32(fuc, 0x1002d4, 0x00000001);
648 	ram_wr32(fuc, 0x1002d0, 0x00000001);
649 	ram_wr32(fuc, 0x1002d0, 0x00000001);
650 	ram_wr32(fuc, 0x100210, 0x00000000);
651 	ram_wr32(fuc, 0x1002dc, 0x00000001);
652 	ram_nsec(fuc, 2000);
653 
654 	if (nv_device(fb)->chipset == 0xa3 && freq <= 500000)
655 		ram_mask(fuc, 0x100700, 0x00000006, 0x00000006);
656 
657 	/* Fiddle with clocks */
658 	/* There's 4 scenario's
659 	 * pll->pll: first switch to a 324MHz clock, set up new PLL, switch
660 	 * clk->pll: Set up new PLL, switch
661 	 * pll->clk: Set up clock, switch
662 	 * clk->clk: Overwrite ctrl and other bits, switch */
663 
664 	/* Switch to regular clock - 324MHz */
665 	if (pll2pll) {
666 		ram_mask(fuc, 0x004000, 0x00000004, 0x00000004);
667 		ram_mask(fuc, 0x004168, 0x003f3141, 0x00083101);
668 		ram_mask(fuc, 0x004000, 0x00000008, 0x00000008);
669 		ram_mask(fuc, 0x1110e0, 0x00088000, 0x00088000);
670 		ram_wr32(fuc, 0x004018, 0x00001000);
671 		gt215_ram_lock_pll(fuc, &mclk);
672 	}
673 
674 	if (mclk.pll) {
675 		ram_mask(fuc, 0x004000, 0x00000105, 0x00000105);
676 		ram_wr32(fuc, 0x004018, 0x00001000 | r004018);
677 		ram_wr32(fuc, 0x100da0, r100da0);
678 	} else {
679 		ram_mask(fuc, 0x004168, 0x003f3141, mclk.clk | 0x00000101);
680 		ram_mask(fuc, 0x004000, 0x00000108, 0x00000008);
681 		ram_mask(fuc, 0x1110e0, 0x00088000, 0x00088000);
682 		ram_wr32(fuc, 0x004018, 0x00009000 | r004018);
683 		ram_wr32(fuc, 0x100da0, r100da0);
684 	}
685 	ram_nsec(fuc, 20000);
686 
687 	if (next->bios.rammap_10_04_08) {
688 		ram_wr32(fuc, 0x1005a0, next->bios.ramcfg_10_06 << 16 |
689 					next->bios.ramcfg_10_05 << 8 |
690 					next->bios.ramcfg_10_05);
691 		ram_wr32(fuc, 0x1005a4, next->bios.ramcfg_10_08 << 8 |
692 					next->bios.ramcfg_10_07);
693 		ram_wr32(fuc, 0x10f804, next->bios.ramcfg_10_09_f0 << 20 |
694 					next->bios.ramcfg_10_03_0f << 16 |
695 					next->bios.ramcfg_10_09_0f |
696 					0x80000000);
697 		ram_mask(fuc, 0x10053c, 0x00001000, 0x00000000);
698 	} else {
699 		if (train->state == NVA3_TRAIN_DONE) {
700 			ram_wr32(fuc, 0x100080, 0x1020);
701 			ram_mask(fuc, 0x111400, 0xffffffff, train->r_111400);
702 			ram_mask(fuc, 0x1111e0, 0xffffffff, train->r_1111e0);
703 			ram_mask(fuc, 0x100720, 0xffffffff, train->r_100720);
704 		}
705 		ram_mask(fuc, 0x10053c, 0x00001000, 0x00001000);
706 		ram_mask(fuc, 0x10f804, 0x80000000, 0x00000000);
707 		ram_mask(fuc, 0x100760, 0x22222222, r100760);
708 		ram_mask(fuc, 0x1007a0, 0x22222222, r100760);
709 		ram_mask(fuc, 0x1007e0, 0x22222222, r100760);
710 	}
711 
712 	if (nv_device(fb)->chipset == 0xa3 && freq > 500000) {
713 		ram_mask(fuc, 0x100700, 0x00000006, 0x00000000);
714 	}
715 
716 	/* Final switch */
717 	if (mclk.pll) {
718 		ram_mask(fuc, 0x1110e0, 0x00088000, 0x00011000);
719 		ram_mask(fuc, 0x004000, 0x00000008, 0x00000000);
720 	}
721 
722 	ram_wr32(fuc, 0x1002dc, 0x00000000);
723 	ram_wr32(fuc, 0x1002d4, 0x00000001);
724 	ram_wr32(fuc, 0x100210, 0x80000000);
725 	ram_nsec(fuc, 2000);
726 
727 	/* Set RAM MR parameters and timings */
728 	for (i = 2; i >= 0; i--) {
729 		if (ram_rd32(fuc, mr[i]) != ram->base.mr[i]) {
730 			ram_wr32(fuc, mr[i], ram->base.mr[i]);
731 			ram_nsec(fuc, 1000);
732 		}
733 	}
734 
735 	ram_wr32(fuc, 0x100220[3], timing[3]);
736 	ram_wr32(fuc, 0x100220[1], timing[1]);
737 	ram_wr32(fuc, 0x100220[6], timing[6]);
738 	ram_wr32(fuc, 0x100220[7], timing[7]);
739 	ram_wr32(fuc, 0x100220[2], timing[2]);
740 	ram_wr32(fuc, 0x100220[4], timing[4]);
741 	ram_wr32(fuc, 0x100220[5], timing[5]);
742 	ram_wr32(fuc, 0x100220[0], timing[0]);
743 	ram_wr32(fuc, 0x100220[8], timing[8]);
744 
745 	/* Misc */
746 	ram_mask(fuc, 0x100200, 0x00001000, !next->bios.ramcfg_10_02_08 << 12);
747 
748 	/* XXX: A lot of "chipset"/"ram type" specific stuff...? */
749 	unk714  = ram_rd32(fuc, 0x100714) & ~0xf0000130;
750 	unk718  = ram_rd32(fuc, 0x100718) & ~0x00000100;
751 	unk71c  = ram_rd32(fuc, 0x10071c) & ~0x00000100;
752 	r111100 = ram_rd32(fuc, 0x111100) & ~0x3a800000;
753 
754 	if (next->bios.ramcfg_10_02_04) {
755 		switch (ram->base.type) {
756 		case NV_MEM_TYPE_DDR3:
757 			if (nv_device(fb)->chipset != 0xa8)
758 				r111100 |= 0x00000004;
759 			/* no break */
760 		case NV_MEM_TYPE_DDR2:
761 			r111100 |= 0x08000000;
762 			break;
763 		default:
764 			break;
765 		}
766 	} else {
767 		switch (ram->base.type) {
768 		case NV_MEM_TYPE_DDR2:
769 			r111100 |= 0x1a800000;
770 			unk714  |= 0x00000010;
771 			break;
772 		case NV_MEM_TYPE_DDR3:
773 			if (nv_device(fb)->chipset == 0xa8) {
774 				r111100 |=  0x08000000;
775 			} else {
776 				r111100 &= ~0x00000004;
777 				r111100 |=  0x12800000;
778 			}
779 			unk714  |= 0x00000010;
780 			break;
781 		case NV_MEM_TYPE_GDDR3:
782 			r111100 |= 0x30000000;
783 			unk714  |= 0x00000020;
784 			break;
785 		default:
786 			break;
787 		}
788 	}
789 
790 	unk714 |= (next->bios.ramcfg_10_04_01) << 8;
791 
792 	if (next->bios.ramcfg_10_02_20)
793 		unk714 |= 0xf0000000;
794 	if (next->bios.ramcfg_10_02_02)
795 		unk718 |= 0x00000100;
796 	if (next->bios.ramcfg_10_02_01)
797 		unk71c |= 0x00000100;
798 	if (next->bios.timing_10_24 != 0xff) {
799 		unk718 &= ~0xf0000000;
800 		unk718 |= next->bios.timing_10_24 << 28;
801 	}
802 	if (next->bios.ramcfg_10_02_10)
803 		r111100 &= ~0x04020000;
804 
805 	ram_mask(fuc, 0x100714, 0xffffffff, unk714);
806 	ram_mask(fuc, 0x10071c, 0xffffffff, unk71c);
807 	ram_mask(fuc, 0x100718, 0xffffffff, unk718);
808 	ram_mask(fuc, 0x111100, 0xffffffff, r111100);
809 
810 	if (fuc->r_gpioFBVREF.addr && !next->bios.timing_10_ODT)
811 		gt215_ram_fbvref(fuc, 1);
812 
813 	/* Reset DLL */
814 	if (!next->bios.ramcfg_DLLoff)
815 		nvkm_sddr2_dll_reset(fuc);
816 
817 	if (ram->base.type == NV_MEM_TYPE_GDDR3) {
818 		ram_nsec(fuc, 31000);
819 	} else {
820 		ram_nsec(fuc, 14000);
821 	}
822 
823 	if (ram->base.type == NV_MEM_TYPE_DDR3) {
824 		ram_wr32(fuc, 0x100264, 0x1);
825 		ram_nsec(fuc, 2000);
826 	}
827 
828 	ram_nuke(fuc, 0x100700);
829 	ram_mask(fuc, 0x100700, 0x01000000, 0x01000000);
830 	ram_mask(fuc, 0x100700, 0x01000000, 0x00000000);
831 
832 	/* Re-enable FB */
833 	ram_unblock(fuc);
834 	ram_wr32(fuc, 0x611200, 0x3330);
835 
836 	/* Post fiddlings */
837 	if (next->bios.rammap_10_04_02)
838 		ram_mask(fuc, 0x100200, 0x00000800, 0x00000800);
839 	if (next->bios.ramcfg_10_02_10) {
840 		ram_mask(fuc, 0x111104, 0x00000180, 0x00000180);
841 		ram_mask(fuc, 0x111100, 0x40000000, 0x00000000);
842 	} else {
843 		ram_mask(fuc, 0x111104, 0x00000600, 0x00000600);
844 	}
845 
846 	if (mclk.pll) {
847 		ram_mask(fuc, 0x004168, 0x00000001, 0x00000000);
848 		ram_mask(fuc, 0x004168, 0x00000100, 0x00000000);
849 	} else {
850 		ram_mask(fuc, 0x004000, 0x00000001, 0x00000000);
851 		ram_mask(fuc, 0x004128, 0x00000001, 0x00000000);
852 		ram_mask(fuc, 0x004128, 0x00000100, 0x00000000);
853 	}
854 
855 	return 0;
856 }
857 
858 static int
859 gt215_ram_prog(struct nvkm_fb *fb)
860 {
861 	struct nvkm_device *device = nv_device(fb);
862 	struct gt215_ram *ram = (void *)fb->ram;
863 	struct gt215_ramfuc *fuc = &ram->fuc;
864 	bool exec = nvkm_boolopt(device->cfgopt, "NvMemExec", true);
865 
866 	if (exec) {
867 		nv_mask(fb, 0x001534, 0x2, 0x2);
868 
869 		ram_exec(fuc, true);
870 
871 		/* Post-processing, avoids flicker */
872 		nv_mask(fb, 0x002504, 0x1, 0x0);
873 		nv_mask(fb, 0x001534, 0x2, 0x0);
874 
875 		nv_mask(fb, 0x616308, 0x10, 0x10);
876 		nv_mask(fb, 0x616b08, 0x10, 0x10);
877 	} else {
878 		ram_exec(fuc, false);
879 	}
880 	return 0;
881 }
882 
883 static void
884 gt215_ram_tidy(struct nvkm_fb *fb)
885 {
886 	struct gt215_ram *ram = (void *)fb->ram;
887 	struct gt215_ramfuc *fuc = &ram->fuc;
888 	ram_exec(fuc, false);
889 }
890 
891 static int
892 gt215_ram_init(struct nvkm_object *object)
893 {
894 	struct nvkm_fb *fb = (void *)object->parent;
895 	struct gt215_ram   *ram = (void *)object;
896 	int ret;
897 
898 	ret = nvkm_ram_init(&ram->base);
899 	if (ret)
900 		return ret;
901 
902 	gt215_link_train_init(fb);
903 	return 0;
904 }
905 
906 static int
907 gt215_ram_fini(struct nvkm_object *object, bool suspend)
908 {
909 	struct nvkm_fb *fb = (void *)object->parent;
910 
911 	if (!suspend)
912 		gt215_link_train_fini(fb);
913 
914 	return 0;
915 }
916 
917 static int
918 gt215_ram_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
919 	       struct nvkm_oclass *oclass, void *data, u32 datasize,
920 	       struct nvkm_object **pobject)
921 {
922 	struct nvkm_fb *fb = nvkm_fb(parent);
923 	struct nvkm_gpio *gpio = nvkm_gpio(fb);
924 	struct dcb_gpio_func func;
925 	struct gt215_ram *ram;
926 	int ret, i;
927 	u32 reg, shift;
928 
929 	ret = nv50_ram_create(parent, engine, oclass, &ram);
930 	*pobject = nv_object(ram);
931 	if (ret)
932 		return ret;
933 
934 	switch (ram->base.type) {
935 	case NV_MEM_TYPE_DDR2:
936 	case NV_MEM_TYPE_DDR3:
937 	case NV_MEM_TYPE_GDDR3:
938 		ram->base.calc = gt215_ram_calc;
939 		ram->base.prog = gt215_ram_prog;
940 		ram->base.tidy = gt215_ram_tidy;
941 		break;
942 	default:
943 		nv_warn(ram, "reclocking of this ram type unsupported\n");
944 		return 0;
945 	}
946 
947 	ram->fuc.r_0x001610 = ramfuc_reg(0x001610);
948 	ram->fuc.r_0x001700 = ramfuc_reg(0x001700);
949 	ram->fuc.r_0x002504 = ramfuc_reg(0x002504);
950 	ram->fuc.r_0x004000 = ramfuc_reg(0x004000);
951 	ram->fuc.r_0x004004 = ramfuc_reg(0x004004);
952 	ram->fuc.r_0x004018 = ramfuc_reg(0x004018);
953 	ram->fuc.r_0x004128 = ramfuc_reg(0x004128);
954 	ram->fuc.r_0x004168 = ramfuc_reg(0x004168);
955 	ram->fuc.r_0x100080 = ramfuc_reg(0x100080);
956 	ram->fuc.r_0x100200 = ramfuc_reg(0x100200);
957 	ram->fuc.r_0x100210 = ramfuc_reg(0x100210);
958 	for (i = 0; i < 9; i++)
959 		ram->fuc.r_0x100220[i] = ramfuc_reg(0x100220 + (i * 4));
960 	ram->fuc.r_0x100264 = ramfuc_reg(0x100264);
961 	ram->fuc.r_0x1002d0 = ramfuc_reg(0x1002d0);
962 	ram->fuc.r_0x1002d4 = ramfuc_reg(0x1002d4);
963 	ram->fuc.r_0x1002dc = ramfuc_reg(0x1002dc);
964 	ram->fuc.r_0x10053c = ramfuc_reg(0x10053c);
965 	ram->fuc.r_0x1005a0 = ramfuc_reg(0x1005a0);
966 	ram->fuc.r_0x1005a4 = ramfuc_reg(0x1005a4);
967 	ram->fuc.r_0x100700 = ramfuc_reg(0x100700);
968 	ram->fuc.r_0x100714 = ramfuc_reg(0x100714);
969 	ram->fuc.r_0x100718 = ramfuc_reg(0x100718);
970 	ram->fuc.r_0x10071c = ramfuc_reg(0x10071c);
971 	ram->fuc.r_0x100720 = ramfuc_reg(0x100720);
972 	ram->fuc.r_0x100760 = ramfuc_stride(0x100760, 4, ram->base.part_mask);
973 	ram->fuc.r_0x1007a0 = ramfuc_stride(0x1007a0, 4, ram->base.part_mask);
974 	ram->fuc.r_0x1007e0 = ramfuc_stride(0x1007e0, 4, ram->base.part_mask);
975 	ram->fuc.r_0x100da0 = ramfuc_stride(0x100da0, 4, ram->base.part_mask);
976 	ram->fuc.r_0x10f804 = ramfuc_reg(0x10f804);
977 	ram->fuc.r_0x1110e0 = ramfuc_stride(0x1110e0, 4, ram->base.part_mask);
978 	ram->fuc.r_0x111100 = ramfuc_reg(0x111100);
979 	ram->fuc.r_0x111104 = ramfuc_reg(0x111104);
980 	ram->fuc.r_0x1111e0 = ramfuc_reg(0x1111e0);
981 	ram->fuc.r_0x111400 = ramfuc_reg(0x111400);
982 	ram->fuc.r_0x611200 = ramfuc_reg(0x611200);
983 
984 	if (ram->base.ranks > 1) {
985 		ram->fuc.r_mr[0] = ramfuc_reg2(0x1002c0, 0x1002c8);
986 		ram->fuc.r_mr[1] = ramfuc_reg2(0x1002c4, 0x1002cc);
987 		ram->fuc.r_mr[2] = ramfuc_reg2(0x1002e0, 0x1002e8);
988 		ram->fuc.r_mr[3] = ramfuc_reg2(0x1002e4, 0x1002ec);
989 	} else {
990 		ram->fuc.r_mr[0] = ramfuc_reg(0x1002c0);
991 		ram->fuc.r_mr[1] = ramfuc_reg(0x1002c4);
992 		ram->fuc.r_mr[2] = ramfuc_reg(0x1002e0);
993 		ram->fuc.r_mr[3] = ramfuc_reg(0x1002e4);
994 	}
995 
996 	ret = gpio->find(gpio, 0, 0x2e, DCB_GPIO_UNUSED, &func);
997 	if (ret == 0) {
998 		nv50_gpio_location(func.line, &reg, &shift);
999 		ram->fuc.r_gpioFBVREF = ramfuc_reg(reg);
1000 	}
1001 
1002 	return 0;
1003 }
1004 
1005 struct nvkm_oclass
1006 gt215_ram_oclass = {
1007 	.ofuncs = &(struct nvkm_ofuncs) {
1008 		.ctor = gt215_ram_ctor,
1009 		.dtor = _nvkm_ram_dtor,
1010 		.init = gt215_ram_init,
1011 		.fini = gt215_ram_fini,
1012 	},
1013 };
1014