1 /*
2  * Copyright 2013 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24 #include "gf100.h"
25 #include "ramfuc.h"
26 
27 #include <core/option.h>
28 #include <subdev/bios.h>
29 #include <subdev/bios/pll.h>
30 #include <subdev/bios/rammap.h>
31 #include <subdev/bios/timing.h>
32 #include <subdev/clk.h>
33 #include <subdev/clk/pll.h>
34 #include <subdev/ltc.h>
35 
36 struct gf100_ramfuc {
37 	struct ramfuc base;
38 
39 	struct ramfuc_reg r_0x10fe20;
40 	struct ramfuc_reg r_0x10fe24;
41 	struct ramfuc_reg r_0x137320;
42 	struct ramfuc_reg r_0x137330;
43 
44 	struct ramfuc_reg r_0x132000;
45 	struct ramfuc_reg r_0x132004;
46 	struct ramfuc_reg r_0x132100;
47 
48 	struct ramfuc_reg r_0x137390;
49 
50 	struct ramfuc_reg r_0x10f290;
51 	struct ramfuc_reg r_0x10f294;
52 	struct ramfuc_reg r_0x10f298;
53 	struct ramfuc_reg r_0x10f29c;
54 	struct ramfuc_reg r_0x10f2a0;
55 
56 	struct ramfuc_reg r_0x10f300;
57 	struct ramfuc_reg r_0x10f338;
58 	struct ramfuc_reg r_0x10f340;
59 	struct ramfuc_reg r_0x10f344;
60 	struct ramfuc_reg r_0x10f348;
61 
62 	struct ramfuc_reg r_0x10f910;
63 	struct ramfuc_reg r_0x10f914;
64 
65 	struct ramfuc_reg r_0x100b0c;
66 	struct ramfuc_reg r_0x10f050;
67 	struct ramfuc_reg r_0x10f090;
68 	struct ramfuc_reg r_0x10f200;
69 	struct ramfuc_reg r_0x10f210;
70 	struct ramfuc_reg r_0x10f310;
71 	struct ramfuc_reg r_0x10f314;
72 	struct ramfuc_reg r_0x10f610;
73 	struct ramfuc_reg r_0x10f614;
74 	struct ramfuc_reg r_0x10f800;
75 	struct ramfuc_reg r_0x10f808;
76 	struct ramfuc_reg r_0x10f824;
77 	struct ramfuc_reg r_0x10f830;
78 	struct ramfuc_reg r_0x10f988;
79 	struct ramfuc_reg r_0x10f98c;
80 	struct ramfuc_reg r_0x10f990;
81 	struct ramfuc_reg r_0x10f998;
82 	struct ramfuc_reg r_0x10f9b0;
83 	struct ramfuc_reg r_0x10f9b4;
84 	struct ramfuc_reg r_0x10fb04;
85 	struct ramfuc_reg r_0x10fb08;
86 	struct ramfuc_reg r_0x137300;
87 	struct ramfuc_reg r_0x137310;
88 	struct ramfuc_reg r_0x137360;
89 	struct ramfuc_reg r_0x1373ec;
90 	struct ramfuc_reg r_0x1373f0;
91 	struct ramfuc_reg r_0x1373f8;
92 
93 	struct ramfuc_reg r_0x61c140;
94 	struct ramfuc_reg r_0x611200;
95 
96 	struct ramfuc_reg r_0x13d8f4;
97 };
98 
99 struct gf100_ram {
100 	struct nvkm_ram base;
101 	struct gf100_ramfuc fuc;
102 	struct nvbios_pll refpll;
103 	struct nvbios_pll mempll;
104 };
105 
106 static void
107 gf100_ram_train(struct gf100_ramfuc *fuc, u32 magic)
108 {
109 	struct gf100_ram *ram = container_of(fuc, typeof(*ram), fuc);
110 	struct nvkm_fb *fb = nvkm_fb(ram);
111 	u32 part = nv_rd32(fb, 0x022438), i;
112 	u32 mask = nv_rd32(fb, 0x022554);
113 	u32 addr = 0x110974;
114 
115 	ram_wr32(fuc, 0x10f910, magic);
116 	ram_wr32(fuc, 0x10f914, magic);
117 
118 	for (i = 0; (magic & 0x80000000) && i < part; addr += 0x1000, i++) {
119 		if (mask & (1 << i))
120 			continue;
121 		ram_wait(fuc, addr, 0x0000000f, 0x00000000, 500000);
122 	}
123 }
124 
125 static int
126 gf100_ram_calc(struct nvkm_fb *fb, u32 freq)
127 {
128 	struct nvkm_clk *clk = nvkm_clk(fb);
129 	struct nvkm_bios *bios = nvkm_bios(fb);
130 	struct gf100_ram *ram = (void *)fb->ram;
131 	struct gf100_ramfuc *fuc = &ram->fuc;
132 	struct nvbios_ramcfg cfg;
133 	u8  ver, cnt, len, strap;
134 	struct {
135 		u32 data;
136 		u8  size;
137 	} rammap, ramcfg, timing;
138 	int ref, div, out;
139 	int from, mode;
140 	int N1, M1, P;
141 	int ret;
142 
143 	/* lookup memory config data relevant to the target frequency */
144 	rammap.data = nvbios_rammapEm(bios, freq / 1000, &ver, &rammap.size,
145 				      &cnt, &ramcfg.size, &cfg);
146 	if (!rammap.data || ver != 0x10 || rammap.size < 0x0e) {
147 		nv_error(fb, "invalid/missing rammap entry\n");
148 		return -EINVAL;
149 	}
150 
151 	/* locate specific data set for the attached memory */
152 	strap = nvbios_ramcfg_index(nv_subdev(fb));
153 	if (strap >= cnt) {
154 		nv_error(fb, "invalid ramcfg strap\n");
155 		return -EINVAL;
156 	}
157 
158 	ramcfg.data = rammap.data + rammap.size + (strap * ramcfg.size);
159 	if (!ramcfg.data || ver != 0x10 || ramcfg.size < 0x0e) {
160 		nv_error(fb, "invalid/missing ramcfg entry\n");
161 		return -EINVAL;
162 	}
163 
164 	/* lookup memory timings, if bios says they're present */
165 	strap = nv_ro08(bios, ramcfg.data + 0x01);
166 	if (strap != 0xff) {
167 		timing.data = nvbios_timingEe(bios, strap, &ver, &timing.size,
168 					      &cnt, &len);
169 		if (!timing.data || ver != 0x10 || timing.size < 0x19) {
170 			nv_error(fb, "invalid/missing timing entry\n");
171 			return -EINVAL;
172 		}
173 	} else {
174 		timing.data = 0;
175 	}
176 
177 	ret = ram_init(fuc, fb);
178 	if (ret)
179 		return ret;
180 
181 	/* determine current mclk configuration */
182 	from = !!(ram_rd32(fuc, 0x1373f0) & 0x00000002); /*XXX: ok? */
183 
184 	/* determine target mclk configuration */
185 	if (!(ram_rd32(fuc, 0x137300) & 0x00000100))
186 		ref = clk->read(clk, nv_clk_src_sppll0);
187 	else
188 		ref = clk->read(clk, nv_clk_src_sppll1);
189 	div = max(min((ref * 2) / freq, (u32)65), (u32)2) - 2;
190 	out = (ref * 2) / (div + 2);
191 	mode = freq != out;
192 
193 	ram_mask(fuc, 0x137360, 0x00000002, 0x00000000);
194 
195 	if ((ram_rd32(fuc, 0x132000) & 0x00000002) || 0 /*XXX*/) {
196 		ram_nuke(fuc, 0x132000);
197 		ram_mask(fuc, 0x132000, 0x00000002, 0x00000002);
198 		ram_mask(fuc, 0x132000, 0x00000002, 0x00000000);
199 	}
200 
201 	if (mode == 1) {
202 		ram_nuke(fuc, 0x10fe20);
203 		ram_mask(fuc, 0x10fe20, 0x00000002, 0x00000002);
204 		ram_mask(fuc, 0x10fe20, 0x00000002, 0x00000000);
205 	}
206 
207 // 0x00020034 // 0x0000000a
208 	ram_wr32(fuc, 0x132100, 0x00000001);
209 
210 	if (mode == 1 && from == 0) {
211 		/* calculate refpll */
212 		ret = gt215_pll_calc(nv_subdev(fb), &ram->refpll,
213 				     ram->mempll.refclk, &N1, NULL, &M1, &P);
214 		if (ret <= 0) {
215 			nv_error(fb, "unable to calc refpll\n");
216 			return ret ? ret : -ERANGE;
217 		}
218 
219 		ram_wr32(fuc, 0x10fe20, 0x20010000);
220 		ram_wr32(fuc, 0x137320, 0x00000003);
221 		ram_wr32(fuc, 0x137330, 0x81200006);
222 		ram_wr32(fuc, 0x10fe24, (P << 16) | (N1 << 8) | M1);
223 		ram_wr32(fuc, 0x10fe20, 0x20010001);
224 		ram_wait(fuc, 0x137390, 0x00020000, 0x00020000, 64000);
225 
226 		/* calculate mempll */
227 		ret = gt215_pll_calc(nv_subdev(fb), &ram->mempll, freq,
228 				     &N1, NULL, &M1, &P);
229 		if (ret <= 0) {
230 			nv_error(fb, "unable to calc refpll\n");
231 			return ret ? ret : -ERANGE;
232 		}
233 
234 		ram_wr32(fuc, 0x10fe20, 0x20010005);
235 		ram_wr32(fuc, 0x132004, (P << 16) | (N1 << 8) | M1);
236 		ram_wr32(fuc, 0x132000, 0x18010101);
237 		ram_wait(fuc, 0x137390, 0x00000002, 0x00000002, 64000);
238 	} else
239 	if (mode == 0) {
240 		ram_wr32(fuc, 0x137300, 0x00000003);
241 	}
242 
243 	if (from == 0) {
244 		ram_nuke(fuc, 0x10fb04);
245 		ram_mask(fuc, 0x10fb04, 0x0000ffff, 0x00000000);
246 		ram_nuke(fuc, 0x10fb08);
247 		ram_mask(fuc, 0x10fb08, 0x0000ffff, 0x00000000);
248 		ram_wr32(fuc, 0x10f988, 0x2004ff00);
249 		ram_wr32(fuc, 0x10f98c, 0x003fc040);
250 		ram_wr32(fuc, 0x10f990, 0x20012001);
251 		ram_wr32(fuc, 0x10f998, 0x00011a00);
252 		ram_wr32(fuc, 0x13d8f4, 0x00000000);
253 	} else {
254 		ram_wr32(fuc, 0x10f988, 0x20010000);
255 		ram_wr32(fuc, 0x10f98c, 0x00000000);
256 		ram_wr32(fuc, 0x10f990, 0x20012001);
257 		ram_wr32(fuc, 0x10f998, 0x00010a00);
258 	}
259 
260 	if (from == 0) {
261 // 0x00020039 // 0x000000ba
262 	}
263 
264 // 0x0002003a // 0x00000002
265 	ram_wr32(fuc, 0x100b0c, 0x00080012);
266 // 0x00030014 // 0x00000000 // 0x02b5f070
267 // 0x00030014 // 0x00010000 // 0x02b5f070
268 	ram_wr32(fuc, 0x611200, 0x00003300);
269 // 0x00020034 // 0x0000000a
270 // 0x00030020 // 0x00000001 // 0x00000000
271 
272 	ram_mask(fuc, 0x10f200, 0x00000800, 0x00000000);
273 	ram_wr32(fuc, 0x10f210, 0x00000000);
274 	ram_nsec(fuc, 1000);
275 	if (mode == 0)
276 		gf100_ram_train(fuc, 0x000c1001);
277 	ram_wr32(fuc, 0x10f310, 0x00000001);
278 	ram_nsec(fuc, 1000);
279 	ram_wr32(fuc, 0x10f090, 0x00000061);
280 	ram_wr32(fuc, 0x10f090, 0xc000007f);
281 	ram_nsec(fuc, 1000);
282 
283 	if (from == 0) {
284 		ram_wr32(fuc, 0x10f824, 0x00007fd4);
285 	} else {
286 		ram_wr32(fuc, 0x1373ec, 0x00020404);
287 	}
288 
289 	if (mode == 0) {
290 		ram_mask(fuc, 0x10f808, 0x00080000, 0x00000000);
291 		ram_mask(fuc, 0x10f200, 0x00008000, 0x00008000);
292 		ram_wr32(fuc, 0x10f830, 0x41500010);
293 		ram_mask(fuc, 0x10f830, 0x01000000, 0x00000000);
294 		ram_mask(fuc, 0x132100, 0x00000100, 0x00000100);
295 		ram_wr32(fuc, 0x10f050, 0xff000090);
296 		ram_wr32(fuc, 0x1373ec, 0x00020f0f);
297 		ram_wr32(fuc, 0x1373f0, 0x00000003);
298 		ram_wr32(fuc, 0x137310, 0x81201616);
299 		ram_wr32(fuc, 0x132100, 0x00000001);
300 // 0x00020039 // 0x000000ba
301 		ram_wr32(fuc, 0x10f830, 0x00300017);
302 		ram_wr32(fuc, 0x1373f0, 0x00000001);
303 		ram_wr32(fuc, 0x10f824, 0x00007e77);
304 		ram_wr32(fuc, 0x132000, 0x18030001);
305 		ram_wr32(fuc, 0x10f090, 0x4000007e);
306 		ram_nsec(fuc, 2000);
307 		ram_wr32(fuc, 0x10f314, 0x00000001);
308 		ram_wr32(fuc, 0x10f210, 0x80000000);
309 		ram_wr32(fuc, 0x10f338, 0x00300220);
310 		ram_wr32(fuc, 0x10f300, 0x0000011d);
311 		ram_nsec(fuc, 1000);
312 		ram_wr32(fuc, 0x10f290, 0x02060505);
313 		ram_wr32(fuc, 0x10f294, 0x34208288);
314 		ram_wr32(fuc, 0x10f298, 0x44050411);
315 		ram_wr32(fuc, 0x10f29c, 0x0000114c);
316 		ram_wr32(fuc, 0x10f2a0, 0x42e10069);
317 		ram_wr32(fuc, 0x10f614, 0x40044f77);
318 		ram_wr32(fuc, 0x10f610, 0x40044f77);
319 		ram_wr32(fuc, 0x10f344, 0x00600009);
320 		ram_nsec(fuc, 1000);
321 		ram_wr32(fuc, 0x10f348, 0x00700008);
322 		ram_wr32(fuc, 0x61c140, 0x19240000);
323 		ram_wr32(fuc, 0x10f830, 0x00300017);
324 		gf100_ram_train(fuc, 0x80021001);
325 		gf100_ram_train(fuc, 0x80081001);
326 		ram_wr32(fuc, 0x10f340, 0x00500004);
327 		ram_nsec(fuc, 1000);
328 		ram_wr32(fuc, 0x10f830, 0x01300017);
329 		ram_wr32(fuc, 0x10f830, 0x00300017);
330 // 0x00030020 // 0x00000000 // 0x00000000
331 // 0x00020034 // 0x0000000b
332 		ram_wr32(fuc, 0x100b0c, 0x00080028);
333 		ram_wr32(fuc, 0x611200, 0x00003330);
334 	} else {
335 		ram_wr32(fuc, 0x10f800, 0x00001800);
336 		ram_wr32(fuc, 0x13d8f4, 0x00000000);
337 		ram_wr32(fuc, 0x1373ec, 0x00020404);
338 		ram_wr32(fuc, 0x1373f0, 0x00000003);
339 		ram_wr32(fuc, 0x10f830, 0x40700010);
340 		ram_wr32(fuc, 0x10f830, 0x40500010);
341 		ram_wr32(fuc, 0x13d8f4, 0x00000000);
342 		ram_wr32(fuc, 0x1373f8, 0x00000000);
343 		ram_wr32(fuc, 0x132100, 0x00000101);
344 		ram_wr32(fuc, 0x137310, 0x89201616);
345 		ram_wr32(fuc, 0x10f050, 0xff000090);
346 		ram_wr32(fuc, 0x1373ec, 0x00030404);
347 		ram_wr32(fuc, 0x1373f0, 0x00000002);
348 	// 0x00020039 // 0x00000011
349 		ram_wr32(fuc, 0x132100, 0x00000001);
350 		ram_wr32(fuc, 0x1373f8, 0x00002000);
351 		ram_nsec(fuc, 2000);
352 		ram_wr32(fuc, 0x10f808, 0x7aaa0050);
353 		ram_wr32(fuc, 0x10f830, 0x00500010);
354 		ram_wr32(fuc, 0x10f200, 0x00ce1000);
355 		ram_wr32(fuc, 0x10f090, 0x4000007e);
356 		ram_nsec(fuc, 2000);
357 		ram_wr32(fuc, 0x10f314, 0x00000001);
358 		ram_wr32(fuc, 0x10f210, 0x80000000);
359 		ram_wr32(fuc, 0x10f338, 0x00300200);
360 		ram_wr32(fuc, 0x10f300, 0x0000084d);
361 		ram_nsec(fuc, 1000);
362 		ram_wr32(fuc, 0x10f290, 0x0b343825);
363 		ram_wr32(fuc, 0x10f294, 0x3483028e);
364 		ram_wr32(fuc, 0x10f298, 0x440c0600);
365 		ram_wr32(fuc, 0x10f29c, 0x0000214c);
366 		ram_wr32(fuc, 0x10f2a0, 0x42e20069);
367 		ram_wr32(fuc, 0x10f200, 0x00ce0000);
368 		ram_wr32(fuc, 0x10f614, 0x60044e77);
369 		ram_wr32(fuc, 0x10f610, 0x60044e77);
370 		ram_wr32(fuc, 0x10f340, 0x00500000);
371 		ram_nsec(fuc, 1000);
372 		ram_wr32(fuc, 0x10f344, 0x00600228);
373 		ram_nsec(fuc, 1000);
374 		ram_wr32(fuc, 0x10f348, 0x00700000);
375 		ram_wr32(fuc, 0x13d8f4, 0x00000000);
376 		ram_wr32(fuc, 0x61c140, 0x09a40000);
377 
378 		gf100_ram_train(fuc, 0x800e1008);
379 
380 		ram_nsec(fuc, 1000);
381 		ram_wr32(fuc, 0x10f800, 0x00001804);
382 	// 0x00030020 // 0x00000000 // 0x00000000
383 	// 0x00020034 // 0x0000000b
384 		ram_wr32(fuc, 0x13d8f4, 0x00000000);
385 		ram_wr32(fuc, 0x100b0c, 0x00080028);
386 		ram_wr32(fuc, 0x611200, 0x00003330);
387 		ram_nsec(fuc, 100000);
388 		ram_wr32(fuc, 0x10f9b0, 0x05313f41);
389 		ram_wr32(fuc, 0x10f9b4, 0x00002f50);
390 
391 		gf100_ram_train(fuc, 0x010c1001);
392 	}
393 
394 	ram_mask(fuc, 0x10f200, 0x00000800, 0x00000800);
395 // 0x00020016 // 0x00000000
396 
397 	if (mode == 0)
398 		ram_mask(fuc, 0x132000, 0x00000001, 0x00000000);
399 
400 	return 0;
401 }
402 
403 static int
404 gf100_ram_prog(struct nvkm_fb *fb)
405 {
406 	struct nvkm_device *device = nv_device(fb);
407 	struct gf100_ram *ram = (void *)fb->ram;
408 	struct gf100_ramfuc *fuc = &ram->fuc;
409 	ram_exec(fuc, nvkm_boolopt(device->cfgopt, "NvMemExec", true));
410 	return 0;
411 }
412 
413 static void
414 gf100_ram_tidy(struct nvkm_fb *fb)
415 {
416 	struct gf100_ram *ram = (void *)fb->ram;
417 	struct gf100_ramfuc *fuc = &ram->fuc;
418 	ram_exec(fuc, false);
419 }
420 
421 extern const u8 gf100_pte_storage_type_map[256];
422 
423 void
424 gf100_ram_put(struct nvkm_fb *fb, struct nvkm_mem **pmem)
425 {
426 	struct nvkm_ltc *ltc = nvkm_ltc(fb);
427 	struct nvkm_mem *mem = *pmem;
428 
429 	*pmem = NULL;
430 	if (unlikely(mem == NULL))
431 		return;
432 
433 	mutex_lock(&fb->subdev.mutex);
434 	if (mem->tag)
435 		ltc->tags_free(ltc, &mem->tag);
436 	__nv50_ram_put(fb, mem);
437 	mutex_unlock(&fb->subdev.mutex);
438 
439 	kfree(mem);
440 }
441 
442 int
443 gf100_ram_get(struct nvkm_fb *fb, u64 size, u32 align, u32 ncmin,
444 	      u32 memtype, struct nvkm_mem **pmem)
445 {
446 	struct nvkm_mm *mm = &fb->vram;
447 	struct nvkm_mm_node *r;
448 	struct nvkm_mem *mem;
449 	int type = (memtype & 0x0ff);
450 	int back = (memtype & 0x800);
451 	const bool comp = gf100_pte_storage_type_map[type] != type;
452 	int ret;
453 
454 	size  >>= 12;
455 	align >>= 12;
456 	ncmin >>= 12;
457 	if (!ncmin)
458 		ncmin = size;
459 
460 	mem = kzalloc(sizeof(*mem), GFP_KERNEL);
461 	if (!mem)
462 		return -ENOMEM;
463 
464 	INIT_LIST_HEAD(&mem->regions);
465 	mem->size = size;
466 
467 	mutex_lock(&fb->subdev.mutex);
468 	if (comp) {
469 		struct nvkm_ltc *ltc = nvkm_ltc(fb);
470 
471 		/* compression only works with lpages */
472 		if (align == (1 << (17 - 12))) {
473 			int n = size >> 5;
474 			ltc->tags_alloc(ltc, n, &mem->tag);
475 		}
476 
477 		if (unlikely(!mem->tag))
478 			type = gf100_pte_storage_type_map[type];
479 	}
480 	mem->memtype = type;
481 
482 	do {
483 		if (back)
484 			ret = nvkm_mm_tail(mm, 0, 1, size, ncmin, align, &r);
485 		else
486 			ret = nvkm_mm_head(mm, 0, 1, size, ncmin, align, &r);
487 		if (ret) {
488 			mutex_unlock(&fb->subdev.mutex);
489 			fb->ram->put(fb, &mem);
490 			return ret;
491 		}
492 
493 		list_add_tail(&r->rl_entry, &mem->regions);
494 		size -= r->length;
495 	} while (size);
496 	mutex_unlock(&fb->subdev.mutex);
497 
498 	r = list_first_entry(&mem->regions, struct nvkm_mm_node, rl_entry);
499 	mem->offset = (u64)r->offset << 12;
500 	*pmem = mem;
501 	return 0;
502 }
503 
504 int
505 gf100_ram_create_(struct nvkm_object *parent, struct nvkm_object *engine,
506 		  struct nvkm_oclass *oclass, u32 maskaddr, int size,
507 		  void **pobject)
508 {
509 	struct nvkm_fb *fb = nvkm_fb(parent);
510 	struct nvkm_bios *bios = nvkm_bios(fb);
511 	struct nvkm_ram *ram;
512 	const u32 rsvd_head = ( 256 * 1024) >> 12; /* vga memory */
513 	const u32 rsvd_tail = (1024 * 1024) >> 12; /* vbios etc */
514 	u32 parts = nv_rd32(fb, 0x022438);
515 	u32 pmask = nv_rd32(fb, maskaddr);
516 	u32 bsize = nv_rd32(fb, 0x10f20c);
517 	u32 offset, length;
518 	bool uniform = true;
519 	int ret, part;
520 
521 	ret = nvkm_ram_create_(parent, engine, oclass, size, pobject);
522 	ram = *pobject;
523 	if (ret)
524 		return ret;
525 
526 	nv_debug(fb, "0x100800: 0x%08x\n", nv_rd32(fb, 0x100800));
527 	nv_debug(fb, "parts 0x%08x mask 0x%08x\n", parts, pmask);
528 
529 	ram->type = nvkm_fb_bios_memtype(bios);
530 	ram->ranks = (nv_rd32(fb, 0x10f200) & 0x00000004) ? 2 : 1;
531 
532 	/* read amount of vram attached to each memory controller */
533 	for (part = 0; part < parts; part++) {
534 		if (!(pmask & (1 << part))) {
535 			u32 psize = nv_rd32(fb, 0x11020c + (part * 0x1000));
536 			if (psize != bsize) {
537 				if (psize < bsize)
538 					bsize = psize;
539 				uniform = false;
540 			}
541 
542 			nv_debug(fb, "%d: mem_amount 0x%08x\n", part, psize);
543 			ram->size += (u64)psize << 20;
544 		}
545 	}
546 
547 	/* if all controllers have the same amount attached, there's no holes */
548 	if (uniform) {
549 		offset = rsvd_head;
550 		length = (ram->size >> 12) - rsvd_head - rsvd_tail;
551 		ret = nvkm_mm_init(&fb->vram, offset, length, 1);
552 	} else {
553 		/* otherwise, address lowest common amount from 0GiB */
554 		ret = nvkm_mm_init(&fb->vram, rsvd_head,
555 				   (bsize << 8) * parts - rsvd_head, 1);
556 		if (ret)
557 			return ret;
558 
559 		/* and the rest starting from (8GiB + common_size) */
560 		offset = (0x0200000000ULL >> 12) + (bsize << 8);
561 		length = (ram->size >> 12) - ((bsize * parts) << 8) - rsvd_tail;
562 
563 		ret = nvkm_mm_init(&fb->vram, offset, length, 1);
564 		if (ret)
565 			nvkm_mm_fini(&fb->vram);
566 	}
567 
568 	if (ret)
569 		return ret;
570 
571 	ram->get = gf100_ram_get;
572 	ram->put = gf100_ram_put;
573 	return 0;
574 }
575 
576 static int
577 gf100_ram_init(struct nvkm_object *object)
578 {
579 	struct nvkm_fb *fb = (void *)object->parent;
580 	struct gf100_ram *ram = (void *)object;
581 	int ret, i;
582 
583 	ret = nvkm_ram_init(&ram->base);
584 	if (ret)
585 		return ret;
586 
587 	/* prepare for ddr link training, and load training patterns */
588 	switch (ram->base.type) {
589 	case NV_MEM_TYPE_GDDR5: {
590 		static const u8  train0[] = {
591 			0x00, 0xff, 0x55, 0xaa, 0x33, 0xcc,
592 			0x00, 0xff, 0xff, 0x00, 0xff, 0x00,
593 		};
594 		static const u32 train1[] = {
595 			0x00000000, 0xffffffff,
596 			0x55555555, 0xaaaaaaaa,
597 			0x33333333, 0xcccccccc,
598 			0xf0f0f0f0, 0x0f0f0f0f,
599 			0x00ff00ff, 0xff00ff00,
600 			0x0000ffff, 0xffff0000,
601 		};
602 
603 		for (i = 0; i < 0x30; i++) {
604 			nv_wr32(fb, 0x10f968, 0x00000000 | (i << 8));
605 			nv_wr32(fb, 0x10f96c, 0x00000000 | (i << 8));
606 			nv_wr32(fb, 0x10f920, 0x00000100 | train0[i % 12]);
607 			nv_wr32(fb, 0x10f924, 0x00000100 | train0[i % 12]);
608 			nv_wr32(fb, 0x10f918,              train1[i % 12]);
609 			nv_wr32(fb, 0x10f91c,              train1[i % 12]);
610 			nv_wr32(fb, 0x10f920, 0x00000000 | train0[i % 12]);
611 			nv_wr32(fb, 0x10f924, 0x00000000 | train0[i % 12]);
612 			nv_wr32(fb, 0x10f918,              train1[i % 12]);
613 			nv_wr32(fb, 0x10f91c,              train1[i % 12]);
614 		}
615 	}	break;
616 	default:
617 		break;
618 	}
619 
620 	return 0;
621 }
622 
623 static int
624 gf100_ram_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
625 	       struct nvkm_oclass *oclass, void *data, u32 size,
626 	       struct nvkm_object **pobject)
627 {
628 	struct nvkm_bios *bios = nvkm_bios(parent);
629 	struct gf100_ram *ram;
630 	int ret;
631 
632 	ret = gf100_ram_create(parent, engine, oclass, 0x022554, &ram);
633 	*pobject = nv_object(ram);
634 	if (ret)
635 		return ret;
636 
637 	ret = nvbios_pll_parse(bios, 0x0c, &ram->refpll);
638 	if (ret) {
639 		nv_error(ram, "mclk refpll data not found\n");
640 		return ret;
641 	}
642 
643 	ret = nvbios_pll_parse(bios, 0x04, &ram->mempll);
644 	if (ret) {
645 		nv_error(ram, "mclk pll data not found\n");
646 		return ret;
647 	}
648 
649 	switch (ram->base.type) {
650 	case NV_MEM_TYPE_GDDR5:
651 		ram->base.calc = gf100_ram_calc;
652 		ram->base.prog = gf100_ram_prog;
653 		ram->base.tidy = gf100_ram_tidy;
654 		break;
655 	default:
656 		nv_warn(ram, "reclocking of this ram type unsupported\n");
657 		return 0;
658 	}
659 
660 	ram->fuc.r_0x10fe20 = ramfuc_reg(0x10fe20);
661 	ram->fuc.r_0x10fe24 = ramfuc_reg(0x10fe24);
662 	ram->fuc.r_0x137320 = ramfuc_reg(0x137320);
663 	ram->fuc.r_0x137330 = ramfuc_reg(0x137330);
664 
665 	ram->fuc.r_0x132000 = ramfuc_reg(0x132000);
666 	ram->fuc.r_0x132004 = ramfuc_reg(0x132004);
667 	ram->fuc.r_0x132100 = ramfuc_reg(0x132100);
668 
669 	ram->fuc.r_0x137390 = ramfuc_reg(0x137390);
670 
671 	ram->fuc.r_0x10f290 = ramfuc_reg(0x10f290);
672 	ram->fuc.r_0x10f294 = ramfuc_reg(0x10f294);
673 	ram->fuc.r_0x10f298 = ramfuc_reg(0x10f298);
674 	ram->fuc.r_0x10f29c = ramfuc_reg(0x10f29c);
675 	ram->fuc.r_0x10f2a0 = ramfuc_reg(0x10f2a0);
676 
677 	ram->fuc.r_0x10f300 = ramfuc_reg(0x10f300);
678 	ram->fuc.r_0x10f338 = ramfuc_reg(0x10f338);
679 	ram->fuc.r_0x10f340 = ramfuc_reg(0x10f340);
680 	ram->fuc.r_0x10f344 = ramfuc_reg(0x10f344);
681 	ram->fuc.r_0x10f348 = ramfuc_reg(0x10f348);
682 
683 	ram->fuc.r_0x10f910 = ramfuc_reg(0x10f910);
684 	ram->fuc.r_0x10f914 = ramfuc_reg(0x10f914);
685 
686 	ram->fuc.r_0x100b0c = ramfuc_reg(0x100b0c);
687 	ram->fuc.r_0x10f050 = ramfuc_reg(0x10f050);
688 	ram->fuc.r_0x10f090 = ramfuc_reg(0x10f090);
689 	ram->fuc.r_0x10f200 = ramfuc_reg(0x10f200);
690 	ram->fuc.r_0x10f210 = ramfuc_reg(0x10f210);
691 	ram->fuc.r_0x10f310 = ramfuc_reg(0x10f310);
692 	ram->fuc.r_0x10f314 = ramfuc_reg(0x10f314);
693 	ram->fuc.r_0x10f610 = ramfuc_reg(0x10f610);
694 	ram->fuc.r_0x10f614 = ramfuc_reg(0x10f614);
695 	ram->fuc.r_0x10f800 = ramfuc_reg(0x10f800);
696 	ram->fuc.r_0x10f808 = ramfuc_reg(0x10f808);
697 	ram->fuc.r_0x10f824 = ramfuc_reg(0x10f824);
698 	ram->fuc.r_0x10f830 = ramfuc_reg(0x10f830);
699 	ram->fuc.r_0x10f988 = ramfuc_reg(0x10f988);
700 	ram->fuc.r_0x10f98c = ramfuc_reg(0x10f98c);
701 	ram->fuc.r_0x10f990 = ramfuc_reg(0x10f990);
702 	ram->fuc.r_0x10f998 = ramfuc_reg(0x10f998);
703 	ram->fuc.r_0x10f9b0 = ramfuc_reg(0x10f9b0);
704 	ram->fuc.r_0x10f9b4 = ramfuc_reg(0x10f9b4);
705 	ram->fuc.r_0x10fb04 = ramfuc_reg(0x10fb04);
706 	ram->fuc.r_0x10fb08 = ramfuc_reg(0x10fb08);
707 	ram->fuc.r_0x137310 = ramfuc_reg(0x137300);
708 	ram->fuc.r_0x137310 = ramfuc_reg(0x137310);
709 	ram->fuc.r_0x137360 = ramfuc_reg(0x137360);
710 	ram->fuc.r_0x1373ec = ramfuc_reg(0x1373ec);
711 	ram->fuc.r_0x1373f0 = ramfuc_reg(0x1373f0);
712 	ram->fuc.r_0x1373f8 = ramfuc_reg(0x1373f8);
713 
714 	ram->fuc.r_0x61c140 = ramfuc_reg(0x61c140);
715 	ram->fuc.r_0x611200 = ramfuc_reg(0x611200);
716 
717 	ram->fuc.r_0x13d8f4 = ramfuc_reg(0x13d8f4);
718 	return 0;
719 }
720 
721 struct nvkm_oclass
722 gf100_ram_oclass = {
723 	.handle = 0,
724 	.ofuncs = &(struct nvkm_ofuncs) {
725 		.ctor = gf100_ram_ctor,
726 		.dtor = _nvkm_ram_dtor,
727 		.init = gf100_ram_init,
728 		.fini = _nvkm_ram_fini,
729 	}
730 };
731