1 /* 2 * Copyright 2013 Red Hat Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Ben Skeggs 23 * Roy Spliet <rspliet@eclipso.eu> 24 */ 25 26 #include "ramfuc.h" 27 #include "nv50.h" 28 29 #include <core/option.h> 30 #include <subdev/bios.h> 31 #include <subdev/bios/M0205.h> 32 #include <subdev/bios/rammap.h> 33 #include <subdev/bios/timing.h> 34 #include <subdev/clk/gt215.h> 35 #include <subdev/gpio.h> 36 37 /* XXX: Remove when memx gains GPIO support */ 38 extern int nv50_gpio_location(int line, u32 *reg, u32 *shift); 39 40 struct gt215_ramfuc { 41 struct ramfuc base; 42 struct ramfuc_reg r_0x001610; 43 struct ramfuc_reg r_0x001700; 44 struct ramfuc_reg r_0x002504; 45 struct ramfuc_reg r_0x004000; 46 struct ramfuc_reg r_0x004004; 47 struct ramfuc_reg r_0x004018; 48 struct ramfuc_reg r_0x004128; 49 struct ramfuc_reg r_0x004168; 50 struct ramfuc_reg r_0x100080; 51 struct ramfuc_reg r_0x100200; 52 struct ramfuc_reg r_0x100210; 53 struct ramfuc_reg r_0x100220[9]; 54 struct ramfuc_reg r_0x100264; 55 struct ramfuc_reg r_0x1002d0; 56 struct ramfuc_reg r_0x1002d4; 57 struct ramfuc_reg r_0x1002dc; 58 struct ramfuc_reg r_0x10053c; 59 struct ramfuc_reg r_0x1005a0; 60 struct ramfuc_reg r_0x1005a4; 61 struct ramfuc_reg r_0x100700; 62 struct ramfuc_reg r_0x100714; 63 struct ramfuc_reg r_0x100718; 64 struct ramfuc_reg r_0x10071c; 65 struct ramfuc_reg r_0x100720; 66 struct ramfuc_reg r_0x100760; 67 struct ramfuc_reg r_0x1007a0; 68 struct ramfuc_reg r_0x1007e0; 69 struct ramfuc_reg r_0x100da0; 70 struct ramfuc_reg r_0x10f804; 71 struct ramfuc_reg r_0x1110e0; 72 struct ramfuc_reg r_0x111100; 73 struct ramfuc_reg r_0x111104; 74 struct ramfuc_reg r_0x1111e0; 75 struct ramfuc_reg r_0x111400; 76 struct ramfuc_reg r_0x611200; 77 struct ramfuc_reg r_mr[4]; 78 struct ramfuc_reg r_gpioFBVREF; 79 }; 80 81 struct gt215_ltrain { 82 enum { 83 NVA3_TRAIN_UNKNOWN, 84 NVA3_TRAIN_UNSUPPORTED, 85 NVA3_TRAIN_ONCE, 86 NVA3_TRAIN_EXEC, 87 NVA3_TRAIN_DONE 88 } state; 89 u32 r_100720; 90 u32 r_1111e0; 91 u32 r_111400; 92 struct nvkm_mem *mem; 93 }; 94 95 struct gt215_ram { 96 struct nvkm_ram base; 97 struct gt215_ramfuc fuc; 98 struct gt215_ltrain ltrain; 99 }; 100 101 void 102 gt215_link_train_calc(u32 *vals, struct gt215_ltrain *train) 103 { 104 int i, lo, hi; 105 u8 median[8], bins[4] = {0, 0, 0, 0}, bin = 0, qty = 0; 106 107 for (i = 0; i < 8; i++) { 108 for (lo = 0; lo < 0x40; lo++) { 109 if (!(vals[lo] & 0x80000000)) 110 continue; 111 if (vals[lo] & (0x101 << i)) 112 break; 113 } 114 115 if (lo == 0x40) 116 return; 117 118 for (hi = lo + 1; hi < 0x40; hi++) { 119 if (!(vals[lo] & 0x80000000)) 120 continue; 121 if (!(vals[hi] & (0x101 << i))) { 122 hi--; 123 break; 124 } 125 } 126 127 median[i] = ((hi - lo) >> 1) + lo; 128 bins[(median[i] & 0xf0) >> 4]++; 129 median[i] += 0x30; 130 } 131 132 /* Find the best value for 0x1111e0 */ 133 for (i = 0; i < 4; i++) { 134 if (bins[i] > qty) { 135 bin = i + 3; 136 qty = bins[i]; 137 } 138 } 139 140 train->r_100720 = 0; 141 for (i = 0; i < 8; i++) { 142 median[i] = max(median[i], (u8) (bin << 4)); 143 median[i] = min(median[i], (u8) ((bin << 4) | 0xf)); 144 145 train->r_100720 |= ((median[i] & 0x0f) << (i << 2)); 146 } 147 148 train->r_1111e0 = 0x02000000 | (bin * 0x101); 149 train->r_111400 = 0x0; 150 } 151 152 /* 153 * Link training for (at least) DDR3 154 */ 155 int 156 gt215_link_train(struct nvkm_fb *fb) 157 { 158 struct nvkm_bios *bios = nvkm_bios(fb); 159 struct gt215_ram *ram = (void *)fb->ram; 160 struct nvkm_clk *clk = nvkm_clk(fb); 161 struct gt215_ltrain *train = &ram->ltrain; 162 struct nvkm_device *device = nv_device(fb); 163 struct gt215_ramfuc *fuc = &ram->fuc; 164 u32 *result, r1700; 165 int ret, i; 166 struct nvbios_M0205T M0205T = { 0 }; 167 u8 ver, hdr, cnt, len, snr, ssz; 168 unsigned int clk_current; 169 unsigned long flags; 170 unsigned long *f = &flags; 171 172 if (nvkm_boolopt(device->cfgopt, "NvMemExec", true) != true) 173 return -ENOSYS; 174 175 /* XXX: Multiple partitions? */ 176 result = kmalloc(64 * sizeof(u32), GFP_KERNEL); 177 if (!result) 178 return -ENOMEM; 179 180 train->state = NVA3_TRAIN_EXEC; 181 182 /* Clock speeds for training and back */ 183 nvbios_M0205Tp(bios, &ver, &hdr, &cnt, &len, &snr, &ssz, &M0205T); 184 if (M0205T.freq == 0) { 185 kfree(result); 186 return -ENOENT; 187 } 188 189 clk_current = clk->read(clk, nv_clk_src_mem); 190 191 ret = gt215_clk_pre(clk, f); 192 if (ret) 193 goto out; 194 195 /* First: clock up/down */ 196 ret = ram->base.calc(fb, (u32) M0205T.freq * 1000); 197 if (ret) 198 goto out; 199 200 /* Do this *after* calc, eliminates write in script */ 201 nvkm_wr32(device, 0x111400, 0x00000000); 202 /* XXX: Magic writes that improve train reliability? */ 203 nvkm_mask(device, 0x100674, 0x0000ffff, 0x00000000); 204 nvkm_mask(device, 0x1005e4, 0x0000ffff, 0x00000000); 205 nvkm_mask(device, 0x100b0c, 0x000000ff, 0x00000000); 206 nvkm_wr32(device, 0x100c04, 0x00000400); 207 208 /* Now the training script */ 209 r1700 = ram_rd32(fuc, 0x001700); 210 211 ram_mask(fuc, 0x100200, 0x00000800, 0x00000000); 212 ram_wr32(fuc, 0x611200, 0x3300); 213 ram_wait_vblank(fuc); 214 ram_wait(fuc, 0x611200, 0x00000003, 0x00000000, 500000); 215 ram_mask(fuc, 0x001610, 0x00000083, 0x00000003); 216 ram_mask(fuc, 0x100080, 0x00000020, 0x00000000); 217 ram_mask(fuc, 0x10f804, 0x80000000, 0x00000000); 218 ram_wr32(fuc, 0x001700, 0x00000000); 219 220 ram_train(fuc); 221 222 /* Reset */ 223 ram_mask(fuc, 0x10f804, 0x80000000, 0x80000000); 224 ram_wr32(fuc, 0x10053c, 0x0); 225 ram_wr32(fuc, 0x100720, train->r_100720); 226 ram_wr32(fuc, 0x1111e0, train->r_1111e0); 227 ram_wr32(fuc, 0x111400, train->r_111400); 228 ram_nuke(fuc, 0x100080); 229 ram_mask(fuc, 0x100080, 0x00000020, 0x00000020); 230 ram_nsec(fuc, 1000); 231 232 ram_wr32(fuc, 0x001700, r1700); 233 ram_mask(fuc, 0x001610, 0x00000083, 0x00000080); 234 ram_wr32(fuc, 0x611200, 0x3330); 235 ram_mask(fuc, 0x100200, 0x00000800, 0x00000800); 236 237 ram_exec(fuc, true); 238 239 ram->base.calc(fb, clk_current); 240 ram_exec(fuc, true); 241 242 /* Post-processing, avoids flicker */ 243 nvkm_mask(device, 0x616308, 0x10, 0x10); 244 nvkm_mask(device, 0x616b08, 0x10, 0x10); 245 246 gt215_clk_post(clk, f); 247 248 ram_train_result(fb, result, 64); 249 for (i = 0; i < 64; i++) 250 nv_debug(fb, "Train: %08x", result[i]); 251 gt215_link_train_calc(result, train); 252 253 nv_debug(fb, "Train: %08x %08x %08x", train->r_100720, 254 train->r_1111e0, train->r_111400); 255 256 kfree(result); 257 258 train->state = NVA3_TRAIN_DONE; 259 260 return ret; 261 262 out: 263 if(ret == -EBUSY) 264 f = NULL; 265 266 train->state = NVA3_TRAIN_UNSUPPORTED; 267 268 gt215_clk_post(clk, f); 269 kfree(result); 270 return ret; 271 } 272 273 int 274 gt215_link_train_init(struct nvkm_fb *fb) 275 { 276 static const u32 pattern[16] = { 277 0xaaaaaaaa, 0xcccccccc, 0xdddddddd, 0xeeeeeeee, 278 0x00000000, 0x11111111, 0x44444444, 0xdddddddd, 279 0x33333333, 0x55555555, 0x77777777, 0x66666666, 280 0x99999999, 0x88888888, 0xeeeeeeee, 0xbbbbbbbb, 281 }; 282 struct nvkm_device *device = fb->subdev.device; 283 struct nvkm_bios *bios = device->bios; 284 struct gt215_ram *ram = (void *)fb->ram; 285 struct gt215_ltrain *train = &ram->ltrain; 286 struct nvkm_mem *mem; 287 struct nvbios_M0205E M0205E; 288 u8 ver, hdr, cnt, len; 289 u32 r001700; 290 int ret, i = 0; 291 292 train->state = NVA3_TRAIN_UNSUPPORTED; 293 294 /* We support type "5" 295 * XXX: training pattern table appears to be unused for this routine */ 296 if (!nvbios_M0205Ep(bios, i, &ver, &hdr, &cnt, &len, &M0205E)) 297 return -ENOENT; 298 299 if (M0205E.type != 5) 300 return 0; 301 302 train->state = NVA3_TRAIN_ONCE; 303 304 ret = fb->ram->get(fb, 0x8000, 0x10000, 0, 0x800, &ram->ltrain.mem); 305 if (ret) 306 return ret; 307 308 mem = ram->ltrain.mem; 309 310 nvkm_wr32(device, 0x100538, 0x10000000 | (mem->offset >> 16)); 311 nvkm_wr32(device, 0x1005a8, 0x0000ffff); 312 nvkm_mask(device, 0x10f800, 0x00000001, 0x00000001); 313 314 for (i = 0; i < 0x30; i++) { 315 nvkm_wr32(device, 0x10f8c0, (i << 8) | i); 316 nvkm_wr32(device, 0x10f900, pattern[i % 16]); 317 } 318 319 for (i = 0; i < 0x30; i++) { 320 nvkm_wr32(device, 0x10f8e0, (i << 8) | i); 321 nvkm_wr32(device, 0x10f920, pattern[i % 16]); 322 } 323 324 /* And upload the pattern */ 325 r001700 = nvkm_rd32(device, 0x1700); 326 nvkm_wr32(device, 0x1700, mem->offset >> 16); 327 for (i = 0; i < 16; i++) 328 nvkm_wr32(device, 0x700000 + (i << 2), pattern[i]); 329 for (i = 0; i < 16; i++) 330 nvkm_wr32(device, 0x700100 + (i << 2), pattern[i]); 331 nvkm_wr32(device, 0x1700, r001700); 332 333 train->r_100720 = nvkm_rd32(device, 0x100720); 334 train->r_1111e0 = nvkm_rd32(device, 0x1111e0); 335 train->r_111400 = nvkm_rd32(device, 0x111400); 336 return 0; 337 } 338 339 void 340 gt215_link_train_fini(struct nvkm_fb *fb) 341 { 342 struct gt215_ram *ram = (void *)fb->ram; 343 344 if (ram->ltrain.mem) 345 fb->ram->put(fb, &ram->ltrain.mem); 346 } 347 348 /* 349 * RAM reclocking 350 */ 351 #define T(t) cfg->timing_10_##t 352 static int 353 gt215_ram_timing_calc(struct nvkm_fb *fb, u32 *timing) 354 { 355 struct nvkm_device *device = fb->subdev.device; 356 struct gt215_ram *ram = (void *)fb->ram; 357 struct nvbios_ramcfg *cfg = &ram->base.target.bios; 358 int tUNK_base, tUNK_40_0, prevCL; 359 u32 cur2, cur3, cur7, cur8; 360 361 cur2 = nvkm_rd32(device, 0x100228); 362 cur3 = nvkm_rd32(device, 0x10022c); 363 cur7 = nvkm_rd32(device, 0x10023c); 364 cur8 = nvkm_rd32(device, 0x100240); 365 366 367 switch ((!T(CWL)) * ram->base.type) { 368 case NV_MEM_TYPE_DDR2: 369 T(CWL) = T(CL) - 1; 370 break; 371 case NV_MEM_TYPE_GDDR3: 372 T(CWL) = ((cur2 & 0xff000000) >> 24) + 1; 373 break; 374 } 375 376 prevCL = (cur3 & 0x000000ff) + 1; 377 tUNK_base = ((cur7 & 0x00ff0000) >> 16) - prevCL; 378 379 timing[0] = (T(RP) << 24 | T(RAS) << 16 | T(RFC) << 8 | T(RC)); 380 timing[1] = (T(WR) + 1 + T(CWL)) << 24 | 381 max_t(u8,T(18), 1) << 16 | 382 (T(WTR) + 1 + T(CWL)) << 8 | 383 (5 + T(CL) - T(CWL)); 384 timing[2] = (T(CWL) - 1) << 24 | 385 (T(RRD) << 16) | 386 (T(RCDWR) << 8) | 387 T(RCDRD); 388 timing[3] = (cur3 & 0x00ff0000) | 389 (0x30 + T(CL)) << 24 | 390 (0xb + T(CL)) << 8 | 391 (T(CL) - 1); 392 timing[4] = T(20) << 24 | 393 T(21) << 16 | 394 T(13) << 8 | 395 T(13); 396 timing[5] = T(RFC) << 24 | 397 max_t(u8,T(RCDRD), T(RCDWR)) << 16 | 398 max_t(u8, (T(CWL) + 6), (T(CL) + 2)) << 8 | 399 T(RP); 400 timing[6] = (0x5a + T(CL)) << 16 | 401 max_t(u8, 1, (6 - T(CL) + T(CWL))) << 8 | 402 (0x50 + T(CL) - T(CWL)); 403 timing[7] = (cur7 & 0xff000000) | 404 ((tUNK_base + T(CL)) << 16) | 405 0x202; 406 timing[8] = cur8 & 0xffffff00; 407 408 switch (ram->base.type) { 409 case NV_MEM_TYPE_DDR2: 410 case NV_MEM_TYPE_GDDR3: 411 tUNK_40_0 = prevCL - (cur8 & 0xff); 412 if (tUNK_40_0 > 0) 413 timing[8] |= T(CL); 414 break; 415 default: 416 break; 417 } 418 419 nv_debug(fb, "Entry: 220: %08x %08x %08x %08x\n", 420 timing[0], timing[1], timing[2], timing[3]); 421 nv_debug(fb, " 230: %08x %08x %08x %08x\n", 422 timing[4], timing[5], timing[6], timing[7]); 423 nv_debug(fb, " 240: %08x\n", timing[8]); 424 return 0; 425 } 426 #undef T 427 428 static void 429 nvkm_sddr2_dll_reset(struct gt215_ramfuc *fuc) 430 { 431 ram_mask(fuc, mr[0], 0x100, 0x100); 432 ram_nsec(fuc, 1000); 433 ram_mask(fuc, mr[0], 0x100, 0x000); 434 ram_nsec(fuc, 1000); 435 } 436 437 static void 438 nvkm_sddr3_dll_disable(struct gt215_ramfuc *fuc, u32 *mr) 439 { 440 u32 mr1_old = ram_rd32(fuc, mr[1]); 441 442 if (!(mr1_old & 0x1)) { 443 ram_wr32(fuc, 0x1002d4, 0x00000001); 444 ram_wr32(fuc, mr[1], mr[1]); 445 ram_nsec(fuc, 1000); 446 } 447 } 448 449 static void 450 nvkm_gddr3_dll_disable(struct gt215_ramfuc *fuc, u32 *mr) 451 { 452 u32 mr1_old = ram_rd32(fuc, mr[1]); 453 454 if (!(mr1_old & 0x40)) { 455 ram_wr32(fuc, mr[1], mr[1]); 456 ram_nsec(fuc, 1000); 457 } 458 } 459 460 static void 461 gt215_ram_lock_pll(struct gt215_ramfuc *fuc, struct gt215_clk_info *mclk) 462 { 463 ram_wr32(fuc, 0x004004, mclk->pll); 464 ram_mask(fuc, 0x004000, 0x00000001, 0x00000001); 465 ram_mask(fuc, 0x004000, 0x00000010, 0x00000000); 466 ram_wait(fuc, 0x004000, 0x00020000, 0x00020000, 64000); 467 ram_mask(fuc, 0x004000, 0x00000010, 0x00000010); 468 } 469 470 static void 471 gt215_ram_fbvref(struct gt215_ramfuc *fuc, u32 val) 472 { 473 struct nvkm_gpio *gpio = nvkm_gpio(fuc->base.fb); 474 struct dcb_gpio_func func; 475 u32 reg, sh, gpio_val; 476 int ret; 477 478 if (gpio->get(gpio, 0, 0x2e, DCB_GPIO_UNUSED) != val) { 479 ret = gpio->find(gpio, 0, 0x2e, DCB_GPIO_UNUSED, &func); 480 if (ret) 481 return; 482 483 nv50_gpio_location(func.line, ®, &sh); 484 gpio_val = ram_rd32(fuc, gpioFBVREF); 485 if (gpio_val & (8 << sh)) 486 val = !val; 487 488 ram_mask(fuc, gpioFBVREF, (0x3 << sh), ((val | 0x2) << sh)); 489 ram_nsec(fuc, 20000); 490 } 491 } 492 493 static int 494 gt215_ram_calc(struct nvkm_fb *fb, u32 freq) 495 { 496 struct nvkm_bios *bios = nvkm_bios(fb); 497 struct gt215_ram *ram = (void *)fb->ram; 498 struct gt215_ramfuc *fuc = &ram->fuc; 499 struct gt215_ltrain *train = &ram->ltrain; 500 struct gt215_clk_info mclk; 501 struct nvkm_ram_data *next; 502 u8 ver, hdr, cnt, len, strap; 503 u32 data; 504 u32 r004018, r100760, r100da0, r111100, ctrl; 505 u32 unk714, unk718, unk71c; 506 int ret, i; 507 u32 timing[9]; 508 bool pll2pll; 509 510 next = &ram->base.target; 511 next->freq = freq; 512 ram->base.next = next; 513 514 if (ram->ltrain.state == NVA3_TRAIN_ONCE) 515 gt215_link_train(fb); 516 517 /* lookup memory config data relevant to the target frequency */ 518 data = nvbios_rammapEm(bios, freq / 1000, &ver, &hdr, &cnt, &len, 519 &next->bios); 520 if (!data || ver != 0x10 || hdr < 0x05) { 521 nv_error(fb, "invalid/missing rammap entry\n"); 522 return -EINVAL; 523 } 524 525 /* locate specific data set for the attached memory */ 526 strap = nvbios_ramcfg_index(nv_subdev(fb)); 527 if (strap >= cnt) { 528 nv_error(fb, "invalid ramcfg strap\n"); 529 return -EINVAL; 530 } 531 532 data = nvbios_rammapSp(bios, data, ver, hdr, cnt, len, strap, 533 &ver, &hdr, &next->bios); 534 if (!data || ver != 0x10 || hdr < 0x09) { 535 nv_error(fb, "invalid/missing ramcfg entry\n"); 536 return -EINVAL; 537 } 538 539 /* lookup memory timings, if bios says they're present */ 540 if (next->bios.ramcfg_timing != 0xff) { 541 data = nvbios_timingEp(bios, next->bios.ramcfg_timing, 542 &ver, &hdr, &cnt, &len, 543 &next->bios); 544 if (!data || ver != 0x10 || hdr < 0x17) { 545 nv_error(fb, "invalid/missing timing entry\n"); 546 return -EINVAL; 547 } 548 } 549 550 ret = gt215_pll_info(nvkm_clk(fb), 0x12, 0x4000, freq, &mclk); 551 if (ret < 0) { 552 nv_error(fb, "failed mclk calculation\n"); 553 return ret; 554 } 555 556 gt215_ram_timing_calc(fb, timing); 557 558 ret = ram_init(fuc, fb); 559 if (ret) 560 return ret; 561 562 /* Determine ram-specific MR values */ 563 ram->base.mr[0] = ram_rd32(fuc, mr[0]); 564 ram->base.mr[1] = ram_rd32(fuc, mr[1]); 565 ram->base.mr[2] = ram_rd32(fuc, mr[2]); 566 567 switch (ram->base.type) { 568 case NV_MEM_TYPE_DDR2: 569 ret = nvkm_sddr2_calc(&ram->base); 570 break; 571 case NV_MEM_TYPE_DDR3: 572 ret = nvkm_sddr3_calc(&ram->base); 573 break; 574 case NV_MEM_TYPE_GDDR3: 575 ret = nvkm_gddr3_calc(&ram->base); 576 break; 577 default: 578 ret = -ENOSYS; 579 break; 580 } 581 582 if (ret) 583 return ret; 584 585 /* XXX: 750MHz seems rather arbitrary */ 586 if (freq <= 750000) { 587 r004018 = 0x10000000; 588 r100760 = 0x22222222; 589 r100da0 = 0x00000010; 590 } else { 591 r004018 = 0x00000000; 592 r100760 = 0x00000000; 593 r100da0 = 0x00000000; 594 } 595 596 if (!next->bios.ramcfg_DLLoff) 597 r004018 |= 0x00004000; 598 599 /* pll2pll requires to switch to a safe clock first */ 600 ctrl = ram_rd32(fuc, 0x004000); 601 pll2pll = (!(ctrl & 0x00000008)) && mclk.pll; 602 603 /* Pre, NVIDIA does this outside the script */ 604 if (next->bios.ramcfg_10_02_10) { 605 ram_mask(fuc, 0x111104, 0x00000600, 0x00000000); 606 } else { 607 ram_mask(fuc, 0x111100, 0x40000000, 0x40000000); 608 ram_mask(fuc, 0x111104, 0x00000180, 0x00000000); 609 } 610 /* Always disable this bit during reclock */ 611 ram_mask(fuc, 0x100200, 0x00000800, 0x00000000); 612 613 /* If switching from non-pll to pll, lock before disabling FB */ 614 if (mclk.pll && !pll2pll) { 615 ram_mask(fuc, 0x004128, 0x003f3141, mclk.clk | 0x00000101); 616 gt215_ram_lock_pll(fuc, &mclk); 617 } 618 619 /* Start with disabling some CRTCs and PFIFO? */ 620 ram_wait_vblank(fuc); 621 ram_wr32(fuc, 0x611200, 0x3300); 622 ram_mask(fuc, 0x002504, 0x1, 0x1); 623 ram_nsec(fuc, 10000); 624 ram_wait(fuc, 0x002504, 0x10, 0x10, 20000); /* XXX: or longer? */ 625 ram_block(fuc); 626 ram_nsec(fuc, 2000); 627 628 if (!next->bios.ramcfg_10_02_10) { 629 if (ram->base.type == NV_MEM_TYPE_GDDR3) 630 ram_mask(fuc, 0x111100, 0x04020000, 0x00020000); 631 else 632 ram_mask(fuc, 0x111100, 0x04020000, 0x04020000); 633 } 634 635 /* If we're disabling the DLL, do it now */ 636 switch (next->bios.ramcfg_DLLoff * ram->base.type) { 637 case NV_MEM_TYPE_DDR3: 638 nvkm_sddr3_dll_disable(fuc, ram->base.mr); 639 break; 640 case NV_MEM_TYPE_GDDR3: 641 nvkm_gddr3_dll_disable(fuc, ram->base.mr); 642 break; 643 } 644 645 if (fuc->r_gpioFBVREF.addr && next->bios.timing_10_ODT) 646 gt215_ram_fbvref(fuc, 0); 647 648 /* Brace RAM for impact */ 649 ram_wr32(fuc, 0x1002d4, 0x00000001); 650 ram_wr32(fuc, 0x1002d0, 0x00000001); 651 ram_wr32(fuc, 0x1002d0, 0x00000001); 652 ram_wr32(fuc, 0x100210, 0x00000000); 653 ram_wr32(fuc, 0x1002dc, 0x00000001); 654 ram_nsec(fuc, 2000); 655 656 if (nv_device(fb)->chipset == 0xa3 && freq <= 500000) 657 ram_mask(fuc, 0x100700, 0x00000006, 0x00000006); 658 659 /* Fiddle with clocks */ 660 /* There's 4 scenario's 661 * pll->pll: first switch to a 324MHz clock, set up new PLL, switch 662 * clk->pll: Set up new PLL, switch 663 * pll->clk: Set up clock, switch 664 * clk->clk: Overwrite ctrl and other bits, switch */ 665 666 /* Switch to regular clock - 324MHz */ 667 if (pll2pll) { 668 ram_mask(fuc, 0x004000, 0x00000004, 0x00000004); 669 ram_mask(fuc, 0x004168, 0x003f3141, 0x00083101); 670 ram_mask(fuc, 0x004000, 0x00000008, 0x00000008); 671 ram_mask(fuc, 0x1110e0, 0x00088000, 0x00088000); 672 ram_wr32(fuc, 0x004018, 0x00001000); 673 gt215_ram_lock_pll(fuc, &mclk); 674 } 675 676 if (mclk.pll) { 677 ram_mask(fuc, 0x004000, 0x00000105, 0x00000105); 678 ram_wr32(fuc, 0x004018, 0x00001000 | r004018); 679 ram_wr32(fuc, 0x100da0, r100da0); 680 } else { 681 ram_mask(fuc, 0x004168, 0x003f3141, mclk.clk | 0x00000101); 682 ram_mask(fuc, 0x004000, 0x00000108, 0x00000008); 683 ram_mask(fuc, 0x1110e0, 0x00088000, 0x00088000); 684 ram_wr32(fuc, 0x004018, 0x00009000 | r004018); 685 ram_wr32(fuc, 0x100da0, r100da0); 686 } 687 ram_nsec(fuc, 20000); 688 689 if (next->bios.rammap_10_04_08) { 690 ram_wr32(fuc, 0x1005a0, next->bios.ramcfg_10_06 << 16 | 691 next->bios.ramcfg_10_05 << 8 | 692 next->bios.ramcfg_10_05); 693 ram_wr32(fuc, 0x1005a4, next->bios.ramcfg_10_08 << 8 | 694 next->bios.ramcfg_10_07); 695 ram_wr32(fuc, 0x10f804, next->bios.ramcfg_10_09_f0 << 20 | 696 next->bios.ramcfg_10_03_0f << 16 | 697 next->bios.ramcfg_10_09_0f | 698 0x80000000); 699 ram_mask(fuc, 0x10053c, 0x00001000, 0x00000000); 700 } else { 701 if (train->state == NVA3_TRAIN_DONE) { 702 ram_wr32(fuc, 0x100080, 0x1020); 703 ram_mask(fuc, 0x111400, 0xffffffff, train->r_111400); 704 ram_mask(fuc, 0x1111e0, 0xffffffff, train->r_1111e0); 705 ram_mask(fuc, 0x100720, 0xffffffff, train->r_100720); 706 } 707 ram_mask(fuc, 0x10053c, 0x00001000, 0x00001000); 708 ram_mask(fuc, 0x10f804, 0x80000000, 0x00000000); 709 ram_mask(fuc, 0x100760, 0x22222222, r100760); 710 ram_mask(fuc, 0x1007a0, 0x22222222, r100760); 711 ram_mask(fuc, 0x1007e0, 0x22222222, r100760); 712 } 713 714 if (nv_device(fb)->chipset == 0xa3 && freq > 500000) { 715 ram_mask(fuc, 0x100700, 0x00000006, 0x00000000); 716 } 717 718 /* Final switch */ 719 if (mclk.pll) { 720 ram_mask(fuc, 0x1110e0, 0x00088000, 0x00011000); 721 ram_mask(fuc, 0x004000, 0x00000008, 0x00000000); 722 } 723 724 ram_wr32(fuc, 0x1002dc, 0x00000000); 725 ram_wr32(fuc, 0x1002d4, 0x00000001); 726 ram_wr32(fuc, 0x100210, 0x80000000); 727 ram_nsec(fuc, 2000); 728 729 /* Set RAM MR parameters and timings */ 730 for (i = 2; i >= 0; i--) { 731 if (ram_rd32(fuc, mr[i]) != ram->base.mr[i]) { 732 ram_wr32(fuc, mr[i], ram->base.mr[i]); 733 ram_nsec(fuc, 1000); 734 } 735 } 736 737 ram_wr32(fuc, 0x100220[3], timing[3]); 738 ram_wr32(fuc, 0x100220[1], timing[1]); 739 ram_wr32(fuc, 0x100220[6], timing[6]); 740 ram_wr32(fuc, 0x100220[7], timing[7]); 741 ram_wr32(fuc, 0x100220[2], timing[2]); 742 ram_wr32(fuc, 0x100220[4], timing[4]); 743 ram_wr32(fuc, 0x100220[5], timing[5]); 744 ram_wr32(fuc, 0x100220[0], timing[0]); 745 ram_wr32(fuc, 0x100220[8], timing[8]); 746 747 /* Misc */ 748 ram_mask(fuc, 0x100200, 0x00001000, !next->bios.ramcfg_10_02_08 << 12); 749 750 /* XXX: A lot of "chipset"/"ram type" specific stuff...? */ 751 unk714 = ram_rd32(fuc, 0x100714) & ~0xf0000130; 752 unk718 = ram_rd32(fuc, 0x100718) & ~0x00000100; 753 unk71c = ram_rd32(fuc, 0x10071c) & ~0x00000100; 754 r111100 = ram_rd32(fuc, 0x111100) & ~0x3a800000; 755 756 if (next->bios.ramcfg_10_02_04) { 757 switch (ram->base.type) { 758 case NV_MEM_TYPE_DDR3: 759 if (nv_device(fb)->chipset != 0xa8) 760 r111100 |= 0x00000004; 761 /* no break */ 762 case NV_MEM_TYPE_DDR2: 763 r111100 |= 0x08000000; 764 break; 765 default: 766 break; 767 } 768 } else { 769 switch (ram->base.type) { 770 case NV_MEM_TYPE_DDR2: 771 r111100 |= 0x1a800000; 772 unk714 |= 0x00000010; 773 break; 774 case NV_MEM_TYPE_DDR3: 775 if (nv_device(fb)->chipset == 0xa8) { 776 r111100 |= 0x08000000; 777 } else { 778 r111100 &= ~0x00000004; 779 r111100 |= 0x12800000; 780 } 781 unk714 |= 0x00000010; 782 break; 783 case NV_MEM_TYPE_GDDR3: 784 r111100 |= 0x30000000; 785 unk714 |= 0x00000020; 786 break; 787 default: 788 break; 789 } 790 } 791 792 unk714 |= (next->bios.ramcfg_10_04_01) << 8; 793 794 if (next->bios.ramcfg_10_02_20) 795 unk714 |= 0xf0000000; 796 if (next->bios.ramcfg_10_02_02) 797 unk718 |= 0x00000100; 798 if (next->bios.ramcfg_10_02_01) 799 unk71c |= 0x00000100; 800 if (next->bios.timing_10_24 != 0xff) { 801 unk718 &= ~0xf0000000; 802 unk718 |= next->bios.timing_10_24 << 28; 803 } 804 if (next->bios.ramcfg_10_02_10) 805 r111100 &= ~0x04020000; 806 807 ram_mask(fuc, 0x100714, 0xffffffff, unk714); 808 ram_mask(fuc, 0x10071c, 0xffffffff, unk71c); 809 ram_mask(fuc, 0x100718, 0xffffffff, unk718); 810 ram_mask(fuc, 0x111100, 0xffffffff, r111100); 811 812 if (fuc->r_gpioFBVREF.addr && !next->bios.timing_10_ODT) 813 gt215_ram_fbvref(fuc, 1); 814 815 /* Reset DLL */ 816 if (!next->bios.ramcfg_DLLoff) 817 nvkm_sddr2_dll_reset(fuc); 818 819 if (ram->base.type == NV_MEM_TYPE_GDDR3) { 820 ram_nsec(fuc, 31000); 821 } else { 822 ram_nsec(fuc, 14000); 823 } 824 825 if (ram->base.type == NV_MEM_TYPE_DDR3) { 826 ram_wr32(fuc, 0x100264, 0x1); 827 ram_nsec(fuc, 2000); 828 } 829 830 ram_nuke(fuc, 0x100700); 831 ram_mask(fuc, 0x100700, 0x01000000, 0x01000000); 832 ram_mask(fuc, 0x100700, 0x01000000, 0x00000000); 833 834 /* Re-enable FB */ 835 ram_unblock(fuc); 836 ram_wr32(fuc, 0x611200, 0x3330); 837 838 /* Post fiddlings */ 839 if (next->bios.rammap_10_04_02) 840 ram_mask(fuc, 0x100200, 0x00000800, 0x00000800); 841 if (next->bios.ramcfg_10_02_10) { 842 ram_mask(fuc, 0x111104, 0x00000180, 0x00000180); 843 ram_mask(fuc, 0x111100, 0x40000000, 0x00000000); 844 } else { 845 ram_mask(fuc, 0x111104, 0x00000600, 0x00000600); 846 } 847 848 if (mclk.pll) { 849 ram_mask(fuc, 0x004168, 0x00000001, 0x00000000); 850 ram_mask(fuc, 0x004168, 0x00000100, 0x00000000); 851 } else { 852 ram_mask(fuc, 0x004000, 0x00000001, 0x00000000); 853 ram_mask(fuc, 0x004128, 0x00000001, 0x00000000); 854 ram_mask(fuc, 0x004128, 0x00000100, 0x00000000); 855 } 856 857 return 0; 858 } 859 860 static int 861 gt215_ram_prog(struct nvkm_fb *fb) 862 { 863 struct nvkm_device *device = nv_device(fb); 864 struct gt215_ram *ram = (void *)fb->ram; 865 struct gt215_ramfuc *fuc = &ram->fuc; 866 bool exec = nvkm_boolopt(device->cfgopt, "NvMemExec", true); 867 868 if (exec) { 869 nvkm_mask(device, 0x001534, 0x2, 0x2); 870 871 ram_exec(fuc, true); 872 873 /* Post-processing, avoids flicker */ 874 nvkm_mask(device, 0x002504, 0x1, 0x0); 875 nvkm_mask(device, 0x001534, 0x2, 0x0); 876 877 nvkm_mask(device, 0x616308, 0x10, 0x10); 878 nvkm_mask(device, 0x616b08, 0x10, 0x10); 879 } else { 880 ram_exec(fuc, false); 881 } 882 return 0; 883 } 884 885 static void 886 gt215_ram_tidy(struct nvkm_fb *fb) 887 { 888 struct gt215_ram *ram = (void *)fb->ram; 889 struct gt215_ramfuc *fuc = &ram->fuc; 890 ram_exec(fuc, false); 891 } 892 893 static int 894 gt215_ram_init(struct nvkm_object *object) 895 { 896 struct nvkm_fb *fb = (void *)object->parent; 897 struct gt215_ram *ram = (void *)object; 898 int ret; 899 900 ret = nvkm_ram_init(&ram->base); 901 if (ret) 902 return ret; 903 904 gt215_link_train_init(fb); 905 return 0; 906 } 907 908 static int 909 gt215_ram_fini(struct nvkm_object *object, bool suspend) 910 { 911 struct nvkm_fb *fb = (void *)object->parent; 912 913 if (!suspend) 914 gt215_link_train_fini(fb); 915 916 return 0; 917 } 918 919 static int 920 gt215_ram_ctor(struct nvkm_object *parent, struct nvkm_object *engine, 921 struct nvkm_oclass *oclass, void *data, u32 datasize, 922 struct nvkm_object **pobject) 923 { 924 struct nvkm_fb *fb = nvkm_fb(parent); 925 struct nvkm_gpio *gpio = nvkm_gpio(fb); 926 struct dcb_gpio_func func; 927 struct gt215_ram *ram; 928 int ret, i; 929 u32 reg, shift; 930 931 ret = nv50_ram_create(parent, engine, oclass, &ram); 932 *pobject = nv_object(ram); 933 if (ret) 934 return ret; 935 936 switch (ram->base.type) { 937 case NV_MEM_TYPE_DDR2: 938 case NV_MEM_TYPE_DDR3: 939 case NV_MEM_TYPE_GDDR3: 940 ram->base.calc = gt215_ram_calc; 941 ram->base.prog = gt215_ram_prog; 942 ram->base.tidy = gt215_ram_tidy; 943 break; 944 default: 945 nv_warn(ram, "reclocking of this ram type unsupported\n"); 946 return 0; 947 } 948 949 ram->fuc.r_0x001610 = ramfuc_reg(0x001610); 950 ram->fuc.r_0x001700 = ramfuc_reg(0x001700); 951 ram->fuc.r_0x002504 = ramfuc_reg(0x002504); 952 ram->fuc.r_0x004000 = ramfuc_reg(0x004000); 953 ram->fuc.r_0x004004 = ramfuc_reg(0x004004); 954 ram->fuc.r_0x004018 = ramfuc_reg(0x004018); 955 ram->fuc.r_0x004128 = ramfuc_reg(0x004128); 956 ram->fuc.r_0x004168 = ramfuc_reg(0x004168); 957 ram->fuc.r_0x100080 = ramfuc_reg(0x100080); 958 ram->fuc.r_0x100200 = ramfuc_reg(0x100200); 959 ram->fuc.r_0x100210 = ramfuc_reg(0x100210); 960 for (i = 0; i < 9; i++) 961 ram->fuc.r_0x100220[i] = ramfuc_reg(0x100220 + (i * 4)); 962 ram->fuc.r_0x100264 = ramfuc_reg(0x100264); 963 ram->fuc.r_0x1002d0 = ramfuc_reg(0x1002d0); 964 ram->fuc.r_0x1002d4 = ramfuc_reg(0x1002d4); 965 ram->fuc.r_0x1002dc = ramfuc_reg(0x1002dc); 966 ram->fuc.r_0x10053c = ramfuc_reg(0x10053c); 967 ram->fuc.r_0x1005a0 = ramfuc_reg(0x1005a0); 968 ram->fuc.r_0x1005a4 = ramfuc_reg(0x1005a4); 969 ram->fuc.r_0x100700 = ramfuc_reg(0x100700); 970 ram->fuc.r_0x100714 = ramfuc_reg(0x100714); 971 ram->fuc.r_0x100718 = ramfuc_reg(0x100718); 972 ram->fuc.r_0x10071c = ramfuc_reg(0x10071c); 973 ram->fuc.r_0x100720 = ramfuc_reg(0x100720); 974 ram->fuc.r_0x100760 = ramfuc_stride(0x100760, 4, ram->base.part_mask); 975 ram->fuc.r_0x1007a0 = ramfuc_stride(0x1007a0, 4, ram->base.part_mask); 976 ram->fuc.r_0x1007e0 = ramfuc_stride(0x1007e0, 4, ram->base.part_mask); 977 ram->fuc.r_0x100da0 = ramfuc_stride(0x100da0, 4, ram->base.part_mask); 978 ram->fuc.r_0x10f804 = ramfuc_reg(0x10f804); 979 ram->fuc.r_0x1110e0 = ramfuc_stride(0x1110e0, 4, ram->base.part_mask); 980 ram->fuc.r_0x111100 = ramfuc_reg(0x111100); 981 ram->fuc.r_0x111104 = ramfuc_reg(0x111104); 982 ram->fuc.r_0x1111e0 = ramfuc_reg(0x1111e0); 983 ram->fuc.r_0x111400 = ramfuc_reg(0x111400); 984 ram->fuc.r_0x611200 = ramfuc_reg(0x611200); 985 986 if (ram->base.ranks > 1) { 987 ram->fuc.r_mr[0] = ramfuc_reg2(0x1002c0, 0x1002c8); 988 ram->fuc.r_mr[1] = ramfuc_reg2(0x1002c4, 0x1002cc); 989 ram->fuc.r_mr[2] = ramfuc_reg2(0x1002e0, 0x1002e8); 990 ram->fuc.r_mr[3] = ramfuc_reg2(0x1002e4, 0x1002ec); 991 } else { 992 ram->fuc.r_mr[0] = ramfuc_reg(0x1002c0); 993 ram->fuc.r_mr[1] = ramfuc_reg(0x1002c4); 994 ram->fuc.r_mr[2] = ramfuc_reg(0x1002e0); 995 ram->fuc.r_mr[3] = ramfuc_reg(0x1002e4); 996 } 997 998 ret = gpio->find(gpio, 0, 0x2e, DCB_GPIO_UNUSED, &func); 999 if (ret == 0) { 1000 nv50_gpio_location(func.line, ®, &shift); 1001 ram->fuc.r_gpioFBVREF = ramfuc_reg(reg); 1002 } 1003 1004 return 0; 1005 } 1006 1007 struct nvkm_oclass 1008 gt215_ram_oclass = { 1009 .ofuncs = &(struct nvkm_ofuncs) { 1010 .ctor = gt215_ram_ctor, 1011 .dtor = _nvkm_ram_dtor, 1012 .init = gt215_ram_init, 1013 .fini = gt215_ram_fini, 1014 }, 1015 }; 1016