1 /* 2 * Copyright 2013 Red Hat Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Ben Skeggs 23 * Roy Spliet <rspliet@eclipso.eu> 24 */ 25 26 #include "ramfuc.h" 27 #include "nv50.h" 28 29 #include <core/device.h> 30 #include <core/option.h> 31 #include <subdev/bios.h> 32 #include <subdev/bios/M0205.h> 33 #include <subdev/bios/rammap.h> 34 #include <subdev/bios/timing.h> 35 #include <subdev/clk/gt215.h> 36 #include <subdev/gpio.h> 37 38 /* XXX: Remove when memx gains GPIO support */ 39 extern int nv50_gpio_location(int line, u32 *reg, u32 *shift); 40 41 struct gt215_ramfuc { 42 struct ramfuc base; 43 struct ramfuc_reg r_0x001610; 44 struct ramfuc_reg r_0x001700; 45 struct ramfuc_reg r_0x002504; 46 struct ramfuc_reg r_0x004000; 47 struct ramfuc_reg r_0x004004; 48 struct ramfuc_reg r_0x004018; 49 struct ramfuc_reg r_0x004128; 50 struct ramfuc_reg r_0x004168; 51 struct ramfuc_reg r_0x100080; 52 struct ramfuc_reg r_0x100200; 53 struct ramfuc_reg r_0x100210; 54 struct ramfuc_reg r_0x100220[9]; 55 struct ramfuc_reg r_0x100264; 56 struct ramfuc_reg r_0x1002d0; 57 struct ramfuc_reg r_0x1002d4; 58 struct ramfuc_reg r_0x1002dc; 59 struct ramfuc_reg r_0x10053c; 60 struct ramfuc_reg r_0x1005a0; 61 struct ramfuc_reg r_0x1005a4; 62 struct ramfuc_reg r_0x100700; 63 struct ramfuc_reg r_0x100714; 64 struct ramfuc_reg r_0x100718; 65 struct ramfuc_reg r_0x10071c; 66 struct ramfuc_reg r_0x100720; 67 struct ramfuc_reg r_0x100760; 68 struct ramfuc_reg r_0x1007a0; 69 struct ramfuc_reg r_0x1007e0; 70 struct ramfuc_reg r_0x100da0; 71 struct ramfuc_reg r_0x10f804; 72 struct ramfuc_reg r_0x1110e0; 73 struct ramfuc_reg r_0x111100; 74 struct ramfuc_reg r_0x111104; 75 struct ramfuc_reg r_0x1111e0; 76 struct ramfuc_reg r_0x111400; 77 struct ramfuc_reg r_0x611200; 78 struct ramfuc_reg r_mr[4]; 79 struct ramfuc_reg r_gpioFBVREF; 80 }; 81 82 struct gt215_ltrain { 83 enum { 84 NVA3_TRAIN_UNKNOWN, 85 NVA3_TRAIN_UNSUPPORTED, 86 NVA3_TRAIN_ONCE, 87 NVA3_TRAIN_EXEC, 88 NVA3_TRAIN_DONE 89 } state; 90 u32 r_100720; 91 u32 r_1111e0; 92 u32 r_111400; 93 struct nvkm_mem *mem; 94 }; 95 96 struct gt215_ram { 97 struct nvkm_ram base; 98 struct gt215_ramfuc fuc; 99 struct gt215_ltrain ltrain; 100 }; 101 102 void 103 gt215_link_train_calc(u32 *vals, struct gt215_ltrain *train) 104 { 105 int i, lo, hi; 106 u8 median[8], bins[4] = {0, 0, 0, 0}, bin = 0, qty = 0; 107 108 for (i = 0; i < 8; i++) { 109 for (lo = 0; lo < 0x40; lo++) { 110 if (!(vals[lo] & 0x80000000)) 111 continue; 112 if (vals[lo] & (0x101 << i)) 113 break; 114 } 115 116 if (lo == 0x40) 117 return; 118 119 for (hi = lo + 1; hi < 0x40; hi++) { 120 if (!(vals[lo] & 0x80000000)) 121 continue; 122 if (!(vals[hi] & (0x101 << i))) { 123 hi--; 124 break; 125 } 126 } 127 128 median[i] = ((hi - lo) >> 1) + lo; 129 bins[(median[i] & 0xf0) >> 4]++; 130 median[i] += 0x30; 131 } 132 133 /* Find the best value for 0x1111e0 */ 134 for (i = 0; i < 4; i++) { 135 if (bins[i] > qty) { 136 bin = i + 3; 137 qty = bins[i]; 138 } 139 } 140 141 train->r_100720 = 0; 142 for (i = 0; i < 8; i++) { 143 median[i] = max(median[i], (u8) (bin << 4)); 144 median[i] = min(median[i], (u8) ((bin << 4) | 0xf)); 145 146 train->r_100720 |= ((median[i] & 0x0f) << (i << 2)); 147 } 148 149 train->r_1111e0 = 0x02000000 | (bin * 0x101); 150 train->r_111400 = 0x0; 151 } 152 153 /* 154 * Link training for (at least) DDR3 155 */ 156 int 157 gt215_link_train(struct nvkm_fb *pfb) 158 { 159 struct nvkm_bios *bios = nvkm_bios(pfb); 160 struct gt215_ram *ram = (void *)pfb->ram; 161 struct nvkm_clk *clk = nvkm_clk(pfb); 162 struct gt215_ltrain *train = &ram->ltrain; 163 struct nvkm_device *device = nv_device(pfb); 164 struct gt215_ramfuc *fuc = &ram->fuc; 165 u32 *result, r1700; 166 int ret, i; 167 struct nvbios_M0205T M0205T = { 0 }; 168 u8 ver, hdr, cnt, len, snr, ssz; 169 unsigned int clk_current; 170 unsigned long flags; 171 unsigned long *f = &flags; 172 173 if (nvkm_boolopt(device->cfgopt, "NvMemExec", true) != true) 174 return -ENOSYS; 175 176 /* XXX: Multiple partitions? */ 177 result = kmalloc(64 * sizeof(u32), GFP_KERNEL); 178 if (!result) 179 return -ENOMEM; 180 181 train->state = NVA3_TRAIN_EXEC; 182 183 /* Clock speeds for training and back */ 184 nvbios_M0205Tp(bios, &ver, &hdr, &cnt, &len, &snr, &ssz, &M0205T); 185 if (M0205T.freq == 0) 186 return -ENOENT; 187 188 clk_current = clk->read(clk, nv_clk_src_mem); 189 190 ret = gt215_clk_pre(clk, f); 191 if (ret) 192 goto out; 193 194 /* First: clock up/down */ 195 ret = ram->base.calc(pfb, (u32) M0205T.freq * 1000); 196 if (ret) 197 goto out; 198 199 /* Do this *after* calc, eliminates write in script */ 200 nv_wr32(pfb, 0x111400, 0x00000000); 201 /* XXX: Magic writes that improve train reliability? */ 202 nv_mask(pfb, 0x100674, 0x0000ffff, 0x00000000); 203 nv_mask(pfb, 0x1005e4, 0x0000ffff, 0x00000000); 204 nv_mask(pfb, 0x100b0c, 0x000000ff, 0x00000000); 205 nv_wr32(pfb, 0x100c04, 0x00000400); 206 207 /* Now the training script */ 208 r1700 = ram_rd32(fuc, 0x001700); 209 210 ram_mask(fuc, 0x100200, 0x00000800, 0x00000000); 211 ram_wr32(fuc, 0x611200, 0x3300); 212 ram_wait_vblank(fuc); 213 ram_wait(fuc, 0x611200, 0x00000003, 0x00000000, 500000); 214 ram_mask(fuc, 0x001610, 0x00000083, 0x00000003); 215 ram_mask(fuc, 0x100080, 0x00000020, 0x00000000); 216 ram_mask(fuc, 0x10f804, 0x80000000, 0x00000000); 217 ram_wr32(fuc, 0x001700, 0x00000000); 218 219 ram_train(fuc); 220 221 /* Reset */ 222 ram_mask(fuc, 0x10f804, 0x80000000, 0x80000000); 223 ram_wr32(fuc, 0x10053c, 0x0); 224 ram_wr32(fuc, 0x100720, train->r_100720); 225 ram_wr32(fuc, 0x1111e0, train->r_1111e0); 226 ram_wr32(fuc, 0x111400, train->r_111400); 227 ram_nuke(fuc, 0x100080); 228 ram_mask(fuc, 0x100080, 0x00000020, 0x00000020); 229 ram_nsec(fuc, 1000); 230 231 ram_wr32(fuc, 0x001700, r1700); 232 ram_mask(fuc, 0x001610, 0x00000083, 0x00000080); 233 ram_wr32(fuc, 0x611200, 0x3330); 234 ram_mask(fuc, 0x100200, 0x00000800, 0x00000800); 235 236 ram_exec(fuc, true); 237 238 ram->base.calc(pfb, clk_current); 239 ram_exec(fuc, true); 240 241 /* Post-processing, avoids flicker */ 242 nv_mask(pfb, 0x616308, 0x10, 0x10); 243 nv_mask(pfb, 0x616b08, 0x10, 0x10); 244 245 gt215_clk_post(clk, f); 246 247 ram_train_result(pfb, result, 64); 248 for (i = 0; i < 64; i++) 249 nv_debug(pfb, "Train: %08x", result[i]); 250 gt215_link_train_calc(result, train); 251 252 nv_debug(pfb, "Train: %08x %08x %08x", train->r_100720, 253 train->r_1111e0, train->r_111400); 254 255 kfree(result); 256 257 train->state = NVA3_TRAIN_DONE; 258 259 return ret; 260 261 out: 262 if(ret == -EBUSY) 263 f = NULL; 264 265 train->state = NVA3_TRAIN_UNSUPPORTED; 266 267 gt215_clk_post(clk, f); 268 return ret; 269 } 270 271 int 272 gt215_link_train_init(struct nvkm_fb *pfb) 273 { 274 static const u32 pattern[16] = { 275 0xaaaaaaaa, 0xcccccccc, 0xdddddddd, 0xeeeeeeee, 276 0x00000000, 0x11111111, 0x44444444, 0xdddddddd, 277 0x33333333, 0x55555555, 0x77777777, 0x66666666, 278 0x99999999, 0x88888888, 0xeeeeeeee, 0xbbbbbbbb, 279 }; 280 struct nvkm_bios *bios = nvkm_bios(pfb); 281 struct gt215_ram *ram = (void *)pfb->ram; 282 struct gt215_ltrain *train = &ram->ltrain; 283 struct nvkm_mem *mem; 284 struct nvbios_M0205E M0205E; 285 u8 ver, hdr, cnt, len; 286 u32 r001700; 287 int ret, i = 0; 288 289 train->state = NVA3_TRAIN_UNSUPPORTED; 290 291 /* We support type "5" 292 * XXX: training pattern table appears to be unused for this routine */ 293 if (!nvbios_M0205Ep(bios, i, &ver, &hdr, &cnt, &len, &M0205E)) 294 return -ENOENT; 295 296 if (M0205E.type != 5) 297 return 0; 298 299 train->state = NVA3_TRAIN_ONCE; 300 301 ret = pfb->ram->get(pfb, 0x8000, 0x10000, 0, 0x800, &ram->ltrain.mem); 302 if (ret) 303 return ret; 304 305 mem = ram->ltrain.mem; 306 307 nv_wr32(pfb, 0x100538, 0x10000000 | (mem->offset >> 16)); 308 nv_wr32(pfb, 0x1005a8, 0x0000ffff); 309 nv_mask(pfb, 0x10f800, 0x00000001, 0x00000001); 310 311 for (i = 0; i < 0x30; i++) { 312 nv_wr32(pfb, 0x10f8c0, (i << 8) | i); 313 nv_wr32(pfb, 0x10f900, pattern[i % 16]); 314 } 315 316 for (i = 0; i < 0x30; i++) { 317 nv_wr32(pfb, 0x10f8e0, (i << 8) | i); 318 nv_wr32(pfb, 0x10f920, pattern[i % 16]); 319 } 320 321 /* And upload the pattern */ 322 r001700 = nv_rd32(pfb, 0x1700); 323 nv_wr32(pfb, 0x1700, mem->offset >> 16); 324 for (i = 0; i < 16; i++) 325 nv_wr32(pfb, 0x700000 + (i << 2), pattern[i]); 326 for (i = 0; i < 16; i++) 327 nv_wr32(pfb, 0x700100 + (i << 2), pattern[i]); 328 nv_wr32(pfb, 0x1700, r001700); 329 330 train->r_100720 = nv_rd32(pfb, 0x100720); 331 train->r_1111e0 = nv_rd32(pfb, 0x1111e0); 332 train->r_111400 = nv_rd32(pfb, 0x111400); 333 return 0; 334 } 335 336 void 337 gt215_link_train_fini(struct nvkm_fb *pfb) 338 { 339 struct gt215_ram *ram = (void *)pfb->ram; 340 341 if (ram->ltrain.mem) 342 pfb->ram->put(pfb, &ram->ltrain.mem); 343 } 344 345 /* 346 * RAM reclocking 347 */ 348 #define T(t) cfg->timing_10_##t 349 static int 350 gt215_ram_timing_calc(struct nvkm_fb *pfb, u32 *timing) 351 { 352 struct gt215_ram *ram = (void *)pfb->ram; 353 struct nvbios_ramcfg *cfg = &ram->base.target.bios; 354 int tUNK_base, tUNK_40_0, prevCL; 355 u32 cur2, cur3, cur7, cur8; 356 357 cur2 = nv_rd32(pfb, 0x100228); 358 cur3 = nv_rd32(pfb, 0x10022c); 359 cur7 = nv_rd32(pfb, 0x10023c); 360 cur8 = nv_rd32(pfb, 0x100240); 361 362 363 switch ((!T(CWL)) * ram->base.type) { 364 case NV_MEM_TYPE_DDR2: 365 T(CWL) = T(CL) - 1; 366 break; 367 case NV_MEM_TYPE_GDDR3: 368 T(CWL) = ((cur2 & 0xff000000) >> 24) + 1; 369 break; 370 } 371 372 prevCL = (cur3 & 0x000000ff) + 1; 373 tUNK_base = ((cur7 & 0x00ff0000) >> 16) - prevCL; 374 375 timing[0] = (T(RP) << 24 | T(RAS) << 16 | T(RFC) << 8 | T(RC)); 376 timing[1] = (T(WR) + 1 + T(CWL)) << 24 | 377 max_t(u8,T(18), 1) << 16 | 378 (T(WTR) + 1 + T(CWL)) << 8 | 379 (5 + T(CL) - T(CWL)); 380 timing[2] = (T(CWL) - 1) << 24 | 381 (T(RRD) << 16) | 382 (T(RCDWR) << 8) | 383 T(RCDRD); 384 timing[3] = (cur3 & 0x00ff0000) | 385 (0x30 + T(CL)) << 24 | 386 (0xb + T(CL)) << 8 | 387 (T(CL) - 1); 388 timing[4] = T(20) << 24 | 389 T(21) << 16 | 390 T(13) << 8 | 391 T(13); 392 timing[5] = T(RFC) << 24 | 393 max_t(u8,T(RCDRD), T(RCDWR)) << 16 | 394 max_t(u8, (T(CWL) + 6), (T(CL) + 2)) << 8 | 395 T(RP); 396 timing[6] = (0x5a + T(CL)) << 16 | 397 max_t(u8, 1, (6 - T(CL) + T(CWL))) << 8 | 398 (0x50 + T(CL) - T(CWL)); 399 timing[7] = (cur7 & 0xff000000) | 400 ((tUNK_base + T(CL)) << 16) | 401 0x202; 402 timing[8] = cur8 & 0xffffff00; 403 404 switch (ram->base.type) { 405 case NV_MEM_TYPE_DDR2: 406 case NV_MEM_TYPE_GDDR3: 407 tUNK_40_0 = prevCL - (cur8 & 0xff); 408 if (tUNK_40_0 > 0) 409 timing[8] |= T(CL); 410 break; 411 default: 412 break; 413 } 414 415 nv_debug(pfb, "Entry: 220: %08x %08x %08x %08x\n", 416 timing[0], timing[1], timing[2], timing[3]); 417 nv_debug(pfb, " 230: %08x %08x %08x %08x\n", 418 timing[4], timing[5], timing[6], timing[7]); 419 nv_debug(pfb, " 240: %08x\n", timing[8]); 420 return 0; 421 } 422 #undef T 423 424 static void 425 nvkm_sddr2_dll_reset(struct gt215_ramfuc *fuc) 426 { 427 ram_mask(fuc, mr[0], 0x100, 0x100); 428 ram_nsec(fuc, 1000); 429 ram_mask(fuc, mr[0], 0x100, 0x000); 430 ram_nsec(fuc, 1000); 431 } 432 433 static void 434 nvkm_sddr3_dll_disable(struct gt215_ramfuc *fuc, u32 *mr) 435 { 436 u32 mr1_old = ram_rd32(fuc, mr[1]); 437 438 if (!(mr1_old & 0x1)) { 439 ram_wr32(fuc, 0x1002d4, 0x00000001); 440 ram_wr32(fuc, mr[1], mr[1]); 441 ram_nsec(fuc, 1000); 442 } 443 } 444 445 static void 446 nvkm_gddr3_dll_disable(struct gt215_ramfuc *fuc, u32 *mr) 447 { 448 u32 mr1_old = ram_rd32(fuc, mr[1]); 449 450 if (!(mr1_old & 0x40)) { 451 ram_wr32(fuc, mr[1], mr[1]); 452 ram_nsec(fuc, 1000); 453 } 454 } 455 456 static void 457 gt215_ram_lock_pll(struct gt215_ramfuc *fuc, struct gt215_clk_info *mclk) 458 { 459 ram_wr32(fuc, 0x004004, mclk->pll); 460 ram_mask(fuc, 0x004000, 0x00000001, 0x00000001); 461 ram_mask(fuc, 0x004000, 0x00000010, 0x00000000); 462 ram_wait(fuc, 0x004000, 0x00020000, 0x00020000, 64000); 463 ram_mask(fuc, 0x004000, 0x00000010, 0x00000010); 464 } 465 466 static void 467 gt215_ram_fbvref(struct gt215_ramfuc *fuc, u32 val) 468 { 469 struct nvkm_gpio *gpio = nvkm_gpio(fuc->base.pfb); 470 struct dcb_gpio_func func; 471 u32 reg, sh, gpio_val; 472 int ret; 473 474 if (gpio->get(gpio, 0, 0x2e, DCB_GPIO_UNUSED) != val) { 475 ret = gpio->find(gpio, 0, 0x2e, DCB_GPIO_UNUSED, &func); 476 if (ret) 477 return; 478 479 nv50_gpio_location(func.line, ®, &sh); 480 gpio_val = ram_rd32(fuc, gpioFBVREF); 481 if (gpio_val & (8 << sh)) 482 val = !val; 483 484 ram_mask(fuc, gpioFBVREF, (0x3 << sh), ((val | 0x2) << sh)); 485 ram_nsec(fuc, 20000); 486 } 487 } 488 489 static int 490 gt215_ram_calc(struct nvkm_fb *pfb, u32 freq) 491 { 492 struct nvkm_bios *bios = nvkm_bios(pfb); 493 struct gt215_ram *ram = (void *)pfb->ram; 494 struct gt215_ramfuc *fuc = &ram->fuc; 495 struct gt215_ltrain *train = &ram->ltrain; 496 struct gt215_clk_info mclk; 497 struct nvkm_ram_data *next; 498 u8 ver, hdr, cnt, len, strap; 499 u32 data; 500 u32 r004018, r100760, r100da0, r111100, ctrl; 501 u32 unk714, unk718, unk71c; 502 int ret, i; 503 u32 timing[9]; 504 bool pll2pll; 505 506 next = &ram->base.target; 507 next->freq = freq; 508 ram->base.next = next; 509 510 if (ram->ltrain.state == NVA3_TRAIN_ONCE) 511 gt215_link_train(pfb); 512 513 /* lookup memory config data relevant to the target frequency */ 514 i = 0; 515 data = nvbios_rammapEm(bios, freq / 1000, &ver, &hdr, &cnt, &len, 516 &next->bios); 517 if (!data || ver != 0x10 || hdr < 0x05) { 518 nv_error(pfb, "invalid/missing rammap entry\n"); 519 return -EINVAL; 520 } 521 522 /* locate specific data set for the attached memory */ 523 strap = nvbios_ramcfg_index(nv_subdev(pfb)); 524 if (strap >= cnt) { 525 nv_error(pfb, "invalid ramcfg strap\n"); 526 return -EINVAL; 527 } 528 529 data = nvbios_rammapSp(bios, data, ver, hdr, cnt, len, strap, 530 &ver, &hdr, &next->bios); 531 if (!data || ver != 0x10 || hdr < 0x09) { 532 nv_error(pfb, "invalid/missing ramcfg entry\n"); 533 return -EINVAL; 534 } 535 536 /* lookup memory timings, if bios says they're present */ 537 if (next->bios.ramcfg_timing != 0xff) { 538 data = nvbios_timingEp(bios, next->bios.ramcfg_timing, 539 &ver, &hdr, &cnt, &len, 540 &next->bios); 541 if (!data || ver != 0x10 || hdr < 0x17) { 542 nv_error(pfb, "invalid/missing timing entry\n"); 543 return -EINVAL; 544 } 545 } 546 547 ret = gt215_pll_info(nvkm_clk(pfb), 0x12, 0x4000, freq, &mclk); 548 if (ret < 0) { 549 nv_error(pfb, "failed mclk calculation\n"); 550 return ret; 551 } 552 553 gt215_ram_timing_calc(pfb, timing); 554 555 ret = ram_init(fuc, pfb); 556 if (ret) 557 return ret; 558 559 /* Determine ram-specific MR values */ 560 ram->base.mr[0] = ram_rd32(fuc, mr[0]); 561 ram->base.mr[1] = ram_rd32(fuc, mr[1]); 562 ram->base.mr[2] = ram_rd32(fuc, mr[2]); 563 564 switch (ram->base.type) { 565 case NV_MEM_TYPE_DDR2: 566 ret = nvkm_sddr2_calc(&ram->base); 567 break; 568 case NV_MEM_TYPE_DDR3: 569 ret = nvkm_sddr3_calc(&ram->base); 570 break; 571 case NV_MEM_TYPE_GDDR3: 572 ret = nvkm_gddr3_calc(&ram->base); 573 break; 574 default: 575 ret = -ENOSYS; 576 break; 577 } 578 579 if (ret) 580 return ret; 581 582 /* XXX: where the fuck does 750MHz come from? */ 583 if (freq <= 750000) { 584 r004018 = 0x10000000; 585 r100760 = 0x22222222; 586 r100da0 = 0x00000010; 587 } else { 588 r004018 = 0x00000000; 589 r100760 = 0x00000000; 590 r100da0 = 0x00000000; 591 } 592 593 if (!next->bios.ramcfg_10_DLLoff) 594 r004018 |= 0x00004000; 595 596 /* pll2pll requires to switch to a safe clock first */ 597 ctrl = ram_rd32(fuc, 0x004000); 598 pll2pll = (!(ctrl & 0x00000008)) && mclk.pll; 599 600 /* Pre, NVIDIA does this outside the script */ 601 if (next->bios.ramcfg_10_02_10) { 602 ram_mask(fuc, 0x111104, 0x00000600, 0x00000000); 603 } else { 604 ram_mask(fuc, 0x111100, 0x40000000, 0x40000000); 605 ram_mask(fuc, 0x111104, 0x00000180, 0x00000000); 606 } 607 /* Always disable this bit during reclock */ 608 ram_mask(fuc, 0x100200, 0x00000800, 0x00000000); 609 610 /* If switching from non-pll to pll, lock before disabling FB */ 611 if (mclk.pll && !pll2pll) { 612 ram_mask(fuc, 0x004128, 0x003f3141, mclk.clk | 0x00000101); 613 gt215_ram_lock_pll(fuc, &mclk); 614 } 615 616 /* Start with disabling some CRTCs and PFIFO? */ 617 ram_wait_vblank(fuc); 618 ram_wr32(fuc, 0x611200, 0x3300); 619 ram_mask(fuc, 0x002504, 0x1, 0x1); 620 ram_nsec(fuc, 10000); 621 ram_wait(fuc, 0x002504, 0x10, 0x10, 20000); /* XXX: or longer? */ 622 ram_block(fuc); 623 ram_nsec(fuc, 2000); 624 625 if (!next->bios.ramcfg_10_02_10) { 626 if (ram->base.type == NV_MEM_TYPE_GDDR3) 627 ram_mask(fuc, 0x111100, 0x04020000, 0x00020000); 628 else 629 ram_mask(fuc, 0x111100, 0x04020000, 0x04020000); 630 } 631 632 /* If we're disabling the DLL, do it now */ 633 switch (next->bios.ramcfg_10_DLLoff * ram->base.type) { 634 case NV_MEM_TYPE_DDR3: 635 nvkm_sddr3_dll_disable(fuc, ram->base.mr); 636 break; 637 case NV_MEM_TYPE_GDDR3: 638 nvkm_gddr3_dll_disable(fuc, ram->base.mr); 639 break; 640 } 641 642 if (fuc->r_gpioFBVREF.addr && next->bios.timing_10_ODT) 643 gt215_ram_fbvref(fuc, 0); 644 645 /* Brace RAM for impact */ 646 ram_wr32(fuc, 0x1002d4, 0x00000001); 647 ram_wr32(fuc, 0x1002d0, 0x00000001); 648 ram_wr32(fuc, 0x1002d0, 0x00000001); 649 ram_wr32(fuc, 0x100210, 0x00000000); 650 ram_wr32(fuc, 0x1002dc, 0x00000001); 651 ram_nsec(fuc, 2000); 652 653 if (nv_device(pfb)->chipset == 0xa3 && freq <= 500000) 654 ram_mask(fuc, 0x100700, 0x00000006, 0x00000006); 655 656 /* Fiddle with clocks */ 657 /* There's 4 scenario's 658 * pll->pll: first switch to a 324MHz clock, set up new PLL, switch 659 * clk->pll: Set up new PLL, switch 660 * pll->clk: Set up clock, switch 661 * clk->clk: Overwrite ctrl and other bits, switch */ 662 663 /* Switch to regular clock - 324MHz */ 664 if (pll2pll) { 665 ram_mask(fuc, 0x004000, 0x00000004, 0x00000004); 666 ram_mask(fuc, 0x004168, 0x003f3141, 0x00083101); 667 ram_mask(fuc, 0x004000, 0x00000008, 0x00000008); 668 ram_mask(fuc, 0x1110e0, 0x00088000, 0x00088000); 669 ram_wr32(fuc, 0x004018, 0x00001000); 670 gt215_ram_lock_pll(fuc, &mclk); 671 } 672 673 if (mclk.pll) { 674 ram_mask(fuc, 0x004000, 0x00000105, 0x00000105); 675 ram_wr32(fuc, 0x004018, 0x00001000 | r004018); 676 ram_wr32(fuc, 0x100da0, r100da0); 677 } else { 678 ram_mask(fuc, 0x004168, 0x003f3141, mclk.clk | 0x00000101); 679 ram_mask(fuc, 0x004000, 0x00000108, 0x00000008); 680 ram_mask(fuc, 0x1110e0, 0x00088000, 0x00088000); 681 ram_wr32(fuc, 0x004018, 0x00009000 | r004018); 682 ram_wr32(fuc, 0x100da0, r100da0); 683 } 684 ram_nsec(fuc, 20000); 685 686 if (next->bios.rammap_10_04_08) { 687 ram_wr32(fuc, 0x1005a0, next->bios.ramcfg_10_06 << 16 | 688 next->bios.ramcfg_10_05 << 8 | 689 next->bios.ramcfg_10_05); 690 ram_wr32(fuc, 0x1005a4, next->bios.ramcfg_10_08 << 8 | 691 next->bios.ramcfg_10_07); 692 ram_wr32(fuc, 0x10f804, next->bios.ramcfg_10_09_f0 << 20 | 693 next->bios.ramcfg_10_03_0f << 16 | 694 next->bios.ramcfg_10_09_0f | 695 0x80000000); 696 ram_mask(fuc, 0x10053c, 0x00001000, 0x00000000); 697 } else { 698 if (train->state == NVA3_TRAIN_DONE) { 699 ram_wr32(fuc, 0x100080, 0x1020); 700 ram_mask(fuc, 0x111400, 0xffffffff, train->r_111400); 701 ram_mask(fuc, 0x1111e0, 0xffffffff, train->r_1111e0); 702 ram_mask(fuc, 0x100720, 0xffffffff, train->r_100720); 703 } 704 ram_mask(fuc, 0x10053c, 0x00001000, 0x00001000); 705 ram_mask(fuc, 0x10f804, 0x80000000, 0x00000000); 706 ram_mask(fuc, 0x100760, 0x22222222, r100760); 707 ram_mask(fuc, 0x1007a0, 0x22222222, r100760); 708 ram_mask(fuc, 0x1007e0, 0x22222222, r100760); 709 } 710 711 if (nv_device(pfb)->chipset == 0xa3 && freq > 500000) { 712 ram_mask(fuc, 0x100700, 0x00000006, 0x00000000); 713 } 714 715 /* Final switch */ 716 if (mclk.pll) { 717 ram_mask(fuc, 0x1110e0, 0x00088000, 0x00011000); 718 ram_mask(fuc, 0x004000, 0x00000008, 0x00000000); 719 } 720 721 ram_wr32(fuc, 0x1002dc, 0x00000000); 722 ram_wr32(fuc, 0x1002d4, 0x00000001); 723 ram_wr32(fuc, 0x100210, 0x80000000); 724 ram_nsec(fuc, 2000); 725 726 /* Set RAM MR parameters and timings */ 727 for (i = 2; i >= 0; i--) { 728 if (ram_rd32(fuc, mr[i]) != ram->base.mr[i]) { 729 ram_wr32(fuc, mr[i], ram->base.mr[i]); 730 ram_nsec(fuc, 1000); 731 } 732 } 733 734 ram_wr32(fuc, 0x100220[3], timing[3]); 735 ram_wr32(fuc, 0x100220[1], timing[1]); 736 ram_wr32(fuc, 0x100220[6], timing[6]); 737 ram_wr32(fuc, 0x100220[7], timing[7]); 738 ram_wr32(fuc, 0x100220[2], timing[2]); 739 ram_wr32(fuc, 0x100220[4], timing[4]); 740 ram_wr32(fuc, 0x100220[5], timing[5]); 741 ram_wr32(fuc, 0x100220[0], timing[0]); 742 ram_wr32(fuc, 0x100220[8], timing[8]); 743 744 /* Misc */ 745 ram_mask(fuc, 0x100200, 0x00001000, !next->bios.ramcfg_10_02_08 << 12); 746 747 /* XXX: A lot of "chipset"/"ram type" specific stuff...? */ 748 unk714 = ram_rd32(fuc, 0x100714) & ~0xf0000130; 749 unk718 = ram_rd32(fuc, 0x100718) & ~0x00000100; 750 unk71c = ram_rd32(fuc, 0x10071c) & ~0x00000100; 751 r111100 = ram_rd32(fuc, 0x111100) & ~0x3a800000; 752 753 if (next->bios.ramcfg_10_02_04) { 754 switch (ram->base.type) { 755 case NV_MEM_TYPE_DDR3: 756 if (nv_device(pfb)->chipset != 0xa8) 757 r111100 |= 0x00000004; 758 /* no break */ 759 case NV_MEM_TYPE_DDR2: 760 r111100 |= 0x08000000; 761 break; 762 default: 763 break; 764 } 765 } else { 766 switch (ram->base.type) { 767 case NV_MEM_TYPE_DDR2: 768 r111100 |= 0x1a800000; 769 unk714 |= 0x00000010; 770 break; 771 case NV_MEM_TYPE_DDR3: 772 if (nv_device(pfb)->chipset == 0xa8) { 773 r111100 |= 0x08000000; 774 } else { 775 r111100 &= ~0x00000004; 776 r111100 |= 0x12800000; 777 } 778 unk714 |= 0x00000010; 779 break; 780 case NV_MEM_TYPE_GDDR3: 781 r111100 |= 0x30000000; 782 unk714 |= 0x00000020; 783 break; 784 default: 785 break; 786 } 787 } 788 789 unk714 |= (next->bios.ramcfg_10_04_01) << 8; 790 791 if (next->bios.ramcfg_10_02_20) 792 unk714 |= 0xf0000000; 793 if (next->bios.ramcfg_10_02_02) 794 unk718 |= 0x00000100; 795 if (next->bios.ramcfg_10_02_01) 796 unk71c |= 0x00000100; 797 if (next->bios.timing_10_24 != 0xff) { 798 unk718 &= ~0xf0000000; 799 unk718 |= next->bios.timing_10_24 << 28; 800 } 801 if (next->bios.ramcfg_10_02_10) 802 r111100 &= ~0x04020000; 803 804 ram_mask(fuc, 0x100714, 0xffffffff, unk714); 805 ram_mask(fuc, 0x10071c, 0xffffffff, unk71c); 806 ram_mask(fuc, 0x100718, 0xffffffff, unk718); 807 ram_mask(fuc, 0x111100, 0xffffffff, r111100); 808 809 if (fuc->r_gpioFBVREF.addr && !next->bios.timing_10_ODT) 810 gt215_ram_fbvref(fuc, 1); 811 812 /* Reset DLL */ 813 if (!next->bios.ramcfg_10_DLLoff) 814 nvkm_sddr2_dll_reset(fuc); 815 816 if (ram->base.type == NV_MEM_TYPE_GDDR3) { 817 ram_nsec(fuc, 31000); 818 } else { 819 ram_nsec(fuc, 14000); 820 } 821 822 if (ram->base.type == NV_MEM_TYPE_DDR3) { 823 ram_wr32(fuc, 0x100264, 0x1); 824 ram_nsec(fuc, 2000); 825 } 826 827 ram_nuke(fuc, 0x100700); 828 ram_mask(fuc, 0x100700, 0x01000000, 0x01000000); 829 ram_mask(fuc, 0x100700, 0x01000000, 0x00000000); 830 831 /* Re-enable FB */ 832 ram_unblock(fuc); 833 ram_wr32(fuc, 0x611200, 0x3330); 834 835 /* Post fiddlings */ 836 if (next->bios.rammap_10_04_02) 837 ram_mask(fuc, 0x100200, 0x00000800, 0x00000800); 838 if (next->bios.ramcfg_10_02_10) { 839 ram_mask(fuc, 0x111104, 0x00000180, 0x00000180); 840 ram_mask(fuc, 0x111100, 0x40000000, 0x00000000); 841 } else { 842 ram_mask(fuc, 0x111104, 0x00000600, 0x00000600); 843 } 844 845 if (mclk.pll) { 846 ram_mask(fuc, 0x004168, 0x00000001, 0x00000000); 847 ram_mask(fuc, 0x004168, 0x00000100, 0x00000000); 848 } else { 849 ram_mask(fuc, 0x004000, 0x00000001, 0x00000000); 850 ram_mask(fuc, 0x004128, 0x00000001, 0x00000000); 851 ram_mask(fuc, 0x004128, 0x00000100, 0x00000000); 852 } 853 854 return 0; 855 } 856 857 static int 858 gt215_ram_prog(struct nvkm_fb *pfb) 859 { 860 struct nvkm_device *device = nv_device(pfb); 861 struct gt215_ram *ram = (void *)pfb->ram; 862 struct gt215_ramfuc *fuc = &ram->fuc; 863 bool exec = nvkm_boolopt(device->cfgopt, "NvMemExec", true); 864 865 if (exec) { 866 nv_mask(pfb, 0x001534, 0x2, 0x2); 867 868 ram_exec(fuc, true); 869 870 /* Post-processing, avoids flicker */ 871 nv_mask(pfb, 0x002504, 0x1, 0x0); 872 nv_mask(pfb, 0x001534, 0x2, 0x0); 873 874 nv_mask(pfb, 0x616308, 0x10, 0x10); 875 nv_mask(pfb, 0x616b08, 0x10, 0x10); 876 } else { 877 ram_exec(fuc, false); 878 } 879 return 0; 880 } 881 882 static void 883 gt215_ram_tidy(struct nvkm_fb *pfb) 884 { 885 struct gt215_ram *ram = (void *)pfb->ram; 886 struct gt215_ramfuc *fuc = &ram->fuc; 887 ram_exec(fuc, false); 888 } 889 890 static int 891 gt215_ram_init(struct nvkm_object *object) 892 { 893 struct nvkm_fb *pfb = (void *)object->parent; 894 struct gt215_ram *ram = (void *)object; 895 int ret; 896 897 ret = nvkm_ram_init(&ram->base); 898 if (ret) 899 return ret; 900 901 gt215_link_train_init(pfb); 902 return 0; 903 } 904 905 static int 906 gt215_ram_fini(struct nvkm_object *object, bool suspend) 907 { 908 struct nvkm_fb *pfb = (void *)object->parent; 909 910 if (!suspend) 911 gt215_link_train_fini(pfb); 912 913 return 0; 914 } 915 916 static int 917 gt215_ram_ctor(struct nvkm_object *parent, struct nvkm_object *engine, 918 struct nvkm_oclass *oclass, void *data, u32 datasize, 919 struct nvkm_object **pobject) 920 { 921 struct nvkm_fb *pfb = nvkm_fb(parent); 922 struct nvkm_gpio *gpio = nvkm_gpio(pfb); 923 struct dcb_gpio_func func; 924 struct gt215_ram *ram; 925 int ret, i; 926 u32 reg, shift; 927 928 ret = nv50_ram_create(parent, engine, oclass, &ram); 929 *pobject = nv_object(ram); 930 if (ret) 931 return ret; 932 933 switch (ram->base.type) { 934 case NV_MEM_TYPE_DDR2: 935 case NV_MEM_TYPE_DDR3: 936 case NV_MEM_TYPE_GDDR3: 937 ram->base.calc = gt215_ram_calc; 938 ram->base.prog = gt215_ram_prog; 939 ram->base.tidy = gt215_ram_tidy; 940 break; 941 default: 942 nv_warn(ram, "reclocking of this ram type unsupported\n"); 943 return 0; 944 } 945 946 ram->fuc.r_0x001610 = ramfuc_reg(0x001610); 947 ram->fuc.r_0x001700 = ramfuc_reg(0x001700); 948 ram->fuc.r_0x002504 = ramfuc_reg(0x002504); 949 ram->fuc.r_0x004000 = ramfuc_reg(0x004000); 950 ram->fuc.r_0x004004 = ramfuc_reg(0x004004); 951 ram->fuc.r_0x004018 = ramfuc_reg(0x004018); 952 ram->fuc.r_0x004128 = ramfuc_reg(0x004128); 953 ram->fuc.r_0x004168 = ramfuc_reg(0x004168); 954 ram->fuc.r_0x100080 = ramfuc_reg(0x100080); 955 ram->fuc.r_0x100200 = ramfuc_reg(0x100200); 956 ram->fuc.r_0x100210 = ramfuc_reg(0x100210); 957 for (i = 0; i < 9; i++) 958 ram->fuc.r_0x100220[i] = ramfuc_reg(0x100220 + (i * 4)); 959 ram->fuc.r_0x100264 = ramfuc_reg(0x100264); 960 ram->fuc.r_0x1002d0 = ramfuc_reg(0x1002d0); 961 ram->fuc.r_0x1002d4 = ramfuc_reg(0x1002d4); 962 ram->fuc.r_0x1002dc = ramfuc_reg(0x1002dc); 963 ram->fuc.r_0x10053c = ramfuc_reg(0x10053c); 964 ram->fuc.r_0x1005a0 = ramfuc_reg(0x1005a0); 965 ram->fuc.r_0x1005a4 = ramfuc_reg(0x1005a4); 966 ram->fuc.r_0x100700 = ramfuc_reg(0x100700); 967 ram->fuc.r_0x100714 = ramfuc_reg(0x100714); 968 ram->fuc.r_0x100718 = ramfuc_reg(0x100718); 969 ram->fuc.r_0x10071c = ramfuc_reg(0x10071c); 970 ram->fuc.r_0x100720 = ramfuc_reg(0x100720); 971 ram->fuc.r_0x100760 = ramfuc_stride(0x100760, 4, ram->base.part_mask); 972 ram->fuc.r_0x1007a0 = ramfuc_stride(0x1007a0, 4, ram->base.part_mask); 973 ram->fuc.r_0x1007e0 = ramfuc_stride(0x1007e0, 4, ram->base.part_mask); 974 ram->fuc.r_0x100da0 = ramfuc_stride(0x100da0, 4, ram->base.part_mask); 975 ram->fuc.r_0x10f804 = ramfuc_reg(0x10f804); 976 ram->fuc.r_0x1110e0 = ramfuc_stride(0x1110e0, 4, ram->base.part_mask); 977 ram->fuc.r_0x111100 = ramfuc_reg(0x111100); 978 ram->fuc.r_0x111104 = ramfuc_reg(0x111104); 979 ram->fuc.r_0x1111e0 = ramfuc_reg(0x1111e0); 980 ram->fuc.r_0x111400 = ramfuc_reg(0x111400); 981 ram->fuc.r_0x611200 = ramfuc_reg(0x611200); 982 983 if (ram->base.ranks > 1) { 984 ram->fuc.r_mr[0] = ramfuc_reg2(0x1002c0, 0x1002c8); 985 ram->fuc.r_mr[1] = ramfuc_reg2(0x1002c4, 0x1002cc); 986 ram->fuc.r_mr[2] = ramfuc_reg2(0x1002e0, 0x1002e8); 987 ram->fuc.r_mr[3] = ramfuc_reg2(0x1002e4, 0x1002ec); 988 } else { 989 ram->fuc.r_mr[0] = ramfuc_reg(0x1002c0); 990 ram->fuc.r_mr[1] = ramfuc_reg(0x1002c4); 991 ram->fuc.r_mr[2] = ramfuc_reg(0x1002e0); 992 ram->fuc.r_mr[3] = ramfuc_reg(0x1002e4); 993 } 994 995 ret = gpio->find(gpio, 0, 0x2e, DCB_GPIO_UNUSED, &func); 996 if (ret == 0) { 997 nv50_gpio_location(func.line, ®, &shift); 998 ram->fuc.r_gpioFBVREF = ramfuc_reg(reg); 999 } 1000 1001 return 0; 1002 } 1003 1004 struct nvkm_oclass 1005 gt215_ram_oclass = { 1006 .ofuncs = &(struct nvkm_ofuncs) { 1007 .ctor = gt215_ram_ctor, 1008 .dtor = _nvkm_ram_dtor, 1009 .init = gt215_ram_init, 1010 .fini = gt215_ram_fini, 1011 }, 1012 }; 1013