1 /* 2 * (C) Copyright 2006-2007 Freescale Semiconductor, Inc. 3 * 4 * (C) Copyright 2006 5 * Wolfgang Denk, DENX Software Engineering, wd@denx.de. 6 * 7 * Copyright (C) 2004-2006 Freescale Semiconductor, Inc. 8 * (C) Copyright 2003 Motorola Inc. 9 * Xianghua Xiao (X.Xiao@motorola.com) 10 * 11 * SPDX-License-Identifier: GPL-2.0+ 12 */ 13 14 #include <common.h> 15 #include <asm/processor.h> 16 #include <asm/io.h> 17 #include <i2c.h> 18 #include <spd.h> 19 #include <asm/mmu.h> 20 #include <spd_sdram.h> 21 22 DECLARE_GLOBAL_DATA_PTR; 23 24 void board_add_ram_info(int use_default) 25 { 26 volatile immap_t *immap = (immap_t *) CONFIG_SYS_IMMR; 27 volatile ddr83xx_t *ddr = &immap->ddr; 28 char buf[32]; 29 30 printf(" (DDR%d", ((ddr->sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK) 31 >> SDRAM_CFG_SDRAM_TYPE_SHIFT) - 1); 32 33 #if defined(CONFIG_MPC8308) || defined(CONFIG_MPC831x) 34 if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_16) 35 puts(", 16-bit"); 36 else if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_32) 37 puts(", 32-bit"); 38 else 39 puts(", unknown width"); 40 #else 41 if (ddr->sdram_cfg & SDRAM_CFG_32_BE) 42 puts(", 32-bit"); 43 else 44 puts(", 64-bit"); 45 #endif 46 47 if (ddr->sdram_cfg & SDRAM_CFG_ECC_EN) 48 puts(", ECC on"); 49 else 50 puts(", ECC off"); 51 52 printf(", %s MHz)", strmhz(buf, gd->mem_clk)); 53 54 #if defined(CONFIG_SYS_LB_SDRAM) && defined(CONFIG_SYS_LBC_SDRAM_SIZE) 55 puts("\nSDRAM: "); 56 print_size (CONFIG_SYS_LBC_SDRAM_SIZE * 1024 * 1024, " (local bus)"); 57 #endif 58 } 59 60 #ifdef CONFIG_SPD_EEPROM 61 #ifndef CONFIG_SYS_READ_SPD 62 #define CONFIG_SYS_READ_SPD i2c_read 63 #endif 64 #ifndef SPD_EEPROM_OFFSET 65 #define SPD_EEPROM_OFFSET 0 66 #endif 67 #ifndef SPD_EEPROM_ADDR_LEN 68 #define SPD_EEPROM_ADDR_LEN 1 69 #endif 70 71 /* 72 * Convert picoseconds into clock cycles (rounding up if needed). 73 */ 74 int 75 picos_to_clk(int picos) 76 { 77 unsigned int mem_bus_clk; 78 int clks; 79 80 mem_bus_clk = gd->mem_clk >> 1; 81 clks = picos / (1000000000 / (mem_bus_clk / 1000)); 82 if (picos % (1000000000 / (mem_bus_clk / 1000)) != 0) 83 clks++; 84 85 return clks; 86 } 87 88 unsigned int banksize(unsigned char row_dens) 89 { 90 return ((row_dens >> 2) | ((row_dens & 3) << 6)) << 24; 91 } 92 93 int read_spd(uint addr) 94 { 95 return ((int) addr); 96 } 97 98 #undef SPD_DEBUG 99 #ifdef SPD_DEBUG 100 static void spd_debug(spd_eeprom_t *spd) 101 { 102 printf ("\nDIMM type: %-18.18s\n", spd->mpart); 103 printf ("SPD size: %d\n", spd->info_size); 104 printf ("EEPROM size: %d\n", 1 << spd->chip_size); 105 printf ("Memory type: %d\n", spd->mem_type); 106 printf ("Row addr: %d\n", spd->nrow_addr); 107 printf ("Column addr: %d\n", spd->ncol_addr); 108 printf ("# of rows: %d\n", spd->nrows); 109 printf ("Row density: %d\n", spd->row_dens); 110 printf ("# of banks: %d\n", spd->nbanks); 111 printf ("Data width: %d\n", 112 256 * spd->dataw_msb + spd->dataw_lsb); 113 printf ("Chip width: %d\n", spd->primw); 114 printf ("Refresh rate: %02X\n", spd->refresh); 115 printf ("CAS latencies: %02X\n", spd->cas_lat); 116 printf ("Write latencies: %02X\n", spd->write_lat); 117 printf ("tRP: %d\n", spd->trp); 118 printf ("tRCD: %d\n", spd->trcd); 119 printf ("\n"); 120 } 121 #endif /* SPD_DEBUG */ 122 123 long int spd_sdram() 124 { 125 volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR; 126 volatile ddr83xx_t *ddr = &immap->ddr; 127 volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0]; 128 spd_eeprom_t spd; 129 unsigned int n_ranks; 130 unsigned int odt_rd_cfg, odt_wr_cfg; 131 unsigned char twr_clk, twtr_clk; 132 unsigned int sdram_type; 133 unsigned int memsize; 134 unsigned int law_size; 135 unsigned char caslat, caslat_ctrl; 136 unsigned int trfc, trfc_clk, trfc_low; 137 unsigned int trcd_clk, trtp_clk; 138 unsigned char cke_min_clk; 139 unsigned char add_lat, wr_lat; 140 unsigned char wr_data_delay; 141 unsigned char four_act; 142 unsigned char cpo; 143 unsigned char burstlen; 144 unsigned char odt_cfg, mode_odt_enable; 145 unsigned int max_bus_clk; 146 unsigned int max_data_rate, effective_data_rate; 147 unsigned int ddrc_clk; 148 unsigned int refresh_clk; 149 unsigned int sdram_cfg; 150 unsigned int ddrc_ecc_enable; 151 unsigned int pvr = get_pvr(); 152 153 /* 154 * First disable the memory controller (could be enabled 155 * by the debugger) 156 */ 157 clrsetbits_be32(&ddr->sdram_cfg, SDRAM_CFG_MEM_EN, 0); 158 sync(); 159 isync(); 160 161 /* Read SPD parameters with I2C */ 162 CONFIG_SYS_READ_SPD(SPD_EEPROM_ADDRESS, SPD_EEPROM_OFFSET, 163 SPD_EEPROM_ADDR_LEN, (uchar *) &spd, sizeof(spd)); 164 #ifdef SPD_DEBUG 165 spd_debug(&spd); 166 #endif 167 /* Check the memory type */ 168 if (spd.mem_type != SPD_MEMTYPE_DDR && spd.mem_type != SPD_MEMTYPE_DDR2) { 169 debug("DDR: Module mem type is %02X\n", spd.mem_type); 170 return 0; 171 } 172 173 /* Check the number of physical bank */ 174 if (spd.mem_type == SPD_MEMTYPE_DDR) { 175 n_ranks = spd.nrows; 176 } else { 177 n_ranks = (spd.nrows & 0x7) + 1; 178 } 179 180 if (n_ranks > 2) { 181 printf("DDR: The number of physical bank is %02X\n", n_ranks); 182 return 0; 183 } 184 185 /* Check if the number of row of the module is in the range of DDRC */ 186 if (spd.nrow_addr < 12 || spd.nrow_addr > 15) { 187 printf("DDR: Row number is out of range of DDRC, row=%02X\n", 188 spd.nrow_addr); 189 return 0; 190 } 191 192 /* Check if the number of col of the module is in the range of DDRC */ 193 if (spd.ncol_addr < 8 || spd.ncol_addr > 11) { 194 printf("DDR: Col number is out of range of DDRC, col=%02X\n", 195 spd.ncol_addr); 196 return 0; 197 } 198 199 #ifdef CONFIG_SYS_DDRCDR_VALUE 200 /* 201 * Adjust DDR II IO voltage biasing. It just makes it work. 202 */ 203 if(spd.mem_type == SPD_MEMTYPE_DDR2) { 204 immap->sysconf.ddrcdr = CONFIG_SYS_DDRCDR_VALUE; 205 } 206 udelay(50000); 207 #endif 208 209 /* 210 * ODT configuration recommendation from DDR Controller Chapter. 211 */ 212 odt_rd_cfg = 0; /* Never assert ODT */ 213 odt_wr_cfg = 0; /* Never assert ODT */ 214 if (spd.mem_type == SPD_MEMTYPE_DDR2) { 215 odt_wr_cfg = 1; /* Assert ODT on writes to CSn */ 216 } 217 218 /* Setup DDR chip select register */ 219 #ifdef CONFIG_SYS_83XX_DDR_USES_CS0 220 ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1; 221 ddr->cs_config[0] = ( 1 << 31 222 | (odt_rd_cfg << 20) 223 | (odt_wr_cfg << 16) 224 | ((spd.nbanks == 8 ? 1 : 0) << 14) 225 | ((spd.nrow_addr - 12) << 8) 226 | (spd.ncol_addr - 8) ); 227 debug("\n"); 228 debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds); 229 debug("cs0_config = 0x%08x\n",ddr->cs_config[0]); 230 231 if (n_ranks == 2) { 232 ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8) 233 | ((banksize(spd.row_dens) >> 23) - 1) ); 234 ddr->cs_config[1] = ( 1<<31 235 | (odt_rd_cfg << 20) 236 | (odt_wr_cfg << 16) 237 | ((spd.nbanks == 8 ? 1 : 0) << 14) 238 | ((spd.nrow_addr - 12) << 8) 239 | (spd.ncol_addr - 8) ); 240 debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds); 241 debug("cs1_config = 0x%08x\n",ddr->cs_config[1]); 242 } 243 244 #else 245 ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1; 246 ddr->cs_config[2] = ( 1 << 31 247 | (odt_rd_cfg << 20) 248 | (odt_wr_cfg << 16) 249 | ((spd.nbanks == 8 ? 1 : 0) << 14) 250 | ((spd.nrow_addr - 12) << 8) 251 | (spd.ncol_addr - 8) ); 252 debug("\n"); 253 debug("cs2_bnds = 0x%08x\n",ddr->csbnds[2].csbnds); 254 debug("cs2_config = 0x%08x\n",ddr->cs_config[2]); 255 256 if (n_ranks == 2) { 257 ddr->csbnds[3].csbnds = ( (banksize(spd.row_dens) >> 8) 258 | ((banksize(spd.row_dens) >> 23) - 1) ); 259 ddr->cs_config[3] = ( 1<<31 260 | (odt_rd_cfg << 20) 261 | (odt_wr_cfg << 16) 262 | ((spd.nbanks == 8 ? 1 : 0) << 14) 263 | ((spd.nrow_addr - 12) << 8) 264 | (spd.ncol_addr - 8) ); 265 debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds); 266 debug("cs3_config = 0x%08x\n",ddr->cs_config[3]); 267 } 268 #endif 269 270 /* 271 * Figure out memory size in Megabytes. 272 */ 273 memsize = n_ranks * banksize(spd.row_dens) / 0x100000; 274 275 /* 276 * First supported LAW size is 16M, at LAWAR_SIZE_16M == 23. 277 */ 278 law_size = 19 + __ilog2(memsize); 279 280 /* 281 * Set up LAWBAR for all of DDR. 282 */ 283 ecm->bar = CONFIG_SYS_DDR_SDRAM_BASE & 0xfffff000; 284 ecm->ar = (LAWAR_EN | LAWAR_TRGT_IF_DDR | (LAWAR_SIZE & law_size)); 285 debug("DDR:bar=0x%08x\n", ecm->bar); 286 debug("DDR:ar=0x%08x\n", ecm->ar); 287 288 /* 289 * Find the largest CAS by locating the highest 1 bit 290 * in the spd.cas_lat field. Translate it to a DDR 291 * controller field value: 292 * 293 * CAS Lat DDR I DDR II Ctrl 294 * Clocks SPD Bit SPD Bit Value 295 * ------- ------- ------- ----- 296 * 1.0 0 0001 297 * 1.5 1 0010 298 * 2.0 2 2 0011 299 * 2.5 3 0100 300 * 3.0 4 3 0101 301 * 3.5 5 0110 302 * 4.0 6 4 0111 303 * 4.5 1000 304 * 5.0 5 1001 305 */ 306 caslat = __ilog2(spd.cas_lat); 307 if ((spd.mem_type == SPD_MEMTYPE_DDR) 308 && (caslat > 6)) { 309 printf("DDR I: Invalid SPD CAS Latency: 0x%x.\n", spd.cas_lat); 310 return 0; 311 } else if (spd.mem_type == SPD_MEMTYPE_DDR2 312 && (caslat < 2 || caslat > 5)) { 313 printf("DDR II: Invalid SPD CAS Latency: 0x%x.\n", 314 spd.cas_lat); 315 return 0; 316 } 317 debug("DDR: caslat SPD bit is %d\n", caslat); 318 319 max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10 320 + (spd.clk_cycle & 0x0f)); 321 max_data_rate = max_bus_clk * 2; 322 323 debug("DDR:Module maximum data rate is: %d MHz\n", max_data_rate); 324 325 ddrc_clk = gd->mem_clk / 1000000; 326 effective_data_rate = 0; 327 328 if (max_data_rate >= 460) { /* it is DDR2-800, 667, 533 */ 329 if (spd.cas_lat & 0x08) 330 caslat = 3; 331 else 332 caslat = 4; 333 if (ddrc_clk <= 460 && ddrc_clk > 350) 334 effective_data_rate = 400; 335 else if (ddrc_clk <=350 && ddrc_clk > 280) 336 effective_data_rate = 333; 337 else if (ddrc_clk <= 280 && ddrc_clk > 230) 338 effective_data_rate = 266; 339 else 340 effective_data_rate = 200; 341 } else if (max_data_rate >= 390 && max_data_rate < 460) { /* it is DDR 400 */ 342 if (ddrc_clk <= 460 && ddrc_clk > 350) { 343 /* DDR controller clk at 350~460 */ 344 effective_data_rate = 400; /* 5ns */ 345 caslat = caslat; 346 } else if (ddrc_clk <= 350 && ddrc_clk > 280) { 347 /* DDR controller clk at 280~350 */ 348 effective_data_rate = 333; /* 6ns */ 349 if (spd.clk_cycle2 == 0x60) 350 caslat = caslat - 1; 351 else 352 caslat = caslat; 353 } else if (ddrc_clk <= 280 && ddrc_clk > 230) { 354 /* DDR controller clk at 230~280 */ 355 effective_data_rate = 266; /* 7.5ns */ 356 if (spd.clk_cycle3 == 0x75) 357 caslat = caslat - 2; 358 else if (spd.clk_cycle2 == 0x75) 359 caslat = caslat - 1; 360 else 361 caslat = caslat; 362 } else if (ddrc_clk <= 230 && ddrc_clk > 90) { 363 /* DDR controller clk at 90~230 */ 364 effective_data_rate = 200; /* 10ns */ 365 if (spd.clk_cycle3 == 0xa0) 366 caslat = caslat - 2; 367 else if (spd.clk_cycle2 == 0xa0) 368 caslat = caslat - 1; 369 else 370 caslat = caslat; 371 } 372 } else if (max_data_rate >= 323) { /* it is DDR 333 */ 373 if (ddrc_clk <= 350 && ddrc_clk > 280) { 374 /* DDR controller clk at 280~350 */ 375 effective_data_rate = 333; /* 6ns */ 376 caslat = caslat; 377 } else if (ddrc_clk <= 280 && ddrc_clk > 230) { 378 /* DDR controller clk at 230~280 */ 379 effective_data_rate = 266; /* 7.5ns */ 380 if (spd.clk_cycle2 == 0x75) 381 caslat = caslat - 1; 382 else 383 caslat = caslat; 384 } else if (ddrc_clk <= 230 && ddrc_clk > 90) { 385 /* DDR controller clk at 90~230 */ 386 effective_data_rate = 200; /* 10ns */ 387 if (spd.clk_cycle3 == 0xa0) 388 caslat = caslat - 2; 389 else if (spd.clk_cycle2 == 0xa0) 390 caslat = caslat - 1; 391 else 392 caslat = caslat; 393 } 394 } else if (max_data_rate >= 256) { /* it is DDR 266 */ 395 if (ddrc_clk <= 350 && ddrc_clk > 280) { 396 /* DDR controller clk at 280~350 */ 397 printf("DDR: DDR controller freq is more than " 398 "max data rate of the module\n"); 399 return 0; 400 } else if (ddrc_clk <= 280 && ddrc_clk > 230) { 401 /* DDR controller clk at 230~280 */ 402 effective_data_rate = 266; /* 7.5ns */ 403 caslat = caslat; 404 } else if (ddrc_clk <= 230 && ddrc_clk > 90) { 405 /* DDR controller clk at 90~230 */ 406 effective_data_rate = 200; /* 10ns */ 407 if (spd.clk_cycle2 == 0xa0) 408 caslat = caslat - 1; 409 } 410 } else if (max_data_rate >= 190) { /* it is DDR 200 */ 411 if (ddrc_clk <= 350 && ddrc_clk > 230) { 412 /* DDR controller clk at 230~350 */ 413 printf("DDR: DDR controller freq is more than " 414 "max data rate of the module\n"); 415 return 0; 416 } else if (ddrc_clk <= 230 && ddrc_clk > 90) { 417 /* DDR controller clk at 90~230 */ 418 effective_data_rate = 200; /* 10ns */ 419 caslat = caslat; 420 } 421 } 422 423 debug("DDR:Effective data rate is: %dMHz\n", effective_data_rate); 424 debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat); 425 426 /* 427 * Errata DDR6 work around: input enable 2 cycles earlier. 428 * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2. 429 */ 430 if(PVR_MAJ(pvr) <= 1 && spd.mem_type == SPD_MEMTYPE_DDR){ 431 if (caslat == 2) 432 ddr->debug_reg = 0x201c0000; /* CL=2 */ 433 else if (caslat == 3) 434 ddr->debug_reg = 0x202c0000; /* CL=2.5 */ 435 else if (caslat == 4) 436 ddr->debug_reg = 0x202c0000; /* CL=3.0 */ 437 438 __asm__ __volatile__ ("sync"); 439 440 debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg); 441 } 442 443 /* 444 * Convert caslat clocks to DDR controller value. 445 * Force caslat_ctrl to be DDR Controller field-sized. 446 */ 447 if (spd.mem_type == SPD_MEMTYPE_DDR) { 448 caslat_ctrl = (caslat + 1) & 0x07; 449 } else { 450 caslat_ctrl = (2 * caslat - 1) & 0x0f; 451 } 452 453 debug("DDR: effective data rate is %d MHz\n", effective_data_rate); 454 debug("DDR: caslat SPD bit is %d, controller field is 0x%x\n", 455 caslat, caslat_ctrl); 456 457 /* 458 * Timing Config 0. 459 * Avoid writing for DDR I. 460 */ 461 if (spd.mem_type == SPD_MEMTYPE_DDR2) { 462 unsigned char taxpd_clk = 8; /* By the book. */ 463 unsigned char tmrd_clk = 2; /* By the book. */ 464 unsigned char act_pd_exit = 2; /* Empirical? */ 465 unsigned char pre_pd_exit = 6; /* Empirical? */ 466 467 ddr->timing_cfg_0 = (0 468 | ((act_pd_exit & 0x7) << 20) /* ACT_PD_EXIT */ 469 | ((pre_pd_exit & 0x7) << 16) /* PRE_PD_EXIT */ 470 | ((taxpd_clk & 0xf) << 8) /* ODT_PD_EXIT */ 471 | ((tmrd_clk & 0xf) << 0) /* MRS_CYC */ 472 ); 473 debug("DDR: timing_cfg_0 = 0x%08x\n", ddr->timing_cfg_0); 474 } 475 476 /* 477 * For DDR I, WRREC(Twr) and WRTORD(Twtr) are not in SPD, 478 * use conservative value. 479 * For DDR II, they are bytes 36 and 37, in quarter nanos. 480 */ 481 482 if (spd.mem_type == SPD_MEMTYPE_DDR) { 483 twr_clk = 3; /* Clocks */ 484 twtr_clk = 1; /* Clocks */ 485 } else { 486 twr_clk = picos_to_clk(spd.twr * 250); 487 twtr_clk = picos_to_clk(spd.twtr * 250); 488 if (twtr_clk < 2) 489 twtr_clk = 2; 490 } 491 492 /* 493 * Calculate Trfc, in picos. 494 * DDR I: Byte 42 straight up in ns. 495 * DDR II: Byte 40 and 42 swizzled some, in ns. 496 */ 497 if (spd.mem_type == SPD_MEMTYPE_DDR) { 498 trfc = spd.trfc * 1000; /* up to ps */ 499 } else { 500 unsigned int byte40_table_ps[8] = { 501 0, 502 250, 503 330, 504 500, 505 660, 506 750, 507 0, 508 0 509 }; 510 511 trfc = (((spd.trctrfc_ext & 0x1) * 256) + spd.trfc) * 1000 512 + byte40_table_ps[(spd.trctrfc_ext >> 1) & 0x7]; 513 } 514 trfc_clk = picos_to_clk(trfc); 515 516 /* 517 * Trcd, Byte 29, from quarter nanos to ps and clocks. 518 */ 519 trcd_clk = picos_to_clk(spd.trcd * 250) & 0x7; 520 521 /* 522 * Convert trfc_clk to DDR controller fields. DDR I should 523 * fit in the REFREC field (16-19) of TIMING_CFG_1, but the 524 * 83xx controller has an extended REFREC field of three bits. 525 * The controller automatically adds 8 clocks to this value, 526 * so preadjust it down 8 first before splitting it up. 527 */ 528 trfc_low = (trfc_clk - 8) & 0xf; 529 530 ddr->timing_cfg_1 = 531 (((picos_to_clk(spd.trp * 250) & 0x07) << 28 ) | /* PRETOACT */ 532 ((picos_to_clk(spd.tras * 1000) & 0x0f ) << 24 ) | /* ACTTOPRE */ 533 (trcd_clk << 20 ) | /* ACTTORW */ 534 (caslat_ctrl << 16 ) | /* CASLAT */ 535 (trfc_low << 12 ) | /* REFEC */ 536 ((twr_clk & 0x07) << 8) | /* WRRREC */ 537 ((picos_to_clk(spd.trrd * 250) & 0x07) << 4) | /* ACTTOACT */ 538 ((twtr_clk & 0x07) << 0) /* WRTORD */ 539 ); 540 541 /* 542 * Additive Latency 543 * For DDR I, 0. 544 * For DDR II, with ODT enabled, use "a value" less than ACTTORW, 545 * which comes from Trcd, and also note that: 546 * add_lat + caslat must be >= 4 547 */ 548 add_lat = 0; 549 if (spd.mem_type == SPD_MEMTYPE_DDR2 550 && (odt_wr_cfg || odt_rd_cfg) 551 && (caslat < 4)) { 552 add_lat = 4 - caslat; 553 if ((add_lat + caslat) < 4) { 554 add_lat = 0; 555 } 556 } 557 558 /* 559 * Write Data Delay 560 * Historically 0x2 == 4/8 clock delay. 561 * Empirically, 0x3 == 6/8 clock delay is suggested for DDR I 266. 562 */ 563 wr_data_delay = 2; 564 #ifdef CONFIG_SYS_DDR_WRITE_DATA_DELAY 565 wr_data_delay = CONFIG_SYS_DDR_WRITE_DATA_DELAY; 566 #endif 567 568 /* 569 * Write Latency 570 * Read to Precharge 571 * Minimum CKE Pulse Width. 572 * Four Activate Window 573 */ 574 if (spd.mem_type == SPD_MEMTYPE_DDR) { 575 /* 576 * This is a lie. It should really be 1, but if it is 577 * set to 1, bits overlap into the old controller's 578 * otherwise unused ACSM field. If we leave it 0, then 579 * the HW will magically treat it as 1 for DDR 1. Oh Yea. 580 */ 581 wr_lat = 0; 582 583 trtp_clk = 2; /* By the book. */ 584 cke_min_clk = 1; /* By the book. */ 585 four_act = 1; /* By the book. */ 586 587 } else { 588 wr_lat = caslat - 1; 589 590 /* Convert SPD value from quarter nanos to picos. */ 591 trtp_clk = picos_to_clk(spd.trtp * 250); 592 if (trtp_clk < 2) 593 trtp_clk = 2; 594 trtp_clk += add_lat; 595 596 cke_min_clk = 3; /* By the book. */ 597 four_act = picos_to_clk(37500); /* By the book. 1k pages? */ 598 } 599 600 /* 601 * Empirically set ~MCAS-to-preamble override for DDR 2. 602 * Your milage will vary. 603 */ 604 cpo = 0; 605 if (spd.mem_type == SPD_MEMTYPE_DDR2) { 606 #ifdef CONFIG_SYS_DDR_CPO 607 cpo = CONFIG_SYS_DDR_CPO; 608 #else 609 if (effective_data_rate == 266) { 610 cpo = 0x4; /* READ_LAT + 1/2 */ 611 } else if (effective_data_rate == 333) { 612 cpo = 0x6; /* READ_LAT + 1 */ 613 } else if (effective_data_rate == 400) { 614 cpo = 0x7; /* READ_LAT + 5/4 */ 615 } else { 616 /* Automatic calibration */ 617 cpo = 0x1f; 618 } 619 #endif 620 } 621 622 ddr->timing_cfg_2 = (0 623 | ((add_lat & 0x7) << 28) /* ADD_LAT */ 624 | ((cpo & 0x1f) << 23) /* CPO */ 625 | ((wr_lat & 0x7) << 19) /* WR_LAT */ 626 | ((trtp_clk & 0x7) << 13) /* RD_TO_PRE */ 627 | ((wr_data_delay & 0x7) << 10) /* WR_DATA_DELAY */ 628 | ((cke_min_clk & 0x7) << 6) /* CKE_PLS */ 629 | ((four_act & 0x1f) << 0) /* FOUR_ACT */ 630 ); 631 632 debug("DDR:timing_cfg_1=0x%08x\n", ddr->timing_cfg_1); 633 debug("DDR:timing_cfg_2=0x%08x\n", ddr->timing_cfg_2); 634 635 /* Check DIMM data bus width */ 636 if (spd.dataw_lsb < 64) { 637 if (spd.mem_type == SPD_MEMTYPE_DDR) 638 burstlen = 0x03; /* 32 bit data bus, burst len is 8 */ 639 else 640 burstlen = 0x02; /* 32 bit data bus, burst len is 4 */ 641 debug("\n DDR DIMM: data bus width is 32 bit"); 642 } else { 643 burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */ 644 debug("\n DDR DIMM: data bus width is 64 bit"); 645 } 646 647 /* Is this an ECC DDR chip? */ 648 if (spd.config == 0x02) 649 debug(" with ECC\n"); 650 else 651 debug(" without ECC\n"); 652 653 /* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus, 654 Burst type is sequential 655 */ 656 if (spd.mem_type == SPD_MEMTYPE_DDR) { 657 switch (caslat) { 658 case 1: 659 ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */ 660 break; 661 case 2: 662 ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */ 663 break; 664 case 3: 665 ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */ 666 break; 667 case 4: 668 ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */ 669 break; 670 default: 671 printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n"); 672 return 0; 673 } 674 } else { 675 mode_odt_enable = 0x0; /* Default disabled */ 676 if (odt_wr_cfg || odt_rd_cfg) { 677 /* 678 * Bits 6 and 2 in Extended MRS(1) 679 * Bit 2 == 0x04 == 75 Ohm, with 2 DIMM modules. 680 * Bit 6 == 0x40 == 150 Ohm, with 1 DIMM module. 681 */ 682 mode_odt_enable = 0x40; /* 150 Ohm */ 683 } 684 685 ddr->sdram_mode = 686 (0 687 | (1 << (16 + 10)) /* DQS Differential disable */ 688 #ifdef CONFIG_SYS_DDR_MODE_WEAK 689 | (1 << (16 + 1)) /* weak driver (~60%) */ 690 #endif 691 | (add_lat << (16 + 3)) /* Additive Latency in EMRS1 */ 692 | (mode_odt_enable << 16) /* ODT Enable in EMRS1 */ 693 | ((twr_clk - 1) << 9) /* Write Recovery Autopre */ 694 | (caslat << 4) /* caslat */ 695 | (burstlen << 0) /* Burst length */ 696 ); 697 } 698 debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode); 699 700 /* 701 * Clear EMRS2 and EMRS3. 702 */ 703 ddr->sdram_mode2 = 0; 704 debug("DDR: sdram_mode2 = 0x%08x\n", ddr->sdram_mode2); 705 706 switch (spd.refresh) { 707 case 0x00: 708 case 0x80: 709 refresh_clk = picos_to_clk(15625000); 710 break; 711 case 0x01: 712 case 0x81: 713 refresh_clk = picos_to_clk(3900000); 714 break; 715 case 0x02: 716 case 0x82: 717 refresh_clk = picos_to_clk(7800000); 718 break; 719 case 0x03: 720 case 0x83: 721 refresh_clk = picos_to_clk(31300000); 722 break; 723 case 0x04: 724 case 0x84: 725 refresh_clk = picos_to_clk(62500000); 726 break; 727 case 0x05: 728 case 0x85: 729 refresh_clk = picos_to_clk(125000000); 730 break; 731 default: 732 refresh_clk = 0x512; 733 break; 734 } 735 736 /* 737 * Set BSTOPRE to 0x100 for page mode 738 * If auto-charge is used, set BSTOPRE = 0 739 */ 740 ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100; 741 debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval); 742 743 /* 744 * SDRAM Cfg 2 745 */ 746 odt_cfg = 0; 747 #ifndef CONFIG_NEVER_ASSERT_ODT_TO_CPU 748 if (odt_rd_cfg | odt_wr_cfg) { 749 odt_cfg = 0x2; /* ODT to IOs during reads */ 750 } 751 #endif 752 if (spd.mem_type == SPD_MEMTYPE_DDR2) { 753 ddr->sdram_cfg2 = (0 754 | (0 << 26) /* True DQS */ 755 | (odt_cfg << 21) /* ODT only read */ 756 | (1 << 12) /* 1 refresh at a time */ 757 ); 758 759 debug("DDR: sdram_cfg2 = 0x%08x\n", ddr->sdram_cfg2); 760 } 761 762 #ifdef CONFIG_SYS_DDR_SDRAM_CLK_CNTL /* Optional platform specific value */ 763 ddr->sdram_clk_cntl = CONFIG_SYS_DDR_SDRAM_CLK_CNTL; 764 #endif 765 debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl); 766 767 asm("sync;isync"); 768 769 udelay(600); 770 771 /* 772 * Figure out the settings for the sdram_cfg register. Build up 773 * the value in 'sdram_cfg' before writing since the write into 774 * the register will actually enable the memory controller, and all 775 * settings must be done before enabling. 776 * 777 * sdram_cfg[0] = 1 (ddr sdram logic enable) 778 * sdram_cfg[1] = 1 (self-refresh-enable) 779 * sdram_cfg[5:7] = (SDRAM type = DDR SDRAM) 780 * 010 DDR 1 SDRAM 781 * 011 DDR 2 SDRAM 782 * sdram_cfg[12] = 0 (32_BE =0 , 64 bit bus mode) 783 * sdram_cfg[13] = 0 (8_BE =0, 4-beat bursts) 784 */ 785 if (spd.mem_type == SPD_MEMTYPE_DDR) 786 sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR1; 787 else 788 sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR2; 789 790 sdram_cfg = (0 791 | SDRAM_CFG_MEM_EN /* DDR enable */ 792 | SDRAM_CFG_SREN /* Self refresh */ 793 | sdram_type /* SDRAM type */ 794 ); 795 796 /* sdram_cfg[3] = RD_EN - registered DIMM enable */ 797 if (spd.mod_attr & 0x02) 798 sdram_cfg |= SDRAM_CFG_RD_EN; 799 800 /* The DIMM is 32bit width */ 801 if (spd.dataw_lsb < 64) { 802 if (spd.mem_type == SPD_MEMTYPE_DDR) 803 sdram_cfg |= SDRAM_CFG_32_BE | SDRAM_CFG_8_BE; 804 if (spd.mem_type == SPD_MEMTYPE_DDR2) 805 sdram_cfg |= SDRAM_CFG_32_BE; 806 } 807 808 ddrc_ecc_enable = 0; 809 810 #if defined(CONFIG_DDR_ECC) 811 /* Enable ECC with sdram_cfg[2] */ 812 if (spd.config == 0x02) { 813 sdram_cfg |= 0x20000000; 814 ddrc_ecc_enable = 1; 815 /* disable error detection */ 816 ddr->err_disable = ~ECC_ERROR_ENABLE; 817 /* set single bit error threshold to maximum value, 818 * reset counter to zero */ 819 ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) | 820 (0 << ECC_ERROR_MAN_SBEC_SHIFT); 821 } 822 823 debug("DDR:err_disable=0x%08x\n", ddr->err_disable); 824 debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe); 825 #endif 826 debug(" DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF"); 827 828 #if defined(CONFIG_DDR_2T_TIMING) 829 /* 830 * Enable 2T timing by setting sdram_cfg[16]. 831 */ 832 sdram_cfg |= SDRAM_CFG_2T_EN; 833 #endif 834 /* Enable controller, and GO! */ 835 ddr->sdram_cfg = sdram_cfg; 836 asm("sync;isync"); 837 udelay(500); 838 839 debug("DDR:sdram_cfg=0x%08x\n", ddr->sdram_cfg); 840 return memsize; /*in MBytes*/ 841 } 842 #endif /* CONFIG_SPD_EEPROM */ 843 844 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER) 845 /* 846 * Use timebase counter, get_timer() is not availabe 847 * at this point of initialization yet. 848 */ 849 static __inline__ unsigned long get_tbms (void) 850 { 851 unsigned long tbl; 852 unsigned long tbu1, tbu2; 853 unsigned long ms; 854 unsigned long long tmp; 855 856 ulong tbclk = get_tbclk(); 857 858 /* get the timebase ticks */ 859 do { 860 asm volatile ("mftbu %0":"=r" (tbu1):); 861 asm volatile ("mftb %0":"=r" (tbl):); 862 asm volatile ("mftbu %0":"=r" (tbu2):); 863 } while (tbu1 != tbu2); 864 865 /* convert ticks to ms */ 866 tmp = (unsigned long long)(tbu1); 867 tmp = (tmp << 32); 868 tmp += (unsigned long long)(tbl); 869 ms = tmp/(tbclk/1000); 870 871 return ms; 872 } 873 874 /* 875 * Initialize all of memory for ECC, then enable errors. 876 */ 877 void ddr_enable_ecc(unsigned int dram_size) 878 { 879 volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR; 880 volatile ddr83xx_t *ddr= &immap->ddr; 881 unsigned long t_start, t_end; 882 register u64 *p; 883 register uint size; 884 unsigned int pattern[2]; 885 886 icache_enable(); 887 t_start = get_tbms(); 888 pattern[0] = 0xdeadbeef; 889 pattern[1] = 0xdeadbeef; 890 891 #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA) 892 dma_meminit(pattern[0], dram_size); 893 #else 894 debug("ddr init: CPU FP write method\n"); 895 size = dram_size; 896 for (p = 0; p < (u64*)(size); p++) { 897 ppcDWstore((u32*)p, pattern); 898 } 899 __asm__ __volatile__ ("sync"); 900 #endif 901 902 t_end = get_tbms(); 903 icache_disable(); 904 905 debug("\nREADY!!\n"); 906 debug("ddr init duration: %ld ms\n", t_end - t_start); 907 908 /* Clear All ECC Errors */ 909 if ((ddr->err_detect & ECC_ERROR_DETECT_MME) == ECC_ERROR_DETECT_MME) 910 ddr->err_detect |= ECC_ERROR_DETECT_MME; 911 if ((ddr->err_detect & ECC_ERROR_DETECT_MBE) == ECC_ERROR_DETECT_MBE) 912 ddr->err_detect |= ECC_ERROR_DETECT_MBE; 913 if ((ddr->err_detect & ECC_ERROR_DETECT_SBE) == ECC_ERROR_DETECT_SBE) 914 ddr->err_detect |= ECC_ERROR_DETECT_SBE; 915 if ((ddr->err_detect & ECC_ERROR_DETECT_MSE) == ECC_ERROR_DETECT_MSE) 916 ddr->err_detect |= ECC_ERROR_DETECT_MSE; 917 918 /* Disable ECC-Interrupts */ 919 ddr->err_int_en &= ECC_ERR_INT_DISABLE; 920 921 /* Enable errors for ECC */ 922 ddr->err_disable &= ECC_ERROR_ENABLE; 923 924 __asm__ __volatile__ ("sync"); 925 __asm__ __volatile__ ("isync"); 926 } 927 #endif /* CONFIG_DDR_ECC */ 928