1 /* 2 * (C) Copyright 2006-2007 Freescale Semiconductor, Inc. 3 * 4 * (C) Copyright 2006 5 * Wolfgang Denk, DENX Software Engineering, wd@denx.de. 6 * 7 * Copyright (C) 2004-2006 Freescale Semiconductor, Inc. 8 * (C) Copyright 2003 Motorola Inc. 9 * Xianghua Xiao (X.Xiao@motorola.com) 10 * 11 * See file CREDITS for list of people who contributed to this 12 * project. 13 * 14 * This program is free software; you can redistribute it and/or 15 * modify it under the terms of the GNU General Public License as 16 * published by the Free Software Foundation; either version 2 of 17 * the License, or (at your option) any later version. 18 * 19 * This program is distributed in the hope that it will be useful, 20 * but WITHOUT ANY WARRANTY; without even the implied warranty of 21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 22 * GNU General Public License for more details. 23 * 24 * You should have received a copy of the GNU General Public License 25 * along with this program; if not, write to the Free Software 26 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, 27 * MA 02111-1307 USA 28 */ 29 30 #include <common.h> 31 #include <asm/processor.h> 32 #include <asm/io.h> 33 #include <i2c.h> 34 #include <spd.h> 35 #include <asm/mmu.h> 36 #include <spd_sdram.h> 37 38 DECLARE_GLOBAL_DATA_PTR; 39 40 void board_add_ram_info(int use_default) 41 { 42 volatile immap_t *immap = (immap_t *) CONFIG_SYS_IMMR; 43 volatile ddr83xx_t *ddr = &immap->ddr; 44 char buf[32]; 45 46 printf(" (DDR%d", ((ddr->sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK) 47 >> SDRAM_CFG_SDRAM_TYPE_SHIFT) - 1); 48 49 #if defined(CONFIG_MPC8308) || defined(CONFIG_MPC831x) 50 if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_16) 51 puts(", 16-bit"); 52 else if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_32) 53 puts(", 32-bit"); 54 else 55 puts(", unknown width"); 56 #else 57 if (ddr->sdram_cfg & SDRAM_CFG_32_BE) 58 puts(", 32-bit"); 59 else 60 puts(", 64-bit"); 61 #endif 62 63 if (ddr->sdram_cfg & SDRAM_CFG_ECC_EN) 64 puts(", ECC on"); 65 else 66 puts(", ECC off"); 67 68 printf(", %s MHz)", strmhz(buf, gd->mem_clk)); 69 70 #if defined(CONFIG_SYS_LB_SDRAM) && defined(CONFIG_SYS_LBC_SDRAM_SIZE) 71 puts("\nSDRAM: "); 72 print_size (CONFIG_SYS_LBC_SDRAM_SIZE * 1024 * 1024, " (local bus)"); 73 #endif 74 } 75 76 #ifdef CONFIG_SPD_EEPROM 77 #ifndef CONFIG_SYS_READ_SPD 78 #define CONFIG_SYS_READ_SPD i2c_read 79 #endif 80 #ifndef SPD_EEPROM_OFFSET 81 #define SPD_EEPROM_OFFSET 0 82 #endif 83 #ifndef SPD_EEPROM_ADDR_LEN 84 #define SPD_EEPROM_ADDR_LEN 1 85 #endif 86 87 /* 88 * Convert picoseconds into clock cycles (rounding up if needed). 89 */ 90 int 91 picos_to_clk(int picos) 92 { 93 unsigned int mem_bus_clk; 94 int clks; 95 96 mem_bus_clk = gd->mem_clk >> 1; 97 clks = picos / (1000000000 / (mem_bus_clk / 1000)); 98 if (picos % (1000000000 / (mem_bus_clk / 1000)) != 0) 99 clks++; 100 101 return clks; 102 } 103 104 unsigned int banksize(unsigned char row_dens) 105 { 106 return ((row_dens >> 2) | ((row_dens & 3) << 6)) << 24; 107 } 108 109 int read_spd(uint addr) 110 { 111 return ((int) addr); 112 } 113 114 #undef SPD_DEBUG 115 #ifdef SPD_DEBUG 116 static void spd_debug(spd_eeprom_t *spd) 117 { 118 printf ("\nDIMM type: %-18.18s\n", spd->mpart); 119 printf ("SPD size: %d\n", spd->info_size); 120 printf ("EEPROM size: %d\n", 1 << spd->chip_size); 121 printf ("Memory type: %d\n", spd->mem_type); 122 printf ("Row addr: %d\n", spd->nrow_addr); 123 printf ("Column addr: %d\n", spd->ncol_addr); 124 printf ("# of rows: %d\n", spd->nrows); 125 printf ("Row density: %d\n", spd->row_dens); 126 printf ("# of banks: %d\n", spd->nbanks); 127 printf ("Data width: %d\n", 128 256 * spd->dataw_msb + spd->dataw_lsb); 129 printf ("Chip width: %d\n", spd->primw); 130 printf ("Refresh rate: %02X\n", spd->refresh); 131 printf ("CAS latencies: %02X\n", spd->cas_lat); 132 printf ("Write latencies: %02X\n", spd->write_lat); 133 printf ("tRP: %d\n", spd->trp); 134 printf ("tRCD: %d\n", spd->trcd); 135 printf ("\n"); 136 } 137 #endif /* SPD_DEBUG */ 138 139 long int spd_sdram() 140 { 141 volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR; 142 volatile ddr83xx_t *ddr = &immap->ddr; 143 volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0]; 144 spd_eeprom_t spd; 145 unsigned int n_ranks; 146 unsigned int odt_rd_cfg, odt_wr_cfg; 147 unsigned char twr_clk, twtr_clk; 148 unsigned int sdram_type; 149 unsigned int memsize; 150 unsigned int law_size; 151 unsigned char caslat, caslat_ctrl; 152 unsigned int trfc, trfc_clk, trfc_low, trfc_high; 153 unsigned int trcd_clk, trtp_clk; 154 unsigned char cke_min_clk; 155 unsigned char add_lat, wr_lat; 156 unsigned char wr_data_delay; 157 unsigned char four_act; 158 unsigned char cpo; 159 unsigned char burstlen; 160 unsigned char odt_cfg, mode_odt_enable; 161 unsigned int max_bus_clk; 162 unsigned int max_data_rate, effective_data_rate; 163 unsigned int ddrc_clk; 164 unsigned int refresh_clk; 165 unsigned int sdram_cfg; 166 unsigned int ddrc_ecc_enable; 167 unsigned int pvr = get_pvr(); 168 169 /* 170 * First disable the memory controller (could be enabled 171 * by the debugger) 172 */ 173 clrsetbits_be32(&ddr->sdram_cfg, SDRAM_CFG_MEM_EN, 0); 174 sync(); 175 isync(); 176 177 /* Read SPD parameters with I2C */ 178 CONFIG_SYS_READ_SPD(SPD_EEPROM_ADDRESS, SPD_EEPROM_OFFSET, 179 SPD_EEPROM_ADDR_LEN, (uchar *) &spd, sizeof(spd)); 180 #ifdef SPD_DEBUG 181 spd_debug(&spd); 182 #endif 183 /* Check the memory type */ 184 if (spd.mem_type != SPD_MEMTYPE_DDR && spd.mem_type != SPD_MEMTYPE_DDR2) { 185 debug("DDR: Module mem type is %02X\n", spd.mem_type); 186 return 0; 187 } 188 189 /* Check the number of physical bank */ 190 if (spd.mem_type == SPD_MEMTYPE_DDR) { 191 n_ranks = spd.nrows; 192 } else { 193 n_ranks = (spd.nrows & 0x7) + 1; 194 } 195 196 if (n_ranks > 2) { 197 printf("DDR: The number of physical bank is %02X\n", n_ranks); 198 return 0; 199 } 200 201 /* Check if the number of row of the module is in the range of DDRC */ 202 if (spd.nrow_addr < 12 || spd.nrow_addr > 15) { 203 printf("DDR: Row number is out of range of DDRC, row=%02X\n", 204 spd.nrow_addr); 205 return 0; 206 } 207 208 /* Check if the number of col of the module is in the range of DDRC */ 209 if (spd.ncol_addr < 8 || spd.ncol_addr > 11) { 210 printf("DDR: Col number is out of range of DDRC, col=%02X\n", 211 spd.ncol_addr); 212 return 0; 213 } 214 215 #ifdef CONFIG_SYS_DDRCDR_VALUE 216 /* 217 * Adjust DDR II IO voltage biasing. It just makes it work. 218 */ 219 if(spd.mem_type == SPD_MEMTYPE_DDR2) { 220 immap->sysconf.ddrcdr = CONFIG_SYS_DDRCDR_VALUE; 221 } 222 udelay(50000); 223 #endif 224 225 /* 226 * ODT configuration recommendation from DDR Controller Chapter. 227 */ 228 odt_rd_cfg = 0; /* Never assert ODT */ 229 odt_wr_cfg = 0; /* Never assert ODT */ 230 if (spd.mem_type == SPD_MEMTYPE_DDR2) { 231 odt_wr_cfg = 1; /* Assert ODT on writes to CSn */ 232 } 233 234 /* Setup DDR chip select register */ 235 #ifdef CONFIG_SYS_83XX_DDR_USES_CS0 236 ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1; 237 ddr->cs_config[0] = ( 1 << 31 238 | (odt_rd_cfg << 20) 239 | (odt_wr_cfg << 16) 240 | ((spd.nbanks == 8 ? 1 : 0) << 14) 241 | ((spd.nrow_addr - 12) << 8) 242 | (spd.ncol_addr - 8) ); 243 debug("\n"); 244 debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds); 245 debug("cs0_config = 0x%08x\n",ddr->cs_config[0]); 246 247 if (n_ranks == 2) { 248 ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8) 249 | ((banksize(spd.row_dens) >> 23) - 1) ); 250 ddr->cs_config[1] = ( 1<<31 251 | (odt_rd_cfg << 20) 252 | (odt_wr_cfg << 16) 253 | ((spd.nbanks == 8 ? 1 : 0) << 14) 254 | ((spd.nrow_addr - 12) << 8) 255 | (spd.ncol_addr - 8) ); 256 debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds); 257 debug("cs1_config = 0x%08x\n",ddr->cs_config[1]); 258 } 259 260 #else 261 ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1; 262 ddr->cs_config[2] = ( 1 << 31 263 | (odt_rd_cfg << 20) 264 | (odt_wr_cfg << 16) 265 | ((spd.nbanks == 8 ? 1 : 0) << 14) 266 | ((spd.nrow_addr - 12) << 8) 267 | (spd.ncol_addr - 8) ); 268 debug("\n"); 269 debug("cs2_bnds = 0x%08x\n",ddr->csbnds[2].csbnds); 270 debug("cs2_config = 0x%08x\n",ddr->cs_config[2]); 271 272 if (n_ranks == 2) { 273 ddr->csbnds[3].csbnds = ( (banksize(spd.row_dens) >> 8) 274 | ((banksize(spd.row_dens) >> 23) - 1) ); 275 ddr->cs_config[3] = ( 1<<31 276 | (odt_rd_cfg << 20) 277 | (odt_wr_cfg << 16) 278 | ((spd.nbanks == 8 ? 1 : 0) << 14) 279 | ((spd.nrow_addr - 12) << 8) 280 | (spd.ncol_addr - 8) ); 281 debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds); 282 debug("cs3_config = 0x%08x\n",ddr->cs_config[3]); 283 } 284 #endif 285 286 /* 287 * Figure out memory size in Megabytes. 288 */ 289 memsize = n_ranks * banksize(spd.row_dens) / 0x100000; 290 291 /* 292 * First supported LAW size is 16M, at LAWAR_SIZE_16M == 23. 293 */ 294 law_size = 19 + __ilog2(memsize); 295 296 /* 297 * Set up LAWBAR for all of DDR. 298 */ 299 ecm->bar = CONFIG_SYS_DDR_SDRAM_BASE & 0xfffff000; 300 ecm->ar = (LAWAR_EN | LAWAR_TRGT_IF_DDR | (LAWAR_SIZE & law_size)); 301 debug("DDR:bar=0x%08x\n", ecm->bar); 302 debug("DDR:ar=0x%08x\n", ecm->ar); 303 304 /* 305 * Find the largest CAS by locating the highest 1 bit 306 * in the spd.cas_lat field. Translate it to a DDR 307 * controller field value: 308 * 309 * CAS Lat DDR I DDR II Ctrl 310 * Clocks SPD Bit SPD Bit Value 311 * ------- ------- ------- ----- 312 * 1.0 0 0001 313 * 1.5 1 0010 314 * 2.0 2 2 0011 315 * 2.5 3 0100 316 * 3.0 4 3 0101 317 * 3.5 5 0110 318 * 4.0 6 4 0111 319 * 4.5 1000 320 * 5.0 5 1001 321 */ 322 caslat = __ilog2(spd.cas_lat); 323 if ((spd.mem_type == SPD_MEMTYPE_DDR) 324 && (caslat > 6)) { 325 printf("DDR I: Invalid SPD CAS Latency: 0x%x.\n", spd.cas_lat); 326 return 0; 327 } else if (spd.mem_type == SPD_MEMTYPE_DDR2 328 && (caslat < 2 || caslat > 5)) { 329 printf("DDR II: Invalid SPD CAS Latency: 0x%x.\n", 330 spd.cas_lat); 331 return 0; 332 } 333 debug("DDR: caslat SPD bit is %d\n", caslat); 334 335 max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10 336 + (spd.clk_cycle & 0x0f)); 337 max_data_rate = max_bus_clk * 2; 338 339 debug("DDR:Module maximum data rate is: %d MHz\n", max_data_rate); 340 341 ddrc_clk = gd->mem_clk / 1000000; 342 effective_data_rate = 0; 343 344 if (max_data_rate >= 460) { /* it is DDR2-800, 667, 533 */ 345 if (spd.cas_lat & 0x08) 346 caslat = 3; 347 else 348 caslat = 4; 349 if (ddrc_clk <= 460 && ddrc_clk > 350) 350 effective_data_rate = 400; 351 else if (ddrc_clk <=350 && ddrc_clk > 280) 352 effective_data_rate = 333; 353 else if (ddrc_clk <= 280 && ddrc_clk > 230) 354 effective_data_rate = 266; 355 else 356 effective_data_rate = 200; 357 } else if (max_data_rate >= 390 && max_data_rate < 460) { /* it is DDR 400 */ 358 if (ddrc_clk <= 460 && ddrc_clk > 350) { 359 /* DDR controller clk at 350~460 */ 360 effective_data_rate = 400; /* 5ns */ 361 caslat = caslat; 362 } else if (ddrc_clk <= 350 && ddrc_clk > 280) { 363 /* DDR controller clk at 280~350 */ 364 effective_data_rate = 333; /* 6ns */ 365 if (spd.clk_cycle2 == 0x60) 366 caslat = caslat - 1; 367 else 368 caslat = caslat; 369 } else if (ddrc_clk <= 280 && ddrc_clk > 230) { 370 /* DDR controller clk at 230~280 */ 371 effective_data_rate = 266; /* 7.5ns */ 372 if (spd.clk_cycle3 == 0x75) 373 caslat = caslat - 2; 374 else if (spd.clk_cycle2 == 0x75) 375 caslat = caslat - 1; 376 else 377 caslat = caslat; 378 } else if (ddrc_clk <= 230 && ddrc_clk > 90) { 379 /* DDR controller clk at 90~230 */ 380 effective_data_rate = 200; /* 10ns */ 381 if (spd.clk_cycle3 == 0xa0) 382 caslat = caslat - 2; 383 else if (spd.clk_cycle2 == 0xa0) 384 caslat = caslat - 1; 385 else 386 caslat = caslat; 387 } 388 } else if (max_data_rate >= 323) { /* it is DDR 333 */ 389 if (ddrc_clk <= 350 && ddrc_clk > 280) { 390 /* DDR controller clk at 280~350 */ 391 effective_data_rate = 333; /* 6ns */ 392 caslat = caslat; 393 } else if (ddrc_clk <= 280 && ddrc_clk > 230) { 394 /* DDR controller clk at 230~280 */ 395 effective_data_rate = 266; /* 7.5ns */ 396 if (spd.clk_cycle2 == 0x75) 397 caslat = caslat - 1; 398 else 399 caslat = caslat; 400 } else if (ddrc_clk <= 230 && ddrc_clk > 90) { 401 /* DDR controller clk at 90~230 */ 402 effective_data_rate = 200; /* 10ns */ 403 if (spd.clk_cycle3 == 0xa0) 404 caslat = caslat - 2; 405 else if (spd.clk_cycle2 == 0xa0) 406 caslat = caslat - 1; 407 else 408 caslat = caslat; 409 } 410 } else if (max_data_rate >= 256) { /* it is DDR 266 */ 411 if (ddrc_clk <= 350 && ddrc_clk > 280) { 412 /* DDR controller clk at 280~350 */ 413 printf("DDR: DDR controller freq is more than " 414 "max data rate of the module\n"); 415 return 0; 416 } else if (ddrc_clk <= 280 && ddrc_clk > 230) { 417 /* DDR controller clk at 230~280 */ 418 effective_data_rate = 266; /* 7.5ns */ 419 caslat = caslat; 420 } else if (ddrc_clk <= 230 && ddrc_clk > 90) { 421 /* DDR controller clk at 90~230 */ 422 effective_data_rate = 200; /* 10ns */ 423 if (spd.clk_cycle2 == 0xa0) 424 caslat = caslat - 1; 425 } 426 } else if (max_data_rate >= 190) { /* it is DDR 200 */ 427 if (ddrc_clk <= 350 && ddrc_clk > 230) { 428 /* DDR controller clk at 230~350 */ 429 printf("DDR: DDR controller freq is more than " 430 "max data rate of the module\n"); 431 return 0; 432 } else if (ddrc_clk <= 230 && ddrc_clk > 90) { 433 /* DDR controller clk at 90~230 */ 434 effective_data_rate = 200; /* 10ns */ 435 caslat = caslat; 436 } 437 } 438 439 debug("DDR:Effective data rate is: %dMHz\n", effective_data_rate); 440 debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat); 441 442 /* 443 * Errata DDR6 work around: input enable 2 cycles earlier. 444 * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2. 445 */ 446 if(PVR_MAJ(pvr) <= 1 && spd.mem_type == SPD_MEMTYPE_DDR){ 447 if (caslat == 2) 448 ddr->debug_reg = 0x201c0000; /* CL=2 */ 449 else if (caslat == 3) 450 ddr->debug_reg = 0x202c0000; /* CL=2.5 */ 451 else if (caslat == 4) 452 ddr->debug_reg = 0x202c0000; /* CL=3.0 */ 453 454 __asm__ __volatile__ ("sync"); 455 456 debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg); 457 } 458 459 /* 460 * Convert caslat clocks to DDR controller value. 461 * Force caslat_ctrl to be DDR Controller field-sized. 462 */ 463 if (spd.mem_type == SPD_MEMTYPE_DDR) { 464 caslat_ctrl = (caslat + 1) & 0x07; 465 } else { 466 caslat_ctrl = (2 * caslat - 1) & 0x0f; 467 } 468 469 debug("DDR: effective data rate is %d MHz\n", effective_data_rate); 470 debug("DDR: caslat SPD bit is %d, controller field is 0x%x\n", 471 caslat, caslat_ctrl); 472 473 /* 474 * Timing Config 0. 475 * Avoid writing for DDR I. 476 */ 477 if (spd.mem_type == SPD_MEMTYPE_DDR2) { 478 unsigned char taxpd_clk = 8; /* By the book. */ 479 unsigned char tmrd_clk = 2; /* By the book. */ 480 unsigned char act_pd_exit = 2; /* Empirical? */ 481 unsigned char pre_pd_exit = 6; /* Empirical? */ 482 483 ddr->timing_cfg_0 = (0 484 | ((act_pd_exit & 0x7) << 20) /* ACT_PD_EXIT */ 485 | ((pre_pd_exit & 0x7) << 16) /* PRE_PD_EXIT */ 486 | ((taxpd_clk & 0xf) << 8) /* ODT_PD_EXIT */ 487 | ((tmrd_clk & 0xf) << 0) /* MRS_CYC */ 488 ); 489 debug("DDR: timing_cfg_0 = 0x%08x\n", ddr->timing_cfg_0); 490 } 491 492 /* 493 * For DDR I, WRREC(Twr) and WRTORD(Twtr) are not in SPD, 494 * use conservative value. 495 * For DDR II, they are bytes 36 and 37, in quarter nanos. 496 */ 497 498 if (spd.mem_type == SPD_MEMTYPE_DDR) { 499 twr_clk = 3; /* Clocks */ 500 twtr_clk = 1; /* Clocks */ 501 } else { 502 twr_clk = picos_to_clk(spd.twr * 250); 503 twtr_clk = picos_to_clk(spd.twtr * 250); 504 if (twtr_clk < 2) 505 twtr_clk = 2; 506 } 507 508 /* 509 * Calculate Trfc, in picos. 510 * DDR I: Byte 42 straight up in ns. 511 * DDR II: Byte 40 and 42 swizzled some, in ns. 512 */ 513 if (spd.mem_type == SPD_MEMTYPE_DDR) { 514 trfc = spd.trfc * 1000; /* up to ps */ 515 } else { 516 unsigned int byte40_table_ps[8] = { 517 0, 518 250, 519 330, 520 500, 521 660, 522 750, 523 0, 524 0 525 }; 526 527 trfc = (((spd.trctrfc_ext & 0x1) * 256) + spd.trfc) * 1000 528 + byte40_table_ps[(spd.trctrfc_ext >> 1) & 0x7]; 529 } 530 trfc_clk = picos_to_clk(trfc); 531 532 /* 533 * Trcd, Byte 29, from quarter nanos to ps and clocks. 534 */ 535 trcd_clk = picos_to_clk(spd.trcd * 250) & 0x7; 536 537 /* 538 * Convert trfc_clk to DDR controller fields. DDR I should 539 * fit in the REFREC field (16-19) of TIMING_CFG_1, but the 540 * 83xx controller has an extended REFREC field of three bits. 541 * The controller automatically adds 8 clocks to this value, 542 * so preadjust it down 8 first before splitting it up. 543 */ 544 trfc_low = (trfc_clk - 8) & 0xf; 545 trfc_high = ((trfc_clk - 8) >> 4) & 0x3; 546 547 ddr->timing_cfg_1 = 548 (((picos_to_clk(spd.trp * 250) & 0x07) << 28 ) | /* PRETOACT */ 549 ((picos_to_clk(spd.tras * 1000) & 0x0f ) << 24 ) | /* ACTTOPRE */ 550 (trcd_clk << 20 ) | /* ACTTORW */ 551 (caslat_ctrl << 16 ) | /* CASLAT */ 552 (trfc_low << 12 ) | /* REFEC */ 553 ((twr_clk & 0x07) << 8) | /* WRRREC */ 554 ((picos_to_clk(spd.trrd * 250) & 0x07) << 4) | /* ACTTOACT */ 555 ((twtr_clk & 0x07) << 0) /* WRTORD */ 556 ); 557 558 /* 559 * Additive Latency 560 * For DDR I, 0. 561 * For DDR II, with ODT enabled, use "a value" less than ACTTORW, 562 * which comes from Trcd, and also note that: 563 * add_lat + caslat must be >= 4 564 */ 565 add_lat = 0; 566 if (spd.mem_type == SPD_MEMTYPE_DDR2 567 && (odt_wr_cfg || odt_rd_cfg) 568 && (caslat < 4)) { 569 add_lat = 4 - caslat; 570 if ((add_lat + caslat) < 4) { 571 add_lat = 0; 572 } 573 } 574 575 /* 576 * Write Data Delay 577 * Historically 0x2 == 4/8 clock delay. 578 * Empirically, 0x3 == 6/8 clock delay is suggested for DDR I 266. 579 */ 580 wr_data_delay = 2; 581 #ifdef CONFIG_SYS_DDR_WRITE_DATA_DELAY 582 wr_data_delay = CONFIG_SYS_DDR_WRITE_DATA_DELAY; 583 #endif 584 585 /* 586 * Write Latency 587 * Read to Precharge 588 * Minimum CKE Pulse Width. 589 * Four Activate Window 590 */ 591 if (spd.mem_type == SPD_MEMTYPE_DDR) { 592 /* 593 * This is a lie. It should really be 1, but if it is 594 * set to 1, bits overlap into the old controller's 595 * otherwise unused ACSM field. If we leave it 0, then 596 * the HW will magically treat it as 1 for DDR 1. Oh Yea. 597 */ 598 wr_lat = 0; 599 600 trtp_clk = 2; /* By the book. */ 601 cke_min_clk = 1; /* By the book. */ 602 four_act = 1; /* By the book. */ 603 604 } else { 605 wr_lat = caslat - 1; 606 607 /* Convert SPD value from quarter nanos to picos. */ 608 trtp_clk = picos_to_clk(spd.trtp * 250); 609 if (trtp_clk < 2) 610 trtp_clk = 2; 611 trtp_clk += add_lat; 612 613 cke_min_clk = 3; /* By the book. */ 614 four_act = picos_to_clk(37500); /* By the book. 1k pages? */ 615 } 616 617 /* 618 * Empirically set ~MCAS-to-preamble override for DDR 2. 619 * Your milage will vary. 620 */ 621 cpo = 0; 622 if (spd.mem_type == SPD_MEMTYPE_DDR2) { 623 #ifdef CONFIG_SYS_DDR_CPO 624 cpo = CONFIG_SYS_DDR_CPO; 625 #else 626 if (effective_data_rate == 266) { 627 cpo = 0x4; /* READ_LAT + 1/2 */ 628 } else if (effective_data_rate == 333) { 629 cpo = 0x6; /* READ_LAT + 1 */ 630 } else if (effective_data_rate == 400) { 631 cpo = 0x7; /* READ_LAT + 5/4 */ 632 } else { 633 /* Automatic calibration */ 634 cpo = 0x1f; 635 } 636 #endif 637 } 638 639 ddr->timing_cfg_2 = (0 640 | ((add_lat & 0x7) << 28) /* ADD_LAT */ 641 | ((cpo & 0x1f) << 23) /* CPO */ 642 | ((wr_lat & 0x7) << 19) /* WR_LAT */ 643 | ((trtp_clk & 0x7) << 13) /* RD_TO_PRE */ 644 | ((wr_data_delay & 0x7) << 10) /* WR_DATA_DELAY */ 645 | ((cke_min_clk & 0x7) << 6) /* CKE_PLS */ 646 | ((four_act & 0x1f) << 0) /* FOUR_ACT */ 647 ); 648 649 debug("DDR:timing_cfg_1=0x%08x\n", ddr->timing_cfg_1); 650 debug("DDR:timing_cfg_2=0x%08x\n", ddr->timing_cfg_2); 651 652 /* Check DIMM data bus width */ 653 if (spd.dataw_lsb < 64) { 654 if (spd.mem_type == SPD_MEMTYPE_DDR) 655 burstlen = 0x03; /* 32 bit data bus, burst len is 8 */ 656 else 657 burstlen = 0x02; /* 32 bit data bus, burst len is 4 */ 658 debug("\n DDR DIMM: data bus width is 32 bit"); 659 } else { 660 burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */ 661 debug("\n DDR DIMM: data bus width is 64 bit"); 662 } 663 664 /* Is this an ECC DDR chip? */ 665 if (spd.config == 0x02) 666 debug(" with ECC\n"); 667 else 668 debug(" without ECC\n"); 669 670 /* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus, 671 Burst type is sequential 672 */ 673 if (spd.mem_type == SPD_MEMTYPE_DDR) { 674 switch (caslat) { 675 case 1: 676 ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */ 677 break; 678 case 2: 679 ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */ 680 break; 681 case 3: 682 ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */ 683 break; 684 case 4: 685 ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */ 686 break; 687 default: 688 printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n"); 689 return 0; 690 } 691 } else { 692 mode_odt_enable = 0x0; /* Default disabled */ 693 if (odt_wr_cfg || odt_rd_cfg) { 694 /* 695 * Bits 6 and 2 in Extended MRS(1) 696 * Bit 2 == 0x04 == 75 Ohm, with 2 DIMM modules. 697 * Bit 6 == 0x40 == 150 Ohm, with 1 DIMM module. 698 */ 699 mode_odt_enable = 0x40; /* 150 Ohm */ 700 } 701 702 ddr->sdram_mode = 703 (0 704 | (1 << (16 + 10)) /* DQS Differential disable */ 705 #ifdef CONFIG_SYS_DDR_MODE_WEAK 706 | (1 << (16 + 1)) /* weak driver (~60%) */ 707 #endif 708 | (add_lat << (16 + 3)) /* Additive Latency in EMRS1 */ 709 | (mode_odt_enable << 16) /* ODT Enable in EMRS1 */ 710 | ((twr_clk - 1) << 9) /* Write Recovery Autopre */ 711 | (caslat << 4) /* caslat */ 712 | (burstlen << 0) /* Burst length */ 713 ); 714 } 715 debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode); 716 717 /* 718 * Clear EMRS2 and EMRS3. 719 */ 720 ddr->sdram_mode2 = 0; 721 debug("DDR: sdram_mode2 = 0x%08x\n", ddr->sdram_mode2); 722 723 switch (spd.refresh) { 724 case 0x00: 725 case 0x80: 726 refresh_clk = picos_to_clk(15625000); 727 break; 728 case 0x01: 729 case 0x81: 730 refresh_clk = picos_to_clk(3900000); 731 break; 732 case 0x02: 733 case 0x82: 734 refresh_clk = picos_to_clk(7800000); 735 break; 736 case 0x03: 737 case 0x83: 738 refresh_clk = picos_to_clk(31300000); 739 break; 740 case 0x04: 741 case 0x84: 742 refresh_clk = picos_to_clk(62500000); 743 break; 744 case 0x05: 745 case 0x85: 746 refresh_clk = picos_to_clk(125000000); 747 break; 748 default: 749 refresh_clk = 0x512; 750 break; 751 } 752 753 /* 754 * Set BSTOPRE to 0x100 for page mode 755 * If auto-charge is used, set BSTOPRE = 0 756 */ 757 ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100; 758 debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval); 759 760 /* 761 * SDRAM Cfg 2 762 */ 763 odt_cfg = 0; 764 #ifndef CONFIG_NEVER_ASSERT_ODT_TO_CPU 765 if (odt_rd_cfg | odt_wr_cfg) { 766 odt_cfg = 0x2; /* ODT to IOs during reads */ 767 } 768 #endif 769 if (spd.mem_type == SPD_MEMTYPE_DDR2) { 770 ddr->sdram_cfg2 = (0 771 | (0 << 26) /* True DQS */ 772 | (odt_cfg << 21) /* ODT only read */ 773 | (1 << 12) /* 1 refresh at a time */ 774 ); 775 776 debug("DDR: sdram_cfg2 = 0x%08x\n", ddr->sdram_cfg2); 777 } 778 779 #ifdef CONFIG_SYS_DDR_SDRAM_CLK_CNTL /* Optional platform specific value */ 780 ddr->sdram_clk_cntl = CONFIG_SYS_DDR_SDRAM_CLK_CNTL; 781 #endif 782 debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl); 783 784 asm("sync;isync"); 785 786 udelay(600); 787 788 /* 789 * Figure out the settings for the sdram_cfg register. Build up 790 * the value in 'sdram_cfg' before writing since the write into 791 * the register will actually enable the memory controller, and all 792 * settings must be done before enabling. 793 * 794 * sdram_cfg[0] = 1 (ddr sdram logic enable) 795 * sdram_cfg[1] = 1 (self-refresh-enable) 796 * sdram_cfg[5:7] = (SDRAM type = DDR SDRAM) 797 * 010 DDR 1 SDRAM 798 * 011 DDR 2 SDRAM 799 * sdram_cfg[12] = 0 (32_BE =0 , 64 bit bus mode) 800 * sdram_cfg[13] = 0 (8_BE =0, 4-beat bursts) 801 */ 802 if (spd.mem_type == SPD_MEMTYPE_DDR) 803 sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR1; 804 else 805 sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR2; 806 807 sdram_cfg = (0 808 | SDRAM_CFG_MEM_EN /* DDR enable */ 809 | SDRAM_CFG_SREN /* Self refresh */ 810 | sdram_type /* SDRAM type */ 811 ); 812 813 /* sdram_cfg[3] = RD_EN - registered DIMM enable */ 814 if (spd.mod_attr & 0x02) 815 sdram_cfg |= SDRAM_CFG_RD_EN; 816 817 /* The DIMM is 32bit width */ 818 if (spd.dataw_lsb < 64) { 819 if (spd.mem_type == SPD_MEMTYPE_DDR) 820 sdram_cfg |= SDRAM_CFG_32_BE | SDRAM_CFG_8_BE; 821 if (spd.mem_type == SPD_MEMTYPE_DDR2) 822 sdram_cfg |= SDRAM_CFG_32_BE; 823 } 824 825 ddrc_ecc_enable = 0; 826 827 #if defined(CONFIG_DDR_ECC) 828 /* Enable ECC with sdram_cfg[2] */ 829 if (spd.config == 0x02) { 830 sdram_cfg |= 0x20000000; 831 ddrc_ecc_enable = 1; 832 /* disable error detection */ 833 ddr->err_disable = ~ECC_ERROR_ENABLE; 834 /* set single bit error threshold to maximum value, 835 * reset counter to zero */ 836 ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) | 837 (0 << ECC_ERROR_MAN_SBEC_SHIFT); 838 } 839 840 debug("DDR:err_disable=0x%08x\n", ddr->err_disable); 841 debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe); 842 #endif 843 debug(" DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF"); 844 845 #if defined(CONFIG_DDR_2T_TIMING) 846 /* 847 * Enable 2T timing by setting sdram_cfg[16]. 848 */ 849 sdram_cfg |= SDRAM_CFG_2T_EN; 850 #endif 851 /* Enable controller, and GO! */ 852 ddr->sdram_cfg = sdram_cfg; 853 asm("sync;isync"); 854 udelay(500); 855 856 debug("DDR:sdram_cfg=0x%08x\n", ddr->sdram_cfg); 857 return memsize; /*in MBytes*/ 858 } 859 #endif /* CONFIG_SPD_EEPROM */ 860 861 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER) 862 /* 863 * Use timebase counter, get_timer() is not availabe 864 * at this point of initialization yet. 865 */ 866 static __inline__ unsigned long get_tbms (void) 867 { 868 unsigned long tbl; 869 unsigned long tbu1, tbu2; 870 unsigned long ms; 871 unsigned long long tmp; 872 873 ulong tbclk = get_tbclk(); 874 875 /* get the timebase ticks */ 876 do { 877 asm volatile ("mftbu %0":"=r" (tbu1):); 878 asm volatile ("mftb %0":"=r" (tbl):); 879 asm volatile ("mftbu %0":"=r" (tbu2):); 880 } while (tbu1 != tbu2); 881 882 /* convert ticks to ms */ 883 tmp = (unsigned long long)(tbu1); 884 tmp = (tmp << 32); 885 tmp += (unsigned long long)(tbl); 886 ms = tmp/(tbclk/1000); 887 888 return ms; 889 } 890 891 /* 892 * Initialize all of memory for ECC, then enable errors. 893 */ 894 void ddr_enable_ecc(unsigned int dram_size) 895 { 896 volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR; 897 volatile ddr83xx_t *ddr= &immap->ddr; 898 unsigned long t_start, t_end; 899 register u64 *p; 900 register uint size; 901 unsigned int pattern[2]; 902 903 icache_enable(); 904 t_start = get_tbms(); 905 pattern[0] = 0xdeadbeef; 906 pattern[1] = 0xdeadbeef; 907 908 #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA) 909 dma_meminit(pattern[0], dram_size); 910 #else 911 debug("ddr init: CPU FP write method\n"); 912 size = dram_size; 913 for (p = 0; p < (u64*)(size); p++) { 914 ppcDWstore((u32*)p, pattern); 915 } 916 __asm__ __volatile__ ("sync"); 917 #endif 918 919 t_end = get_tbms(); 920 icache_disable(); 921 922 debug("\nREADY!!\n"); 923 debug("ddr init duration: %ld ms\n", t_end - t_start); 924 925 /* Clear All ECC Errors */ 926 if ((ddr->err_detect & ECC_ERROR_DETECT_MME) == ECC_ERROR_DETECT_MME) 927 ddr->err_detect |= ECC_ERROR_DETECT_MME; 928 if ((ddr->err_detect & ECC_ERROR_DETECT_MBE) == ECC_ERROR_DETECT_MBE) 929 ddr->err_detect |= ECC_ERROR_DETECT_MBE; 930 if ((ddr->err_detect & ECC_ERROR_DETECT_SBE) == ECC_ERROR_DETECT_SBE) 931 ddr->err_detect |= ECC_ERROR_DETECT_SBE; 932 if ((ddr->err_detect & ECC_ERROR_DETECT_MSE) == ECC_ERROR_DETECT_MSE) 933 ddr->err_detect |= ECC_ERROR_DETECT_MSE; 934 935 /* Disable ECC-Interrupts */ 936 ddr->err_int_en &= ECC_ERR_INT_DISABLE; 937 938 /* Enable errors for ECC */ 939 ddr->err_disable &= ECC_ERROR_ENABLE; 940 941 __asm__ __volatile__ ("sync"); 942 __asm__ __volatile__ ("isync"); 943 } 944 #endif /* CONFIG_DDR_ECC */ 945