1 /*
2  * (C) Copyright 2006-2007 Freescale Semiconductor, Inc.
3  *
4  * (C) Copyright 2006
5  * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
6  *
7  * Copyright (C) 2004-2006 Freescale Semiconductor, Inc.
8  * (C) Copyright 2003 Motorola Inc.
9  * Xianghua Xiao (X.Xiao@motorola.com)
10  *
11  * See file CREDITS for list of people who contributed to this
12  * project.
13  *
14  * This program is free software; you can redistribute it and/or
15  * modify it under the terms of the GNU General Public License as
16  * published by the Free Software Foundation; either version 2 of
17  * the License, or (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, write to the Free Software
26  * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
27  * MA 02111-1307 USA
28  */
29 
30 #include <common.h>
31 #include <asm/processor.h>
32 #include <asm/io.h>
33 #include <i2c.h>
34 #include <spd.h>
35 #include <asm/mmu.h>
36 #include <spd_sdram.h>
37 
38 DECLARE_GLOBAL_DATA_PTR;
39 
40 void board_add_ram_info(int use_default)
41 {
42 	volatile immap_t *immap = (immap_t *) CONFIG_SYS_IMMR;
43 	volatile ddr83xx_t *ddr = &immap->ddr;
44 	char buf[32];
45 
46 	printf(" (DDR%d", ((ddr->sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK)
47 			   >> SDRAM_CFG_SDRAM_TYPE_SHIFT) - 1);
48 
49 	if (ddr->sdram_cfg & SDRAM_CFG_32_BE)
50 		puts(", 32-bit");
51 	else
52 		puts(", 64-bit");
53 
54 	if (ddr->sdram_cfg & SDRAM_CFG_ECC_EN)
55 		puts(", ECC on");
56 	else
57 		puts(", ECC off");
58 
59 	printf(", %s MHz)", strmhz(buf, gd->mem_clk));
60 
61 #if defined(CONFIG_SYS_LB_SDRAM) && defined(CONFIG_SYS_LBC_SDRAM_SIZE)
62 	puts("\nSDRAM: ");
63 	print_size (CONFIG_SYS_LBC_SDRAM_SIZE * 1024 * 1024, " (local bus)");
64 #endif
65 }
66 
67 #ifdef CONFIG_SPD_EEPROM
68 #ifndef	CONFIG_SYS_READ_SPD
69 #define CONFIG_SYS_READ_SPD	i2c_read
70 #endif
71 
72 /*
73  * Convert picoseconds into clock cycles (rounding up if needed).
74  */
75 int
76 picos_to_clk(int picos)
77 {
78 	unsigned int mem_bus_clk;
79 	int clks;
80 
81 	mem_bus_clk = gd->mem_clk >> 1;
82 	clks = picos / (1000000000 / (mem_bus_clk / 1000));
83 	if (picos % (1000000000 / (mem_bus_clk / 1000)) != 0)
84 		clks++;
85 
86 	return clks;
87 }
88 
89 unsigned int banksize(unsigned char row_dens)
90 {
91 	return ((row_dens >> 2) | ((row_dens & 3) << 6)) << 24;
92 }
93 
94 int read_spd(uint addr)
95 {
96 	return ((int) addr);
97 }
98 
99 #undef SPD_DEBUG
100 #ifdef SPD_DEBUG
101 static void spd_debug(spd_eeprom_t *spd)
102 {
103 	printf ("\nDIMM type:       %-18.18s\n", spd->mpart);
104 	printf ("SPD size:        %d\n", spd->info_size);
105 	printf ("EEPROM size:     %d\n", 1 << spd->chip_size);
106 	printf ("Memory type:     %d\n", spd->mem_type);
107 	printf ("Row addr:        %d\n", spd->nrow_addr);
108 	printf ("Column addr:     %d\n", spd->ncol_addr);
109 	printf ("# of rows:       %d\n", spd->nrows);
110 	printf ("Row density:     %d\n", spd->row_dens);
111 	printf ("# of banks:      %d\n", spd->nbanks);
112 	printf ("Data width:      %d\n",
113 			256 * spd->dataw_msb + spd->dataw_lsb);
114 	printf ("Chip width:      %d\n", spd->primw);
115 	printf ("Refresh rate:    %02X\n", spd->refresh);
116 	printf ("CAS latencies:   %02X\n", spd->cas_lat);
117 	printf ("Write latencies: %02X\n", spd->write_lat);
118 	printf ("tRP:             %d\n", spd->trp);
119 	printf ("tRCD:            %d\n", spd->trcd);
120 	printf ("\n");
121 }
122 #endif /* SPD_DEBUG */
123 
124 long int spd_sdram()
125 {
126 	volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR;
127 	volatile ddr83xx_t *ddr = &immap->ddr;
128 	volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0];
129 	spd_eeprom_t spd;
130 	unsigned int n_ranks;
131 	unsigned int odt_rd_cfg, odt_wr_cfg;
132 	unsigned char twr_clk, twtr_clk;
133 	unsigned int sdram_type;
134 	unsigned int memsize;
135 	unsigned int law_size;
136 	unsigned char caslat, caslat_ctrl;
137 	unsigned int trfc, trfc_clk, trfc_low, trfc_high;
138 	unsigned int trcd_clk, trtp_clk;
139 	unsigned char cke_min_clk;
140 	unsigned char add_lat, wr_lat;
141 	unsigned char wr_data_delay;
142 	unsigned char four_act;
143 	unsigned char cpo;
144 	unsigned char burstlen;
145 	unsigned char odt_cfg, mode_odt_enable;
146 	unsigned int max_bus_clk;
147 	unsigned int max_data_rate, effective_data_rate;
148 	unsigned int ddrc_clk;
149 	unsigned int refresh_clk;
150 	unsigned int sdram_cfg;
151 	unsigned int ddrc_ecc_enable;
152 	unsigned int pvr = get_pvr();
153 
154 	/*
155 	 * First disable the memory controller (could be enabled
156 	 * by the debugger)
157 	 */
158 	clrsetbits_be32(&ddr->sdram_cfg, SDRAM_CFG_MEM_EN, 0);
159 	sync();
160 	isync();
161 
162 	/* Read SPD parameters with I2C */
163 	CONFIG_SYS_READ_SPD(SPD_EEPROM_ADDRESS, 0, 1, (uchar *) & spd, sizeof (spd));
164 #ifdef SPD_DEBUG
165 	spd_debug(&spd);
166 #endif
167 	/* Check the memory type */
168 	if (spd.mem_type != SPD_MEMTYPE_DDR && spd.mem_type != SPD_MEMTYPE_DDR2) {
169 		debug("DDR: Module mem type is %02X\n", spd.mem_type);
170 		return 0;
171 	}
172 
173 	/* Check the number of physical bank */
174 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
175 		n_ranks = spd.nrows;
176 	} else {
177 		n_ranks = (spd.nrows & 0x7) + 1;
178 	}
179 
180 	if (n_ranks > 2) {
181 		printf("DDR: The number of physical bank is %02X\n", n_ranks);
182 		return 0;
183 	}
184 
185 	/* Check if the number of row of the module is in the range of DDRC */
186 	if (spd.nrow_addr < 12 || spd.nrow_addr > 15) {
187 		printf("DDR: Row number is out of range of DDRC, row=%02X\n",
188 							 spd.nrow_addr);
189 		return 0;
190 	}
191 
192 	/* Check if the number of col of the module is in the range of DDRC */
193 	if (spd.ncol_addr < 8 || spd.ncol_addr > 11) {
194 		printf("DDR: Col number is out of range of DDRC, col=%02X\n",
195 							 spd.ncol_addr);
196 		return 0;
197 	}
198 
199 #ifdef CONFIG_SYS_DDRCDR_VALUE
200 	/*
201 	 * Adjust DDR II IO voltage biasing.  It just makes it work.
202 	 */
203 	if(spd.mem_type == SPD_MEMTYPE_DDR2) {
204 		immap->sysconf.ddrcdr = CONFIG_SYS_DDRCDR_VALUE;
205 	}
206 	udelay(50000);
207 #endif
208 
209 	/*
210 	 * ODT configuration recommendation from DDR Controller Chapter.
211 	 */
212 	odt_rd_cfg = 0;			/* Never assert ODT */
213 	odt_wr_cfg = 0;			/* Never assert ODT */
214 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
215 		odt_wr_cfg = 1;		/* Assert ODT on writes to CSn */
216 	}
217 
218 	/* Setup DDR chip select register */
219 #ifdef CONFIG_SYS_83XX_DDR_USES_CS0
220 	ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1;
221 	ddr->cs_config[0] = ( 1 << 31
222 			    | (odt_rd_cfg << 20)
223 			    | (odt_wr_cfg << 16)
224 			    | ((spd.nbanks == 8 ? 1 : 0) << 14)
225 			    | ((spd.nrow_addr - 12) << 8)
226 			    | (spd.ncol_addr - 8) );
227 	debug("\n");
228 	debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds);
229 	debug("cs0_config = 0x%08x\n",ddr->cs_config[0]);
230 
231 	if (n_ranks == 2) {
232 		ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8)
233 				  | ((banksize(spd.row_dens) >> 23) - 1) );
234 		ddr->cs_config[1] = ( 1<<31
235 				    | (odt_rd_cfg << 20)
236 				    | (odt_wr_cfg << 16)
237 				    | ((spd.nbanks == 8 ? 1 : 0) << 14)
238 				    | ((spd.nrow_addr - 12) << 8)
239 				    | (spd.ncol_addr - 8) );
240 		debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds);
241 		debug("cs1_config = 0x%08x\n",ddr->cs_config[1]);
242 	}
243 
244 #else
245 	ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1;
246 	ddr->cs_config[2] = ( 1 << 31
247 			    | (odt_rd_cfg << 20)
248 			    | (odt_wr_cfg << 16)
249 			    | ((spd.nbanks == 8 ? 1 : 0) << 14)
250 			    | ((spd.nrow_addr - 12) << 8)
251 			    | (spd.ncol_addr - 8) );
252 	debug("\n");
253 	debug("cs2_bnds = 0x%08x\n",ddr->csbnds[2].csbnds);
254 	debug("cs2_config = 0x%08x\n",ddr->cs_config[2]);
255 
256 	if (n_ranks == 2) {
257 		ddr->csbnds[3].csbnds = ( (banksize(spd.row_dens) >> 8)
258 				  | ((banksize(spd.row_dens) >> 23) - 1) );
259 		ddr->cs_config[3] = ( 1<<31
260 				    | (odt_rd_cfg << 20)
261 				    | (odt_wr_cfg << 16)
262 				    | ((spd.nbanks == 8 ? 1 : 0) << 14)
263 				    | ((spd.nrow_addr - 12) << 8)
264 				    | (spd.ncol_addr - 8) );
265 		debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds);
266 		debug("cs3_config = 0x%08x\n",ddr->cs_config[3]);
267 	}
268 #endif
269 
270 	/*
271 	 * Figure out memory size in Megabytes.
272 	 */
273 	memsize = n_ranks * banksize(spd.row_dens) / 0x100000;
274 
275 	/*
276 	 * First supported LAW size is 16M, at LAWAR_SIZE_16M == 23.
277 	 */
278 	law_size = 19 + __ilog2(memsize);
279 
280 	/*
281 	 * Set up LAWBAR for all of DDR.
282 	 */
283 	ecm->bar = CONFIG_SYS_DDR_SDRAM_BASE & 0xfffff000;
284 	ecm->ar  = (LAWAR_EN | LAWAR_TRGT_IF_DDR | (LAWAR_SIZE & law_size));
285 	debug("DDR:bar=0x%08x\n", ecm->bar);
286 	debug("DDR:ar=0x%08x\n", ecm->ar);
287 
288 	/*
289 	 * Find the largest CAS by locating the highest 1 bit
290 	 * in the spd.cas_lat field.  Translate it to a DDR
291 	 * controller field value:
292 	 *
293 	 *	CAS Lat	DDR I	DDR II	Ctrl
294 	 *	Clocks	SPD Bit	SPD Bit	Value
295 	 *	-------	-------	-------	-----
296 	 *	1.0	0		0001
297 	 *	1.5	1		0010
298 	 *	2.0	2	2	0011
299 	 *	2.5	3		0100
300 	 *	3.0	4	3	0101
301 	 *	3.5	5		0110
302 	 *	4.0	6	4	0111
303 	 *	4.5			1000
304 	 *	5.0		5	1001
305 	 */
306 	caslat = __ilog2(spd.cas_lat);
307 	if ((spd.mem_type == SPD_MEMTYPE_DDR)
308 	    && (caslat > 6)) {
309 		printf("DDR I: Invalid SPD CAS Latency: 0x%x.\n", spd.cas_lat);
310 		return 0;
311 	} else if (spd.mem_type == SPD_MEMTYPE_DDR2
312 		   && (caslat < 2 || caslat > 5)) {
313 		printf("DDR II: Invalid SPD CAS Latency: 0x%x.\n",
314 		       spd.cas_lat);
315 		return 0;
316 	}
317 	debug("DDR: caslat SPD bit is %d\n", caslat);
318 
319 	max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10
320 			+ (spd.clk_cycle & 0x0f));
321 	max_data_rate = max_bus_clk * 2;
322 
323 	debug("DDR:Module maximum data rate is: %d MHz\n", max_data_rate);
324 
325 	ddrc_clk = gd->mem_clk / 1000000;
326 	effective_data_rate = 0;
327 
328 	if (max_data_rate >= 460) { /* it is DDR2-800, 667, 533 */
329 		if (spd.cas_lat & 0x08)
330 			caslat = 3;
331 		else
332 			caslat = 4;
333 		if (ddrc_clk <= 460 && ddrc_clk > 350)
334 			effective_data_rate = 400;
335 		else if (ddrc_clk <=350 && ddrc_clk > 280)
336 			effective_data_rate = 333;
337 		else if (ddrc_clk <= 280 && ddrc_clk > 230)
338 			effective_data_rate = 266;
339 		else
340 			effective_data_rate = 200;
341 	} else if (max_data_rate >= 390 && max_data_rate < 460) { /* it is DDR 400 */
342 		if (ddrc_clk <= 460 && ddrc_clk > 350) {
343 			/* DDR controller clk at 350~460 */
344 			effective_data_rate = 400; /* 5ns */
345 			caslat = caslat;
346 		} else if (ddrc_clk <= 350 && ddrc_clk > 280) {
347 			/* DDR controller clk at 280~350 */
348 			effective_data_rate = 333; /* 6ns */
349 			if (spd.clk_cycle2 == 0x60)
350 				caslat = caslat - 1;
351 			else
352 				caslat = caslat;
353 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
354 			/* DDR controller clk at 230~280 */
355 			effective_data_rate = 266; /* 7.5ns */
356 			if (spd.clk_cycle3 == 0x75)
357 				caslat = caslat - 2;
358 			else if (spd.clk_cycle2 == 0x75)
359 				caslat = caslat - 1;
360 			else
361 				caslat = caslat;
362 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
363 			/* DDR controller clk at 90~230 */
364 			effective_data_rate = 200; /* 10ns */
365 			if (spd.clk_cycle3 == 0xa0)
366 				caslat = caslat - 2;
367 			else if (spd.clk_cycle2 == 0xa0)
368 				caslat = caslat - 1;
369 			else
370 				caslat = caslat;
371 		}
372 	} else if (max_data_rate >= 323) { /* it is DDR 333 */
373 		if (ddrc_clk <= 350 && ddrc_clk > 280) {
374 			/* DDR controller clk at 280~350 */
375 			effective_data_rate = 333; /* 6ns */
376 			caslat = caslat;
377 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
378 			/* DDR controller clk at 230~280 */
379 			effective_data_rate = 266; /* 7.5ns */
380 			if (spd.clk_cycle2 == 0x75)
381 				caslat = caslat - 1;
382 			else
383 				caslat = caslat;
384 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
385 			/* DDR controller clk at 90~230 */
386 			effective_data_rate = 200; /* 10ns */
387 			if (spd.clk_cycle3 == 0xa0)
388 				caslat = caslat - 2;
389 			else if (spd.clk_cycle2 == 0xa0)
390 				caslat = caslat - 1;
391 			else
392 				caslat = caslat;
393 		}
394 	} else if (max_data_rate >= 256) { /* it is DDR 266 */
395 		if (ddrc_clk <= 350 && ddrc_clk > 280) {
396 			/* DDR controller clk at 280~350 */
397 			printf("DDR: DDR controller freq is more than "
398 				"max data rate of the module\n");
399 			return 0;
400 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
401 			/* DDR controller clk at 230~280 */
402 			effective_data_rate = 266; /* 7.5ns */
403 			caslat = caslat;
404 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
405 			/* DDR controller clk at 90~230 */
406 			effective_data_rate = 200; /* 10ns */
407 			if (spd.clk_cycle2 == 0xa0)
408 				caslat = caslat - 1;
409 		}
410 	} else if (max_data_rate >= 190) { /* it is DDR 200 */
411 		if (ddrc_clk <= 350 && ddrc_clk > 230) {
412 			/* DDR controller clk at 230~350 */
413 			printf("DDR: DDR controller freq is more than "
414 				"max data rate of the module\n");
415 			return 0;
416 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
417 			/* DDR controller clk at 90~230 */
418 			effective_data_rate = 200; /* 10ns */
419 			caslat = caslat;
420 		}
421 	}
422 
423 	debug("DDR:Effective data rate is: %dMHz\n", effective_data_rate);
424 	debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat);
425 
426 	/*
427 	 * Errata DDR6 work around: input enable 2 cycles earlier.
428 	 * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2.
429 	 */
430 	if(PVR_MAJ(pvr) <= 1 && spd.mem_type == SPD_MEMTYPE_DDR){
431 		if (caslat == 2)
432 			ddr->debug_reg = 0x201c0000; /* CL=2 */
433 		else if (caslat == 3)
434 			ddr->debug_reg = 0x202c0000; /* CL=2.5 */
435 		else if (caslat == 4)
436 			ddr->debug_reg = 0x202c0000; /* CL=3.0 */
437 
438 		__asm__ __volatile__ ("sync");
439 
440 		debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg);
441 	}
442 
443 	/*
444 	 * Convert caslat clocks to DDR controller value.
445 	 * Force caslat_ctrl to be DDR Controller field-sized.
446 	 */
447 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
448 		caslat_ctrl = (caslat + 1) & 0x07;
449 	} else {
450 		caslat_ctrl =  (2 * caslat - 1) & 0x0f;
451 	}
452 
453 	debug("DDR: effective data rate is %d MHz\n", effective_data_rate);
454 	debug("DDR: caslat SPD bit is %d, controller field is 0x%x\n",
455 	      caslat, caslat_ctrl);
456 
457 	/*
458 	 * Timing Config 0.
459 	 * Avoid writing for DDR I.
460 	 */
461 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
462 		unsigned char taxpd_clk = 8;		/* By the book. */
463 		unsigned char tmrd_clk = 2;		/* By the book. */
464 		unsigned char act_pd_exit = 2;		/* Empirical? */
465 		unsigned char pre_pd_exit = 6;		/* Empirical? */
466 
467 		ddr->timing_cfg_0 = (0
468 			| ((act_pd_exit & 0x7) << 20)	/* ACT_PD_EXIT */
469 			| ((pre_pd_exit & 0x7) << 16)	/* PRE_PD_EXIT */
470 			| ((taxpd_clk & 0xf) << 8)	/* ODT_PD_EXIT */
471 			| ((tmrd_clk & 0xf) << 0)	/* MRS_CYC */
472 			);
473 		debug("DDR: timing_cfg_0 = 0x%08x\n", ddr->timing_cfg_0);
474 	}
475 
476 	/*
477 	 * For DDR I, WRREC(Twr) and WRTORD(Twtr) are not in SPD,
478 	 * use conservative value.
479 	 * For DDR II, they are bytes 36 and 37, in quarter nanos.
480 	 */
481 
482 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
483 		twr_clk = 3;	/* Clocks */
484 		twtr_clk = 1;	/* Clocks */
485 	} else {
486 		twr_clk = picos_to_clk(spd.twr * 250);
487 		twtr_clk = picos_to_clk(spd.twtr * 250);
488 		if (twtr_clk < 2)
489 			twtr_clk = 2;
490 	}
491 
492 	/*
493 	 * Calculate Trfc, in picos.
494 	 * DDR I:  Byte 42 straight up in ns.
495 	 * DDR II: Byte 40 and 42 swizzled some, in ns.
496 	 */
497 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
498 		trfc = spd.trfc * 1000;		/* up to ps */
499 	} else {
500 		unsigned int byte40_table_ps[8] = {
501 			0,
502 			250,
503 			330,
504 			500,
505 			660,
506 			750,
507 			0,
508 			0
509 		};
510 
511 		trfc = (((spd.trctrfc_ext & 0x1) * 256) + spd.trfc) * 1000
512 			+ byte40_table_ps[(spd.trctrfc_ext >> 1) & 0x7];
513 	}
514 	trfc_clk = picos_to_clk(trfc);
515 
516 	/*
517 	 * Trcd, Byte 29, from quarter nanos to ps and clocks.
518 	 */
519 	trcd_clk = picos_to_clk(spd.trcd * 250) & 0x7;
520 
521 	/*
522 	 * Convert trfc_clk to DDR controller fields.  DDR I should
523 	 * fit in the REFREC field (16-19) of TIMING_CFG_1, but the
524 	 * 83xx controller has an extended REFREC field of three bits.
525 	 * The controller automatically adds 8 clocks to this value,
526 	 * so preadjust it down 8 first before splitting it up.
527 	 */
528 	trfc_low = (trfc_clk - 8) & 0xf;
529 	trfc_high = ((trfc_clk - 8) >> 4) & 0x3;
530 
531 	ddr->timing_cfg_1 =
532 	    (((picos_to_clk(spd.trp * 250) & 0x07) << 28 ) |	/* PRETOACT */
533 	     ((picos_to_clk(spd.tras * 1000) & 0x0f ) << 24 ) | /* ACTTOPRE */
534 	     (trcd_clk << 20 ) |				/* ACTTORW */
535 	     (caslat_ctrl << 16 ) |				/* CASLAT */
536 	     (trfc_low << 12 ) |				/* REFEC */
537 	     ((twr_clk & 0x07) << 8) |				/* WRRREC */
538 	     ((picos_to_clk(spd.trrd * 250) & 0x07) << 4) |	/* ACTTOACT */
539 	     ((twtr_clk & 0x07) << 0)				/* WRTORD */
540 	    );
541 
542 	/*
543 	 * Additive Latency
544 	 * For DDR I, 0.
545 	 * For DDR II, with ODT enabled, use "a value" less than ACTTORW,
546 	 * which comes from Trcd, and also note that:
547 	 *	add_lat + caslat must be >= 4
548 	 */
549 	add_lat = 0;
550 	if (spd.mem_type == SPD_MEMTYPE_DDR2
551 	    && (odt_wr_cfg || odt_rd_cfg)
552 	    && (caslat < 4)) {
553 		add_lat = 4 - caslat;
554 		if ((add_lat + caslat) < 4) {
555 			add_lat = 0;
556 		}
557 	}
558 
559 	/*
560 	 * Write Data Delay
561 	 * Historically 0x2 == 4/8 clock delay.
562 	 * Empirically, 0x3 == 6/8 clock delay is suggested for DDR I 266.
563 	 */
564 	wr_data_delay = 2;
565 
566 	/*
567 	 * Write Latency
568 	 * Read to Precharge
569 	 * Minimum CKE Pulse Width.
570 	 * Four Activate Window
571 	 */
572 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
573 		/*
574 		 * This is a lie.  It should really be 1, but if it is
575 		 * set to 1, bits overlap into the old controller's
576 		 * otherwise unused ACSM field.  If we leave it 0, then
577 		 * the HW will magically treat it as 1 for DDR 1.  Oh Yea.
578 		 */
579 		wr_lat = 0;
580 
581 		trtp_clk = 2;		/* By the book. */
582 		cke_min_clk = 1;	/* By the book. */
583 		four_act = 1;		/* By the book. */
584 
585 	} else {
586 		wr_lat = caslat - 1;
587 
588 		/* Convert SPD value from quarter nanos to picos. */
589 		trtp_clk = picos_to_clk(spd.trtp * 250);
590 		if (trtp_clk < 2)
591 			trtp_clk = 2;
592 		trtp_clk += add_lat;
593 
594 		cke_min_clk = 3;	/* By the book. */
595 		four_act = picos_to_clk(37500);	/* By the book. 1k pages? */
596 	}
597 
598 	/*
599 	 * Empirically set ~MCAS-to-preamble override for DDR 2.
600 	 * Your milage will vary.
601 	 */
602 	cpo = 0;
603 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
604 		if (effective_data_rate == 266) {
605 			cpo = 0x4;		/* READ_LAT + 1/2 */
606 		} else if (effective_data_rate == 333) {
607 			cpo = 0x6;		/* READ_LAT + 1 */
608 		} else if (effective_data_rate == 400) {
609 			cpo = 0x7;		/* READ_LAT + 5/4 */
610 		} else {
611 			/* Automatic calibration */
612 			cpo = 0x1f;
613 		}
614 	}
615 
616 	ddr->timing_cfg_2 = (0
617 		| ((add_lat & 0x7) << 28)		/* ADD_LAT */
618 		| ((cpo & 0x1f) << 23)			/* CPO */
619 		| ((wr_lat & 0x7) << 19)		/* WR_LAT */
620 		| ((trtp_clk & 0x7) << 13)		/* RD_TO_PRE */
621 		| ((wr_data_delay & 0x7) << 10)		/* WR_DATA_DELAY */
622 		| ((cke_min_clk & 0x7) << 6)		/* CKE_PLS */
623 		| ((four_act & 0x1f) << 0)		/* FOUR_ACT */
624 		);
625 
626 	debug("DDR:timing_cfg_1=0x%08x\n", ddr->timing_cfg_1);
627 	debug("DDR:timing_cfg_2=0x%08x\n", ddr->timing_cfg_2);
628 
629 	/* Check DIMM data bus width */
630 	if (spd.dataw_lsb < 64) {
631 		if (spd.mem_type == SPD_MEMTYPE_DDR)
632 			burstlen = 0x03; /* 32 bit data bus, burst len is 8 */
633 		else
634 			burstlen = 0x02; /* 32 bit data bus, burst len is 4 */
635 		debug("\n   DDR DIMM: data bus width is 32 bit");
636 	} else {
637 		burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */
638 		debug("\n   DDR DIMM: data bus width is 64 bit");
639 	}
640 
641 	/* Is this an ECC DDR chip? */
642 	if (spd.config == 0x02)
643 		debug(" with ECC\n");
644 	else
645 		debug(" without ECC\n");
646 
647 	/* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus,
648 	   Burst type is sequential
649 	 */
650 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
651 		switch (caslat) {
652 		case 1:
653 			ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */
654 			break;
655 		case 2:
656 			ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */
657 			break;
658 		case 3:
659 			ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */
660 			break;
661 		case 4:
662 			ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */
663 			break;
664 		default:
665 			printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n");
666 			return 0;
667 		}
668 	} else {
669 		mode_odt_enable = 0x0;                  /* Default disabled */
670 		if (odt_wr_cfg || odt_rd_cfg) {
671 			/*
672 			 * Bits 6 and 2 in Extended MRS(1)
673 			 * Bit 2 == 0x04 == 75 Ohm, with 2 DIMM modules.
674 			 * Bit 6 == 0x40 == 150 Ohm, with 1 DIMM module.
675 			 */
676 			mode_odt_enable = 0x40;         /* 150 Ohm */
677 		}
678 
679 		ddr->sdram_mode =
680 			(0
681 			 | (1 << (16 + 10))             /* DQS Differential disable */
682 			 | (add_lat << (16 + 3))        /* Additive Latency in EMRS1 */
683 			 | (mode_odt_enable << 16)      /* ODT Enable in EMRS1 */
684 			 | ((twr_clk - 1) << 9)         /* Write Recovery Autopre */
685 			 | (caslat << 4)                /* caslat */
686 			 | (burstlen << 0)              /* Burst length */
687 			);
688 	}
689 	debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode);
690 
691 	/*
692 	 * Clear EMRS2 and EMRS3.
693 	 */
694 	ddr->sdram_mode2 = 0;
695 	debug("DDR: sdram_mode2 = 0x%08x\n", ddr->sdram_mode2);
696 
697 	switch (spd.refresh) {
698 		case 0x00:
699 		case 0x80:
700 			refresh_clk = picos_to_clk(15625000);
701 			break;
702 		case 0x01:
703 		case 0x81:
704 			refresh_clk = picos_to_clk(3900000);
705 			break;
706 		case 0x02:
707 		case 0x82:
708 			refresh_clk = picos_to_clk(7800000);
709 			break;
710 		case 0x03:
711 		case 0x83:
712 			refresh_clk = picos_to_clk(31300000);
713 			break;
714 		case 0x04:
715 		case 0x84:
716 			refresh_clk = picos_to_clk(62500000);
717 			break;
718 		case 0x05:
719 		case 0x85:
720 			refresh_clk = picos_to_clk(125000000);
721 			break;
722 		default:
723 			refresh_clk = 0x512;
724 			break;
725 	}
726 
727 	/*
728 	 * Set BSTOPRE to 0x100 for page mode
729 	 * If auto-charge is used, set BSTOPRE = 0
730 	 */
731 	ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100;
732 	debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval);
733 
734 	/*
735 	 * SDRAM Cfg 2
736 	 */
737 	odt_cfg = 0;
738 #ifndef CONFIG_NEVER_ASSERT_ODT_TO_CPU
739 	if (odt_rd_cfg | odt_wr_cfg) {
740 		odt_cfg = 0x2;		/* ODT to IOs during reads */
741 	}
742 #endif
743 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
744 		ddr->sdram_cfg2 = (0
745 			    | (0 << 26)	/* True DQS */
746 			    | (odt_cfg << 21)	/* ODT only read */
747 			    | (1 << 12)	/* 1 refresh at a time */
748 			    );
749 
750 		debug("DDR: sdram_cfg2  = 0x%08x\n", ddr->sdram_cfg2);
751 	}
752 
753 #ifdef CONFIG_SYS_DDR_SDRAM_CLK_CNTL	/* Optional platform specific value */
754 	ddr->sdram_clk_cntl = CONFIG_SYS_DDR_SDRAM_CLK_CNTL;
755 #endif
756 	debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl);
757 
758 	asm("sync;isync");
759 
760 	udelay(600);
761 
762 	/*
763 	 * Figure out the settings for the sdram_cfg register. Build up
764 	 * the value in 'sdram_cfg' before writing since the write into
765 	 * the register will actually enable the memory controller, and all
766 	 * settings must be done before enabling.
767 	 *
768 	 * sdram_cfg[0]   = 1 (ddr sdram logic enable)
769 	 * sdram_cfg[1]   = 1 (self-refresh-enable)
770 	 * sdram_cfg[5:7] = (SDRAM type = DDR SDRAM)
771 	 *			010 DDR 1 SDRAM
772 	 *			011 DDR 2 SDRAM
773 	 * sdram_cfg[12] = 0 (32_BE =0 , 64 bit bus mode)
774 	 * sdram_cfg[13] = 0 (8_BE =0, 4-beat bursts)
775 	 */
776 	if (spd.mem_type == SPD_MEMTYPE_DDR)
777 		sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR1;
778 	else
779 		sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR2;
780 
781 	sdram_cfg = (0
782 		     | SDRAM_CFG_MEM_EN		/* DDR enable */
783 		     | SDRAM_CFG_SREN		/* Self refresh */
784 		     | sdram_type		/* SDRAM type */
785 		     );
786 
787 	/* sdram_cfg[3] = RD_EN - registered DIMM enable */
788 	if (spd.mod_attr & 0x02)
789 		sdram_cfg |= SDRAM_CFG_RD_EN;
790 
791 	/* The DIMM is 32bit width */
792 	if (spd.dataw_lsb < 64) {
793 		if (spd.mem_type == SPD_MEMTYPE_DDR)
794 			sdram_cfg |= SDRAM_CFG_32_BE | SDRAM_CFG_8_BE;
795 		if (spd.mem_type == SPD_MEMTYPE_DDR2)
796 			sdram_cfg |= SDRAM_CFG_32_BE;
797 	}
798 
799 	ddrc_ecc_enable = 0;
800 
801 #if defined(CONFIG_DDR_ECC)
802 	/* Enable ECC with sdram_cfg[2] */
803 	if (spd.config == 0x02) {
804 		sdram_cfg |= 0x20000000;
805 		ddrc_ecc_enable = 1;
806 		/* disable error detection */
807 		ddr->err_disable = ~ECC_ERROR_ENABLE;
808 		/* set single bit error threshold to maximum value,
809 		 * reset counter to zero */
810 		ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) |
811 				(0 << ECC_ERROR_MAN_SBEC_SHIFT);
812 	}
813 
814 	debug("DDR:err_disable=0x%08x\n", ddr->err_disable);
815 	debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe);
816 #endif
817 	debug("   DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF");
818 
819 #if defined(CONFIG_DDR_2T_TIMING)
820 	/*
821 	 * Enable 2T timing by setting sdram_cfg[16].
822 	 */
823 	sdram_cfg |= SDRAM_CFG_2T_EN;
824 #endif
825 	/* Enable controller, and GO! */
826 	ddr->sdram_cfg = sdram_cfg;
827 	asm("sync;isync");
828 	udelay(500);
829 
830 	debug("DDR:sdram_cfg=0x%08x\n", ddr->sdram_cfg);
831 	return memsize; /*in MBytes*/
832 }
833 #endif /* CONFIG_SPD_EEPROM */
834 
835 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER)
836 /*
837  * Use timebase counter, get_timer() is not availabe
838  * at this point of initialization yet.
839  */
840 static __inline__ unsigned long get_tbms (void)
841 {
842 	unsigned long tbl;
843 	unsigned long tbu1, tbu2;
844 	unsigned long ms;
845 	unsigned long long tmp;
846 
847 	ulong tbclk = get_tbclk();
848 
849 	/* get the timebase ticks */
850 	do {
851 		asm volatile ("mftbu %0":"=r" (tbu1):);
852 		asm volatile ("mftb %0":"=r" (tbl):);
853 		asm volatile ("mftbu %0":"=r" (tbu2):);
854 	} while (tbu1 != tbu2);
855 
856 	/* convert ticks to ms */
857 	tmp = (unsigned long long)(tbu1);
858 	tmp = (tmp << 32);
859 	tmp += (unsigned long long)(tbl);
860 	ms = tmp/(tbclk/1000);
861 
862 	return ms;
863 }
864 
865 /*
866  * Initialize all of memory for ECC, then enable errors.
867  */
868 void ddr_enable_ecc(unsigned int dram_size)
869 {
870 	volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR;
871 	volatile ddr83xx_t *ddr= &immap->ddr;
872 	unsigned long t_start, t_end;
873 	register u64 *p;
874 	register uint size;
875 	unsigned int pattern[2];
876 
877 	icache_enable();
878 	t_start = get_tbms();
879 	pattern[0] = 0xdeadbeef;
880 	pattern[1] = 0xdeadbeef;
881 
882 #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA)
883 	dma_meminit(pattern[0], dram_size);
884 #else
885 	debug("ddr init: CPU FP write method\n");
886 	size = dram_size;
887 	for (p = 0; p < (u64*)(size); p++) {
888 		ppcDWstore((u32*)p, pattern);
889 	}
890 	__asm__ __volatile__ ("sync");
891 #endif
892 
893 	t_end = get_tbms();
894 	icache_disable();
895 
896 	debug("\nREADY!!\n");
897 	debug("ddr init duration: %ld ms\n", t_end - t_start);
898 
899 	/* Clear All ECC Errors */
900 	if ((ddr->err_detect & ECC_ERROR_DETECT_MME) == ECC_ERROR_DETECT_MME)
901 		ddr->err_detect |= ECC_ERROR_DETECT_MME;
902 	if ((ddr->err_detect & ECC_ERROR_DETECT_MBE) == ECC_ERROR_DETECT_MBE)
903 		ddr->err_detect |= ECC_ERROR_DETECT_MBE;
904 	if ((ddr->err_detect & ECC_ERROR_DETECT_SBE) == ECC_ERROR_DETECT_SBE)
905 		ddr->err_detect |= ECC_ERROR_DETECT_SBE;
906 	if ((ddr->err_detect & ECC_ERROR_DETECT_MSE) == ECC_ERROR_DETECT_MSE)
907 		ddr->err_detect |= ECC_ERROR_DETECT_MSE;
908 
909 	/* Disable ECC-Interrupts */
910 	ddr->err_int_en &= ECC_ERR_INT_DISABLE;
911 
912 	/* Enable errors for ECC */
913 	ddr->err_disable &= ECC_ERROR_ENABLE;
914 
915 	__asm__ __volatile__ ("sync");
916 	__asm__ __volatile__ ("isync");
917 }
918 #endif	/* CONFIG_DDR_ECC */
919