xref: /openbmc/u-boot/arch/arm/mach-omap2/am33xx/ddr.c (revision 2399e401)
1 /*
2  * DDR Configuration for AM33xx devices.
3  *
4  * Copyright (C) 2011 Texas Instruments Incorporated - http://www.ti.com/
5  *
6  * SPDX-License-Identifier:	GPL-2.0+
7  */
8 
9 #include <asm/arch/cpu.h>
10 #include <asm/arch/ddr_defs.h>
11 #include <asm/arch/sys_proto.h>
12 #include <asm/io.h>
13 #include <asm/emif.h>
14 
15 /**
16  * Base address for EMIF instances
17  */
18 static struct emif_reg_struct *emif_reg[2] = {
19 				(struct emif_reg_struct *)EMIF4_0_CFG_BASE,
20 				(struct emif_reg_struct *)EMIF4_1_CFG_BASE};
21 
22 /**
23  * Base addresses for DDR PHY cmd/data regs
24  */
25 static struct ddr_cmd_regs *ddr_cmd_reg[2] = {
26 				(struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
27 				(struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
28 
29 static struct ddr_data_regs *ddr_data_reg[2] = {
30 				(struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
31 				(struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
32 
33 /**
34  * Base address for ddr io control instances
35  */
36 static struct ddr_cmdtctrl *ioctrl_reg = {
37 			(struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
38 
39 static inline u32 get_mr(int nr, u32 cs, u32 mr_addr)
40 {
41 	u32 mr;
42 
43 	mr_addr |= cs << EMIF_REG_CS_SHIFT;
44 	writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
45 
46 	mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data);
47 	debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr);
48 	if (((mr & 0x0000ff00) >>  8) == (mr & 0xff) &&
49 	    ((mr & 0x00ff0000) >> 16) == (mr & 0xff) &&
50 	    ((mr & 0xff000000) >> 24) == (mr & 0xff))
51 		return mr & 0xff;
52 	else
53 		return mr;
54 }
55 
56 static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val)
57 {
58 	mr_addr |= cs << EMIF_REG_CS_SHIFT;
59 	writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
60 	writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data);
61 }
62 
63 static void configure_mr(int nr, u32 cs)
64 {
65 	u32 mr_addr;
66 
67 	while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK)
68 		;
69 	set_mr(nr, cs, LPDDR2_MR10, 0x56);
70 
71 	set_mr(nr, cs, LPDDR2_MR1, 0x43);
72 	set_mr(nr, cs, LPDDR2_MR2, 0x2);
73 
74 	mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK;
75 	set_mr(nr, cs, mr_addr, 0x2);
76 }
77 
78 /*
79  * Configure EMIF4D5 registers and MR registers For details about these magic
80  * values please see the EMIF registers section of the TRM.
81  */
82 void config_sdram_emif4d5(const struct emif_regs *regs, int nr)
83 {
84 	writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl);
85 	writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw);
86 	writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
87 
88 	writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config);
89 	writel(regs->emif_rd_wr_lvl_rmp_win,
90 	       &emif_reg[nr]->emif_rd_wr_lvl_rmp_win);
91 	writel(regs->emif_rd_wr_lvl_rmp_ctl,
92 	       &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
93 	writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
94 	writel(regs->emif_rd_wr_exec_thresh,
95 	       &emif_reg[nr]->emif_rd_wr_exec_thresh);
96 
97 	/*
98 	 * for most SOCs these registers won't need to be changed so only
99 	 * write to these registers if someone explicitly has set the
100 	 * register's value.
101 	 */
102 	if(regs->emif_cos_config) {
103 		writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map);
104 		writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map);
105 		writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map);
106 		writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config);
107 	}
108 
109 	/*
110 	 * Sequence to ensure that the PHY is in a known state prior to
111 	 * startting hardware leveling.  Also acts as to latch some state from
112 	 * the EMIF into the PHY.
113 	 */
114 	writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
115 	writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
116 	writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
117 
118 	clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
119 			EMIF_REG_INITREF_DIS_MASK);
120 
121 	writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
122 	writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
123 
124 	/* Wait 1ms because of L3 timeout error */
125 	udelay(1000);
126 
127 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
128 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
129 
130 	/* Perform hardware leveling for DDR3 */
131 	if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) {
132 		writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) |
133 		       0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
134 		writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) |
135 		       0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
136 
137 		writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
138 
139 		/* Enable read leveling */
140 		writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
141 
142 		/*
143 		 * Enable full read and write leveling.  Wait for read and write
144 		 * leveling bit to clear RDWRLVLFULL_START bit 31
145 		 */
146 		while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000)
147 		      != 0)
148 			;
149 
150 		/* Check the timeout register to see if leveling is complete */
151 		if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0)
152 			puts("DDR3 H/W leveling incomplete with errors\n");
153 
154 	} else {
155 		/* DDR2 */
156 		configure_mr(nr, 0);
157 		configure_mr(nr, 1);
158 	}
159 }
160 
161 /**
162  * Configure SDRAM
163  */
164 void config_sdram(const struct emif_regs *regs, int nr)
165 {
166 	if (regs->zq_config) {
167 		writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
168 		writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
169 		writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
170 
171 		/* Trigger initialization */
172 		writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
173 		/* Wait 1ms because of L3 timeout error */
174 		udelay(1000);
175 
176 		/* Write proper sdram_ref_cref_ctrl value */
177 		writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
178 		writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
179 	}
180 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
181 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
182 	writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
183 
184 	/* Write REG_COS_COUNT_1, REG_COS_COUNT_2, and REG_PR_OLD_COUNT. */
185 	if (regs->ocp_config)
186 		writel(regs->ocp_config, &emif_reg[nr]->emif_l3_config);
187 }
188 
189 /**
190  * Set SDRAM timings
191  */
192 void set_sdram_timings(const struct emif_regs *regs, int nr)
193 {
194 	writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1);
195 	writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw);
196 	writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2);
197 	writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw);
198 	writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3);
199 	writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw);
200 }
201 
202 /*
203  * Configure EXT PHY registers for software leveling
204  */
205 static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
206 {
207 	u32 *ext_phy_ctrl_base = 0;
208 	u32 *emif_ext_phy_ctrl_base = 0;
209 	__maybe_unused const u32 *ext_phy_ctrl_const_regs;
210 	u32 i = 0;
211 	__maybe_unused u32 size;
212 
213 	ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1);
214 	emif_ext_phy_ctrl_base =
215 			(u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
216 
217 	/* Configure external phy control timing registers */
218 	for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
219 		writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++);
220 		/* Update shadow registers */
221 		writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++);
222 	}
223 
224 #ifdef CONFIG_AM43XX
225 	/*
226 	 * External phy 6-24 registers do not change with ddr frequency.
227 	 * These only need to be set on DDR2 on AM43xx.
228 	 */
229 	emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size);
230 
231 	if (!size)
232 		return;
233 
234 	for (i = 0; i < size; i++) {
235 		writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
236 		/* Update shadow registers */
237 		writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
238 	}
239 #endif
240 }
241 
242 /*
243  * Configure EXT PHY registers for hardware leveling
244  */
245 static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
246 {
247 	/*
248 	 * Enable hardware leveling on the EMIF.  For details about these
249 	 * magic values please see the EMIF registers section of the TRM.
250 	 */
251 	writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
252 	writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
253 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22);
254 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw);
255 	writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23);
256 	writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw);
257 	writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24);
258 	writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw);
259 	writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25);
260 	writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw);
261 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26);
262 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw);
263 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27);
264 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw);
265 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28);
266 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw);
267 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29);
268 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw);
269 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30);
270 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw);
271 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31);
272 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw);
273 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32);
274 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw);
275 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33);
276 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw);
277 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34);
278 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw);
279 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35);
280 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw);
281 	writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
282 	writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
283 
284 	/*
285 	 * Sequence to ensure that the PHY is again in a known state after
286 	 * hardware leveling.
287 	 */
288 	writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
289 	writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
290 	writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
291 }
292 
293 /**
294  * Configure DDR PHY
295  */
296 void config_ddr_phy(const struct emif_regs *regs, int nr)
297 {
298 	/*
299 	 * Disable initialization and refreshes for now until we finish
300 	 * programming EMIF regs and set time between rising edge of
301 	 * DDR_RESET to rising edge of DDR_CKE to > 500us per memory spec.
302 	 * We currently hardcode a value based on a max expected frequency
303 	 * of 400MHz.
304 	 */
305 	writel(EMIF_REG_INITREF_DIS_MASK | 0x3100,
306 		&emif_reg[nr]->emif_sdram_ref_ctrl);
307 
308 	writel(regs->emif_ddr_phy_ctlr_1,
309 		&emif_reg[nr]->emif_ddr_phy_ctrl_1);
310 	writel(regs->emif_ddr_phy_ctlr_1,
311 		&emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
312 
313 	if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) {
314 		if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
315 			ext_phy_settings_hwlvl(regs, nr);
316 		else
317 			ext_phy_settings_swlvl(regs, nr);
318 	}
319 }
320 
321 /**
322  * Configure DDR CMD control registers
323  */
324 void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
325 {
326 	if (!cmd)
327 		return;
328 
329 	writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio);
330 	writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout);
331 
332 	writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio);
333 	writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout);
334 
335 	writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio);
336 	writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout);
337 }
338 
339 /**
340  * Configure DDR DATA registers
341  */
342 void config_ddr_data(const struct ddr_data *data, int nr)
343 {
344 	int i;
345 
346 	if (!data)
347 		return;
348 
349 	for (i = 0; i < DDR_DATA_REGS_NR; i++) {
350 		writel(data->datardsratio0,
351 			&(ddr_data_reg[nr]+i)->dt0rdsratio0);
352 		writel(data->datawdsratio0,
353 			&(ddr_data_reg[nr]+i)->dt0wdsratio0);
354 		writel(data->datawiratio0,
355 			&(ddr_data_reg[nr]+i)->dt0wiratio0);
356 		writel(data->datagiratio0,
357 			&(ddr_data_reg[nr]+i)->dt0giratio0);
358 		writel(data->datafwsratio0,
359 			&(ddr_data_reg[nr]+i)->dt0fwsratio0);
360 		writel(data->datawrsratio0,
361 			&(ddr_data_reg[nr]+i)->dt0wrsratio0);
362 	}
363 }
364 
365 void config_io_ctrl(const struct ctrl_ioregs *ioregs)
366 {
367 	if (!ioregs)
368 		return;
369 
370 	writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl);
371 	writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl);
372 	writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl);
373 	writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl);
374 	writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl);
375 #ifdef CONFIG_AM43XX
376 	writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl);
377 	writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl);
378 	writel(ioregs->emif_sdram_config_ext,
379 	       &ioctrl_reg->emif_sdram_config_ext);
380 #endif
381 }
382