xref: /openbmc/u-boot/arch/arm/mach-omap2/am33xx/ddr.c (revision e8f80a5a)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * DDR Configuration for AM33xx devices.
4  *
5  * Copyright (C) 2011 Texas Instruments Incorporated - http://www.ti.com/
6  */
7 
8 #include <asm/arch/cpu.h>
9 #include <asm/arch/ddr_defs.h>
10 #include <asm/arch/sys_proto.h>
11 #include <asm/io.h>
12 #include <asm/emif.h>
13 
14 /**
15  * Base address for EMIF instances
16  */
17 static struct emif_reg_struct *emif_reg[2] = {
18 				(struct emif_reg_struct *)EMIF4_0_CFG_BASE,
19 				(struct emif_reg_struct *)EMIF4_1_CFG_BASE};
20 
21 /**
22  * Base addresses for DDR PHY cmd/data regs
23  */
24 static struct ddr_cmd_regs *ddr_cmd_reg[2] = {
25 				(struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
26 				(struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
27 
28 static struct ddr_data_regs *ddr_data_reg[2] = {
29 				(struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
30 				(struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
31 
32 /**
33  * Base address for ddr io control instances
34  */
35 static struct ddr_cmdtctrl *ioctrl_reg = {
36 			(struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
37 
get_mr(int nr,u32 cs,u32 mr_addr)38 static inline u32 get_mr(int nr, u32 cs, u32 mr_addr)
39 {
40 	u32 mr;
41 
42 	mr_addr |= cs << EMIF_REG_CS_SHIFT;
43 	writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
44 
45 	mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data);
46 	debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr);
47 	if (((mr & 0x0000ff00) >>  8) == (mr & 0xff) &&
48 	    ((mr & 0x00ff0000) >> 16) == (mr & 0xff) &&
49 	    ((mr & 0xff000000) >> 24) == (mr & 0xff))
50 		return mr & 0xff;
51 	else
52 		return mr;
53 }
54 
set_mr(int nr,u32 cs,u32 mr_addr,u32 mr_val)55 static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val)
56 {
57 	mr_addr |= cs << EMIF_REG_CS_SHIFT;
58 	writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
59 	writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data);
60 }
61 
configure_mr(int nr,u32 cs)62 static void configure_mr(int nr, u32 cs)
63 {
64 	u32 mr_addr;
65 
66 	while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK)
67 		;
68 	set_mr(nr, cs, LPDDR2_MR10, 0x56);
69 
70 	set_mr(nr, cs, LPDDR2_MR1, 0x43);
71 	set_mr(nr, cs, LPDDR2_MR2, 0x2);
72 
73 	mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK;
74 	set_mr(nr, cs, mr_addr, 0x2);
75 }
76 
77 /*
78  * Configure EMIF4D5 registers and MR registers For details about these magic
79  * values please see the EMIF registers section of the TRM.
80  */
config_sdram_emif4d5(const struct emif_regs * regs,int nr)81 void config_sdram_emif4d5(const struct emif_regs *regs, int nr)
82 {
83 	writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl);
84 	writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw);
85 	writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
86 
87 	writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config);
88 	writel(regs->emif_rd_wr_lvl_rmp_win,
89 	       &emif_reg[nr]->emif_rd_wr_lvl_rmp_win);
90 	writel(regs->emif_rd_wr_lvl_rmp_ctl,
91 	       &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
92 	writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
93 	writel(regs->emif_rd_wr_exec_thresh,
94 	       &emif_reg[nr]->emif_rd_wr_exec_thresh);
95 
96 	/*
97 	 * for most SOCs these registers won't need to be changed so only
98 	 * write to these registers if someone explicitly has set the
99 	 * register's value.
100 	 */
101 	if(regs->emif_cos_config) {
102 		writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map);
103 		writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map);
104 		writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map);
105 		writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config);
106 	}
107 
108 	/*
109 	 * Sequence to ensure that the PHY is in a known state prior to
110 	 * startting hardware leveling.  Also acts as to latch some state from
111 	 * the EMIF into the PHY.
112 	 */
113 	writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
114 	writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
115 	writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
116 
117 	clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
118 			EMIF_REG_INITREF_DIS_MASK);
119 
120 	writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
121 	writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
122 
123 	/* Wait 1ms because of L3 timeout error */
124 	udelay(1000);
125 
126 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
127 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
128 
129 	/* Perform hardware leveling for DDR3 */
130 	if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) {
131 		writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) |
132 		       0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
133 		writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) |
134 		       0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
135 
136 		writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
137 
138 		/* Enable read leveling */
139 		writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
140 
141 		/*
142 		 * Enable full read and write leveling.  Wait for read and write
143 		 * leveling bit to clear RDWRLVLFULL_START bit 31
144 		 */
145 		while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000)
146 		      != 0)
147 			;
148 
149 		/* Check the timeout register to see if leveling is complete */
150 		if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0)
151 			puts("DDR3 H/W leveling incomplete with errors\n");
152 
153 	} else {
154 		/* DDR2 */
155 		configure_mr(nr, 0);
156 		configure_mr(nr, 1);
157 	}
158 }
159 
160 /**
161  * Configure SDRAM
162  */
config_sdram(const struct emif_regs * regs,int nr)163 void config_sdram(const struct emif_regs *regs, int nr)
164 {
165 #ifdef CONFIG_TI816X
166 	writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
167 	writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1);
168 	writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
169 	writel(0x0000613B, &emif_reg[nr]->emif_sdram_ref_ctrl);   /* initially a large refresh period */
170 	writel(0x1000613B, &emif_reg[nr]->emif_sdram_ref_ctrl);   /* trigger initialization           */
171 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
172 #else
173 	if (regs->zq_config) {
174 		writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
175 		writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
176 		writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
177 
178 		/* Trigger initialization */
179 		writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
180 		/* Wait 1ms because of L3 timeout error */
181 		udelay(1000);
182 
183 		/* Write proper sdram_ref_cref_ctrl value */
184 		writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
185 		writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
186 	}
187 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
188 	writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
189 	writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
190 
191 	/* Write REG_COS_COUNT_1, REG_COS_COUNT_2, and REG_PR_OLD_COUNT. */
192 	if (regs->ocp_config)
193 		writel(regs->ocp_config, &emif_reg[nr]->emif_l3_config);
194 #endif
195 }
196 
197 /**
198  * Set SDRAM timings
199  */
set_sdram_timings(const struct emif_regs * regs,int nr)200 void set_sdram_timings(const struct emif_regs *regs, int nr)
201 {
202 	writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1);
203 	writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw);
204 	writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2);
205 	writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw);
206 	writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3);
207 	writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw);
208 }
209 
210 /*
211  * Configure EXT PHY registers for software leveling
212  */
ext_phy_settings_swlvl(const struct emif_regs * regs,int nr)213 static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
214 {
215 	u32 *ext_phy_ctrl_base = 0;
216 	u32 *emif_ext_phy_ctrl_base = 0;
217 	__maybe_unused const u32 *ext_phy_ctrl_const_regs;
218 	u32 i = 0;
219 	__maybe_unused u32 size;
220 
221 	ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1);
222 	emif_ext_phy_ctrl_base =
223 			(u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
224 
225 	/* Configure external phy control timing registers */
226 	for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
227 		writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++);
228 		/* Update shadow registers */
229 		writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++);
230 	}
231 
232 #ifdef CONFIG_AM43XX
233 	/*
234 	 * External phy 6-24 registers do not change with ddr frequency.
235 	 * These only need to be set on DDR2 on AM43xx.
236 	 */
237 	emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size);
238 
239 	if (!size)
240 		return;
241 
242 	for (i = 0; i < size; i++) {
243 		writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
244 		/* Update shadow registers */
245 		writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
246 	}
247 #endif
248 }
249 
250 /*
251  * Configure EXT PHY registers for hardware leveling
252  */
ext_phy_settings_hwlvl(const struct emif_regs * regs,int nr)253 static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
254 {
255 	/*
256 	 * Enable hardware leveling on the EMIF.  For details about these
257 	 * magic values please see the EMIF registers section of the TRM.
258 	 */
259 	writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
260 	writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
261 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22);
262 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw);
263 	writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23);
264 	writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw);
265 	writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24);
266 	writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw);
267 	writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25);
268 	writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw);
269 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26);
270 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw);
271 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27);
272 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw);
273 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28);
274 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw);
275 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29);
276 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw);
277 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30);
278 	writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw);
279 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31);
280 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw);
281 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32);
282 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw);
283 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33);
284 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw);
285 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34);
286 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw);
287 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35);
288 	writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw);
289 	writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
290 	writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
291 
292 	/*
293 	 * Sequence to ensure that the PHY is again in a known state after
294 	 * hardware leveling.
295 	 */
296 	writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
297 	writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
298 	writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
299 }
300 
301 /**
302  * Configure DDR PHY
303  */
config_ddr_phy(const struct emif_regs * regs,int nr)304 void config_ddr_phy(const struct emif_regs *regs, int nr)
305 {
306 	/*
307 	 * Disable initialization and refreshes for now until we finish
308 	 * programming EMIF regs and set time between rising edge of
309 	 * DDR_RESET to rising edge of DDR_CKE to > 500us per memory spec.
310 	 * We currently hardcode a value based on a max expected frequency
311 	 * of 400MHz.
312 	 */
313 	writel(EMIF_REG_INITREF_DIS_MASK | 0x3100,
314 		&emif_reg[nr]->emif_sdram_ref_ctrl);
315 
316 	writel(regs->emif_ddr_phy_ctlr_1,
317 		&emif_reg[nr]->emif_ddr_phy_ctrl_1);
318 	writel(regs->emif_ddr_phy_ctlr_1,
319 		&emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
320 
321 	if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) {
322 		if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
323 			ext_phy_settings_hwlvl(regs, nr);
324 		else
325 			ext_phy_settings_swlvl(regs, nr);
326 	}
327 }
328 
329 /**
330  * Configure DDR CMD control registers
331  */
config_cmd_ctrl(const struct cmd_control * cmd,int nr)332 void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
333 {
334 	if (!cmd)
335 		return;
336 
337 	writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio);
338 	writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout);
339 
340 	writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio);
341 	writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout);
342 
343 	writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio);
344 	writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout);
345 }
346 
347 /**
348  * Configure DDR DATA registers
349  */
config_ddr_data(const struct ddr_data * data,int nr)350 void config_ddr_data(const struct ddr_data *data, int nr)
351 {
352 	int i;
353 
354 	if (!data)
355 		return;
356 
357 	for (i = 0; i < DDR_DATA_REGS_NR; i++) {
358 		writel(data->datardsratio0,
359 			&(ddr_data_reg[nr]+i)->dt0rdsratio0);
360 		writel(data->datawdsratio0,
361 			&(ddr_data_reg[nr]+i)->dt0wdsratio0);
362 		writel(data->datawiratio0,
363 			&(ddr_data_reg[nr]+i)->dt0wiratio0);
364 		writel(data->datagiratio0,
365 			&(ddr_data_reg[nr]+i)->dt0giratio0);
366 		writel(data->datafwsratio0,
367 			&(ddr_data_reg[nr]+i)->dt0fwsratio0);
368 		writel(data->datawrsratio0,
369 			&(ddr_data_reg[nr]+i)->dt0wrsratio0);
370 	}
371 }
372 
config_io_ctrl(const struct ctrl_ioregs * ioregs)373 void config_io_ctrl(const struct ctrl_ioregs *ioregs)
374 {
375 	if (!ioregs)
376 		return;
377 
378 	writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl);
379 	writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl);
380 	writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl);
381 	writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl);
382 	writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl);
383 #ifdef CONFIG_AM43XX
384 	writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl);
385 	writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl);
386 	writel(ioregs->emif_sdram_config_ext,
387 	       &ioctrl_reg->emif_sdram_config_ext);
388 #endif
389 }
390