1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
2 /*
3  * (C) Copyright 2016-2017 Rockchip Inc.
4  *
5  * Adapted from coreboot.
6  */
7 
8 #include <common.h>
9 #include <clk.h>
10 #include <dm.h>
11 #include <dt-structs.h>
12 #include <ram.h>
13 #include <regmap.h>
14 #include <syscon.h>
15 #include <asm/io.h>
16 #include <asm/arch/clock.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rk3399.h>
19 #include <asm/arch/cru_rk3399.h>
20 #include <asm/arch/grf_rk3399.h>
21 #include <asm/arch/hardware.h>
22 #include <linux/err.h>
23 #include <time.h>
24 
25 struct chan_info {
26 	struct rk3399_ddr_pctl_regs *pctl;
27 	struct rk3399_ddr_pi_regs *pi;
28 	struct rk3399_ddr_publ_regs *publ;
29 	struct rk3399_msch_regs *msch;
30 };
31 
32 struct dram_info {
33 #ifdef CONFIG_SPL_BUILD
34 	struct chan_info chan[2];
35 	struct clk ddr_clk;
36 	struct rk3399_cru *cru;
37 	struct rk3399_pmucru *pmucru;
38 	struct rk3399_pmusgrf_regs *pmusgrf;
39 	struct rk3399_ddr_cic_regs *cic;
40 #endif
41 	struct ram_info info;
42 	struct rk3399_pmugrf_regs *pmugrf;
43 };
44 
45 #define PRESET_SGRF_HOLD(n)	((0x1 << (6 + 16)) | ((n) << 6))
46 #define PRESET_GPIO0_HOLD(n)	((0x1 << (7 + 16)) | ((n) << 7))
47 #define PRESET_GPIO1_HOLD(n)	((0x1 << (8 + 16)) | ((n) << 8))
48 
49 #define PHY_DRV_ODT_Hi_Z	0x0
50 #define PHY_DRV_ODT_240		0x1
51 #define PHY_DRV_ODT_120		0x8
52 #define PHY_DRV_ODT_80		0x9
53 #define PHY_DRV_ODT_60		0xc
54 #define PHY_DRV_ODT_48		0xd
55 #define PHY_DRV_ODT_40		0xe
56 #define PHY_DRV_ODT_34_3	0xf
57 
58 #ifdef CONFIG_SPL_BUILD
59 
60 struct rockchip_dmc_plat {
61 #if CONFIG_IS_ENABLED(OF_PLATDATA)
62 	struct dtd_rockchip_rk3399_dmc dtplat;
63 #else
64 	struct rk3399_sdram_params sdram_params;
65 #endif
66 	struct regmap *map;
67 };
68 
copy_to_reg(u32 * dest,const u32 * src,u32 n)69 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
70 {
71 	int i;
72 
73 	for (i = 0; i < n / sizeof(u32); i++) {
74 		writel(*src, dest);
75 		src++;
76 		dest++;
77 	}
78 }
79 
phy_dll_bypass_set(struct rk3399_ddr_publ_regs * ddr_publ_regs,u32 freq)80 static void phy_dll_bypass_set(struct rk3399_ddr_publ_regs *ddr_publ_regs,
81 			       u32 freq)
82 {
83 	u32 *denali_phy = ddr_publ_regs->denali_phy;
84 
85 	/* From IP spec, only freq small than 125 can enter dll bypass mode */
86 	if (freq <= 125) {
87 		/* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
88 		setbits_le32(&denali_phy[86], (0x3 << 2) << 8);
89 		setbits_le32(&denali_phy[214], (0x3 << 2) << 8);
90 		setbits_le32(&denali_phy[342], (0x3 << 2) << 8);
91 		setbits_le32(&denali_phy[470], (0x3 << 2) << 8);
92 
93 		/* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
94 		setbits_le32(&denali_phy[547], (0x3 << 2) << 16);
95 		setbits_le32(&denali_phy[675], (0x3 << 2) << 16);
96 		setbits_le32(&denali_phy[803], (0x3 << 2) << 16);
97 	} else {
98 		/* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
99 		clrbits_le32(&denali_phy[86], (0x3 << 2) << 8);
100 		clrbits_le32(&denali_phy[214], (0x3 << 2) << 8);
101 		clrbits_le32(&denali_phy[342], (0x3 << 2) << 8);
102 		clrbits_le32(&denali_phy[470], (0x3 << 2) << 8);
103 
104 		/* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
105 		clrbits_le32(&denali_phy[547], (0x3 << 2) << 16);
106 		clrbits_le32(&denali_phy[675], (0x3 << 2) << 16);
107 		clrbits_le32(&denali_phy[803], (0x3 << 2) << 16);
108 	}
109 }
110 
set_memory_map(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)111 static void set_memory_map(const struct chan_info *chan, u32 channel,
112 			   const struct rk3399_sdram_params *sdram_params)
113 {
114 	const struct rk3399_sdram_channel *sdram_ch =
115 		&sdram_params->ch[channel];
116 	u32 *denali_ctl = chan->pctl->denali_ctl;
117 	u32 *denali_pi = chan->pi->denali_pi;
118 	u32 cs_map;
119 	u32 reduc;
120 	u32 row;
121 
122 	/* Get row number from ddrconfig setting */
123 	if (sdram_ch->ddrconfig < 2 || sdram_ch->ddrconfig == 4)
124 		row = 16;
125 	else if (sdram_ch->ddrconfig == 3)
126 		row = 14;
127 	else
128 		row = 15;
129 
130 	cs_map = (sdram_ch->rank > 1) ? 3 : 1;
131 	reduc = (sdram_ch->bw == 2) ? 0 : 1;
132 
133 	/* Set the dram configuration to ctrl */
134 	clrsetbits_le32(&denali_ctl[191], 0xF, (12 - sdram_ch->col));
135 	clrsetbits_le32(&denali_ctl[190], (0x3 << 16) | (0x7 << 24),
136 			((3 - sdram_ch->bk) << 16) |
137 			((16 - row) << 24));
138 
139 	clrsetbits_le32(&denali_ctl[196], 0x3 | (1 << 16),
140 			cs_map | (reduc << 16));
141 
142 	/* PI_199 PI_COL_DIFF:RW:0:4 */
143 	clrsetbits_le32(&denali_pi[199], 0xF, (12 - sdram_ch->col));
144 
145 	/* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */
146 	clrsetbits_le32(&denali_pi[155], (0x3 << 16) | (0x7 << 24),
147 			((3 - sdram_ch->bk) << 16) |
148 			((16 - row) << 24));
149 	/* PI_41 PI_CS_MAP:RW:24:4 */
150 	clrsetbits_le32(&denali_pi[41], 0xf << 24, cs_map << 24);
151 	if ((sdram_ch->rank == 1) && (sdram_params->base.dramtype == DDR3))
152 		writel(0x2EC7FFFF, &denali_pi[34]);
153 }
154 
set_ds_odt(const struct chan_info * chan,const struct rk3399_sdram_params * sdram_params)155 static void set_ds_odt(const struct chan_info *chan,
156 		       const struct rk3399_sdram_params *sdram_params)
157 {
158 	u32 *denali_phy = chan->publ->denali_phy;
159 
160 	u32 tsel_idle_en, tsel_wr_en, tsel_rd_en;
161 	u32 tsel_idle_select_p, tsel_wr_select_p, tsel_rd_select_p;
162 	u32 ca_tsel_wr_select_p, ca_tsel_wr_select_n;
163 	u32 tsel_idle_select_n, tsel_wr_select_n, tsel_rd_select_n;
164 	u32 reg_value;
165 
166 	if (sdram_params->base.dramtype == LPDDR4) {
167 		tsel_rd_select_p = PHY_DRV_ODT_Hi_Z;
168 		tsel_wr_select_p = PHY_DRV_ODT_40;
169 		ca_tsel_wr_select_p = PHY_DRV_ODT_40;
170 		tsel_idle_select_p = PHY_DRV_ODT_Hi_Z;
171 
172 		tsel_rd_select_n = PHY_DRV_ODT_240;
173 		tsel_wr_select_n = PHY_DRV_ODT_40;
174 		ca_tsel_wr_select_n = PHY_DRV_ODT_40;
175 		tsel_idle_select_n = PHY_DRV_ODT_240;
176 	} else if (sdram_params->base.dramtype == LPDDR3) {
177 		tsel_rd_select_p = PHY_DRV_ODT_240;
178 		tsel_wr_select_p = PHY_DRV_ODT_34_3;
179 		ca_tsel_wr_select_p = PHY_DRV_ODT_48;
180 		tsel_idle_select_p = PHY_DRV_ODT_240;
181 
182 		tsel_rd_select_n = PHY_DRV_ODT_Hi_Z;
183 		tsel_wr_select_n = PHY_DRV_ODT_34_3;
184 		ca_tsel_wr_select_n = PHY_DRV_ODT_48;
185 		tsel_idle_select_n = PHY_DRV_ODT_Hi_Z;
186 	} else {
187 		tsel_rd_select_p = PHY_DRV_ODT_240;
188 		tsel_wr_select_p = PHY_DRV_ODT_34_3;
189 		ca_tsel_wr_select_p = PHY_DRV_ODT_34_3;
190 		tsel_idle_select_p = PHY_DRV_ODT_240;
191 
192 		tsel_rd_select_n = PHY_DRV_ODT_240;
193 		tsel_wr_select_n = PHY_DRV_ODT_34_3;
194 		ca_tsel_wr_select_n = PHY_DRV_ODT_34_3;
195 		tsel_idle_select_n = PHY_DRV_ODT_240;
196 	}
197 
198 	if (sdram_params->base.odt == 1)
199 		tsel_rd_en = 1;
200 	else
201 		tsel_rd_en = 0;
202 
203 	tsel_wr_en = 0;
204 	tsel_idle_en = 0;
205 
206 	/*
207 	 * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0
208 	 * sets termination values for read/idle cycles and drive strength
209 	 * for write cycles for DQ/DM
210 	 */
211 	reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) |
212 		    (tsel_wr_select_n << 8) | (tsel_wr_select_p << 12) |
213 		    (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20);
214 	clrsetbits_le32(&denali_phy[6], 0xffffff, reg_value);
215 	clrsetbits_le32(&denali_phy[134], 0xffffff, reg_value);
216 	clrsetbits_le32(&denali_phy[262], 0xffffff, reg_value);
217 	clrsetbits_le32(&denali_phy[390], 0xffffff, reg_value);
218 
219 	/*
220 	 * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0
221 	 * sets termination values for read/idle cycles and drive strength
222 	 * for write cycles for DQS
223 	 */
224 	clrsetbits_le32(&denali_phy[7], 0xffffff, reg_value);
225 	clrsetbits_le32(&denali_phy[135], 0xffffff, reg_value);
226 	clrsetbits_le32(&denali_phy[263], 0xffffff, reg_value);
227 	clrsetbits_le32(&denali_phy[391], 0xffffff, reg_value);
228 
229 	/* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */
230 	reg_value = ca_tsel_wr_select_n | (ca_tsel_wr_select_p << 0x4);
231 	clrsetbits_le32(&denali_phy[544], 0xff, reg_value);
232 	clrsetbits_le32(&denali_phy[672], 0xff, reg_value);
233 	clrsetbits_le32(&denali_phy[800], 0xff, reg_value);
234 
235 	/* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */
236 	clrsetbits_le32(&denali_phy[928], 0xff, reg_value);
237 
238 	/* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */
239 	clrsetbits_le32(&denali_phy[937], 0xff, reg_value);
240 
241 	/* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */
242 	clrsetbits_le32(&denali_phy[935], 0xff, reg_value);
243 
244 	/* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */
245 	clrsetbits_le32(&denali_phy[939], 0xff, reg_value);
246 
247 	/* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */
248 	clrsetbits_le32(&denali_phy[929], 0xff, reg_value);
249 
250 	/* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */
251 	clrsetbits_le32(&denali_phy[924], 0xff,
252 			tsel_wr_select_n | (tsel_wr_select_p << 4));
253 	clrsetbits_le32(&denali_phy[925], 0xff,
254 			tsel_rd_select_n | (tsel_rd_select_p << 4));
255 
256 	/* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */
257 	reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
258 		<< 16;
259 	clrsetbits_le32(&denali_phy[5], 0x7 << 16, reg_value);
260 	clrsetbits_le32(&denali_phy[133], 0x7 << 16, reg_value);
261 	clrsetbits_le32(&denali_phy[261], 0x7 << 16, reg_value);
262 	clrsetbits_le32(&denali_phy[389], 0x7 << 16, reg_value);
263 
264 	/* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */
265 	reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
266 		<< 24;
267 	clrsetbits_le32(&denali_phy[6], 0x7 << 24, reg_value);
268 	clrsetbits_le32(&denali_phy[134], 0x7 << 24, reg_value);
269 	clrsetbits_le32(&denali_phy[262], 0x7 << 24, reg_value);
270 	clrsetbits_le32(&denali_phy[390], 0x7 << 24, reg_value);
271 
272 	/* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */
273 	reg_value = tsel_wr_en << 8;
274 	clrsetbits_le32(&denali_phy[518], 0x1 << 8, reg_value);
275 	clrsetbits_le32(&denali_phy[646], 0x1 << 8, reg_value);
276 	clrsetbits_le32(&denali_phy[774], 0x1 << 8, reg_value);
277 
278 	/* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */
279 	reg_value = tsel_wr_en << 17;
280 	clrsetbits_le32(&denali_phy[933], 0x1 << 17, reg_value);
281 	/*
282 	 * pad_rst/cke/cs/clk_term tsel 1bits
283 	 * DENALI_PHY_938/936/940/934 offset_17
284 	 */
285 	clrsetbits_le32(&denali_phy[938], 0x1 << 17, reg_value);
286 	clrsetbits_le32(&denali_phy[936], 0x1 << 17, reg_value);
287 	clrsetbits_le32(&denali_phy[940], 0x1 << 17, reg_value);
288 	clrsetbits_le32(&denali_phy[934], 0x1 << 17, reg_value);
289 
290 	/* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */
291 	clrsetbits_le32(&denali_phy[930], 0x1 << 17, reg_value);
292 }
293 
phy_io_config(const struct chan_info * chan,const struct rk3399_sdram_params * sdram_params)294 static int phy_io_config(const struct chan_info *chan,
295 			  const struct rk3399_sdram_params *sdram_params)
296 {
297 	u32 *denali_phy = chan->publ->denali_phy;
298 	u32 vref_mode_dq, vref_value_dq, vref_mode_ac, vref_value_ac;
299 	u32 mode_sel;
300 	u32 reg_value;
301 	u32 drv_value, odt_value;
302 	u32 speed;
303 
304 	/* vref setting */
305 	if (sdram_params->base.dramtype == LPDDR4) {
306 		/* LPDDR4 */
307 		vref_mode_dq = 0x6;
308 		vref_value_dq = 0x1f;
309 		vref_mode_ac = 0x6;
310 		vref_value_ac = 0x1f;
311 	} else if (sdram_params->base.dramtype == LPDDR3) {
312 		if (sdram_params->base.odt == 1) {
313 			vref_mode_dq = 0x5;  /* LPDDR3 ODT */
314 			drv_value = (readl(&denali_phy[6]) >> 12) & 0xf;
315 			odt_value = (readl(&denali_phy[6]) >> 4) & 0xf;
316 			if (drv_value == PHY_DRV_ODT_48) {
317 				switch (odt_value) {
318 				case PHY_DRV_ODT_240:
319 					vref_value_dq = 0x16;
320 					break;
321 				case PHY_DRV_ODT_120:
322 					vref_value_dq = 0x26;
323 					break;
324 				case PHY_DRV_ODT_60:
325 					vref_value_dq = 0x36;
326 					break;
327 				default:
328 					debug("Invalid ODT value.\n");
329 					return -EINVAL;
330 				}
331 			} else if (drv_value == PHY_DRV_ODT_40) {
332 				switch (odt_value) {
333 				case PHY_DRV_ODT_240:
334 					vref_value_dq = 0x19;
335 					break;
336 				case PHY_DRV_ODT_120:
337 					vref_value_dq = 0x23;
338 					break;
339 				case PHY_DRV_ODT_60:
340 					vref_value_dq = 0x31;
341 					break;
342 				default:
343 					debug("Invalid ODT value.\n");
344 					return -EINVAL;
345 				}
346 			} else if (drv_value == PHY_DRV_ODT_34_3) {
347 				switch (odt_value) {
348 				case PHY_DRV_ODT_240:
349 					vref_value_dq = 0x17;
350 					break;
351 				case PHY_DRV_ODT_120:
352 					vref_value_dq = 0x20;
353 					break;
354 				case PHY_DRV_ODT_60:
355 					vref_value_dq = 0x2e;
356 					break;
357 				default:
358 					debug("Invalid ODT value.\n");
359 					return -EINVAL;
360 				}
361 			} else {
362 				debug("Invalid DRV value.\n");
363 				return -EINVAL;
364 			}
365 		} else {
366 			vref_mode_dq = 0x2;  /* LPDDR3 */
367 			vref_value_dq = 0x1f;
368 		}
369 		vref_mode_ac = 0x2;
370 		vref_value_ac = 0x1f;
371 	} else if (sdram_params->base.dramtype == DDR3) {
372 		/* DDR3L */
373 		vref_mode_dq = 0x1;
374 		vref_value_dq = 0x1f;
375 		vref_mode_ac = 0x1;
376 		vref_value_ac = 0x1f;
377 	} else {
378 		debug("Unknown DRAM type.\n");
379 		return -EINVAL;
380 	}
381 
382 	reg_value = (vref_mode_dq << 9) | (0x1 << 8) | vref_value_dq;
383 
384 	/* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */
385 	clrsetbits_le32(&denali_phy[913], 0xfff << 8, reg_value << 8);
386 	/* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */
387 	clrsetbits_le32(&denali_phy[914], 0xfff, reg_value);
388 	/* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */
389 	clrsetbits_le32(&denali_phy[914], 0xfff << 16, reg_value << 16);
390 	/* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */
391 	clrsetbits_le32(&denali_phy[915], 0xfff, reg_value);
392 
393 	reg_value = (vref_mode_ac << 9) | (0x1 << 8) | vref_value_ac;
394 
395 	/* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */
396 	clrsetbits_le32(&denali_phy[915], 0xfff << 16, reg_value << 16);
397 
398 	if (sdram_params->base.dramtype == LPDDR4)
399 		mode_sel = 0x6;
400 	else if (sdram_params->base.dramtype == LPDDR3)
401 		mode_sel = 0x0;
402 	else if (sdram_params->base.dramtype == DDR3)
403 		mode_sel = 0x1;
404 	else
405 		return -EINVAL;
406 
407 	/* PHY_924 PHY_PAD_FDBK_DRIVE */
408 	clrsetbits_le32(&denali_phy[924], 0x7 << 15, mode_sel << 15);
409 	/* PHY_926 PHY_PAD_DATA_DRIVE */
410 	clrsetbits_le32(&denali_phy[926], 0x7 << 6, mode_sel << 6);
411 	/* PHY_927 PHY_PAD_DQS_DRIVE */
412 	clrsetbits_le32(&denali_phy[927], 0x7 << 6, mode_sel << 6);
413 	/* PHY_928 PHY_PAD_ADDR_DRIVE */
414 	clrsetbits_le32(&denali_phy[928], 0x7 << 14, mode_sel << 14);
415 	/* PHY_929 PHY_PAD_CLK_DRIVE */
416 	clrsetbits_le32(&denali_phy[929], 0x7 << 14, mode_sel << 14);
417 	/* PHY_935 PHY_PAD_CKE_DRIVE */
418 	clrsetbits_le32(&denali_phy[935], 0x7 << 14, mode_sel << 14);
419 	/* PHY_937 PHY_PAD_RST_DRIVE */
420 	clrsetbits_le32(&denali_phy[937], 0x7 << 14, mode_sel << 14);
421 	/* PHY_939 PHY_PAD_CS_DRIVE */
422 	clrsetbits_le32(&denali_phy[939], 0x7 << 14, mode_sel << 14);
423 
424 
425 	/* speed setting */
426 	if (sdram_params->base.ddr_freq < 400)
427 		speed = 0x0;
428 	else if (sdram_params->base.ddr_freq < 800)
429 		speed = 0x1;
430 	else if (sdram_params->base.ddr_freq < 1200)
431 		speed = 0x2;
432 	else
433 		speed = 0x3;
434 
435 	/* PHY_924 PHY_PAD_FDBK_DRIVE */
436 	clrsetbits_le32(&denali_phy[924], 0x3 << 21, speed << 21);
437 	/* PHY_926 PHY_PAD_DATA_DRIVE */
438 	clrsetbits_le32(&denali_phy[926], 0x3 << 9, speed << 9);
439 	/* PHY_927 PHY_PAD_DQS_DRIVE */
440 	clrsetbits_le32(&denali_phy[927], 0x3 << 9, speed << 9);
441 	/* PHY_928 PHY_PAD_ADDR_DRIVE */
442 	clrsetbits_le32(&denali_phy[928], 0x3 << 17, speed << 17);
443 	/* PHY_929 PHY_PAD_CLK_DRIVE */
444 	clrsetbits_le32(&denali_phy[929], 0x3 << 17, speed << 17);
445 	/* PHY_935 PHY_PAD_CKE_DRIVE */
446 	clrsetbits_le32(&denali_phy[935], 0x3 << 17, speed << 17);
447 	/* PHY_937 PHY_PAD_RST_DRIVE */
448 	clrsetbits_le32(&denali_phy[937], 0x3 << 17, speed << 17);
449 	/* PHY_939 PHY_PAD_CS_DRIVE */
450 	clrsetbits_le32(&denali_phy[939], 0x3 << 17, speed << 17);
451 
452 	return 0;
453 }
454 
pctl_cfg(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)455 static int pctl_cfg(const struct chan_info *chan, u32 channel,
456 		    const struct rk3399_sdram_params *sdram_params)
457 {
458 	u32 *denali_ctl = chan->pctl->denali_ctl;
459 	u32 *denali_pi = chan->pi->denali_pi;
460 	u32 *denali_phy = chan->publ->denali_phy;
461 	const u32 *params_ctl = sdram_params->pctl_regs.denali_ctl;
462 	const u32 *params_phy = sdram_params->phy_regs.denali_phy;
463 	u32 tmp, tmp1, tmp2;
464 	u32 pwrup_srefresh_exit;
465 	int ret;
466 	const ulong timeout_ms = 200;
467 
468 	/*
469 	 * work around controller bug:
470 	 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
471 	 */
472 	copy_to_reg(&denali_ctl[1], &params_ctl[1],
473 		    sizeof(struct rk3399_ddr_pctl_regs) - 4);
474 	writel(params_ctl[0], &denali_ctl[0]);
475 	copy_to_reg(denali_pi, &sdram_params->pi_regs.denali_pi[0],
476 		    sizeof(struct rk3399_ddr_pi_regs));
477 	/* rank count need to set for init */
478 	set_memory_map(chan, channel, sdram_params);
479 
480 	writel(sdram_params->phy_regs.denali_phy[910], &denali_phy[910]);
481 	writel(sdram_params->phy_regs.denali_phy[911], &denali_phy[911]);
482 	writel(sdram_params->phy_regs.denali_phy[912], &denali_phy[912]);
483 
484 	pwrup_srefresh_exit = readl(&denali_ctl[68]) & PWRUP_SREFRESH_EXIT;
485 	clrbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT);
486 
487 	/* PHY_DLL_RST_EN */
488 	clrsetbits_le32(&denali_phy[957], 0x3 << 24, 1 << 24);
489 
490 	setbits_le32(&denali_pi[0], START);
491 	setbits_le32(&denali_ctl[0], START);
492 
493 	/* Wating for phy DLL lock */
494 	while (1) {
495 		tmp = readl(&denali_phy[920]);
496 		tmp1 = readl(&denali_phy[921]);
497 		tmp2 = readl(&denali_phy[922]);
498 		if ((((tmp >> 16) & 0x1) == 0x1) &&
499 		    (((tmp1 >> 16) & 0x1) == 0x1) &&
500 		    (((tmp1 >> 0) & 0x1) == 0x1) &&
501 		    (((tmp2 >> 0) & 0x1) == 0x1))
502 			break;
503 	}
504 
505 	copy_to_reg(&denali_phy[896], &params_phy[896], (958 - 895) * 4);
506 	copy_to_reg(&denali_phy[0], &params_phy[0], (90 - 0 + 1) * 4);
507 	copy_to_reg(&denali_phy[128], &params_phy[128], (218 - 128 + 1) * 4);
508 	copy_to_reg(&denali_phy[256], &params_phy[256], (346 - 256 + 1) * 4);
509 	copy_to_reg(&denali_phy[384], &params_phy[384], (474 - 384 + 1) * 4);
510 	copy_to_reg(&denali_phy[512], &params_phy[512], (549 - 512 + 1) * 4);
511 	copy_to_reg(&denali_phy[640], &params_phy[640], (677 - 640 + 1) * 4);
512 	copy_to_reg(&denali_phy[768], &params_phy[768], (805 - 768 + 1) * 4);
513 	set_ds_odt(chan, sdram_params);
514 
515 	/*
516 	 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8
517 	 * dqs_tsel_wr_end[7:4] add Half cycle
518 	 */
519 	tmp = (readl(&denali_phy[84]) >> 8) & 0xff;
520 	clrsetbits_le32(&denali_phy[84], 0xff << 8, (tmp + 0x10) << 8);
521 	tmp = (readl(&denali_phy[212]) >> 8) & 0xff;
522 	clrsetbits_le32(&denali_phy[212], 0xff << 8, (tmp + 0x10) << 8);
523 	tmp = (readl(&denali_phy[340]) >> 8) & 0xff;
524 	clrsetbits_le32(&denali_phy[340], 0xff << 8, (tmp + 0x10) << 8);
525 	tmp = (readl(&denali_phy[468]) >> 8) & 0xff;
526 	clrsetbits_le32(&denali_phy[468], 0xff << 8, (tmp + 0x10) << 8);
527 
528 	/*
529 	 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8
530 	 * dq_tsel_wr_end[7:4] add Half cycle
531 	 */
532 	tmp = (readl(&denali_phy[83]) >> 16) & 0xff;
533 	clrsetbits_le32(&denali_phy[83], 0xff << 16, (tmp + 0x10) << 16);
534 	tmp = (readl(&denali_phy[211]) >> 16) & 0xff;
535 	clrsetbits_le32(&denali_phy[211], 0xff << 16, (tmp + 0x10) << 16);
536 	tmp = (readl(&denali_phy[339]) >> 16) & 0xff;
537 	clrsetbits_le32(&denali_phy[339], 0xff << 16, (tmp + 0x10) << 16);
538 	tmp = (readl(&denali_phy[467]) >> 16) & 0xff;
539 	clrsetbits_le32(&denali_phy[467], 0xff << 16, (tmp + 0x10) << 16);
540 
541 	ret = phy_io_config(chan, sdram_params);
542 	if (ret)
543 		return ret;
544 
545 	/* PHY_DLL_RST_EN */
546 	clrsetbits_le32(&denali_phy[957], 0x3 << 24, 0x2 << 24);
547 
548 	/* Wating for PHY and DRAM init complete */
549 	tmp = get_timer(0);
550 	do {
551 		if (get_timer(tmp) > timeout_ms) {
552 			pr_err("DRAM (%s): phy failed to lock within  %ld ms\n",
553 			      __func__, timeout_ms);
554 			return -ETIME;
555 		}
556 	} while (!(readl(&denali_ctl[203]) & (1 << 3)));
557 	debug("DRAM (%s): phy locked after %ld ms\n", __func__, get_timer(tmp));
558 
559 	clrsetbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT,
560 			pwrup_srefresh_exit);
561 	return 0;
562 }
563 
select_per_cs_training_index(const struct chan_info * chan,u32 rank)564 static void select_per_cs_training_index(const struct chan_info *chan,
565 					 u32 rank)
566 {
567 	u32 *denali_phy = chan->publ->denali_phy;
568 
569 	/* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
570 	if ((readl(&denali_phy[84])>>16) & 1) {
571 		/*
572 		 * PHY_8/136/264/392
573 		 * phy_per_cs_training_index_X 1bit offset_24
574 		 */
575 		clrsetbits_le32(&denali_phy[8], 0x1 << 24, rank << 24);
576 		clrsetbits_le32(&denali_phy[136], 0x1 << 24, rank << 24);
577 		clrsetbits_le32(&denali_phy[264], 0x1 << 24, rank << 24);
578 		clrsetbits_le32(&denali_phy[392], 0x1 << 24, rank << 24);
579 	}
580 }
581 
override_write_leveling_value(const struct chan_info * chan)582 static void override_write_leveling_value(const struct chan_info *chan)
583 {
584 	u32 *denali_ctl = chan->pctl->denali_ctl;
585 	u32 *denali_phy = chan->publ->denali_phy;
586 	u32 byte;
587 
588 	/* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
589 	setbits_le32(&denali_phy[896], 1);
590 
591 	/*
592 	 * PHY_8/136/264/392
593 	 * phy_per_cs_training_multicast_en_X 1bit offset_16
594 	 */
595 	clrsetbits_le32(&denali_phy[8], 0x1 << 16, 1 << 16);
596 	clrsetbits_le32(&denali_phy[136], 0x1 << 16, 1 << 16);
597 	clrsetbits_le32(&denali_phy[264], 0x1 << 16, 1 << 16);
598 	clrsetbits_le32(&denali_phy[392], 0x1 << 16, 1 << 16);
599 
600 	for (byte = 0; byte < 4; byte++)
601 		clrsetbits_le32(&denali_phy[63 + (128 * byte)], 0xffff << 16,
602 				0x200 << 16);
603 
604 	/* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
605 	clrbits_le32(&denali_phy[896], 1);
606 
607 	/* CTL_200 ctrlupd_req 1bit offset_8 */
608 	clrsetbits_le32(&denali_ctl[200], 0x1 << 8, 0x1 << 8);
609 }
610 
data_training_ca(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)611 static int data_training_ca(const struct chan_info *chan, u32 channel,
612 			    const struct rk3399_sdram_params *sdram_params)
613 {
614 	u32 *denali_pi = chan->pi->denali_pi;
615 	u32 *denali_phy = chan->publ->denali_phy;
616 	u32 i, tmp;
617 	u32 obs_0, obs_1, obs_2, obs_err = 0;
618 	u32 rank = sdram_params->ch[channel].rank;
619 
620 	for (i = 0; i < rank; i++) {
621 		select_per_cs_training_index(chan, i);
622 		/* PI_100 PI_CALVL_EN:RW:8:2 */
623 		clrsetbits_le32(&denali_pi[100], 0x3 << 8, 0x2 << 8);
624 		/* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
625 		clrsetbits_le32(&denali_pi[92],
626 				(0x1 << 16) | (0x3 << 24),
627 				(0x1 << 16) | (i << 24));
628 
629 		/* Waiting for training complete */
630 		while (1) {
631 			/* PI_174 PI_INT_STATUS:RD:8:18 */
632 			tmp = readl(&denali_pi[174]) >> 8;
633 			/*
634 			 * check status obs
635 			 * PHY_532/660/789 phy_adr_calvl_obs1_:0:32
636 			 */
637 			obs_0 = readl(&denali_phy[532]);
638 			obs_1 = readl(&denali_phy[660]);
639 			obs_2 = readl(&denali_phy[788]);
640 			if (((obs_0 >> 30) & 0x3) ||
641 			    ((obs_1 >> 30) & 0x3) ||
642 			    ((obs_2 >> 30) & 0x3))
643 				obs_err = 1;
644 			if ((((tmp >> 11) & 0x1) == 0x1) &&
645 			    (((tmp >> 13) & 0x1) == 0x1) &&
646 			    (((tmp >> 5) & 0x1) == 0x0) &&
647 			    (obs_err == 0))
648 				break;
649 			else if ((((tmp >> 5) & 0x1) == 0x1) ||
650 				 (obs_err == 1))
651 				return -EIO;
652 		}
653 		/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
654 		writel(0x00003f7c, (&denali_pi[175]));
655 	}
656 	clrbits_le32(&denali_pi[100], 0x3 << 8);
657 
658 	return 0;
659 }
660 
data_training_wl(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)661 static int data_training_wl(const struct chan_info *chan, u32 channel,
662 			    const struct rk3399_sdram_params *sdram_params)
663 {
664 	u32 *denali_pi = chan->pi->denali_pi;
665 	u32 *denali_phy = chan->publ->denali_phy;
666 	u32 i, tmp;
667 	u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
668 	u32 rank = sdram_params->ch[channel].rank;
669 
670 	for (i = 0; i < rank; i++) {
671 		select_per_cs_training_index(chan, i);
672 		/* PI_60 PI_WRLVL_EN:RW:8:2 */
673 		clrsetbits_le32(&denali_pi[60], 0x3 << 8, 0x2 << 8);
674 		/* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
675 		clrsetbits_le32(&denali_pi[59],
676 				(0x1 << 8) | (0x3 << 16),
677 				(0x1 << 8) | (i << 16));
678 
679 		/* Waiting for training complete */
680 		while (1) {
681 			/* PI_174 PI_INT_STATUS:RD:8:18 */
682 			tmp = readl(&denali_pi[174]) >> 8;
683 
684 			/*
685 			 * check status obs, if error maybe can not
686 			 * get leveling done PHY_40/168/296/424
687 			 * phy_wrlvl_status_obs_X:0:13
688 			 */
689 			obs_0 = readl(&denali_phy[40]);
690 			obs_1 = readl(&denali_phy[168]);
691 			obs_2 = readl(&denali_phy[296]);
692 			obs_3 = readl(&denali_phy[424]);
693 			if (((obs_0 >> 12) & 0x1) ||
694 			    ((obs_1 >> 12) & 0x1) ||
695 			    ((obs_2 >> 12) & 0x1) ||
696 			    ((obs_3 >> 12) & 0x1))
697 				obs_err = 1;
698 			if ((((tmp >> 10) & 0x1) == 0x1) &&
699 			    (((tmp >> 13) & 0x1) == 0x1) &&
700 			    (((tmp >> 4) & 0x1) == 0x0) &&
701 			    (obs_err == 0))
702 				break;
703 			else if ((((tmp >> 4) & 0x1) == 0x1) ||
704 				 (obs_err == 1))
705 				return -EIO;
706 		}
707 		/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
708 		writel(0x00003f7c, (&denali_pi[175]));
709 	}
710 
711 	override_write_leveling_value(chan);
712 	clrbits_le32(&denali_pi[60], 0x3 << 8);
713 
714 	return 0;
715 }
716 
data_training_rg(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)717 static int data_training_rg(const struct chan_info *chan, u32 channel,
718 			    const struct rk3399_sdram_params *sdram_params)
719 {
720 	u32 *denali_pi = chan->pi->denali_pi;
721 	u32 *denali_phy = chan->publ->denali_phy;
722 	u32 i, tmp;
723 	u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
724 	u32 rank = sdram_params->ch[channel].rank;
725 
726 	for (i = 0; i < rank; i++) {
727 		select_per_cs_training_index(chan, i);
728 		/* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
729 		clrsetbits_le32(&denali_pi[80], 0x3 << 24, 0x2 << 24);
730 		/*
731 		 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
732 		 * PI_RDLVL_CS:RW:24:2
733 		 */
734 		clrsetbits_le32(&denali_pi[74],
735 				(0x1 << 16) | (0x3 << 24),
736 				(0x1 << 16) | (i << 24));
737 
738 		/* Waiting for training complete */
739 		while (1) {
740 			/* PI_174 PI_INT_STATUS:RD:8:18 */
741 			tmp = readl(&denali_pi[174]) >> 8;
742 
743 			/*
744 			 * check status obs
745 			 * PHY_43/171/299/427
746 			 *     PHY_GTLVL_STATUS_OBS_x:16:8
747 			 */
748 			obs_0 = readl(&denali_phy[43]);
749 			obs_1 = readl(&denali_phy[171]);
750 			obs_2 = readl(&denali_phy[299]);
751 			obs_3 = readl(&denali_phy[427]);
752 			if (((obs_0 >> (16 + 6)) & 0x3) ||
753 			    ((obs_1 >> (16 + 6)) & 0x3) ||
754 			    ((obs_2 >> (16 + 6)) & 0x3) ||
755 			    ((obs_3 >> (16 + 6)) & 0x3))
756 				obs_err = 1;
757 			if ((((tmp >> 9) & 0x1) == 0x1) &&
758 			    (((tmp >> 13) & 0x1) == 0x1) &&
759 			    (((tmp >> 3) & 0x1) == 0x0) &&
760 			    (obs_err == 0))
761 				break;
762 			else if ((((tmp >> 3) & 0x1) == 0x1) ||
763 				 (obs_err == 1))
764 				return -EIO;
765 		}
766 		/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
767 		writel(0x00003f7c, (&denali_pi[175]));
768 	}
769 	clrbits_le32(&denali_pi[80], 0x3 << 24);
770 
771 	return 0;
772 }
773 
data_training_rl(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)774 static int data_training_rl(const struct chan_info *chan, u32 channel,
775 			    const struct rk3399_sdram_params *sdram_params)
776 {
777 	u32 *denali_pi = chan->pi->denali_pi;
778 	u32 i, tmp;
779 	u32 rank = sdram_params->ch[channel].rank;
780 
781 	for (i = 0; i < rank; i++) {
782 		select_per_cs_training_index(chan, i);
783 		/* PI_80 PI_RDLVL_EN:RW:16:2 */
784 		clrsetbits_le32(&denali_pi[80], 0x3 << 16, 0x2 << 16);
785 		/* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
786 		clrsetbits_le32(&denali_pi[74],
787 				(0x1 << 8) | (0x3 << 24),
788 				(0x1 << 8) | (i << 24));
789 
790 		/* Waiting for training complete */
791 		while (1) {
792 			/* PI_174 PI_INT_STATUS:RD:8:18 */
793 			tmp = readl(&denali_pi[174]) >> 8;
794 
795 			/*
796 			 * make sure status obs not report error bit
797 			 * PHY_46/174/302/430
798 			 *     phy_rdlvl_status_obs_X:16:8
799 			 */
800 			if ((((tmp >> 8) & 0x1) == 0x1) &&
801 			    (((tmp >> 13) & 0x1) == 0x1) &&
802 			    (((tmp >> 2) & 0x1) == 0x0))
803 				break;
804 			else if (((tmp >> 2) & 0x1) == 0x1)
805 				return -EIO;
806 		}
807 		/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
808 		writel(0x00003f7c, (&denali_pi[175]));
809 	}
810 	clrbits_le32(&denali_pi[80], 0x3 << 16);
811 
812 	return 0;
813 }
814 
data_training_wdql(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)815 static int data_training_wdql(const struct chan_info *chan, u32 channel,
816 			      const struct rk3399_sdram_params *sdram_params)
817 {
818 	u32 *denali_pi = chan->pi->denali_pi;
819 	u32 i, tmp;
820 	u32 rank = sdram_params->ch[channel].rank;
821 
822 	for (i = 0; i < rank; i++) {
823 		select_per_cs_training_index(chan, i);
824 		/*
825 		 * disable PI_WDQLVL_VREF_EN before wdq leveling?
826 		 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
827 		 */
828 		clrbits_le32(&denali_pi[181], 0x1 << 8);
829 		/* PI_124 PI_WDQLVL_EN:RW:16:2 */
830 		clrsetbits_le32(&denali_pi[124], 0x3 << 16, 0x2 << 16);
831 		/* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
832 		clrsetbits_le32(&denali_pi[121],
833 				(0x1 << 8) | (0x3 << 16),
834 				(0x1 << 8) | (i << 16));
835 
836 		/* Waiting for training complete */
837 		while (1) {
838 			/* PI_174 PI_INT_STATUS:RD:8:18 */
839 			tmp = readl(&denali_pi[174]) >> 8;
840 			if ((((tmp >> 12) & 0x1) == 0x1) &&
841 			    (((tmp >> 13) & 0x1) == 0x1) &&
842 			    (((tmp >> 6) & 0x1) == 0x0))
843 				break;
844 			else if (((tmp >> 6) & 0x1) == 0x1)
845 				return -EIO;
846 		}
847 		/* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
848 		writel(0x00003f7c, (&denali_pi[175]));
849 	}
850 	clrbits_le32(&denali_pi[124], 0x3 << 16);
851 
852 	return 0;
853 }
854 
data_training(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params,u32 training_flag)855 static int data_training(const struct chan_info *chan, u32 channel,
856 			 const struct rk3399_sdram_params *sdram_params,
857 			 u32 training_flag)
858 {
859 	u32 *denali_phy = chan->publ->denali_phy;
860 
861 	/* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
862 	setbits_le32(&denali_phy[927], (1 << 22));
863 
864 	if (training_flag == PI_FULL_TRAINING) {
865 		if (sdram_params->base.dramtype == LPDDR4) {
866 			training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
867 					PI_READ_GATE_TRAINING |
868 					PI_READ_LEVELING | PI_WDQ_LEVELING;
869 		} else if (sdram_params->base.dramtype == LPDDR3) {
870 			training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
871 					PI_READ_GATE_TRAINING;
872 		} else if (sdram_params->base.dramtype == DDR3) {
873 			training_flag = PI_WRITE_LEVELING |
874 					PI_READ_GATE_TRAINING |
875 					PI_READ_LEVELING;
876 		}
877 	}
878 
879 	/* ca training(LPDDR4,LPDDR3 support) */
880 	if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING)
881 		data_training_ca(chan, channel, sdram_params);
882 
883 	/* write leveling(LPDDR4,LPDDR3,DDR3 support) */
884 	if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING)
885 		data_training_wl(chan, channel, sdram_params);
886 
887 	/* read gate training(LPDDR4,LPDDR3,DDR3 support) */
888 	if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING)
889 		data_training_rg(chan, channel, sdram_params);
890 
891 	/* read leveling(LPDDR4,LPDDR3,DDR3 support) */
892 	if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING)
893 		data_training_rl(chan, channel, sdram_params);
894 
895 	/* wdq leveling(LPDDR4 support) */
896 	if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING)
897 		data_training_wdql(chan, channel, sdram_params);
898 
899 	/* PHY_927 PHY_PAD_DQS_DRIVE  RPULL offset_22 */
900 	clrbits_le32(&denali_phy[927], (1 << 22));
901 
902 	return 0;
903 }
904 
set_ddrconfig(const struct chan_info * chan,const struct rk3399_sdram_params * sdram_params,unsigned char channel,u32 ddrconfig)905 static void set_ddrconfig(const struct chan_info *chan,
906 			  const struct rk3399_sdram_params *sdram_params,
907 			  unsigned char channel, u32 ddrconfig)
908 {
909 	/* only need to set ddrconfig */
910 	struct rk3399_msch_regs *ddr_msch_regs = chan->msch;
911 	unsigned int cs0_cap = 0;
912 	unsigned int cs1_cap = 0;
913 
914 	cs0_cap = (1 << (sdram_params->ch[channel].cs0_row
915 			+ sdram_params->ch[channel].col
916 			+ sdram_params->ch[channel].bk
917 			+ sdram_params->ch[channel].bw - 20));
918 	if (sdram_params->ch[channel].rank > 1)
919 		cs1_cap = cs0_cap >> (sdram_params->ch[channel].cs0_row
920 				- sdram_params->ch[channel].cs1_row);
921 	if (sdram_params->ch[channel].row_3_4) {
922 		cs0_cap = cs0_cap * 3 / 4;
923 		cs1_cap = cs1_cap * 3 / 4;
924 	}
925 
926 	writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf);
927 	writel(((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8),
928 	       &ddr_msch_regs->ddrsize);
929 }
930 
dram_all_config(struct dram_info * dram,const struct rk3399_sdram_params * sdram_params)931 static void dram_all_config(struct dram_info *dram,
932 			    const struct rk3399_sdram_params *sdram_params)
933 {
934 	u32 sys_reg = 0;
935 	unsigned int channel, idx;
936 
937 	sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
938 	sys_reg |= (sdram_params->base.num_channels - 1)
939 		    << SYS_REG_NUM_CH_SHIFT;
940 	for (channel = 0, idx = 0;
941 	     (idx < sdram_params->base.num_channels) && (channel < 2);
942 	     channel++) {
943 		const struct rk3399_sdram_channel *info =
944 			&sdram_params->ch[channel];
945 		struct rk3399_msch_regs *ddr_msch_regs;
946 		const struct rk3399_msch_timings *noc_timing;
947 
948 		if (sdram_params->ch[channel].col == 0)
949 			continue;
950 		idx++;
951 		sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(channel);
952 		sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(channel);
953 		sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(channel);
954 		sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(channel);
955 		sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(channel);
956 		sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(channel);
957 		sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(channel);
958 		sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(channel);
959 		sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(channel);
960 
961 		ddr_msch_regs = dram->chan[channel].msch;
962 		noc_timing = &sdram_params->ch[channel].noc_timings;
963 		writel(noc_timing->ddrtiminga0,
964 		       &ddr_msch_regs->ddrtiminga0);
965 		writel(noc_timing->ddrtimingb0,
966 		       &ddr_msch_regs->ddrtimingb0);
967 		writel(noc_timing->ddrtimingc0,
968 		       &ddr_msch_regs->ddrtimingc0);
969 		writel(noc_timing->devtodev0,
970 		       &ddr_msch_regs->devtodev0);
971 		writel(noc_timing->ddrmode,
972 		       &ddr_msch_regs->ddrmode);
973 
974 		/* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
975 		if (sdram_params->ch[channel].rank == 1)
976 			setbits_le32(&dram->chan[channel].pctl->denali_ctl[276],
977 				     1 << 17);
978 	}
979 
980 	writel(sys_reg, &dram->pmugrf->os_reg2);
981 	rk_clrsetreg(&dram->pmusgrf->soc_con4, 0x1f << 10,
982 		     sdram_params->base.stride << 10);
983 
984 	/* reboot hold register set */
985 	writel(PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) |
986 		PRESET_GPIO1_HOLD(1),
987 		&dram->pmucru->pmucru_rstnhold_con[1]);
988 	clrsetbits_le32(&dram->cru->glb_rst_con, 0x3, 0x3);
989 }
990 
switch_to_phy_index1(struct dram_info * dram,const struct rk3399_sdram_params * sdram_params)991 static int switch_to_phy_index1(struct dram_info *dram,
992 				 const struct rk3399_sdram_params *sdram_params)
993 {
994 	u32 channel;
995 	u32 *denali_phy;
996 	u32 ch_count = sdram_params->base.num_channels;
997 	int ret;
998 	int i = 0;
999 
1000 	writel(RK_CLRSETBITS(0x03 << 4 | 1 << 2 | 1,
1001 			     1 << 4 | 1 << 2 | 1),
1002 			&dram->cic->cic_ctrl0);
1003 	while (!(readl(&dram->cic->cic_status0) & (1 << 2))) {
1004 		mdelay(10);
1005 		i++;
1006 		if (i > 10) {
1007 			debug("index1 frequency change overtime\n");
1008 			return -ETIME;
1009 		}
1010 	}
1011 
1012 	i = 0;
1013 	writel(RK_CLRSETBITS(1 << 1, 1 << 1), &dram->cic->cic_ctrl0);
1014 	while (!(readl(&dram->cic->cic_status0) & (1 << 0))) {
1015 		mdelay(10);
1016 		i++;
1017 		if (i > 10) {
1018 			debug("index1 frequency done overtime\n");
1019 			return -ETIME;
1020 		}
1021 	}
1022 
1023 	for (channel = 0; channel < ch_count; channel++) {
1024 		denali_phy = dram->chan[channel].publ->denali_phy;
1025 		clrsetbits_le32(&denali_phy[896], (0x3 << 8) | 1, 1 << 8);
1026 		ret = data_training(&dram->chan[channel], channel,
1027 				  sdram_params, PI_FULL_TRAINING);
1028 		if (ret) {
1029 			debug("index1 training failed\n");
1030 			return ret;
1031 		}
1032 	}
1033 
1034 	return 0;
1035 }
1036 
sdram_init(struct dram_info * dram,const struct rk3399_sdram_params * sdram_params)1037 static int sdram_init(struct dram_info *dram,
1038 		      const struct rk3399_sdram_params *sdram_params)
1039 {
1040 	unsigned char dramtype = sdram_params->base.dramtype;
1041 	unsigned int ddr_freq = sdram_params->base.ddr_freq;
1042 	int channel;
1043 
1044 	debug("Starting SDRAM initialization...\n");
1045 
1046 	if ((dramtype == DDR3 && ddr_freq > 933) ||
1047 	    (dramtype == LPDDR3 && ddr_freq > 933) ||
1048 	    (dramtype == LPDDR4 && ddr_freq > 800)) {
1049 		debug("SDRAM frequency is to high!");
1050 		return -E2BIG;
1051 	}
1052 
1053 	for (channel = 0; channel < 2; channel++) {
1054 		const struct chan_info *chan = &dram->chan[channel];
1055 		struct rk3399_ddr_publ_regs *publ = chan->publ;
1056 
1057 		phy_dll_bypass_set(publ, ddr_freq);
1058 
1059 		if (channel >= sdram_params->base.num_channels)
1060 			continue;
1061 
1062 		if (pctl_cfg(chan, channel, sdram_params) != 0) {
1063 			printf("pctl_cfg fail, reset\n");
1064 			return -EIO;
1065 		}
1066 
1067 		/* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
1068 		if (dramtype == LPDDR3)
1069 			udelay(10);
1070 
1071 		if (data_training(chan, channel,
1072 				  sdram_params, PI_FULL_TRAINING)) {
1073 			printf("SDRAM initialization failed, reset\n");
1074 			return -EIO;
1075 		}
1076 
1077 		set_ddrconfig(chan, sdram_params, channel,
1078 			      sdram_params->ch[channel].ddrconfig);
1079 	}
1080 	dram_all_config(dram, sdram_params);
1081 	switch_to_phy_index1(dram, sdram_params);
1082 
1083 	debug("Finish SDRAM initialization...\n");
1084 	return 0;
1085 }
1086 
rk3399_dmc_ofdata_to_platdata(struct udevice * dev)1087 static int rk3399_dmc_ofdata_to_platdata(struct udevice *dev)
1088 {
1089 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1090 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1091 	int ret;
1092 
1093 	ret = dev_read_u32_array(dev, "rockchip,sdram-params",
1094 				 (u32 *)&plat->sdram_params,
1095 				 sizeof(plat->sdram_params) / sizeof(u32));
1096 	if (ret) {
1097 		printf("%s: Cannot read rockchip,sdram-params %d\n",
1098 		       __func__, ret);
1099 		return ret;
1100 	}
1101 	ret = regmap_init_mem(dev_ofnode(dev), &plat->map);
1102 	if (ret)
1103 		printf("%s: regmap failed %d\n", __func__, ret);
1104 
1105 #endif
1106 	return 0;
1107 }
1108 
1109 #if CONFIG_IS_ENABLED(OF_PLATDATA)
conv_of_platdata(struct udevice * dev)1110 static int conv_of_platdata(struct udevice *dev)
1111 {
1112 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1113 	struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1114 	int ret;
1115 
1116 	ret = regmap_init_mem_platdata(dev, dtplat->reg,
1117 			ARRAY_SIZE(dtplat->reg) / 2,
1118 			&plat->map);
1119 	if (ret)
1120 		return ret;
1121 
1122 	return 0;
1123 }
1124 #endif
1125 
rk3399_dmc_init(struct udevice * dev)1126 static int rk3399_dmc_init(struct udevice *dev)
1127 {
1128 	struct dram_info *priv = dev_get_priv(dev);
1129 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1130 	int ret;
1131 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1132 	struct rk3399_sdram_params *params = &plat->sdram_params;
1133 #else
1134 	struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
1135 	struct rk3399_sdram_params *params =
1136 					(void *)dtplat->rockchip_sdram_params;
1137 
1138 	ret = conv_of_platdata(dev);
1139 	if (ret)
1140 		return ret;
1141 #endif
1142 
1143 	priv->cic = syscon_get_first_range(ROCKCHIP_SYSCON_CIC);
1144 	priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1145 	priv->pmusgrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUSGRF);
1146 	priv->pmucru = rockchip_get_pmucru();
1147 	priv->cru = rockchip_get_cru();
1148 	priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1149 	priv->chan[0].pi = regmap_get_range(plat->map, 1);
1150 	priv->chan[0].publ = regmap_get_range(plat->map, 2);
1151 	priv->chan[0].msch = regmap_get_range(plat->map, 3);
1152 	priv->chan[1].pctl = regmap_get_range(plat->map, 4);
1153 	priv->chan[1].pi = regmap_get_range(plat->map, 5);
1154 	priv->chan[1].publ = regmap_get_range(plat->map, 6);
1155 	priv->chan[1].msch = regmap_get_range(plat->map, 7);
1156 
1157 	debug("con reg %p %p %p %p %p %p %p %p\n",
1158 	      priv->chan[0].pctl, priv->chan[0].pi,
1159 	      priv->chan[0].publ, priv->chan[0].msch,
1160 	      priv->chan[1].pctl, priv->chan[1].pi,
1161 	      priv->chan[1].publ, priv->chan[1].msch);
1162 	debug("cru %p, cic %p, grf %p, sgrf %p, pmucru %p\n", priv->cru,
1163 	      priv->cic, priv->pmugrf, priv->pmusgrf, priv->pmucru);
1164 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1165 	ret = clk_get_by_index_platdata(dev, 0, dtplat->clocks, &priv->ddr_clk);
1166 #else
1167 	ret = clk_get_by_index(dev, 0, &priv->ddr_clk);
1168 #endif
1169 	if (ret) {
1170 		printf("%s clk get failed %d\n", __func__, ret);
1171 		return ret;
1172 	}
1173 	ret = clk_set_rate(&priv->ddr_clk, params->base.ddr_freq * MHz);
1174 	if (ret < 0) {
1175 		printf("%s clk set failed %d\n", __func__, ret);
1176 		return ret;
1177 	}
1178 	ret = sdram_init(priv, params);
1179 	if (ret < 0) {
1180 		printf("%s DRAM init failed%d\n", __func__, ret);
1181 		return ret;
1182 	}
1183 
1184 	return 0;
1185 }
1186 #endif
1187 
rk3399_dmc_probe(struct udevice * dev)1188 static int rk3399_dmc_probe(struct udevice *dev)
1189 {
1190 #ifdef CONFIG_SPL_BUILD
1191 	if (rk3399_dmc_init(dev))
1192 		return 0;
1193 #else
1194 	struct dram_info *priv = dev_get_priv(dev);
1195 
1196 	priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1197 	debug("%s: pmugrf=%p\n", __func__, priv->pmugrf);
1198 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
1199 	priv->info.size = rockchip_sdram_size(
1200 			(phys_addr_t)&priv->pmugrf->os_reg2);
1201 #endif
1202 	return 0;
1203 }
1204 
rk3399_dmc_get_info(struct udevice * dev,struct ram_info * info)1205 static int rk3399_dmc_get_info(struct udevice *dev, struct ram_info *info)
1206 {
1207 	struct dram_info *priv = dev_get_priv(dev);
1208 
1209 	*info = priv->info;
1210 
1211 	return 0;
1212 }
1213 
1214 static struct ram_ops rk3399_dmc_ops = {
1215 	.get_info = rk3399_dmc_get_info,
1216 };
1217 
1218 
1219 static const struct udevice_id rk3399_dmc_ids[] = {
1220 	{ .compatible = "rockchip,rk3399-dmc" },
1221 	{ }
1222 };
1223 
1224 U_BOOT_DRIVER(dmc_rk3399) = {
1225 	.name = "rockchip_rk3399_dmc",
1226 	.id = UCLASS_RAM,
1227 	.of_match = rk3399_dmc_ids,
1228 	.ops = &rk3399_dmc_ops,
1229 #ifdef CONFIG_SPL_BUILD
1230 	.ofdata_to_platdata = rk3399_dmc_ofdata_to_platdata,
1231 #endif
1232 	.probe = rk3399_dmc_probe,
1233 	.priv_auto_alloc_size = sizeof(struct dram_info),
1234 #ifdef CONFIG_SPL_BUILD
1235 	.platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
1236 #endif
1237 };
1238