1 /*
2  * (C) Copyright 2015 Google, Inc
3  * Copyright 2014 Rockchip Inc.
4  *
5  * SPDX-License-Identifier:     GPL-2.0
6  *
7  * Adapted from coreboot.
8  */
9 
10 #include <common.h>
11 #include <clk.h>
12 #include <dm.h>
13 #include <dt-structs.h>
14 #include <errno.h>
15 #include <ram.h>
16 #include <regmap.h>
17 #include <syscon.h>
18 #include <asm/io.h>
19 #include <asm/arch/clock.h>
20 #include <asm/arch/cru_rk3288.h>
21 #include <asm/arch/ddr_rk3288.h>
22 #include <asm/arch/grf_rk3288.h>
23 #include <asm/arch/pmu_rk3288.h>
24 #include <asm/arch/sdram.h>
25 #include <asm/arch/sdram_common.h>
26 #include <linux/err.h>
27 #include <power/regulator.h>
28 #include <power/rk8xx_pmic.h>
29 
30 DECLARE_GLOBAL_DATA_PTR;
31 
32 struct chan_info {
33 	struct rk3288_ddr_pctl *pctl;
34 	struct rk3288_ddr_publ *publ;
35 	struct rk3288_msch *msch;
36 };
37 
38 struct dram_info {
39 	struct chan_info chan[2];
40 	struct ram_info info;
41 	struct clk ddr_clk;
42 	struct rk3288_cru *cru;
43 	struct rk3288_grf *grf;
44 	struct rk3288_sgrf *sgrf;
45 	struct rk3288_pmu *pmu;
46 	bool is_veyron;
47 };
48 
49 struct rk3288_sdram_params {
50 #if CONFIG_IS_ENABLED(OF_PLATDATA)
51 	struct dtd_rockchip_rk3288_dmc of_plat;
52 #endif
53 	struct rk3288_sdram_channel ch[2];
54 	struct rk3288_sdram_pctl_timing pctl_timing;
55 	struct rk3288_sdram_phy_timing phy_timing;
56 	struct rk3288_base_params base;
57 	int num_channels;
58 	struct regmap *map;
59 };
60 
61 const int ddrconf_table[] = {
62 	/* row	    col,bw */
63 	0,
64 	((1 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
65 	((2 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
66 	((3 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
67 	((4 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
68 	((1 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
69 	((2 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
70 	((3 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
71 	((1 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
72 	((2 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
73 	((3 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
74 	0,
75 	0,
76 	0,
77 	0,
78 	((4 << 4) | 2),
79 };
80 
81 #define TEST_PATTEN	0x5aa5f00f
82 #define DQS_GATE_TRAINING_ERROR_RANK0	(1 << 4)
83 #define DQS_GATE_TRAINING_ERROR_RANK1	(2 << 4)
84 
85 #ifdef CONFIG_SPL_BUILD
86 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
87 {
88 	int i;
89 
90 	for (i = 0; i < n / sizeof(u32); i++) {
91 		writel(*src, dest);
92 		src++;
93 		dest++;
94 	}
95 }
96 
97 static void ddr_reset(struct rk3288_cru *cru, u32 ch, u32 ctl, u32 phy)
98 {
99 	u32 phy_ctl_srstn_shift = 4 + 5 * ch;
100 	u32 ctl_psrstn_shift = 3 + 5 * ch;
101 	u32 ctl_srstn_shift = 2 + 5 * ch;
102 	u32 phy_psrstn_shift = 1 + 5 * ch;
103 	u32 phy_srstn_shift = 5 * ch;
104 
105 	rk_clrsetreg(&cru->cru_softrst_con[10],
106 		     1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
107 		     1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
108 		     1 << phy_srstn_shift,
109 		     phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
110 		     ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
111 		     phy << phy_srstn_shift);
112 }
113 
114 static void ddr_phy_ctl_reset(struct rk3288_cru *cru, u32 ch, u32 n)
115 {
116 	u32 phy_ctl_srstn_shift = 4 + 5 * ch;
117 
118 	rk_clrsetreg(&cru->cru_softrst_con[10],
119 		     1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
120 }
121 
122 static void phy_pctrl_reset(struct rk3288_cru *cru,
123 			    struct rk3288_ddr_publ *publ,
124 			    int channel)
125 {
126 	int i;
127 
128 	ddr_reset(cru, channel, 1, 1);
129 	udelay(1);
130 	clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
131 	for (i = 0; i < 4; i++)
132 		clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
133 
134 	udelay(10);
135 	setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
136 	for (i = 0; i < 4; i++)
137 		setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
138 
139 	udelay(10);
140 	ddr_reset(cru, channel, 1, 0);
141 	udelay(10);
142 	ddr_reset(cru, channel, 0, 0);
143 	udelay(10);
144 }
145 
146 static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
147 	u32 freq)
148 {
149 	int i;
150 
151 	if (freq <= 250000000) {
152 		if (freq <= 150000000)
153 			clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
154 		else
155 			setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
156 		setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
157 		for (i = 0; i < 4; i++)
158 			setbits_le32(&publ->datx8[i].dxdllcr,
159 				     DXDLLCR_DLLDIS);
160 
161 		setbits_le32(&publ->pir, PIR_DLLBYP);
162 	} else {
163 		clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
164 		clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
165 		for (i = 0; i < 4; i++) {
166 			clrbits_le32(&publ->datx8[i].dxdllcr,
167 				     DXDLLCR_DLLDIS);
168 		}
169 
170 		clrbits_le32(&publ->pir, PIR_DLLBYP);
171 	}
172 }
173 
174 static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
175 {
176 	writel(DFI_INIT_START, &pctl->dfistcfg0);
177 	writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
178 	       &pctl->dfistcfg1);
179 	writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
180 	writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
181 	       &pctl->dfilpcfg0);
182 
183 	writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
184 	writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
185 	writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
186 	writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
187 	writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
188 	writel(1, &pctl->dfitphyupdtype0);
189 
190 	/* cs0 and cs1 write odt enable */
191 	writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
192 	       &pctl->dfiodtcfg);
193 	/* odt write length */
194 	writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
195 	/* phyupd and ctrlupd disabled */
196 	writel(0, &pctl->dfiupdcfg);
197 }
198 
199 static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
200 {
201 	uint val = 0;
202 
203 	if (enable) {
204 		val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
205 				DDR0_16BIT_EN_SHIFT);
206 	}
207 	rk_clrsetreg(&grf->soc_con0,
208 		     1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
209 		     val);
210 }
211 
212 static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
213 			      bool ddr3_mode)
214 {
215 	uint mask, val;
216 
217 	mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
218 	val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
219 					MSCH0_MAINDDR3_SHIFT);
220 	rk_clrsetreg(&grf->soc_con0, mask, val);
221 }
222 
223 static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
224 			       bool enable, bool enable_bst, bool enable_odt)
225 {
226 	uint mask;
227 	bool disable_bst = !enable_bst;
228 
229 	mask = channel ?
230 		(1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
231 			1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
232 		(1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
233 			1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
234 	rk_clrsetreg(&grf->soc_con2, mask,
235 		     enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
236 		     disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
237 				UPCTL0_BST_DIABLE_SHIFT) |
238 		     enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
239 				UPCTL0_LPDDR3_ODT_EN_SHIFT));
240 }
241 
242 static void pctl_cfg(int channel, struct rk3288_ddr_pctl *pctl,
243 		     struct rk3288_sdram_params *sdram_params,
244 		     struct rk3288_grf *grf)
245 {
246 	unsigned int burstlen;
247 
248 	burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
249 	copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
250 		    sizeof(sdram_params->pctl_timing));
251 	switch (sdram_params->base.dramtype) {
252 	case LPDDR3:
253 		writel(sdram_params->pctl_timing.tcl - 1,
254 		       &pctl->dfitrddataen);
255 		writel(sdram_params->pctl_timing.tcwl,
256 		       &pctl->dfitphywrlat);
257 		burstlen >>= 1;
258 		writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
259 		       LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
260 		       (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
261 		       1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
262 		       &pctl->mcfg);
263 		ddr_set_ddr3_mode(grf, channel, false);
264 		ddr_set_enable(grf, channel, true);
265 		ddr_set_en_bst_odt(grf, channel, true, false,
266 				   sdram_params->base.odt);
267 		break;
268 	case DDR3:
269 		if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
270 			writel(sdram_params->pctl_timing.tcl - 3,
271 			       &pctl->dfitrddataen);
272 		} else {
273 			writel(sdram_params->pctl_timing.tcl - 2,
274 			       &pctl->dfitrddataen);
275 		}
276 		writel(sdram_params->pctl_timing.tcwl - 1,
277 		       &pctl->dfitphywrlat);
278 		writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
279 		       DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
280 		       1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
281 		       &pctl->mcfg);
282 		ddr_set_ddr3_mode(grf, channel, true);
283 		ddr_set_enable(grf, channel, true);
284 
285 		ddr_set_en_bst_odt(grf, channel, false, true, false);
286 		break;
287 	}
288 
289 	setbits_le32(&pctl->scfg, 1);
290 }
291 
292 static void phy_cfg(const struct chan_info *chan, int channel,
293 		    struct rk3288_sdram_params *sdram_params)
294 {
295 	struct rk3288_ddr_publ *publ = chan->publ;
296 	struct rk3288_msch *msch = chan->msch;
297 	uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
298 	u32 dinit2, tmp;
299 	int i;
300 
301 	dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
302 	/* DDR PHY Timing */
303 	copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
304 		    sizeof(sdram_params->phy_timing));
305 	writel(sdram_params->base.noc_timing, &msch->ddrtiming);
306 	writel(0x3f, &msch->readlatency);
307 	writel(sdram_params->base.noc_activate, &msch->activate);
308 	writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
309 	       1 << BUSRDTORD_SHIFT, &msch->devtodev);
310 	writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
311 	       DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
312 	       8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
313 	writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
314 	       DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
315 	       &publ->ptr[1]);
316 	writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
317 	       DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
318 	       &publ->ptr[2]);
319 
320 	switch (sdram_params->base.dramtype) {
321 	case LPDDR3:
322 		clrsetbits_le32(&publ->pgcr, 0x1F,
323 				0 << PGCR_DFTLMT_SHIFT |
324 				0 << PGCR_DFTCMP_SHIFT |
325 				1 << PGCR_DQSCFG_SHIFT |
326 				0 << PGCR_ITMDMD_SHIFT);
327 		/* DDRMODE select LPDDR3 */
328 		clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
329 				DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
330 		clrsetbits_le32(&publ->dxccr,
331 				DQSNRES_MASK << DQSNRES_SHIFT |
332 				DQSRES_MASK << DQSRES_SHIFT,
333 				4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
334 		tmp = readl(&publ->dtpr[1]);
335 		tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
336 			((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
337 		clrsetbits_le32(&publ->dsgcr,
338 				DQSGE_MASK << DQSGE_SHIFT |
339 				DQSGX_MASK << DQSGX_SHIFT,
340 				tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
341 		break;
342 	case DDR3:
343 		clrbits_le32(&publ->pgcr, 0x1f);
344 		clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
345 				DDRMD_DDR3 << DDRMD_SHIFT);
346 		break;
347 	}
348 	if (sdram_params->base.odt) {
349 		/*dynamic RTT enable */
350 		for (i = 0; i < 4; i++)
351 			setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
352 	} else {
353 		/*dynamic RTT disable */
354 		for (i = 0; i < 4; i++)
355 			clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
356 	}
357 }
358 
359 static void phy_init(struct rk3288_ddr_publ *publ)
360 {
361 	setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
362 		| PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
363 	udelay(1);
364 	while ((readl(&publ->pgsr) &
365 		(PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
366 		(PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
367 		;
368 }
369 
370 static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
371 			 u32 cmd, u32 arg)
372 {
373 	writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
374 	udelay(1);
375 	while (readl(&pctl->mcmd) & START_CMD)
376 		;
377 }
378 
379 static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
380 				   u32 rank, u32 cmd, u32 ma, u32 op)
381 {
382 	send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
383 		     (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
384 }
385 
386 static void memory_init(struct rk3288_ddr_publ *publ,
387 			u32 dramtype)
388 {
389 	setbits_le32(&publ->pir,
390 		     (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
391 		      | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
392 		      | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
393 	udelay(1);
394 	while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
395 		!= (PGSR_IDONE | PGSR_DLDONE))
396 		;
397 }
398 
399 static void move_to_config_state(struct rk3288_ddr_publ *publ,
400 				 struct rk3288_ddr_pctl *pctl)
401 {
402 	unsigned int state;
403 
404 	while (1) {
405 		state = readl(&pctl->stat) & PCTL_STAT_MSK;
406 
407 		switch (state) {
408 		case LOW_POWER:
409 			writel(WAKEUP_STATE, &pctl->sctl);
410 			while ((readl(&pctl->stat) & PCTL_STAT_MSK)
411 				!= ACCESS)
412 				;
413 			/* wait DLL lock */
414 			while ((readl(&publ->pgsr) & PGSR_DLDONE)
415 				!= PGSR_DLDONE)
416 				;
417 			/*
418 			 * if at low power state,need wakeup first,
419 			 * and then enter the config
420 			 * so here no break.
421 			 */
422 		case ACCESS:
423 			/* no break */
424 		case INIT_MEM:
425 			writel(CFG_STATE, &pctl->sctl);
426 			while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
427 				;
428 			break;
429 		case CONFIG:
430 			return;
431 		default:
432 			break;
433 		}
434 	}
435 }
436 
437 static void set_bandwidth_ratio(const struct chan_info *chan, int channel,
438 				u32 n, struct rk3288_grf *grf)
439 {
440 	struct rk3288_ddr_pctl *pctl = chan->pctl;
441 	struct rk3288_ddr_publ *publ = chan->publ;
442 	struct rk3288_msch *msch = chan->msch;
443 
444 	if (n == 1) {
445 		setbits_le32(&pctl->ppcfg, 1);
446 		rk_setreg(&grf->soc_con0, 1 << (8 + channel));
447 		setbits_le32(&msch->ddrtiming, 1 << 31);
448 		/* Data Byte disable*/
449 		clrbits_le32(&publ->datx8[2].dxgcr, 1);
450 		clrbits_le32(&publ->datx8[3].dxgcr, 1);
451 		/* disable DLL */
452 		setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
453 		setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
454 	} else {
455 		clrbits_le32(&pctl->ppcfg, 1);
456 		rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
457 		clrbits_le32(&msch->ddrtiming, 1 << 31);
458 		/* Data Byte enable*/
459 		setbits_le32(&publ->datx8[2].dxgcr, 1);
460 		setbits_le32(&publ->datx8[3].dxgcr, 1);
461 
462 		/* enable DLL */
463 		clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
464 		clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
465 		/* reset DLL */
466 		clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
467 		clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
468 		udelay(10);
469 		setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
470 		setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
471 	}
472 	setbits_le32(&pctl->dfistcfg0, 1 << 2);
473 }
474 
475 static int data_training(const struct chan_info *chan, int channel,
476 			 struct rk3288_sdram_params *sdram_params)
477 {
478 	unsigned int j;
479 	int ret = 0;
480 	u32 rank;
481 	int i;
482 	u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
483 	struct rk3288_ddr_publ *publ = chan->publ;
484 	struct rk3288_ddr_pctl *pctl = chan->pctl;
485 
486 	/* disable auto refresh */
487 	writel(0, &pctl->trefi);
488 
489 	if (sdram_params->base.dramtype != LPDDR3)
490 		setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
491 	rank = sdram_params->ch[channel].rank | 1;
492 	for (j = 0; j < ARRAY_SIZE(step); j++) {
493 		/*
494 		 * trigger QSTRN and RVTRN
495 		 * clear DTDONE status
496 		 */
497 		setbits_le32(&publ->pir, PIR_CLRSR);
498 
499 		/* trigger DTT */
500 		setbits_le32(&publ->pir,
501 			     PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
502 			     PIR_CLRSR);
503 		udelay(1);
504 		/* wait echo byte DTDONE */
505 		while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
506 			!= rank)
507 			;
508 		while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
509 			!= rank)
510 			;
511 		if (!(readl(&pctl->ppcfg) & 1)) {
512 			while ((readl(&publ->datx8[2].dxgsr[0])
513 				& rank) != rank)
514 				;
515 			while ((readl(&publ->datx8[3].dxgsr[0])
516 				& rank) != rank)
517 				;
518 		}
519 		if (readl(&publ->pgsr) &
520 		    (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
521 			ret = -1;
522 			break;
523 		}
524 	}
525 	/* send some auto refresh to complement the lost while DTT */
526 	for (i = 0; i < (rank > 1 ? 8 : 4); i++)
527 		send_command(pctl, rank, REF_CMD, 0);
528 
529 	if (sdram_params->base.dramtype != LPDDR3)
530 		clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
531 
532 	/* resume auto refresh */
533 	writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
534 
535 	return ret;
536 }
537 
538 static void move_to_access_state(const struct chan_info *chan)
539 {
540 	struct rk3288_ddr_publ *publ = chan->publ;
541 	struct rk3288_ddr_pctl *pctl = chan->pctl;
542 	unsigned int state;
543 
544 	while (1) {
545 		state = readl(&pctl->stat) & PCTL_STAT_MSK;
546 
547 		switch (state) {
548 		case LOW_POWER:
549 			if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
550 					LP_TRIG_MASK) == 1)
551 				return;
552 
553 			writel(WAKEUP_STATE, &pctl->sctl);
554 			while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
555 				;
556 			/* wait DLL lock */
557 			while ((readl(&publ->pgsr) & PGSR_DLDONE)
558 				!= PGSR_DLDONE)
559 				;
560 			break;
561 		case INIT_MEM:
562 			writel(CFG_STATE, &pctl->sctl);
563 			while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
564 				;
565 		case CONFIG:
566 			writel(GO_STATE, &pctl->sctl);
567 			while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
568 				;
569 			break;
570 		case ACCESS:
571 			return;
572 		default:
573 			break;
574 		}
575 	}
576 }
577 
578 static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
579 			 struct rk3288_sdram_params *sdram_params)
580 {
581 	struct rk3288_ddr_publ *publ = chan->publ;
582 
583 	if (sdram_params->ch[chnum].bk == 3)
584 		clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
585 				1 << PDQ_SHIFT);
586 	else
587 		clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
588 
589 	writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
590 }
591 
592 static void dram_all_config(const struct dram_info *dram,
593 			    struct rk3288_sdram_params *sdram_params)
594 {
595 	unsigned int chan;
596 	u32 sys_reg = 0;
597 
598 	sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
599 	sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
600 	for (chan = 0; chan < sdram_params->num_channels; chan++) {
601 		const struct rk3288_sdram_channel *info =
602 			&sdram_params->ch[chan];
603 
604 		sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
605 		sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(chan);
606 		sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
607 		sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
608 		sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan);
609 		sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
610 		sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
611 		sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan);
612 		sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(chan);
613 
614 		dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
615 	}
616 	writel(sys_reg, &dram->pmu->sys_reg[2]);
617 	rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
618 }
619 
620 static int sdram_rank_bw_detect(struct dram_info *dram, int channel,
621 		struct rk3288_sdram_params *sdram_params)
622 {
623 	int reg;
624 	int need_trainig = 0;
625 	const struct chan_info *chan = &dram->chan[channel];
626 	struct rk3288_ddr_publ *publ = chan->publ;
627 
628 	if (data_training(chan, channel, sdram_params) < 0) {
629 		reg = readl(&publ->datx8[0].dxgsr[0]);
630 		/* Check the result for rank 0 */
631 		if ((channel == 0) && (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
632 			debug("data training fail!\n");
633 			return -EIO;
634 		} else if ((channel == 1) &&
635 			   (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
636 			sdram_params->num_channels = 1;
637 		}
638 
639 		/* Check the result for rank 1 */
640 		if (reg & DQS_GATE_TRAINING_ERROR_RANK1) {
641 			sdram_params->ch[channel].rank = 1;
642 			clrsetbits_le32(&publ->pgcr, 0xF << 18,
643 					sdram_params->ch[channel].rank << 18);
644 			need_trainig = 1;
645 		}
646 		reg = readl(&publ->datx8[2].dxgsr[0]);
647 		if (reg & (1 << 4)) {
648 			sdram_params->ch[channel].bw = 1;
649 			set_bandwidth_ratio(chan, channel,
650 					    sdram_params->ch[channel].bw,
651 					    dram->grf);
652 			need_trainig = 1;
653 		}
654 	}
655 	/* Assume the Die bit width are the same with the chip bit width */
656 	sdram_params->ch[channel].dbw = sdram_params->ch[channel].bw;
657 
658 	if (need_trainig &&
659 	    (data_training(chan, channel, sdram_params) < 0)) {
660 		if (sdram_params->base.dramtype == LPDDR3) {
661 			ddr_phy_ctl_reset(dram->cru, channel, 1);
662 			udelay(10);
663 			ddr_phy_ctl_reset(dram->cru, channel, 0);
664 			udelay(10);
665 		}
666 		debug("2nd data training failed!");
667 		return -EIO;
668 	}
669 
670 	return 0;
671 }
672 
673 static int sdram_col_row_detect(struct dram_info *dram, int channel,
674 		struct rk3288_sdram_params *sdram_params)
675 {
676 	int row, col;
677 	unsigned int addr;
678 	const struct chan_info *chan = &dram->chan[channel];
679 	struct rk3288_ddr_pctl *pctl = chan->pctl;
680 	struct rk3288_ddr_publ *publ = chan->publ;
681 	int ret = 0;
682 
683 	/* Detect col */
684 	for (col = 11; col >= 9; col--) {
685 		writel(0, CONFIG_SYS_SDRAM_BASE);
686 		addr = CONFIG_SYS_SDRAM_BASE +
687 			(1 << (col + sdram_params->ch[channel].bw - 1));
688 		writel(TEST_PATTEN, addr);
689 		if ((readl(addr) == TEST_PATTEN) &&
690 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
691 			break;
692 	}
693 	if (col == 8) {
694 		printf("Col detect error\n");
695 		ret = -EINVAL;
696 		goto out;
697 	} else {
698 		sdram_params->ch[channel].col = col;
699 	}
700 
701 	move_to_config_state(publ, pctl);
702 	writel(4, &chan->msch->ddrconf);
703 	move_to_access_state(chan);
704 	/* Detect row*/
705 	for (row = 16; row >= 12; row--) {
706 		writel(0, CONFIG_SYS_SDRAM_BASE);
707 		addr = CONFIG_SYS_SDRAM_BASE + (1 << (row + 15 - 1));
708 		writel(TEST_PATTEN, addr);
709 		if ((readl(addr) == TEST_PATTEN) &&
710 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
711 			break;
712 	}
713 	if (row == 11) {
714 		printf("Row detect error\n");
715 		ret = -EINVAL;
716 	} else {
717 		sdram_params->ch[channel].cs1_row = row;
718 		sdram_params->ch[channel].row_3_4 = 0;
719 		debug("chn %d col %d, row %d\n", channel, col, row);
720 		sdram_params->ch[channel].cs0_row = row;
721 	}
722 
723 out:
724 	return ret;
725 }
726 
727 static int sdram_get_niu_config(struct rk3288_sdram_params *sdram_params)
728 {
729 	int i, tmp, size, ret = 0;
730 
731 	tmp = sdram_params->ch[0].col - 9;
732 	tmp -= (sdram_params->ch[0].bw == 2) ? 0 : 1;
733 	tmp |= ((sdram_params->ch[0].cs0_row - 12) << 4);
734 	size = sizeof(ddrconf_table)/sizeof(ddrconf_table[0]);
735 	for (i = 0; i < size; i++)
736 		if (tmp == ddrconf_table[i])
737 			break;
738 	if (i >= size) {
739 		printf("niu config not found\n");
740 		ret = -EINVAL;
741 	} else {
742 		sdram_params->base.ddrconfig = i;
743 	}
744 
745 	return ret;
746 }
747 
748 static int sdram_get_stride(struct rk3288_sdram_params *sdram_params)
749 {
750 	int stride = -1;
751 	int ret = 0;
752 	long cap = sdram_params->num_channels * (1u <<
753 			(sdram_params->ch[0].cs0_row +
754 			 sdram_params->ch[0].col +
755 			 (sdram_params->ch[0].rank - 1) +
756 			 sdram_params->ch[0].bw +
757 			 3 - 20));
758 
759 	switch (cap) {
760 	case 512:
761 		stride = 0;
762 		break;
763 	case 1024:
764 		stride = 5;
765 		break;
766 	case 2048:
767 		stride = 9;
768 		break;
769 	case 4096:
770 		stride = 0xd;
771 		break;
772 	default:
773 		stride = -1;
774 		printf("could not find correct stride, cap error!\n");
775 		ret = -EINVAL;
776 		break;
777 	}
778 	sdram_params->base.stride = stride;
779 
780 	return ret;
781 }
782 
783 static int sdram_init(struct dram_info *dram,
784 		      struct rk3288_sdram_params *sdram_params)
785 {
786 	int channel;
787 	int zqcr;
788 	int ret;
789 
790 	debug("%s start\n", __func__);
791 	if ((sdram_params->base.dramtype == DDR3 &&
792 	     sdram_params->base.ddr_freq > 800000000) ||
793 	    (sdram_params->base.dramtype == LPDDR3 &&
794 	     sdram_params->base.ddr_freq > 533000000)) {
795 		debug("SDRAM frequency is too high!");
796 		return -E2BIG;
797 	}
798 
799 	debug("ddr clk dpll\n");
800 	ret = clk_set_rate(&dram->ddr_clk, sdram_params->base.ddr_freq);
801 	debug("ret=%d\n", ret);
802 	if (ret) {
803 		debug("Could not set DDR clock\n");
804 		return ret;
805 	}
806 
807 	for (channel = 0; channel < 2; channel++) {
808 		const struct chan_info *chan = &dram->chan[channel];
809 		struct rk3288_ddr_pctl *pctl = chan->pctl;
810 		struct rk3288_ddr_publ *publ = chan->publ;
811 
812 		/* map all the 4GB space to the current channel */
813 		if (channel)
814 			rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x17);
815 		else
816 			rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x1a);
817 		phy_pctrl_reset(dram->cru, publ, channel);
818 		phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
819 
820 		dfi_cfg(pctl, sdram_params->base.dramtype);
821 
822 		pctl_cfg(channel, pctl, sdram_params, dram->grf);
823 
824 		phy_cfg(chan, channel, sdram_params);
825 
826 		phy_init(publ);
827 
828 		writel(POWER_UP_START, &pctl->powctl);
829 		while (!(readl(&pctl->powstat) & POWER_UP_DONE))
830 			;
831 
832 		memory_init(publ, sdram_params->base.dramtype);
833 		move_to_config_state(publ, pctl);
834 
835 		if (sdram_params->base.dramtype == LPDDR3) {
836 			send_command(pctl, 3, DESELECT_CMD, 0);
837 			udelay(1);
838 			send_command(pctl, 3, PREA_CMD, 0);
839 			udelay(1);
840 			send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
841 			udelay(1);
842 			send_command_op(pctl, 3, MRS_CMD, 1,
843 					sdram_params->phy_timing.mr[1]);
844 			udelay(1);
845 			send_command_op(pctl, 3, MRS_CMD, 2,
846 					sdram_params->phy_timing.mr[2]);
847 			udelay(1);
848 			send_command_op(pctl, 3, MRS_CMD, 3,
849 					sdram_params->phy_timing.mr[3]);
850 			udelay(1);
851 		}
852 
853 		/* Using 32bit bus width for detect */
854 		sdram_params->ch[channel].bw = 2;
855 		set_bandwidth_ratio(chan, channel,
856 				    sdram_params->ch[channel].bw, dram->grf);
857 		/*
858 		 * set cs, using n=3 for detect
859 		 * CS0, n=1
860 		 * CS1, n=2
861 		 * CS0 & CS1, n = 3
862 		 */
863 		sdram_params->ch[channel].rank = 2,
864 		clrsetbits_le32(&publ->pgcr, 0xF << 18,
865 				(sdram_params->ch[channel].rank | 1) << 18);
866 
867 		/* DS=40ohm,ODT=155ohm */
868 		zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
869 			2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
870 			0x19 << PD_OUTPUT_SHIFT;
871 		writel(zqcr, &publ->zq1cr[0]);
872 		writel(zqcr, &publ->zq0cr[0]);
873 
874 		if (sdram_params->base.dramtype == LPDDR3) {
875 			/* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
876 			udelay(10);
877 			send_command_op(pctl,
878 					sdram_params->ch[channel].rank | 1,
879 					MRS_CMD, 11,
880 					sdram_params->base.odt ? 3 : 0);
881 			if (channel == 0) {
882 				writel(0, &pctl->mrrcfg0);
883 				send_command_op(pctl, 1, MRR_CMD, 8, 0);
884 				/* S8 */
885 				if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
886 					debug("failed!");
887 					return -EREMOTEIO;
888 				}
889 			}
890 		}
891 
892 		/* Detect the rank and bit-width with data-training */
893 		sdram_rank_bw_detect(dram, channel, sdram_params);
894 
895 		if (sdram_params->base.dramtype == LPDDR3) {
896 			u32 i;
897 			writel(0, &pctl->mrrcfg0);
898 			for (i = 0; i < 17; i++)
899 				send_command_op(pctl, 1, MRR_CMD, i, 0);
900 		}
901 		writel(15, &chan->msch->ddrconf);
902 		move_to_access_state(chan);
903 		/* DDR3 and LPDDR3 are always 8 bank, no need detect */
904 		sdram_params->ch[channel].bk = 3;
905 		/* Detect Col and Row number*/
906 		ret = sdram_col_row_detect(dram, channel, sdram_params);
907 		if (ret)
908 			goto error;
909 	}
910 	/* Find NIU DDR configuration */
911 	ret = sdram_get_niu_config(sdram_params);
912 	if (ret)
913 		goto error;
914 	/* Find stride setting */
915 	ret = sdram_get_stride(sdram_params);
916 	if (ret)
917 		goto error;
918 
919 	dram_all_config(dram, sdram_params);
920 	debug("%s done\n", __func__);
921 
922 	return 0;
923 error:
924 	printf("DRAM init failed!\n");
925 	hang();
926 }
927 
928 # ifdef CONFIG_ROCKCHIP_FAST_SPL
929 static int veyron_init(struct dram_info *priv)
930 {
931 	struct udevice *pmic;
932 	int ret;
933 
934 	ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
935 	if (ret)
936 		return ret;
937 
938 	/* Slowly raise to max CPU voltage to prevent overshoot */
939 	ret = rk8xx_spl_configure_buck(pmic, 1, 1200000);
940 	if (ret)
941 		return ret;
942 	udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
943 	ret = rk8xx_spl_configure_buck(pmic, 1, 1400000);
944 	if (ret)
945 		return ret;
946 	udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
947 
948 	rk3288_clk_configure_cpu(priv->cru, priv->grf);
949 
950 	return 0;
951 }
952 # endif
953 
954 static int setup_sdram(struct udevice *dev)
955 {
956 	struct dram_info *priv = dev_get_priv(dev);
957 	struct rk3288_sdram_params *params = dev_get_platdata(dev);
958 
959 # ifdef CONFIG_ROCKCHIP_FAST_SPL
960 	if (priv->is_veyron) {
961 		int ret;
962 
963 		ret = veyron_init(priv);
964 		if (ret)
965 			return ret;
966 	}
967 # endif
968 
969 	return sdram_init(priv, params);
970 }
971 
972 static int rk3288_dmc_ofdata_to_platdata(struct udevice *dev)
973 {
974 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
975 	struct rk3288_sdram_params *params = dev_get_platdata(dev);
976 	int ret;
977 
978 	/* Rk3288 supports dual-channel, set default channel num to 2 */
979 	params->num_channels = 2;
980 	ret = dev_read_u32_array(dev, "rockchip,pctl-timing",
981 				 (u32 *)&params->pctl_timing,
982 				 sizeof(params->pctl_timing) / sizeof(u32));
983 	if (ret) {
984 		debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
985 		return -EINVAL;
986 	}
987 	ret = dev_read_u32_array(dev, "rockchip,phy-timing",
988 				 (u32 *)&params->phy_timing,
989 				 sizeof(params->phy_timing) / sizeof(u32));
990 	if (ret) {
991 		debug("%s: Cannot read rockchip,phy-timing\n", __func__);
992 		return -EINVAL;
993 	}
994 	ret = dev_read_u32_array(dev, "rockchip,sdram-params",
995 				 (u32 *)&params->base,
996 				 sizeof(params->base) / sizeof(u32));
997 	if (ret) {
998 		debug("%s: Cannot read rockchip,sdram-params\n", __func__);
999 		return -EINVAL;
1000 	}
1001 #ifdef CONFIG_ROCKCHIP_FAST_SPL
1002 	struct dram_info *priv = dev_get_priv(dev);
1003 
1004 	priv->is_veyron = !fdt_node_check_compatible(blob, 0, "google,veyron");
1005 #endif
1006 	ret = regmap_init_mem(dev, &params->map);
1007 	if (ret)
1008 		return ret;
1009 #endif
1010 
1011 	return 0;
1012 }
1013 #endif /* CONFIG_SPL_BUILD */
1014 
1015 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1016 static int conv_of_platdata(struct udevice *dev)
1017 {
1018 	struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1019 	struct dtd_rockchip_rk3288_dmc *of_plat = &plat->of_plat;
1020 	int ret;
1021 
1022 	memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
1023 	       sizeof(plat->pctl_timing));
1024 	memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
1025 	       sizeof(plat->phy_timing));
1026 	memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
1027 	/* Rk3288 supports dual-channel, set default channel num to 2 */
1028 	plat->num_channels = 2;
1029 	ret = regmap_init_mem_platdata(dev, of_plat->reg,
1030 				       ARRAY_SIZE(of_plat->reg) / 2,
1031 				       &plat->map);
1032 	if (ret)
1033 		return ret;
1034 
1035 	return 0;
1036 }
1037 #endif
1038 
1039 static int rk3288_dmc_probe(struct udevice *dev)
1040 {
1041 #ifdef CONFIG_SPL_BUILD
1042 	struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1043 	struct udevice *dev_clk;
1044 	struct regmap *map;
1045 	int ret;
1046 #endif
1047 	struct dram_info *priv = dev_get_priv(dev);
1048 
1049 	priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
1050 #ifdef CONFIG_SPL_BUILD
1051 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1052 	ret = conv_of_platdata(dev);
1053 	if (ret)
1054 		return ret;
1055 #endif
1056 	map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
1057 	if (IS_ERR(map))
1058 		return PTR_ERR(map);
1059 	priv->chan[0].msch = regmap_get_range(map, 0);
1060 	priv->chan[1].msch = (struct rk3288_msch *)
1061 			(regmap_get_range(map, 0) + 0x80);
1062 
1063 	priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1064 	priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
1065 
1066 	priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1067 	priv->chan[0].publ = regmap_get_range(plat->map, 1);
1068 	priv->chan[1].pctl = regmap_get_range(plat->map, 2);
1069 	priv->chan[1].publ = regmap_get_range(plat->map, 3);
1070 
1071 	ret = rockchip_get_clk(&dev_clk);
1072 	if (ret)
1073 		return ret;
1074 	priv->ddr_clk.id = CLK_DDR;
1075 	ret = clk_request(dev_clk, &priv->ddr_clk);
1076 	if (ret)
1077 		return ret;
1078 
1079 	priv->cru = rockchip_get_cru();
1080 	if (IS_ERR(priv->cru))
1081 		return PTR_ERR(priv->cru);
1082 	ret = setup_sdram(dev);
1083 	if (ret)
1084 		return ret;
1085 #else
1086 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
1087 	priv->info.size = rockchip_sdram_size(
1088 			(phys_addr_t)&priv->pmu->sys_reg[2]);
1089 #endif
1090 
1091 	return 0;
1092 }
1093 
1094 static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
1095 {
1096 	struct dram_info *priv = dev_get_priv(dev);
1097 
1098 	*info = priv->info;
1099 
1100 	return 0;
1101 }
1102 
1103 static struct ram_ops rk3288_dmc_ops = {
1104 	.get_info = rk3288_dmc_get_info,
1105 };
1106 
1107 static const struct udevice_id rk3288_dmc_ids[] = {
1108 	{ .compatible = "rockchip,rk3288-dmc" },
1109 	{ }
1110 };
1111 
1112 U_BOOT_DRIVER(dmc_rk3288) = {
1113 	.name = "rockchip_rk3288_dmc",
1114 	.id = UCLASS_RAM,
1115 	.of_match = rk3288_dmc_ids,
1116 	.ops = &rk3288_dmc_ops,
1117 #ifdef CONFIG_SPL_BUILD
1118 	.ofdata_to_platdata = rk3288_dmc_ofdata_to_platdata,
1119 #endif
1120 	.probe = rk3288_dmc_probe,
1121 	.priv_auto_alloc_size = sizeof(struct dram_info),
1122 #ifdef CONFIG_SPL_BUILD
1123 	.platdata_auto_alloc_size = sizeof(struct rk3288_sdram_params),
1124 #endif
1125 };
1126