1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
2 /*
3  * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <ram.h>
9 #include <reset.h>
10 #include <timer.h>
11 #include <asm/io.h>
12 #include <asm/arch/ddr.h>
13 #include <linux/iopoll.h>
14 #include "stm32mp1_ddr.h"
15 #include "stm32mp1_ddr_regs.h"
16 
17 #define RCC_DDRITFCR		0xD8
18 
19 #define RCC_DDRITFCR_DDRCAPBRST		(BIT(14))
20 #define RCC_DDRITFCR_DDRCAXIRST		(BIT(15))
21 #define RCC_DDRITFCR_DDRCORERST		(BIT(16))
22 #define RCC_DDRITFCR_DPHYAPBRST		(BIT(17))
23 #define RCC_DDRITFCR_DPHYRST		(BIT(18))
24 #define RCC_DDRITFCR_DPHYCTLRST		(BIT(19))
25 
26 struct reg_desc {
27 	const char *name;
28 	u16 offset;	/* offset for base address */
29 	u8 par_offset;	/* offset for parameter array */
30 };
31 
32 #define INVALID_OFFSET	0xFF
33 
34 #define DDRCTL_REG(x, y) \
35 	{#x,\
36 	 offsetof(struct stm32mp1_ddrctl, x),\
37 	 offsetof(struct y, x)}
38 
39 #define DDRPHY_REG(x, y) \
40 	{#x,\
41 	 offsetof(struct stm32mp1_ddrphy, x),\
42 	 offsetof(struct y, x)}
43 
44 #define DDRCTL_REG_REG(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
45 static const struct reg_desc ddr_reg[] = {
46 	DDRCTL_REG_REG(mstr),
47 	DDRCTL_REG_REG(mrctrl0),
48 	DDRCTL_REG_REG(mrctrl1),
49 	DDRCTL_REG_REG(derateen),
50 	DDRCTL_REG_REG(derateint),
51 	DDRCTL_REG_REG(pwrctl),
52 	DDRCTL_REG_REG(pwrtmg),
53 	DDRCTL_REG_REG(hwlpctl),
54 	DDRCTL_REG_REG(rfshctl0),
55 	DDRCTL_REG_REG(rfshctl3),
56 	DDRCTL_REG_REG(crcparctl0),
57 	DDRCTL_REG_REG(zqctl0),
58 	DDRCTL_REG_REG(dfitmg0),
59 	DDRCTL_REG_REG(dfitmg1),
60 	DDRCTL_REG_REG(dfilpcfg0),
61 	DDRCTL_REG_REG(dfiupd0),
62 	DDRCTL_REG_REG(dfiupd1),
63 	DDRCTL_REG_REG(dfiupd2),
64 	DDRCTL_REG_REG(dfiphymstr),
65 	DDRCTL_REG_REG(odtmap),
66 	DDRCTL_REG_REG(dbg0),
67 	DDRCTL_REG_REG(dbg1),
68 	DDRCTL_REG_REG(dbgcmd),
69 	DDRCTL_REG_REG(poisoncfg),
70 	DDRCTL_REG_REG(pccfg),
71 };
72 
73 #define DDRCTL_REG_TIMING(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
74 static const struct reg_desc ddr_timing[] = {
75 	DDRCTL_REG_TIMING(rfshtmg),
76 	DDRCTL_REG_TIMING(dramtmg0),
77 	DDRCTL_REG_TIMING(dramtmg1),
78 	DDRCTL_REG_TIMING(dramtmg2),
79 	DDRCTL_REG_TIMING(dramtmg3),
80 	DDRCTL_REG_TIMING(dramtmg4),
81 	DDRCTL_REG_TIMING(dramtmg5),
82 	DDRCTL_REG_TIMING(dramtmg6),
83 	DDRCTL_REG_TIMING(dramtmg7),
84 	DDRCTL_REG_TIMING(dramtmg8),
85 	DDRCTL_REG_TIMING(dramtmg14),
86 	DDRCTL_REG_TIMING(odtcfg),
87 };
88 
89 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_map)
90 static const struct reg_desc ddr_map[] = {
91 	DDRCTL_REG_MAP(addrmap1),
92 	DDRCTL_REG_MAP(addrmap2),
93 	DDRCTL_REG_MAP(addrmap3),
94 	DDRCTL_REG_MAP(addrmap4),
95 	DDRCTL_REG_MAP(addrmap5),
96 	DDRCTL_REG_MAP(addrmap6),
97 	DDRCTL_REG_MAP(addrmap9),
98 	DDRCTL_REG_MAP(addrmap10),
99 	DDRCTL_REG_MAP(addrmap11),
100 };
101 
102 #define DDRCTL_REG_PERF(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
103 static const struct reg_desc ddr_perf[] = {
104 	DDRCTL_REG_PERF(sched),
105 	DDRCTL_REG_PERF(sched1),
106 	DDRCTL_REG_PERF(perfhpr1),
107 	DDRCTL_REG_PERF(perflpr1),
108 	DDRCTL_REG_PERF(perfwr1),
109 	DDRCTL_REG_PERF(pcfgr_0),
110 	DDRCTL_REG_PERF(pcfgw_0),
111 	DDRCTL_REG_PERF(pcfgqos0_0),
112 	DDRCTL_REG_PERF(pcfgqos1_0),
113 	DDRCTL_REG_PERF(pcfgwqos0_0),
114 	DDRCTL_REG_PERF(pcfgwqos1_0),
115 	DDRCTL_REG_PERF(pcfgr_1),
116 	DDRCTL_REG_PERF(pcfgw_1),
117 	DDRCTL_REG_PERF(pcfgqos0_1),
118 	DDRCTL_REG_PERF(pcfgqos1_1),
119 	DDRCTL_REG_PERF(pcfgwqos0_1),
120 	DDRCTL_REG_PERF(pcfgwqos1_1),
121 };
122 
123 #define DDRPHY_REG_REG(x)	DDRPHY_REG(x, stm32mp1_ddrphy_reg)
124 static const struct reg_desc ddrphy_reg[] = {
125 	DDRPHY_REG_REG(pgcr),
126 	DDRPHY_REG_REG(aciocr),
127 	DDRPHY_REG_REG(dxccr),
128 	DDRPHY_REG_REG(dsgcr),
129 	DDRPHY_REG_REG(dcr),
130 	DDRPHY_REG_REG(odtcr),
131 	DDRPHY_REG_REG(zq0cr1),
132 	DDRPHY_REG_REG(dx0gcr),
133 	DDRPHY_REG_REG(dx1gcr),
134 	DDRPHY_REG_REG(dx2gcr),
135 	DDRPHY_REG_REG(dx3gcr),
136 };
137 
138 #define DDRPHY_REG_TIMING(x)	DDRPHY_REG(x, stm32mp1_ddrphy_timing)
139 static const struct reg_desc ddrphy_timing[] = {
140 	DDRPHY_REG_TIMING(ptr0),
141 	DDRPHY_REG_TIMING(ptr1),
142 	DDRPHY_REG_TIMING(ptr2),
143 	DDRPHY_REG_TIMING(dtpr0),
144 	DDRPHY_REG_TIMING(dtpr1),
145 	DDRPHY_REG_TIMING(dtpr2),
146 	DDRPHY_REG_TIMING(mr0),
147 	DDRPHY_REG_TIMING(mr1),
148 	DDRPHY_REG_TIMING(mr2),
149 	DDRPHY_REG_TIMING(mr3),
150 };
151 
152 #define DDRPHY_REG_CAL(x)	DDRPHY_REG(x, stm32mp1_ddrphy_cal)
153 static const struct reg_desc ddrphy_cal[] = {
154 	DDRPHY_REG_CAL(dx0dllcr),
155 	DDRPHY_REG_CAL(dx0dqtr),
156 	DDRPHY_REG_CAL(dx0dqstr),
157 	DDRPHY_REG_CAL(dx1dllcr),
158 	DDRPHY_REG_CAL(dx1dqtr),
159 	DDRPHY_REG_CAL(dx1dqstr),
160 	DDRPHY_REG_CAL(dx2dllcr),
161 	DDRPHY_REG_CAL(dx2dqtr),
162 	DDRPHY_REG_CAL(dx2dqstr),
163 	DDRPHY_REG_CAL(dx3dllcr),
164 	DDRPHY_REG_CAL(dx3dqtr),
165 	DDRPHY_REG_CAL(dx3dqstr),
166 };
167 
168 enum reg_type {
169 	REG_REG,
170 	REG_TIMING,
171 	REG_PERF,
172 	REG_MAP,
173 	REGPHY_REG,
174 	REGPHY_TIMING,
175 	REGPHY_CAL,
176 	REG_TYPE_NB
177 };
178 
179 enum base_type {
180 	DDR_BASE,
181 	DDRPHY_BASE,
182 	NONE_BASE
183 };
184 
185 struct ddr_reg_info {
186 	const char *name;
187 	const struct reg_desc *desc;
188 	u8 size;
189 	enum base_type base;
190 };
191 
192 #define DDRPHY_REG_CAL(x)	DDRPHY_REG(x, stm32mp1_ddrphy_cal)
193 
194 const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
195 [REG_REG] = {
196 	"static", ddr_reg, ARRAY_SIZE(ddr_reg), DDR_BASE},
197 [REG_TIMING] = {
198 	"timing", ddr_timing, ARRAY_SIZE(ddr_timing), DDR_BASE},
199 [REG_PERF] = {
200 	"perf", ddr_perf, ARRAY_SIZE(ddr_perf), DDR_BASE},
201 [REG_MAP] = {
202 	"map", ddr_map, ARRAY_SIZE(ddr_map), DDR_BASE},
203 [REGPHY_REG] = {
204 	"static", ddrphy_reg, ARRAY_SIZE(ddrphy_reg), DDRPHY_BASE},
205 [REGPHY_TIMING] = {
206 	"timing", ddrphy_timing, ARRAY_SIZE(ddrphy_timing), DDRPHY_BASE},
207 [REGPHY_CAL] = {
208 	"cal", ddrphy_cal, ARRAY_SIZE(ddrphy_cal), DDRPHY_BASE},
209 };
210 
211 const char *base_name[] = {
212 	[DDR_BASE] = "ctl",
213 	[DDRPHY_BASE] = "phy",
214 };
215 
216 static u32 get_base_addr(const struct ddr_info *priv, enum base_type base)
217 {
218 	if (base == DDRPHY_BASE)
219 		return (u32)priv->phy;
220 	else
221 		return (u32)priv->ctl;
222 }
223 
224 static void set_reg(const struct ddr_info *priv,
225 		    enum reg_type type,
226 		    const void *param)
227 {
228 	unsigned int i;
229 	unsigned int *ptr, value;
230 	enum base_type base = ddr_registers[type].base;
231 	u32 base_addr = get_base_addr(priv, base);
232 	const struct reg_desc *desc = ddr_registers[type].desc;
233 
234 	debug("init %s\n", ddr_registers[type].name);
235 	for (i = 0; i < ddr_registers[type].size; i++) {
236 		ptr = (unsigned int *)(base_addr + desc[i].offset);
237 		if (desc[i].par_offset == INVALID_OFFSET) {
238 			pr_err("invalid parameter offset for %s", desc[i].name);
239 		} else {
240 			value = *((u32 *)((u32)param +
241 					       desc[i].par_offset));
242 			writel(value, ptr);
243 			debug("[0x%x] %s= 0x%08x\n",
244 			      (u32)ptr, desc[i].name, value);
245 		}
246 	}
247 }
248 
249 static void ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
250 {
251 	u32 pgsr;
252 	int ret;
253 
254 	ret = readl_poll_timeout(&phy->pgsr, pgsr,
255 				 pgsr & (DDRPHYC_PGSR_IDONE |
256 					 DDRPHYC_PGSR_DTERR |
257 					 DDRPHYC_PGSR_DTIERR |
258 					 DDRPHYC_PGSR_DFTERR |
259 					 DDRPHYC_PGSR_RVERR |
260 					 DDRPHYC_PGSR_RVEIRR),
261 				1000000);
262 	debug("\n[0x%08x] pgsr = 0x%08x ret=%d\n",
263 	      (u32)&phy->pgsr, pgsr, ret);
264 }
265 
266 void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, u32 pir)
267 {
268 	pir |= DDRPHYC_PIR_INIT;
269 	writel(pir, &phy->pir);
270 	debug("[0x%08x] pir = 0x%08x -> 0x%08x\n",
271 	      (u32)&phy->pir, pir, readl(&phy->pir));
272 
273 	/* need to wait 10 configuration clock before start polling */
274 	udelay(10);
275 
276 	/* Wait DRAM initialization and Gate Training Evaluation complete */
277 	ddrphy_idone_wait(phy);
278 }
279 
280 /* start quasi dynamic register update */
281 static void start_sw_done(struct stm32mp1_ddrctl *ctl)
282 {
283 	clrbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
284 }
285 
286 /* wait quasi dynamic register update */
287 static void wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
288 {
289 	int ret;
290 	u32 swstat;
291 
292 	setbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
293 
294 	ret = readl_poll_timeout(&ctl->swstat, swstat,
295 				 swstat & DDRCTRL_SWSTAT_SW_DONE_ACK,
296 				 1000000);
297 	if (ret)
298 		panic("Timeout initialising DRAM : DDR->swstat = %x\n",
299 		      swstat);
300 
301 	debug("[0x%08x] swstat = 0x%08x\n", (u32)&ctl->swstat, swstat);
302 }
303 
304 /* wait quasi dynamic register update */
305 static void wait_operating_mode(struct ddr_info *priv, int mode)
306 {
307 	u32 stat, val, mask, val2 = 0, mask2 = 0;
308 	int ret;
309 
310 	mask = DDRCTRL_STAT_OPERATING_MODE_MASK;
311 	val = mode;
312 	/* self-refresh due to software => check also STAT.selfref_type */
313 	if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
314 		mask |= DDRCTRL_STAT_SELFREF_TYPE_MASK;
315 		stat |= DDRCTRL_STAT_SELFREF_TYPE_SR;
316 	} else if (mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) {
317 		/* normal mode: handle also automatic self refresh */
318 		mask2 = DDRCTRL_STAT_OPERATING_MODE_MASK |
319 			DDRCTRL_STAT_SELFREF_TYPE_MASK;
320 		val2 = DDRCTRL_STAT_OPERATING_MODE_SR |
321 		       DDRCTRL_STAT_SELFREF_TYPE_ASR;
322 	}
323 
324 	ret = readl_poll_timeout(&priv->ctl->stat, stat,
325 				 ((stat & mask) == val) ||
326 				 (mask2 && ((stat & mask2) == val2)),
327 				 1000000);
328 
329 	if (ret)
330 		panic("Timeout DRAM : DDR->stat = %x\n", stat);
331 
332 	debug("[0x%08x] stat = 0x%08x\n", (u32)&priv->ctl->stat, stat);
333 }
334 
335 void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
336 {
337 	start_sw_done(ctl);
338 	/* quasi-dynamic register update*/
339 	setbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
340 	clrbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
341 	clrbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
342 	wait_sw_done_ack(ctl);
343 }
344 
345 void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
346 			      u32 rfshctl3, u32 pwrctl)
347 {
348 	start_sw_done(ctl);
349 	if (!(rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH))
350 		clrbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
351 	if (pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN)
352 		setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
353 	setbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
354 	wait_sw_done_ack(ctl);
355 }
356 
357 /* board-specific DDR power initializations. */
358 __weak int board_ddr_power_init(void)
359 {
360 	return 0;
361 }
362 
363 __maybe_unused
364 void stm32mp1_ddr_init(struct ddr_info *priv,
365 		       const struct stm32mp1_ddr_config *config)
366 {
367 	u32 pir;
368 	int ret;
369 
370 	ret = board_ddr_power_init();
371 
372 	if (ret)
373 		panic("ddr power init failed\n");
374 
375 	debug("name = %s\n", config->info.name);
376 	debug("speed = %d MHz\n", config->info.speed);
377 	debug("size  = 0x%x\n", config->info.size);
378 /*
379  * 1. Program the DWC_ddr_umctl2 registers
380  * 1.1 RESETS: presetn, core_ddrc_rstn, aresetn
381  */
382 	/* Assert All DDR part */
383 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
384 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
385 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
386 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
387 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
388 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
389 
390 /* 1.2. start CLOCK */
391 	if (stm32mp1_ddr_clk_enable(priv, config->info.speed))
392 		panic("invalid DRAM clock : %d MHz\n",
393 		      config->info.speed);
394 
395 /* 1.3. deassert reset */
396 	/* de-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST */
397 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
398 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
399 	/* De-assert presetn once the clocks are active
400 	 * and stable via DDRCAPBRST bit
401 	 */
402 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
403 
404 /* 1.4. wait 4 cycles for synchronization */
405 	asm(" nop");
406 	asm(" nop");
407 	asm(" nop");
408 	asm(" nop");
409 
410 /* 1.5. initialize registers ddr_umctl2 */
411 	/* Stop uMCTL2 before PHY is ready */
412 	clrbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
413 	debug("[0x%08x] dfimisc = 0x%08x\n",
414 	      (u32)&priv->ctl->dfimisc, readl(&priv->ctl->dfimisc));
415 
416 	set_reg(priv, REG_REG, &config->c_reg);
417 	set_reg(priv, REG_TIMING, &config->c_timing);
418 	set_reg(priv, REG_MAP, &config->c_map);
419 
420 	/* skip CTRL init, SDRAM init is done by PHY PUBL */
421 	clrsetbits_le32(&priv->ctl->init0,
422 			DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
423 			DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
424 
425 	set_reg(priv, REG_PERF, &config->c_perf);
426 
427 /*  2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
428 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
429 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
430 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
431 
432 /*  3. start PHY init by accessing relevant PUBL registers
433  *    (DXGCR, DCR, PTR*, MR*, DTPR*)
434  */
435 	set_reg(priv, REGPHY_REG, &config->p_reg);
436 	set_reg(priv, REGPHY_TIMING, &config->p_timing);
437 	set_reg(priv, REGPHY_CAL, &config->p_cal);
438 
439 /*  4. Monitor PHY init status by polling PUBL register PGSR.IDONE
440  *     Perform DDR PHY DRAM initialization and Gate Training Evaluation
441  */
442 	ddrphy_idone_wait(priv->phy);
443 
444 /*  5. Indicate to PUBL that controller performs SDRAM initialization
445  *     by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
446  *     DRAM init is done by PHY, init0.skip_dram.init = 1
447  */
448 	pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
449 	      DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
450 
451 	if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
452 		pir |= DDRPHYC_PIR_DRAMRST; /* only for DDR3 */
453 
454 	stm32mp1_ddrphy_init(priv->phy, pir);
455 
456 /*  6. SET DFIMISC.dfi_init_complete_en to 1 */
457 	/* Enable quasi-dynamic register programming*/
458 	start_sw_done(priv->ctl);
459 	setbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
460 	wait_sw_done_ack(priv->ctl);
461 
462 /*  7. Wait for DWC_ddr_umctl2 to move to normal operation mode
463  *     by monitoring STAT.operating_mode signal
464  */
465 	/* wait uMCTL2 ready */
466 
467 	wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
468 
469 	debug("DDR DQS training : ");
470 /*  8. Disable Auto refresh and power down by setting
471  *    - RFSHCTL3.dis_au_refresh = 1
472  *    - PWRCTL.powerdown_en = 0
473  *    - DFIMISC.dfiinit_complete_en = 0
474  */
475 	stm32mp1_refresh_disable(priv->ctl);
476 
477 /*  9. Program PUBL PGCR to enable refresh during training and rank to train
478  *     not done => keep the programed value in PGCR
479  */
480 
481 /* 10. configure PUBL PIR register to specify which training step to run */
482 	/* warning : RVTRN  is not supported by this PUBL */
483 	stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
484 
485 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
486 	ddrphy_idone_wait(priv->phy);
487 
488 /* 12. set back registers in step 8 to the orginal values if desidered */
489 	stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
490 				 config->c_reg.pwrctl);
491 
492 	/* enable uMCTL2 AXI port 0 and 1 */
493 	setbits_le32(&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
494 	setbits_le32(&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
495 }
496