1 /*
2  * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier:	GPL-2.0+	BSD-3-Clause
5  */
6 
7 #include <common.h>
8 #include <clk.h>
9 #include <ram.h>
10 #include <reset.h>
11 #include <timer.h>
12 #include <asm/io.h>
13 #include <asm/arch/ddr.h>
14 #include <linux/iopoll.h>
15 #include "stm32mp1_ddr.h"
16 #include "stm32mp1_ddr_regs.h"
17 
18 #define RCC_DDRITFCR		0xD8
19 
20 #define RCC_DDRITFCR_DDRCAPBRST		(BIT(14))
21 #define RCC_DDRITFCR_DDRCAXIRST		(BIT(15))
22 #define RCC_DDRITFCR_DDRCORERST		(BIT(16))
23 #define RCC_DDRITFCR_DPHYAPBRST		(BIT(17))
24 #define RCC_DDRITFCR_DPHYRST		(BIT(18))
25 #define RCC_DDRITFCR_DPHYCTLRST		(BIT(19))
26 
27 struct reg_desc {
28 	const char *name;
29 	u16 offset;	/* offset for base address */
30 	u8 par_offset;	/* offset for parameter array */
31 };
32 
33 #define INVALID_OFFSET	0xFF
34 
35 #define DDRCTL_REG(x, y) \
36 	{#x,\
37 	 offsetof(struct stm32mp1_ddrctl, x),\
38 	 offsetof(struct y, x)}
39 
40 #define DDRPHY_REG(x, y) \
41 	{#x,\
42 	 offsetof(struct stm32mp1_ddrphy, x),\
43 	 offsetof(struct y, x)}
44 
45 #define DDRCTL_REG_REG(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
46 static const struct reg_desc ddr_reg[] = {
47 	DDRCTL_REG_REG(mstr),
48 	DDRCTL_REG_REG(mrctrl0),
49 	DDRCTL_REG_REG(mrctrl1),
50 	DDRCTL_REG_REG(derateen),
51 	DDRCTL_REG_REG(derateint),
52 	DDRCTL_REG_REG(pwrctl),
53 	DDRCTL_REG_REG(pwrtmg),
54 	DDRCTL_REG_REG(hwlpctl),
55 	DDRCTL_REG_REG(rfshctl0),
56 	DDRCTL_REG_REG(rfshctl3),
57 	DDRCTL_REG_REG(crcparctl0),
58 	DDRCTL_REG_REG(zqctl0),
59 	DDRCTL_REG_REG(dfitmg0),
60 	DDRCTL_REG_REG(dfitmg1),
61 	DDRCTL_REG_REG(dfilpcfg0),
62 	DDRCTL_REG_REG(dfiupd0),
63 	DDRCTL_REG_REG(dfiupd1),
64 	DDRCTL_REG_REG(dfiupd2),
65 	DDRCTL_REG_REG(dfiphymstr),
66 	DDRCTL_REG_REG(odtmap),
67 	DDRCTL_REG_REG(dbg0),
68 	DDRCTL_REG_REG(dbg1),
69 	DDRCTL_REG_REG(dbgcmd),
70 	DDRCTL_REG_REG(poisoncfg),
71 	DDRCTL_REG_REG(pccfg),
72 };
73 
74 #define DDRCTL_REG_TIMING(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
75 static const struct reg_desc ddr_timing[] = {
76 	DDRCTL_REG_TIMING(rfshtmg),
77 	DDRCTL_REG_TIMING(dramtmg0),
78 	DDRCTL_REG_TIMING(dramtmg1),
79 	DDRCTL_REG_TIMING(dramtmg2),
80 	DDRCTL_REG_TIMING(dramtmg3),
81 	DDRCTL_REG_TIMING(dramtmg4),
82 	DDRCTL_REG_TIMING(dramtmg5),
83 	DDRCTL_REG_TIMING(dramtmg6),
84 	DDRCTL_REG_TIMING(dramtmg7),
85 	DDRCTL_REG_TIMING(dramtmg8),
86 	DDRCTL_REG_TIMING(dramtmg14),
87 	DDRCTL_REG_TIMING(odtcfg),
88 };
89 
90 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_map)
91 static const struct reg_desc ddr_map[] = {
92 	DDRCTL_REG_MAP(addrmap1),
93 	DDRCTL_REG_MAP(addrmap2),
94 	DDRCTL_REG_MAP(addrmap3),
95 	DDRCTL_REG_MAP(addrmap4),
96 	DDRCTL_REG_MAP(addrmap5),
97 	DDRCTL_REG_MAP(addrmap6),
98 	DDRCTL_REG_MAP(addrmap9),
99 	DDRCTL_REG_MAP(addrmap10),
100 	DDRCTL_REG_MAP(addrmap11),
101 };
102 
103 #define DDRCTL_REG_PERF(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
104 static const struct reg_desc ddr_perf[] = {
105 	DDRCTL_REG_PERF(sched),
106 	DDRCTL_REG_PERF(sched1),
107 	DDRCTL_REG_PERF(perfhpr1),
108 	DDRCTL_REG_PERF(perflpr1),
109 	DDRCTL_REG_PERF(perfwr1),
110 	DDRCTL_REG_PERF(pcfgr_0),
111 	DDRCTL_REG_PERF(pcfgw_0),
112 	DDRCTL_REG_PERF(pcfgqos0_0),
113 	DDRCTL_REG_PERF(pcfgqos1_0),
114 	DDRCTL_REG_PERF(pcfgwqos0_0),
115 	DDRCTL_REG_PERF(pcfgwqos1_0),
116 	DDRCTL_REG_PERF(pcfgr_1),
117 	DDRCTL_REG_PERF(pcfgw_1),
118 	DDRCTL_REG_PERF(pcfgqos0_1),
119 	DDRCTL_REG_PERF(pcfgqos1_1),
120 	DDRCTL_REG_PERF(pcfgwqos0_1),
121 	DDRCTL_REG_PERF(pcfgwqos1_1),
122 };
123 
124 #define DDRPHY_REG_REG(x)	DDRPHY_REG(x, stm32mp1_ddrphy_reg)
125 static const struct reg_desc ddrphy_reg[] = {
126 	DDRPHY_REG_REG(pgcr),
127 	DDRPHY_REG_REG(aciocr),
128 	DDRPHY_REG_REG(dxccr),
129 	DDRPHY_REG_REG(dsgcr),
130 	DDRPHY_REG_REG(dcr),
131 	DDRPHY_REG_REG(odtcr),
132 	DDRPHY_REG_REG(zq0cr1),
133 	DDRPHY_REG_REG(dx0gcr),
134 	DDRPHY_REG_REG(dx1gcr),
135 	DDRPHY_REG_REG(dx2gcr),
136 	DDRPHY_REG_REG(dx3gcr),
137 };
138 
139 #define DDRPHY_REG_TIMING(x)	DDRPHY_REG(x, stm32mp1_ddrphy_timing)
140 static const struct reg_desc ddrphy_timing[] = {
141 	DDRPHY_REG_TIMING(ptr0),
142 	DDRPHY_REG_TIMING(ptr1),
143 	DDRPHY_REG_TIMING(ptr2),
144 	DDRPHY_REG_TIMING(dtpr0),
145 	DDRPHY_REG_TIMING(dtpr1),
146 	DDRPHY_REG_TIMING(dtpr2),
147 	DDRPHY_REG_TIMING(mr0),
148 	DDRPHY_REG_TIMING(mr1),
149 	DDRPHY_REG_TIMING(mr2),
150 	DDRPHY_REG_TIMING(mr3),
151 };
152 
153 #define DDRPHY_REG_CAL(x)	DDRPHY_REG(x, stm32mp1_ddrphy_cal)
154 static const struct reg_desc ddrphy_cal[] = {
155 	DDRPHY_REG_CAL(dx0dllcr),
156 	DDRPHY_REG_CAL(dx0dqtr),
157 	DDRPHY_REG_CAL(dx0dqstr),
158 	DDRPHY_REG_CAL(dx1dllcr),
159 	DDRPHY_REG_CAL(dx1dqtr),
160 	DDRPHY_REG_CAL(dx1dqstr),
161 	DDRPHY_REG_CAL(dx2dllcr),
162 	DDRPHY_REG_CAL(dx2dqtr),
163 	DDRPHY_REG_CAL(dx2dqstr),
164 	DDRPHY_REG_CAL(dx3dllcr),
165 	DDRPHY_REG_CAL(dx3dqtr),
166 	DDRPHY_REG_CAL(dx3dqstr),
167 };
168 
169 enum reg_type {
170 	REG_REG,
171 	REG_TIMING,
172 	REG_PERF,
173 	REG_MAP,
174 	REGPHY_REG,
175 	REGPHY_TIMING,
176 	REGPHY_CAL,
177 	REG_TYPE_NB
178 };
179 
180 enum base_type {
181 	DDR_BASE,
182 	DDRPHY_BASE,
183 	NONE_BASE
184 };
185 
186 struct ddr_reg_info {
187 	const char *name;
188 	const struct reg_desc *desc;
189 	u8 size;
190 	enum base_type base;
191 };
192 
193 #define DDRPHY_REG_CAL(x)	DDRPHY_REG(x, stm32mp1_ddrphy_cal)
194 
195 const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
196 [REG_REG] = {
197 	"static", ddr_reg, ARRAY_SIZE(ddr_reg), DDR_BASE},
198 [REG_TIMING] = {
199 	"timing", ddr_timing, ARRAY_SIZE(ddr_timing), DDR_BASE},
200 [REG_PERF] = {
201 	"perf", ddr_perf, ARRAY_SIZE(ddr_perf), DDR_BASE},
202 [REG_MAP] = {
203 	"map", ddr_map, ARRAY_SIZE(ddr_map), DDR_BASE},
204 [REGPHY_REG] = {
205 	"static", ddrphy_reg, ARRAY_SIZE(ddrphy_reg), DDRPHY_BASE},
206 [REGPHY_TIMING] = {
207 	"timing", ddrphy_timing, ARRAY_SIZE(ddrphy_timing), DDRPHY_BASE},
208 [REGPHY_CAL] = {
209 	"cal", ddrphy_cal, ARRAY_SIZE(ddrphy_cal), DDRPHY_BASE},
210 };
211 
212 const char *base_name[] = {
213 	[DDR_BASE] = "ctl",
214 	[DDRPHY_BASE] = "phy",
215 };
216 
217 static u32 get_base_addr(const struct ddr_info *priv, enum base_type base)
218 {
219 	if (base == DDRPHY_BASE)
220 		return (u32)priv->phy;
221 	else
222 		return (u32)priv->ctl;
223 }
224 
225 static void set_reg(const struct ddr_info *priv,
226 		    enum reg_type type,
227 		    const void *param)
228 {
229 	unsigned int i;
230 	unsigned int *ptr, value;
231 	enum base_type base = ddr_registers[type].base;
232 	u32 base_addr = get_base_addr(priv, base);
233 	const struct reg_desc *desc = ddr_registers[type].desc;
234 
235 	debug("init %s\n", ddr_registers[type].name);
236 	for (i = 0; i < ddr_registers[type].size; i++) {
237 		ptr = (unsigned int *)(base_addr + desc[i].offset);
238 		if (desc[i].par_offset == INVALID_OFFSET) {
239 			pr_err("invalid parameter offset for %s", desc[i].name);
240 		} else {
241 			value = *((u32 *)((u32)param +
242 					       desc[i].par_offset));
243 			writel(value, ptr);
244 			debug("[0x%x] %s= 0x%08x\n",
245 			      (u32)ptr, desc[i].name, value);
246 		}
247 	}
248 }
249 
250 static void ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
251 {
252 	u32 pgsr;
253 	int ret;
254 
255 	ret = readl_poll_timeout(&phy->pgsr, pgsr,
256 				 pgsr & (DDRPHYC_PGSR_IDONE |
257 					 DDRPHYC_PGSR_DTERR |
258 					 DDRPHYC_PGSR_DTIERR |
259 					 DDRPHYC_PGSR_DFTERR |
260 					 DDRPHYC_PGSR_RVERR |
261 					 DDRPHYC_PGSR_RVEIRR),
262 				1000000);
263 	debug("\n[0x%08x] pgsr = 0x%08x ret=%d\n",
264 	      (u32)&phy->pgsr, pgsr, ret);
265 }
266 
267 void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, u32 pir)
268 {
269 	pir |= DDRPHYC_PIR_INIT;
270 	writel(pir, &phy->pir);
271 	debug("[0x%08x] pir = 0x%08x -> 0x%08x\n",
272 	      (u32)&phy->pir, pir, readl(&phy->pir));
273 
274 	/* need to wait 10 configuration clock before start polling */
275 	udelay(10);
276 
277 	/* Wait DRAM initialization and Gate Training Evaluation complete */
278 	ddrphy_idone_wait(phy);
279 }
280 
281 /* start quasi dynamic register update */
282 static void start_sw_done(struct stm32mp1_ddrctl *ctl)
283 {
284 	clrbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
285 }
286 
287 /* wait quasi dynamic register update */
288 static void wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
289 {
290 	int ret;
291 	u32 swstat;
292 
293 	setbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
294 
295 	ret = readl_poll_timeout(&ctl->swstat, swstat,
296 				 swstat & DDRCTRL_SWSTAT_SW_DONE_ACK,
297 				 1000000);
298 	if (ret)
299 		panic("Timeout initialising DRAM : DDR->swstat = %x\n",
300 		      swstat);
301 
302 	debug("[0x%08x] swstat = 0x%08x\n", (u32)&ctl->swstat, swstat);
303 }
304 
305 /* wait quasi dynamic register update */
306 static void wait_operating_mode(struct ddr_info *priv, int mode)
307 {
308 	u32 stat, val, mask, val2 = 0, mask2 = 0;
309 	int ret;
310 
311 	mask = DDRCTRL_STAT_OPERATING_MODE_MASK;
312 	val = mode;
313 	/* self-refresh due to software => check also STAT.selfref_type */
314 	if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
315 		mask |= DDRCTRL_STAT_SELFREF_TYPE_MASK;
316 		stat |= DDRCTRL_STAT_SELFREF_TYPE_SR;
317 	} else if (mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) {
318 		/* normal mode: handle also automatic self refresh */
319 		mask2 = DDRCTRL_STAT_OPERATING_MODE_MASK |
320 			DDRCTRL_STAT_SELFREF_TYPE_MASK;
321 		val2 = DDRCTRL_STAT_OPERATING_MODE_SR |
322 		       DDRCTRL_STAT_SELFREF_TYPE_ASR;
323 	}
324 
325 	ret = readl_poll_timeout(&priv->ctl->stat, stat,
326 				 ((stat & mask) == val) ||
327 				 (mask2 && ((stat & mask2) == val2)),
328 				 1000000);
329 
330 	if (ret)
331 		panic("Timeout DRAM : DDR->stat = %x\n", stat);
332 
333 	debug("[0x%08x] stat = 0x%08x\n", (u32)&priv->ctl->stat, stat);
334 }
335 
336 void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
337 {
338 	start_sw_done(ctl);
339 	/* quasi-dynamic register update*/
340 	setbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
341 	clrbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
342 	clrbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
343 	wait_sw_done_ack(ctl);
344 }
345 
346 void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
347 			      u32 rfshctl3, u32 pwrctl)
348 {
349 	start_sw_done(ctl);
350 	if (!(rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH))
351 		clrbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
352 	if (pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN)
353 		setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
354 	setbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
355 	wait_sw_done_ack(ctl);
356 }
357 
358 /* board-specific DDR power initializations. */
359 __weak int board_ddr_power_init(void)
360 {
361 	return 0;
362 }
363 
364 __maybe_unused
365 void stm32mp1_ddr_init(struct ddr_info *priv,
366 		       const struct stm32mp1_ddr_config *config)
367 {
368 	u32 pir;
369 	int ret;
370 
371 	ret = board_ddr_power_init();
372 
373 	if (ret)
374 		panic("ddr power init failed\n");
375 
376 	debug("name = %s\n", config->info.name);
377 	debug("speed = %d MHz\n", config->info.speed);
378 	debug("size  = 0x%x\n", config->info.size);
379 /*
380  * 1. Program the DWC_ddr_umctl2 registers
381  * 1.1 RESETS: presetn, core_ddrc_rstn, aresetn
382  */
383 	/* Assert All DDR part */
384 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
385 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
386 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
387 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
388 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
389 	setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
390 
391 /* 1.2. start CLOCK */
392 	if (stm32mp1_ddr_clk_enable(priv, config->info.speed))
393 		panic("invalid DRAM clock : %d MHz\n",
394 		      config->info.speed);
395 
396 /* 1.3. deassert reset */
397 	/* de-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST */
398 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
399 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
400 	/* De-assert presetn once the clocks are active
401 	 * and stable via DDRCAPBRST bit
402 	 */
403 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
404 
405 /* 1.4. wait 4 cycles for synchronization */
406 	asm(" nop");
407 	asm(" nop");
408 	asm(" nop");
409 	asm(" nop");
410 
411 /* 1.5. initialize registers ddr_umctl2 */
412 	/* Stop uMCTL2 before PHY is ready */
413 	clrbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
414 	debug("[0x%08x] dfimisc = 0x%08x\n",
415 	      (u32)&priv->ctl->dfimisc, readl(&priv->ctl->dfimisc));
416 
417 	set_reg(priv, REG_REG, &config->c_reg);
418 	set_reg(priv, REG_TIMING, &config->c_timing);
419 	set_reg(priv, REG_MAP, &config->c_map);
420 
421 	/* skip CTRL init, SDRAM init is done by PHY PUBL */
422 	clrsetbits_le32(&priv->ctl->init0,
423 			DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
424 			DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
425 
426 	set_reg(priv, REG_PERF, &config->c_perf);
427 
428 /*  2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
429 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
430 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
431 	clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
432 
433 /*  3. start PHY init by accessing relevant PUBL registers
434  *    (DXGCR, DCR, PTR*, MR*, DTPR*)
435  */
436 	set_reg(priv, REGPHY_REG, &config->p_reg);
437 	set_reg(priv, REGPHY_TIMING, &config->p_timing);
438 	set_reg(priv, REGPHY_CAL, &config->p_cal);
439 
440 /*  4. Monitor PHY init status by polling PUBL register PGSR.IDONE
441  *     Perform DDR PHY DRAM initialization and Gate Training Evaluation
442  */
443 	ddrphy_idone_wait(priv->phy);
444 
445 /*  5. Indicate to PUBL that controller performs SDRAM initialization
446  *     by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
447  *     DRAM init is done by PHY, init0.skip_dram.init = 1
448  */
449 	pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
450 	      DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
451 
452 	if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
453 		pir |= DDRPHYC_PIR_DRAMRST; /* only for DDR3 */
454 
455 	stm32mp1_ddrphy_init(priv->phy, pir);
456 
457 /*  6. SET DFIMISC.dfi_init_complete_en to 1 */
458 	/* Enable quasi-dynamic register programming*/
459 	start_sw_done(priv->ctl);
460 	setbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
461 	wait_sw_done_ack(priv->ctl);
462 
463 /*  7. Wait for DWC_ddr_umctl2 to move to normal operation mode
464  *     by monitoring STAT.operating_mode signal
465  */
466 	/* wait uMCTL2 ready */
467 
468 	wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
469 
470 	debug("DDR DQS training : ");
471 /*  8. Disable Auto refresh and power down by setting
472  *    - RFSHCTL3.dis_au_refresh = 1
473  *    - PWRCTL.powerdown_en = 0
474  *    - DFIMISC.dfiinit_complete_en = 0
475  */
476 	stm32mp1_refresh_disable(priv->ctl);
477 
478 /*  9. Program PUBL PGCR to enable refresh during training and rank to train
479  *     not done => keep the programed value in PGCR
480  */
481 
482 /* 10. configure PUBL PIR register to specify which training step to run */
483 	/* warning : RVTRN  is not supported by this PUBL */
484 	stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
485 
486 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
487 	ddrphy_idone_wait(priv->phy);
488 
489 /* 12. set back registers in step 8 to the orginal values if desidered */
490 	stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
491 				 config->c_reg.pwrctl);
492 
493 	/* enable uMCTL2 AXI port 0 and 1 */
494 	setbits_le32(&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
495 	setbits_le32(&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
496 }
497