1 /*
2  * Copyright (C) Marvell International Ltd. and its affiliates
3  *
4  * SPDX-License-Identifier:	GPL-2.0
5  */
6 
7 #include <common.h>
8 #include <i2c.h>
9 #include <spl.h>
10 #include <asm/io.h>
11 #include <asm/arch/cpu.h>
12 #include <asm/arch/soc.h>
13 
14 #include "ddr3_hw_training.h"
15 
16 /*
17  * Debug
18  */
19 #define DEBUG_DFS_C(s, d, l) \
20 	DEBUG_DFS_S(s); DEBUG_DFS_D(d, l); DEBUG_DFS_S("\n")
21 #define DEBUG_DFS_FULL_C(s, d, l) \
22 	DEBUG_DFS_FULL_S(s); DEBUG_DFS_FULL_D(d, l); DEBUG_DFS_FULL_S("\n")
23 
24 #ifdef MV_DEBUG_DFS
25 #define DEBUG_DFS_S(s)			puts(s)
26 #define DEBUG_DFS_D(d, l)		printf("%x", d)
27 #else
28 #define DEBUG_DFS_S(s)
29 #define DEBUG_DFS_D(d, l)
30 #endif
31 
32 #ifdef MV_DEBUG_DFS_FULL
33 #define DEBUG_DFS_FULL_S(s)		puts(s)
34 #define DEBUG_DFS_FULL_D(d, l)		printf("%x", d)
35 #else
36 #define DEBUG_DFS_FULL_S(s)
37 #define DEBUG_DFS_FULL_D(d, l)
38 #endif
39 
40 #if defined(MV88F672X)
41 extern u8 div_ratio[CLK_VCO][CLK_DDR];
42 extern void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps);
43 #else
44 extern u16 odt_dynamic[ODT_OPT][MAX_CS];
45 extern u8 div_ratio1to1[CLK_CPU][CLK_DDR];
46 extern u8 div_ratio2to1[CLK_CPU][CLK_DDR];
47 #endif
48 extern u16 odt_static[ODT_OPT][MAX_CS];
49 
50 extern u32 cpu_fab_clk_to_hclk[FAB_OPT][CLK_CPU];
51 
52 extern u32 ddr3_get_vco_freq(void);
53 
54 u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1);
55 
56 #ifdef MV_DEBUG_DFS
57 static inline void dfs_reg_write(u32 addr, u32 val)
58 {
59 	printf("\n write reg 0x%08x = 0x%08x", addr, val);
60 	writel(val, INTER_REGS_BASE + addr);
61 }
62 #else
63 static inline void dfs_reg_write(u32 addr, u32 val)
64 {
65 	writel(val, INTER_REGS_BASE + addr);
66 }
67 #endif
68 
69 static void wait_refresh_op_complete(void)
70 {
71 	u32 reg;
72 
73 	/* Poll - Wait for Refresh operation completion */
74 	do {
75 		reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
76 			REG_SDRAM_OPERATION_CMD_RFRS_DONE;
77 	} while (reg);		/* Wait for '0' */
78 }
79 
80 /*
81  * Name:     ddr3_get_freq_parameter
82  * Desc:     Finds CPU/DDR frequency ratio according to Sample@reset and table.
83  * Args:     target_freq - target frequency
84  * Notes:
85  * Returns:  freq_par - the ratio parameter
86  */
87 u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1)
88 {
89 	u32 ui_vco_freq, freq_par;
90 
91 	ui_vco_freq = ddr3_get_vco_freq();
92 
93 #if defined(MV88F672X)
94 	freq_par = div_ratio[ui_vco_freq][target_freq];
95 #else
96 	/* Find the ratio between PLL frequency and ddr-clk */
97 	if (ratio_2to1)
98 		freq_par = div_ratio2to1[ui_vco_freq][target_freq];
99 	else
100 		freq_par = div_ratio1to1[ui_vco_freq][target_freq];
101 #endif
102 
103 	return freq_par;
104 }
105 
106 /*
107  * Name:     ddr3_dfs_high_2_low
108  * Desc:
109  * Args:     freq - target frequency
110  * Notes:
111  * Returns:  MV_OK - success, MV_FAIL - fail
112  */
113 int ddr3_dfs_high_2_low(u32 freq, MV_DRAM_INFO *dram_info)
114 {
115 #if defined(MV88F78X60) || defined(MV88F672X)
116 	/* This Flow is relevant for ArmadaXP A0 */
117 	u32 reg, freq_par, tmp;
118 	u32 cs = 0;
119 
120 	DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
121 		    freq, 1);
122 
123 	/* target frequency - 100MHz */
124 	freq_par = ddr3_get_freq_parameter(freq, 0);
125 
126 #if defined(MV88F672X)
127 	u32 hclk;
128 	u32 cpu_freq = ddr3_get_cpu_freq();
129 	get_target_freq(cpu_freq, &tmp, &hclk);
130 #endif
131 
132 	/* Configure - DRAM DLL final state after DFS is complete - Enable */
133 	reg = reg_read(REG_DFS_ADDR);
134 	/* [0] - DfsDllNextState - Disable */
135 	reg |= (1 << REG_DFS_DLLNEXTSTATE_OFFS);
136 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
137 
138 	/*
139 	 * Configure - XBAR Retry response during Block to enable internal
140 	 * access - Disable
141 	 */
142 	reg = reg_read(REG_METAL_MASK_ADDR);
143 	/* [0] - RetryMask - Disable */
144 	reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
145 	/* 0x14B0 - Dunit MMask Register */
146 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
147 
148 	/* Configure - Block new external transactions - Enable */
149 	reg = reg_read(REG_DFS_ADDR);
150 	reg |= (1 << REG_DFS_BLOCK_OFFS);	/* [1] - DfsBlock - Enable  */
151 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
152 
153 	/* Registered DIMM support */
154 	if (dram_info->reg_dimm) {
155 		/*
156 		 * Configure - Disable Register DIMM CKE Power
157 		 * Down mode - CWA_RC
158 		 */
159 		reg = (0x9 & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
160 			REG_SDRAM_OPERATION_CWA_RC_OFFS;
161 		/*
162 		 * Configure - Disable Register DIMM CKE Power
163 		 * Down mode - CWA_DATA
164 		 */
165 		reg |= ((0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
166 			REG_SDRAM_OPERATION_CWA_DATA_OFFS);
167 
168 		/*
169 		 * Configure - Disable Register DIMM CKE Power
170 		 * Down mode - Set Delay - tMRD
171 		 */
172 		reg |= (0 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
173 
174 		/* Configure - Issue CWA command with the above parameters */
175 		reg |= (REG_SDRAM_OPERATION_CMD_CWA &
176 			~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
177 
178 		/* 0x1418 - SDRAM Operation Register */
179 		dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
180 
181 		/* Poll - Wait for CWA operation completion */
182 		do {
183 			reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
184 			       (REG_SDRAM_OPERATION_CMD_MASK);
185 		} while (reg);
186 
187 		/* Configure - Disable outputs floating during Self Refresh */
188 		reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
189 		/* [15] - SRFloatEn - Disable */
190 		reg &= ~(1 << REG_REGISTERED_DRAM_CTRL_SR_FLOAT_OFFS);
191 		/* 0x16D0 - DDR3 Registered DRAM Control */
192 		dfs_reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
193 	}
194 
195 	/* Optional - Configure - DDR3_Rtt_nom_CS# */
196 	for (cs = 0; cs < MAX_CS; cs++) {
197 		if (dram_info->cs_ena & (1 << cs)) {
198 			reg = reg_read(REG_DDR3_MR1_CS_ADDR +
199 				       (cs << MR_CS_ADDR_OFFS));
200 			reg &= REG_DDR3_MR1_RTT_MASK;
201 			dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
202 				      (cs << MR_CS_ADDR_OFFS), reg);
203 		}
204 	}
205 
206 	/* Configure - Move DRAM into Self Refresh */
207 	reg = reg_read(REG_DFS_ADDR);
208 	reg |= (1 << REG_DFS_SR_OFFS);	/* [2] - DfsSR - Enable */
209 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
210 
211 	/* Poll - Wait for Self Refresh indication */
212 	do {
213 		reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
214 	} while (reg == 0x0);	/* 0x1528 [3] - DfsAtSR - Wait for '1' */
215 
216 	/* Start of clock change procedure (PLL) */
217 #if defined(MV88F672X)
218 	/* avantaLP */
219 	/* Configure    cpupll_clkdiv_reset_mask */
220 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
221 	reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
222 	/* 0xE8264[7:0]   0xff CPU Clock Dividers Reset mask */
223 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
224 
225 	/* Configure    cpu_clkdiv_reload_smooth    */
226 	reg = reg_read(CPU_PLL_CNTRL0);
227 	reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
228 	/* 0xE8260   [15:8]  0x2 CPU Clock Dividers Reload Smooth enable */
229 	dfs_reg_write(CPU_PLL_CNTRL0,
230 		      (reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS)));
231 
232 	/* Configure    cpupll_clkdiv_relax_en */
233 	reg = reg_read(CPU_PLL_CNTRL0);
234 	reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
235 	/* 0xE8260 [31:24] 0x2 Relax Enable */
236 	dfs_reg_write(CPU_PLL_CNTRL0,
237 		      (reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS)));
238 
239 	/* Configure    cpupll_clkdiv_ddr_clk_ratio */
240 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
241 	/*
242 	 * 0xE8268  [13:8]  N   Set Training clock:
243 	 * APLL Out Clock (VCO freq) / N = 100 MHz
244 	 */
245 	reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
246 	reg |= (freq_par << 8);	/* full Integer ratio from PLL-out to ddr-clk */
247 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
248 
249 	/* Configure    cpupll_clkdiv_reload_ratio  */
250 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
251 	reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
252 	/* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
253 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
254 		      (reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS)));
255 
256 	udelay(1);
257 
258 	/* Configure    cpupll_clkdiv_reload_ratio  */
259 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
260 	reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
261 	/* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
262 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
263 
264 	udelay(5);
265 
266 #else
267 	/*
268 	 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
269 	 * and in the same chance, block reassertions of reset [15:8] and
270 	 * force reserved bits[7:0].
271 	 */
272 	reg = 0x0000FDFF;
273 	/* 0x18700 - CPU Div CLK control 0 */
274 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
275 
276 	/*
277 	 * RelaX whenever reset is asserted to that channel
278 	 * (good for any case)
279 	 */
280 	reg = 0x0000FF00;
281 	/* 0x18704 - CPU Div CLK control 0 */
282 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
283 
284 	reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
285 		REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
286 
287 	/* full Integer ratio from PLL-out to ddr-clk */
288 	reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
289 	/* 0x1870C - CPU Div CLK control 3 register */
290 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
291 
292 	/*
293 	 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
294 	 * All the rest are kept as is (forced, but could be read-modify-write).
295 	 * This is done now by RMW above.
296 	 */
297 
298 	/* Clock is not shut off gracefully - keep it running */
299 	reg = 0x000FFF02;
300 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
301 
302 	/* Wait before replacing the clock on the DDR Phy Channel. */
303 	udelay(1);
304 
305 	/*
306 	 * This for triggering the frequency update. Bit[24] is the
307 	 * central control
308 	 * bits [23:16] == which channels to change ==2 ==>
309 	 *                 only DDR Phy (smooth transition)
310 	 * bits [15:8] == mask reset reassertion due to clock modification
311 	 *                to these channels.
312 	 * bits [7:0] == not in use
313 	 */
314 	reg = 0x0102FDFF;
315 	/* 0x18700 - CPU Div CLK control 0 register */
316 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
317 
318 	udelay(1);		/* Wait 1usec */
319 
320 	/*
321 	 * Poll Div CLK status 0 register - indication that the clocks
322 	 * are active - 0x18718 [8]
323 	 */
324 	do {
325 		reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
326 			(1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
327 	} while (reg == 0);
328 
329 	/*
330 	 * Clean the CTRL0, to be ready for next resets and next requests
331 	 * of ratio modifications.
332 	 */
333 	reg = 0x000000FF;
334 	/* 0x18700 - CPU Div CLK control 0 register */
335 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
336 
337 	udelay(5);
338 #endif
339 	/* End of clock change procedure (PLL) */
340 
341 	/* Configure - Select normal clock for the DDR PHY - Enable */
342 	reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
343 	/* [16] - ddr_phy_trn_clk_sel - Enable  */
344 	reg |= (1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
345 	/* 0x18488 - DRAM Init control status register */
346 	dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
347 
348 	/* Configure - Set Correct Ratio - 1:1 */
349 	/* [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between Dunit and Phy */
350 
351 	reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
352 	dfs_reg_write(REG_DDR_IO_ADDR, reg);	/* 0x1524 - DDR IO Register */
353 
354 	/* Configure - 2T Mode - Restore original configuration */
355 	reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
356 	/* [3:4] 2T - 1T Mode - low freq */
357 	reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
358 	/* 0x1404 - DDR Controller Control Low Register */
359 	dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
360 
361 	/* Configure - Restore CL and CWL - MRS Commands */
362 	reg = reg_read(REG_DFS_ADDR);
363 	reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
364 	reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
365 	/* [8] - DfsCLNextState - MRS CL=6 after DFS (due to DLL-off mode) */
366 	reg |= (0x4 << REG_DFS_CL_NEXT_STATE_OFFS);
367 	/* [12] - DfsCWLNextState - MRS CWL=6 after DFS (due to DLL-off mode) */
368 	reg |= (0x1 << REG_DFS_CWL_NEXT_STATE_OFFS);
369 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
370 
371 	/* Poll - Wait for APLL + ADLLs lock on new frequency */
372 	do {
373 		reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
374 			REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
375 		/* 0x1674 [10:0] - Phy lock status Register */
376 	} while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
377 
378 	/* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
379 	reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
380 	/* [30:29] = 0 - Data Pup R/W path reset */
381 	/* 0x1400 - SDRAM Configuration register */
382 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
383 
384 	/*
385 	 * Configure - DRAM Data PHY Read [30], Write [29] path
386 	 * reset - Release Reset
387 	 */
388 	reg = (reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK);
389 	/* [30:29] = '11' - Data Pup R/W path reset */
390 	/* 0x1400 - SDRAM Configuration register */
391 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
392 
393 	/* Registered DIMM support */
394 	if (dram_info->reg_dimm) {
395 		/*
396 		 * Configure - Change register DRAM operating speed
397 		 * (below 400MHz) - CWA_RC
398 		 */
399 		reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
400 			REG_SDRAM_OPERATION_CWA_RC_OFFS;
401 
402 		/*
403 		 * Configure - Change register DRAM operating speed
404 		 * (below 400MHz) - CWA_DATA
405 		 */
406 		reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
407 			REG_SDRAM_OPERATION_CWA_DATA_OFFS);
408 
409 		/* Configure - Set Delay - tSTAB */
410 		reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
411 
412 		/* Configure - Issue CWA command with the above parameters */
413 		reg |= (REG_SDRAM_OPERATION_CMD_CWA &
414 			~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
415 
416 		/* 0x1418 - SDRAM Operation Register */
417 		dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
418 
419 		/* Poll - Wait for CWA operation completion */
420 		do {
421 			reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
422 				(REG_SDRAM_OPERATION_CMD_MASK);
423 		} while (reg);
424 	}
425 
426 	/* Configure - Exit Self Refresh */
427 	/* [2] - DfsSR  */
428 	reg = (reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS));
429 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
430 
431 	/*
432 	 * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices
433 	 * on all ranks are NOT in self refresh mode
434 	 */
435 	do {
436 		reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
437 	} while (reg);		/* Wait for '0' */
438 
439 	/* Configure - Issue Refresh command */
440 	/* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
441 	reg = REG_SDRAM_OPERATION_CMD_RFRS;
442 	for (cs = 0; cs < MAX_CS; cs++) {
443 		if (dram_info->cs_ena & (1 << cs))
444 			reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
445 	}
446 
447 	/* 0x1418 - SDRAM Operation Register */
448 	dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
449 
450 	/* Poll - Wait for Refresh operation completion */
451 	wait_refresh_op_complete();
452 
453 	/* Configure - Block new external transactions - Disable */
454 	reg = reg_read(REG_DFS_ADDR);
455 	reg &= ~(1 << REG_DFS_BLOCK_OFFS);	/* [1] - DfsBlock - Disable  */
456 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
457 
458 	/*
459 	 * Configure -  XBAR Retry response during Block to enable
460 	 * internal access - Disable
461 	 */
462 	reg = reg_read(REG_METAL_MASK_ADDR);
463 	/* [0] - RetryMask - Enable */
464 	reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
465 	/* 0x14B0 - Dunit MMask Register */
466 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
467 
468 	for (cs = 0; cs < MAX_CS; cs++) {
469 		if (dram_info->cs_ena & (1 << cs)) {
470 			/* Configure - Set CL */
471 			reg = reg_read(REG_DDR3_MR0_CS_ADDR +
472 				       (cs << MR_CS_ADDR_OFFS)) &
473 				~REG_DDR3_MR0_CL_MASK;
474 			tmp = 0x4;	/* CL=6 - 0x4 */
475 			reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
476 			reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
477 			dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
478 				      (cs << MR_CS_ADDR_OFFS), reg);
479 
480 			/* Configure - Set CWL */
481 			reg = reg_read(REG_DDR3_MR2_CS_ADDR +
482 				       (cs << MR_CS_ADDR_OFFS))
483 				& ~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
484 			/* CWL=6 - 0x1 */
485 			reg |= ((0x1) << REG_DDR3_MR2_CWL_OFFS);
486 			dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
487 				      (cs << MR_CS_ADDR_OFFS), reg);
488 		}
489 	}
490 
491 	DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
492 		    freq, 1);
493 
494 	return MV_OK;
495 #else
496 	/* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
497 
498 	u32 reg, freq_par;
499 	u32 cs = 0;
500 
501 	DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
502 		    freq, 1);
503 
504 	/* target frequency - 100MHz */
505 	freq_par = ddr3_get_freq_parameter(freq, 0);
506 
507 	reg = 0x0000FF00;
508 	/* 0x18700 - CPU Div CLK control 0 */
509 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
510 
511 	/* 0x1600 - ODPG_CNTRL_Control */
512 	reg = reg_read(REG_ODPG_CNTRL_ADDR);
513 	/* [21] = 1 - auto refresh disable */
514 	reg |= (1 << REG_ODPG_CNTRL_OFFS);
515 	dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
516 
517 	/* 0x1670 - PHY lock mask register */
518 	reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
519 	reg &= REG_PHY_LOCK_MASK_MASK;	/* [11:0] = 0 */
520 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
521 
522 	reg = reg_read(REG_DFS_ADDR);	/* 0x1528 - DFS register */
523 
524 	/* Disable reconfig */
525 	reg &= ~0x10;	/* [4] - Enable reconfig MR registers after DFS_ERG */
526 	reg |= 0x1;	/* [0] - DRAM DLL disabled after DFS */
527 
528 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
529 
530 	reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0); /* [0] - disable */
531 	/* 0x14B0 - Dunit MMask Register */
532 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
533 
534 	/* [1] - DFS Block enable  */
535 	reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
536 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
537 
538 	/* [2] - DFS Self refresh enable  */
539 	reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
540 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
541 
542 	/*
543 	 * Poll DFS Register - 0x1528 [3] - DfsAtSR -
544 	 * All DRAM devices on all ranks are in self refresh mode -
545 	 * DFS can be executed afterwards
546 	 */
547 	do {
548 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
549 	} while (reg == 0x0);	/* Wait for '1' */
550 
551 	/* Disable ODT on DLL-off mode */
552 	dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR,
553 		      REG_SDRAM_ODT_CTRL_HIGH_OVRD_MASK);
554 
555 	/* [11:0] = 0 */
556 	reg = (reg_read(REG_PHY_LOCK_MASK_ADDR) & REG_PHY_LOCK_MASK_MASK);
557 	/* 0x1670 - PHY lock mask register */
558 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
559 
560 	/* Add delay between entering SR and start ratio modification */
561 	udelay(1);
562 
563 	/*
564 	 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
565 	 * and in the same chance, block reassertions of reset [15:8] and
566 	 * force reserved bits[7:0].
567 	 */
568 	reg = 0x0000FDFF;
569 	/* 0x18700 - CPU Div CLK control 0 */
570 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
571 
572 	/*
573 	 * RelaX whenever reset is asserted to that channel (good for any case)
574 	 */
575 	reg = 0x0000FF00;
576 	/* 0x18700 - CPU Div CLK control 0 */
577 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
578 
579 	reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
580 		REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
581 	/* Full Integer ratio from PLL-out to ddr-clk */
582 	reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
583 	/* 0x1870C - CPU Div CLK control 3 register */
584 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
585 
586 	/*
587 	 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
588 	 * All the rest are kept as is (forced, but could be read-modify-write).
589 	 * This is done now by RMW above.
590 	 */
591 
592 	/* Clock is not shut off gracefully - keep it running */
593 	reg = 0x000FFF02;
594 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
595 
596 	/* Wait before replacing the clock on the DDR Phy Channel. */
597 	udelay(1);
598 
599 	/*
600 	 * This for triggering the frequency update. Bit[24] is the
601 	 * central control
602 	 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
603 	 *                 (smooth transition)
604 	 * bits [15:8] == mask reset reassertion due to clock modification
605 	 *                to these channels.
606 	 * bits [7:0] == not in use
607 	 */
608 	reg = 0x0102FDFF;
609 	/* 0x18700 - CPU Div CLK control 0 register */
610 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
611 
612 	udelay(1);		/* Wait 1usec */
613 
614 	/*
615 	 * Poll Div CLK status 0 register - indication that the clocks
616 	 * are active - 0x18718 [8]
617 	 */
618 	do {
619 		reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
620 			(1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
621 	} while (reg == 0);
622 
623 	/*
624 	 * Clean the CTRL0, to be ready for next resets and next requests of
625 	 * ratio modifications.
626 	 */
627 	reg = 0x000000FF;
628 	/* 0x18700 - CPU Div CLK control 0 register */
629 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
630 
631 	udelay(5);
632 
633 	/* Switch HCLK Mux to training clk (100Mhz), keep DFS request bit */
634 	reg = 0x20050000;
635 	/* 0x18488 - DRAM Init control status register */
636 	dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
637 
638 	reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
639 	/* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
640 	dfs_reg_write(REG_DDR_IO_ADDR, reg);	/* 0x1524 - DDR IO Regist */
641 
642 	reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
643 	/* [31:30]] - reset pup data ctrl ADLL */
644 	/* 0x15EC - DRAM PHY Config register */
645 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
646 
647 	reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK);
648 	/* [31:30] - normal pup data ctrl ADLL */
649 	/* 0x15EC - DRAM PHY Config register */
650 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
651 
652 	udelay(1);		/* Wait 1usec */
653 
654 	/* 0x1404 */
655 	reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7);
656 	dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
657 
658 	/* Poll Phy lock status register - APLL lock indication - 0x1674 */
659 	do {
660 		reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
661 			REG_PHY_LOCK_STATUS_LOCK_MASK;
662 	} while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK);	/* Wait for '0xFFF' */
663 
664 	reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
665 	/* [30:29] = 0 - Data Pup R/W path reset */
666 	/* 0x1400 - SDRAM Configuration register */
667 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
668 
669 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
670 	/* [30:29] = '11' - Data Pup R/W path reset */
671 	/* 0x1400 - SDRAM Configuration register */
672 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
673 
674 	udelay(1000);		/* Wait 1msec */
675 
676 	for (cs = 0; cs < MAX_CS; cs++) {
677 		if (dram_info->cs_ena & (1 << cs)) {
678 			/* Poll - Wait for Refresh operation completion */
679 			wait_refresh_op_complete();
680 
681 			/* Config CL and CWL with MR0 and MR2 registers */
682 			reg = reg_read(REG_DDR3_MR0_ADDR);
683 			reg &= ~0x74;	/* CL [3:0]; [6:4],[2] */
684 			reg |= (1 << 5);	/* CL = 4, CAS is 6 */
685 			dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
686 			reg = REG_SDRAM_OPERATION_CMD_MR0 &
687 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
688 			/* 0x1418 - SDRAM Operation Register */
689 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
690 
691 			/* Poll - Wait for Refresh operation completion */
692 			wait_refresh_op_complete();
693 
694 			reg = reg_read(REG_DDR3_MR2_ADDR);
695 			reg &= ~0x38;	/* CWL [5:3] */
696 			reg |= (1 << 3);	/* CWL = 1, CWL is 6 */
697 			dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
698 
699 			reg = REG_SDRAM_OPERATION_CMD_MR2 &
700 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
701 			/* 0x1418 - SDRAM Operation Register */
702 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
703 
704 			/* Poll - Wait for Refresh operation completion */
705 			wait_refresh_op_complete();
706 
707 			/* Set current rd_sample_delay  */
708 			reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
709 			reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
710 				 (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
711 			reg |= (5 << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
712 			dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
713 
714 			/* Set current rd_ready_delay  */
715 			reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
716 			reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
717 				 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
718 			reg |= ((6) << (REG_READ_DATA_READY_DELAYS_OFFS * cs));
719 			dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
720 		}
721 	}
722 
723 	/* [2] - DFS Self refresh disable  */
724 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
725 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
726 
727 	/* [1] - DFS Block enable  */
728 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
729 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
730 
731 	/*
732 	 * Poll DFS Register - 0x1528 [3] - DfsAtSR -
733 	 * All DRAM devices on all ranks are in self refresh mode - DFS can
734 	 * be executed afterwards
735 	 */
736 	do {
737 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
738 	} while (reg);		/* Wait for '1' */
739 
740 	reg = (reg_read(REG_METAL_MASK_ADDR) | (1 << 0));
741 	/* [0] - Enable Dunit to crossbar retry */
742 	/* 0x14B0 - Dunit MMask Register */
743 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
744 
745 	/* 0x1600 - PHY lock mask register */
746 	reg = reg_read(REG_ODPG_CNTRL_ADDR);
747 	reg &= ~(1 << REG_ODPG_CNTRL_OFFS);	/* [21] = 0 */
748 	dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
749 
750 	/* 0x1670 - PHY lock mask register */
751 	reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
752 	reg |= ~REG_PHY_LOCK_MASK_MASK;	/* [11:0] = FFF */
753 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
754 
755 	DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
756 		    freq, 1);
757 
758 	return MV_OK;
759 #endif
760 }
761 
762 /*
763  * Name:     ddr3_dfs_low_2_high
764  * Desc:
765  * Args:     freq - target frequency
766  * Notes:
767  * Returns:  MV_OK - success, MV_FAIL - fail
768  */
769 int ddr3_dfs_low_2_high(u32 freq, int ratio_2to1, MV_DRAM_INFO *dram_info)
770 {
771 #if defined(MV88F78X60) || defined(MV88F672X)
772 	/* This Flow is relevant for ArmadaXP A0 */
773 	u32 reg, freq_par, tmp;
774 	u32 cs = 0;
775 
776 	DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
777 		    freq, 1);
778 
779 	/* target frequency - freq */
780 	freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
781 
782 #if defined(MV88F672X)
783 	u32 hclk;
784 	u32 cpu_freq = ddr3_get_cpu_freq();
785 	get_target_freq(cpu_freq, &tmp, &hclk);
786 #endif
787 
788 	/* Configure - DRAM DLL final state after DFS is complete - Enable */
789 	reg = reg_read(REG_DFS_ADDR);
790 	/* [0] - DfsDllNextState - Enable */
791 	reg &= ~(1 << REG_DFS_DLLNEXTSTATE_OFFS);
792 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
793 
794 	/*
795 	 * Configure -  XBAR Retry response during Block to enable
796 	 * internal access - Disable
797 	 */
798 	reg = reg_read(REG_METAL_MASK_ADDR);
799 	/* [0] - RetryMask - Disable */
800 	reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
801 	/* 0x14B0 - Dunit MMask Register */
802 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
803 
804 	/* Configure - Block new external transactions - Enable */
805 	reg = reg_read(REG_DFS_ADDR);
806 	reg |= (1 << REG_DFS_BLOCK_OFFS);	/* [1] - DfsBlock - Enable  */
807 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
808 
809 	/* Configure - Move DRAM into Self Refresh */
810 	reg = reg_read(REG_DFS_ADDR);
811 	reg |= (1 << REG_DFS_SR_OFFS);	/* [2] - DfsSR - Enable */
812 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
813 
814 	/* Poll - Wait for Self Refresh indication */
815 	do {
816 		reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
817 	} while (reg == 0x0);	/* 0x1528 [3] - DfsAtSR - Wait for '1' */
818 
819 	/* Start of clock change procedure (PLL) */
820 #if defined(MV88F672X)
821 	/* avantaLP */
822 	/* Configure    cpupll_clkdiv_reset_mask */
823 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
824 	reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
825 	/* 0xE8264[7:0]   0xff CPU Clock Dividers Reset mask */
826 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
827 
828 	/* Configure    cpu_clkdiv_reload_smooth    */
829 	reg = reg_read(CPU_PLL_CNTRL0);
830 	reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
831 	/* 0xE8260   [15:8]  0x2 CPU Clock Dividers Reload Smooth enable */
832 	dfs_reg_write(CPU_PLL_CNTRL0,
833 		      reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS));
834 
835 	/* Configure    cpupll_clkdiv_relax_en */
836 	reg = reg_read(CPU_PLL_CNTRL0);
837 	reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
838 	/* 0xE8260 [31:24] 0x2 Relax Enable */
839 	dfs_reg_write(CPU_PLL_CNTRL0,
840 		      reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS));
841 
842 	/* Configure    cpupll_clkdiv_ddr_clk_ratio */
843 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
844 	/*
845 	 * 0xE8268  [13:8]  N   Set Training clock:
846 	 * APLL Out Clock (VCO freq) / N = 100 MHz
847 	 */
848 	reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
849 	reg |= (freq_par << 8);	/* full Integer ratio from PLL-out to ddr-clk */
850 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
851 	/* Configure    cpupll_clkdiv_reload_ratio  */
852 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
853 	reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
854 	/* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
855 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
856 		      reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS));
857 
858 	udelay(1);
859 
860 	/* Configure    cpupll_clkdiv_reload_ratio  */
861 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
862 	reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
863 	/* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
864 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
865 
866 	udelay(5);
867 
868 #else
869 	/*
870 	 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
871 	 * and in the same chance, block reassertions of reset [15:8]
872 	 * and force reserved bits[7:0].
873 	 */
874 	reg = 0x0000FFFF;
875 
876 	/* 0x18700 - CPU Div CLK control 0 */
877 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
878 
879 	/*
880 	 * RelaX whenever reset is asserted to that channel (good for any case)
881 	 */
882 	reg = 0x0000FF00;
883 	/* 0x18704 - CPU Div CLK control 0 */
884 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
885 
886 	reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
887 		REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
888 	reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
889 	/* full Integer ratio from PLL-out to ddr-clk */
890 	/* 0x1870C - CPU Div CLK control 3 register */
891 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
892 
893 	/*
894 	 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
895 	 * All the rest are kept as is (forced, but could be read-modify-write).
896 	 * This is done now by RMW above.
897 	 */
898 	reg = 0x000FFF02;
899 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
900 
901 	/* Wait before replacing the clock on the DDR Phy Channel. */
902 	udelay(1);
903 
904 	reg = 0x0102FDFF;
905 	/*
906 	 * This for triggering the frequency update. Bit[24] is the
907 	 * central control
908 	 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
909 	 *                 (smooth transition)
910 	 * bits [15:8] == mask reset reassertion due to clock modification
911 	 *                to these channels.
912 	 * bits [7:0] == not in use
913 	 */
914 	/* 0x18700 - CPU Div CLK control 0 register */
915 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
916 
917 	udelay(1);
918 
919 	/*
920 	 * Poll Div CLK status 0 register - indication that the clocks
921 	 * are active - 0x18718 [8]
922 	 */
923 	do {
924 		reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
925 			(1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
926 	} while (reg == 0);
927 
928 	reg = 0x000000FF;
929 	/*
930 	 * Clean the CTRL0, to be ready for next resets and next requests
931 	 * of ratio modifications.
932 	 */
933 	/* 0x18700 - CPU Div CLK control 0 register */
934 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
935 #endif
936 	/* End of clock change procedure (PLL) */
937 
938 	if (ratio_2to1) {
939 		/* Configure - Select normal clock for the DDR PHY - Disable */
940 		reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
941 		/* [16] - ddr_phy_trn_clk_sel - Disable  */
942 		reg &= ~(1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
943 		/* 0x18488 - DRAM Init control status register */
944 		dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
945 	}
946 
947 	/*
948 	 * Configure - Set Correct Ratio - according to target ratio
949 	 * parameter - 2:1/1:1
950 	 */
951 	if (ratio_2to1) {
952 		/*
953 		 * [15] - Phy2UnitClkRatio = 1 - Set 2:1 Ratio between
954 		 * Dunit and Phy
955 		 */
956 		reg = reg_read(REG_DDR_IO_ADDR) |
957 			(1 << REG_DDR_IO_CLK_RATIO_OFFS);
958 	} else {
959 		/*
960 		 * [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between
961 		 * Dunit and Phy
962 		 */
963 		reg = reg_read(REG_DDR_IO_ADDR) &
964 			~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
965 	}
966 	dfs_reg_write(REG_DDR_IO_ADDR, reg);	/* 0x1524 - DDR IO Register */
967 
968 	/* Configure - 2T Mode - Restore original configuration */
969 	reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
970 	/* [3:4] 2T - Restore value */
971 	reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
972 	reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
973 		REG_DUNIT_CTRL_LOW_2T_OFFS);
974 	/* 0x1404 - DDR Controller Control Low Register */
975 	dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
976 
977 	/* Configure - Restore CL and CWL - MRS Commands */
978 	reg = reg_read(REG_DFS_ADDR);
979 	reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
980 	reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
981 
982 	if (freq == DDR_400) {
983 		if (dram_info->target_frequency == 0x8)
984 			tmp = ddr3_cl_to_valid_cl(5);
985 		else
986 			tmp = ddr3_cl_to_valid_cl(6);
987 	} else {
988 		tmp = ddr3_cl_to_valid_cl(dram_info->cl);
989 	}
990 
991 	/* [8] - DfsCLNextState */
992 	reg |= ((tmp & REG_DFS_CL_NEXT_STATE_MASK) << REG_DFS_CL_NEXT_STATE_OFFS);
993 	if (freq == DDR_400) {
994 		/* [12] - DfsCWLNextState */
995 		reg |= (((0) & REG_DFS_CWL_NEXT_STATE_MASK) <<
996 			REG_DFS_CWL_NEXT_STATE_OFFS);
997 	} else {
998 		/* [12] - DfsCWLNextState */
999 		reg |= (((dram_info->cwl) & REG_DFS_CWL_NEXT_STATE_MASK) <<
1000 			REG_DFS_CWL_NEXT_STATE_OFFS);
1001 	}
1002 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1003 
1004 	/* Optional - Configure - DDR3_Rtt_nom_CS# */
1005 	for (cs = 0; cs < MAX_CS; cs++) {
1006 		if (dram_info->cs_ena & (1 << cs)) {
1007 			reg = reg_read(REG_DDR3_MR1_CS_ADDR +
1008 				       (cs << MR_CS_ADDR_OFFS));
1009 			reg &= REG_DDR3_MR1_RTT_MASK;
1010 			reg |= odt_static[dram_info->cs_ena][cs];
1011 			dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
1012 				      (cs << MR_CS_ADDR_OFFS), reg);
1013 		}
1014 	}
1015 
1016 	/* Configure - Reset ADLLs - Set Reset */
1017 	reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
1018 	/* [31:30]] - reset pup data ctrl ADLL */
1019 	/* 0x15EC - DRAM PHY Config Register */
1020 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1021 
1022 	/* Configure - Reset ADLLs - Release Reset */
1023 	reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
1024 	/* [31:30] - normal pup data ctrl ADLL */
1025 	/* 0x15EC - DRAM PHY Config register */
1026 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1027 
1028 	/* Poll - Wait for APLL + ADLLs lock on new frequency */
1029 	do {
1030 		reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1031 			REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
1032 		/* 0x1674 [10:0] - Phy lock status Register */
1033 	} while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
1034 
1035 	/* Configure - Reset the PHY SDR clock divider */
1036 	if (ratio_2to1) {
1037 		/* Pup Reset Divider B - Set Reset */
1038 		/* [28] - DataPupRdRST = 0 */
1039 		reg = reg_read(REG_SDRAM_CONFIG_ADDR) &
1040 			~(1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
1041 		/* [28] - DataPupRdRST = 1 */
1042 		tmp = reg_read(REG_SDRAM_CONFIG_ADDR) |
1043 			(1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
1044 		/* 0x1400 - SDRAM Configuration register */
1045 		dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1046 
1047 		/* Pup Reset Divider B - Release Reset */
1048 		/* 0x1400 - SDRAM Configuration register */
1049 		dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
1050 	}
1051 
1052 	/* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
1053 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
1054 	/* [30:29] = 0 - Data Pup R/W path reset */
1055 	/* 0x1400 - SDRAM Configuration register */
1056 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1057 
1058 	/*
1059 	 * Configure - DRAM Data PHY Read [30], Write [29] path reset -
1060 	 * Release Reset
1061 	 */
1062 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
1063 	/* [30:29] = '11' - Data Pup R/W path reset */
1064 	/* 0x1400 - SDRAM Configuration register */
1065 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1066 
1067 	/* Registered DIMM support */
1068 	if (dram_info->reg_dimm) {
1069 		/*
1070 		 * Configure - Change register DRAM operating speed
1071 		 * (DDR3-1333 / DDR3-1600) - CWA_RC
1072 		 */
1073 		reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
1074 			REG_SDRAM_OPERATION_CWA_RC_OFFS;
1075 		if (freq <= DDR_400) {
1076 			/*
1077 			 * Configure - Change register DRAM operating speed
1078 			 * (DDR3-800) - CWA_DATA
1079 			 */
1080 			reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1081 				REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1082 		} else if ((freq > DDR_400) && (freq <= DDR_533)) {
1083 			/*
1084 			 * Configure - Change register DRAM operating speed
1085 			 * (DDR3-1066) - CWA_DATA
1086 			 */
1087 			reg |= ((0x1 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1088 				REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1089 		} else if ((freq > DDR_533) && (freq <= DDR_666)) {
1090 			/*
1091 			 * Configure - Change register DRAM operating speed
1092 			 * (DDR3-1333) - CWA_DATA
1093 			 */
1094 			reg |= ((0x2 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1095 				REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1096 		} else {
1097 			/*
1098 			 * Configure - Change register DRAM operating speed
1099 			 * (DDR3-1600) - CWA_DATA
1100 			 */
1101 			reg |= ((0x3 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1102 				REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1103 		}
1104 
1105 		/* Configure - Set Delay - tSTAB */
1106 		reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
1107 		/* Configure - Issue CWA command with the above parameters */
1108 		reg |= (REG_SDRAM_OPERATION_CMD_CWA &
1109 			~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
1110 
1111 		/* 0x1418 - SDRAM Operation Register */
1112 		dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1113 
1114 		/* Poll - Wait for CWA operation completion */
1115 		do {
1116 			reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
1117 				REG_SDRAM_OPERATION_CMD_MASK;
1118 		} while (reg);
1119 	}
1120 
1121 	/* Configure - Exit Self Refresh */
1122 	/* [2] - DfsSR  */
1123 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
1124 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1125 
1126 	/*
1127 	 * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM
1128 	 * devices on all ranks are NOT in self refresh mode
1129 	 */
1130 	do {
1131 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1132 	} while (reg);		/* Wait for '0' */
1133 
1134 	/* Configure - Issue Refresh command */
1135 	/* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
1136 	reg = REG_SDRAM_OPERATION_CMD_RFRS;
1137 	for (cs = 0; cs < MAX_CS; cs++) {
1138 		if (dram_info->cs_ena & (1 << cs))
1139 			reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1140 	}
1141 
1142 	/* 0x1418 - SDRAM Operation Register */
1143 	dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1144 
1145 	/* Poll - Wait for Refresh operation completion */
1146 	wait_refresh_op_complete();
1147 
1148 	/* Configure - Block new external transactions - Disable */
1149 	reg = reg_read(REG_DFS_ADDR);
1150 	reg &= ~(1 << REG_DFS_BLOCK_OFFS);	/* [1] - DfsBlock - Disable  */
1151 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1152 
1153 	/*
1154 	 * Configure -  XBAR Retry response during Block to enable
1155 	 * internal access - Disable
1156 	 */
1157 	reg = reg_read(REG_METAL_MASK_ADDR);
1158 	/* [0] - RetryMask - Enable */
1159 	reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
1160 	/* 0x14B0 - Dunit MMask Register */
1161 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1162 
1163 	for (cs = 0; cs < MAX_CS; cs++) {
1164 		if (dram_info->cs_ena & (1 << cs)) {
1165 			/* Configure - Set CL */
1166 			reg = reg_read(REG_DDR3_MR0_CS_ADDR +
1167 				       (cs << MR_CS_ADDR_OFFS)) &
1168 				~REG_DDR3_MR0_CL_MASK;
1169 			if (freq == DDR_400)
1170 				tmp = ddr3_cl_to_valid_cl(6);
1171 			else
1172 				tmp = ddr3_cl_to_valid_cl(dram_info->cl);
1173 			reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
1174 			reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
1175 			dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
1176 				      (cs << MR_CS_ADDR_OFFS), reg);
1177 
1178 			/* Configure - Set CWL */
1179 			reg = reg_read(REG_DDR3_MR2_CS_ADDR +
1180 				       (cs << MR_CS_ADDR_OFFS)) &
1181 				~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
1182 			if (freq == DDR_400)
1183 				reg |= ((0) << REG_DDR3_MR2_CWL_OFFS);
1184 			else
1185 				reg |= ((dram_info->cwl) << REG_DDR3_MR2_CWL_OFFS);
1186 			dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
1187 				      (cs << MR_CS_ADDR_OFFS), reg);
1188 		}
1189 	}
1190 
1191 	DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
1192 		    freq, 1);
1193 
1194 	return MV_OK;
1195 
1196 #else
1197 
1198 	/* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
1199 
1200 	u32 reg, freq_par, tmp;
1201 	u32 cs = 0;
1202 
1203 	DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
1204 		    freq, 1);
1205 
1206 	/* target frequency - freq */
1207 	freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
1208 
1209 	reg = 0x0000FF00;
1210 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
1211 
1212 	/* 0x1600 - PHY lock mask register */
1213 	reg = reg_read(REG_ODPG_CNTRL_ADDR);
1214 	reg |= (1 << REG_ODPG_CNTRL_OFFS);	/* [21] = 1 */
1215 	dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
1216 
1217 	/* 0x1670 - PHY lock mask register */
1218 	reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
1219 	reg &= REG_PHY_LOCK_MASK_MASK;	/* [11:0] = 0 */
1220 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
1221 
1222 	/* Enable reconfig MR Registers after DFS */
1223 	reg = reg_read(REG_DFS_ADDR);	/* 0x1528 - DFS register */
1224 	/* [4] - Disable - reconfig MR registers after DFS_ERG */
1225 	reg &= ~0x11;
1226 	/* [0] - Enable - DRAM DLL after DFS */
1227 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1228 
1229 	/* Disable DRAM Controller to crossbar retry */
1230 	/* [0] - disable */
1231 	reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0);
1232 	/* 0x14B0 - Dunit MMask Register */
1233 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1234 
1235 	/* Enable DRAM Blocking */
1236 	/* [1] - DFS Block enable  */
1237 	reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
1238 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1239 
1240 	/* Enable Self refresh */
1241 	/* [2] - DFS Self refresh enable  */
1242 	reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
1243 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1244 
1245 	/*
1246 	 * Poll DFS Register - All DRAM devices on all ranks are in
1247 	 * self refresh mode - DFS can be executed afterwards
1248 	 */
1249 	/* 0x1528 [3] - DfsAtSR  */
1250 	do {
1251 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1252 	} while (reg == 0x0);	/* Wait for '1' */
1253 
1254 	/*
1255 	 * Set Correct Ratio - if freq>MARGIN_FREQ use 2:1 ratio
1256 	 * else use 1:1 ratio
1257 	 */
1258 	if (ratio_2to1) {
1259 		/* [15] = 1 - Set 2:1 Ratio between Dunit and Phy */
1260 		reg = reg_read(REG_DDR_IO_ADDR) |
1261 			(1 << REG_DDR_IO_CLK_RATIO_OFFS);
1262 	} else {
1263 		/* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
1264 		reg = reg_read(REG_DDR_IO_ADDR) &
1265 			~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
1266 	}
1267 	dfs_reg_write(REG_DDR_IO_ADDR, reg);	/* 0x1524 - DDR IO Register */
1268 
1269 	/* Switch HCLK Mux from (100Mhz) [16]=0, keep DFS request bit */
1270 	reg = 0x20040000;
1271 	/*
1272 	 * [29] - training logic request DFS, [28:27] -
1273 	 * preload patterns frequency [18]
1274 	 */
1275 
1276 	/* 0x18488 - DRAM Init control status register */
1277 	dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
1278 
1279 	/* Add delay between entering SR and start ratio modification */
1280 	udelay(1);
1281 
1282 	/*
1283 	 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
1284 	 * and in the same chance, block reassertions of reset [15:8] and
1285 	 * force reserved bits[7:0].
1286 	 */
1287 	reg = 0x0000FFFF;
1288 	/* 0x18700 - CPU Div CLK control 0 */
1289 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1290 
1291 	/*
1292 	 * RelaX whenever reset is asserted to that channel (good for any case)
1293 	 */
1294 	reg = 0x0000FF00;
1295 	/* 0x18704 - CPU Div CLK control 0 */
1296 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
1297 
1298 	reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
1299 		REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
1300 	reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
1301 	/* Full Integer ratio from PLL-out to ddr-clk */
1302 	/* 0x1870C - CPU Div CLK control 3 register */
1303 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
1304 
1305 	/*
1306 	 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
1307 	 * All the rest are kept as is (forced, but could be read-modify-write).
1308 	 * This is done now by RMW above.
1309 	 */
1310 
1311 	reg = 0x000FFF02;
1312 
1313 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
1314 
1315 	/* Wait before replacing the clock on the DDR Phy Channel. */
1316 	udelay(1);
1317 
1318 	reg = 0x0102FDFF;
1319 	/*
1320 	 * This for triggering the frequency update. Bit[24] is the
1321 	 * central control
1322 	 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
1323 	 *                 (smooth transition)
1324 	 * bits [15:8] == mask reset reassertion due to clock modification
1325 	 *                to these channels.
1326 	 * bits [7:0] == not in use
1327 	 */
1328 	/* 0x18700 - CPU Div CLK control 0 register */
1329 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1330 
1331 	udelay(1);
1332 
1333 	/*
1334 	 * Poll Div CLK status 0 register - indication that the clocks are
1335 	 * active - 0x18718 [8]
1336 	 */
1337 	do {
1338 		reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
1339 			(1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
1340 	} while (reg == 0);
1341 
1342 	reg = 0x000000FF;
1343 	/*
1344 	 * Clean the CTRL0, to be ready for next resets and next requests of
1345 	 * ratio modifications.
1346 	 */
1347 	/* 0x18700 - CPU Div CLK control 0 register */
1348 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1349 
1350 	udelay(5);
1351 
1352 	if (ratio_2to1) {
1353 		/* Pup Reset Divider B - Set Reset */
1354 		/* [28] = 0 - Pup Reset Divider B */
1355 		reg = reg_read(REG_SDRAM_CONFIG_ADDR) & ~(1 << 28);
1356 		/* [28] = 1 - Pup Reset Divider B */
1357 		tmp = reg_read(REG_SDRAM_CONFIG_ADDR) | (1 << 28);
1358 		/* 0x1400 - SDRAM Configuration register */
1359 		dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1360 
1361 		/* Pup Reset Divider B - Release Reset */
1362 		/* 0x1400 - SDRAM Configuration register */
1363 		dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
1364 	}
1365 
1366 	/* DRAM Data PHYs ADLL Reset - Set Reset */
1367 	reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK);
1368 	/* [31:30]] - reset pup data ctrl ADLL */
1369 	/* 0x15EC - DRAM PHY Config Register */
1370 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1371 
1372 	udelay(25);
1373 
1374 	/* APLL lock indication - Poll Phy lock status Register - 0x1674 [9] */
1375 	do {
1376 		reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1377 			(1 << REG_PHY_LOCK_STATUS_LOCK_OFFS);
1378 	} while (reg == 0);
1379 
1380 	/* DRAM Data PHYs ADLL Reset - Release Reset */
1381 	reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
1382 	/* [31:30] - normal pup data ctrl ADLL */
1383 	/* 0x15EC - DRAM PHY Config register */
1384 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1385 
1386 	udelay(10000);		/* Wait 10msec */
1387 
1388 	/*
1389 	 * APLL lock indication - Poll Phy lock status Register - 0x1674 [11:0]
1390 	 */
1391 	do {
1392 		reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1393 			REG_PHY_LOCK_STATUS_LOCK_MASK;
1394 	} while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK);
1395 
1396 	/* DRAM Data PHY Read [30], Write [29] path reset - Set Reset */
1397 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
1398 	/* [30:29] = 0 - Data Pup R/W path reset */
1399 	/* 0x1400 - SDRAM Configuration register */
1400 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1401 
1402 	/* DRAM Data PHY Read [30], Write [29] path reset - Release Reset */
1403 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
1404 	/* [30:29] = '11' - Data Pup R/W path reset */
1405 	/* 0x1400 - SDRAM Configuration register */
1406 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1407 
1408 	/* Disable DFS Reconfig */
1409 	reg = reg_read(REG_DFS_ADDR) & ~(1 << 4);
1410 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1411 
1412 	/* [2] - DFS Self refresh disable  */
1413 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
1414 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1415 
1416 	/*
1417 	 * Poll DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices on
1418 	 * all ranks are NOT in self refresh mode
1419 	 */
1420 	do {
1421 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1422 	} while (reg);		/* Wait for '0' */
1423 
1424 	/* 0x1404 */
1425 	reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7) | 0x2;
1426 
1427 	/* Configure - 2T Mode - Restore original configuration */
1428 	/* [3:4] 2T - Restore value */
1429 	reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
1430 	reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
1431 		REG_DUNIT_CTRL_LOW_2T_OFFS);
1432 	dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
1433 
1434 	udelay(1);		/* Wait 1us */
1435 
1436 	for (cs = 0; cs < MAX_CS; cs++) {
1437 		if (dram_info->cs_ena & (1 << cs)) {
1438 			reg = (reg_read(REG_DDR3_MR1_ADDR));
1439 			/* DLL Enable */
1440 			reg &= ~(1 << REG_DDR3_MR1_DLL_ENA_OFFS);
1441 			dfs_reg_write(REG_DDR3_MR1_ADDR, reg);
1442 
1443 			/* Issue MRS Command to current cs */
1444 			reg = REG_SDRAM_OPERATION_CMD_MR1 &
1445 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1446 			/*
1447 			 * [3-0] = 0x4 - MR1 Command, [11-8] -
1448 			 * enable current cs
1449 			 */
1450 			/* 0x1418 - SDRAM Operation Register */
1451 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1452 
1453 			/* Poll - Wait for Refresh operation completion */
1454 			wait_refresh_op_complete();
1455 
1456 			/* DLL Reset - MR0 */
1457 			reg = reg_read(REG_DDR3_MR0_ADDR);
1458 			dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
1459 
1460 			/* Issue MRS Command to current cs */
1461 			reg = REG_SDRAM_OPERATION_CMD_MR0 &
1462 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1463 			/*
1464 			 * [3-0] = 0x4 - MR1 Command, [11-8] -
1465 			 * enable current cs
1466 			 */
1467 			/* 0x1418 - SDRAM Operation Register */
1468 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1469 
1470 			/* Poll - Wait for Refresh operation completion */
1471 			wait_refresh_op_complete();
1472 
1473 			reg = reg_read(REG_DDR3_MR0_ADDR);
1474 			reg &= ~0x74;	/* CL [3:0]; [6:4],[2] */
1475 
1476 			if (freq == DDR_400)
1477 				tmp = ddr3_cl_to_valid_cl(6) & 0xF;
1478 			else
1479 				tmp = ddr3_cl_to_valid_cl(dram_info->cl) & 0xF;
1480 
1481 			reg |= ((tmp & 0x1) << 2);
1482 			reg |= ((tmp >> 1) << 4);	/* to bit 4 */
1483 			dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
1484 
1485 			reg = REG_SDRAM_OPERATION_CMD_MR0 &
1486 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1487 			/* 0x1418 - SDRAM Operation Register */
1488 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1489 
1490 			/* Poll - Wait for Refresh operation completion */
1491 			wait_refresh_op_complete();
1492 
1493 			reg = reg_read(REG_DDR3_MR2_ADDR);
1494 			reg &= ~0x38;	/* CWL [5:3] */
1495 			/* CWL = 0 ,for 400 MHg is 5 */
1496 			if (freq != DDR_400)
1497 				reg |= dram_info->cwl << REG_DDR3_MR2_CWL_OFFS;
1498 			dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
1499 			reg = REG_SDRAM_OPERATION_CMD_MR2 &
1500 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1501 			/* 0x1418 - SDRAM Operation Register */
1502 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1503 
1504 			/* Poll - Wait for Refresh operation completion */
1505 			wait_refresh_op_complete();
1506 
1507 			/* Set current rd_sample_delay  */
1508 			reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
1509 			reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
1510 				 (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1511 			reg |= (dram_info->cl <<
1512 				(REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1513 			dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
1514 
1515 			/* Set current rd_ready_delay  */
1516 			reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
1517 			reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
1518 				 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
1519 			reg |= ((dram_info->cl + 1) <<
1520 				(REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1521 			dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
1522 		}
1523 	}
1524 
1525 	/* Enable ODT on DLL-on mode */
1526 	dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, 0);
1527 
1528 	/* [1] - DFS Block disable  */
1529 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
1530 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1531 
1532 	/* Change DDR frequency to 100MHz procedure: */
1533 	/* 0x1600 - PHY lock mask register */
1534 	reg = reg_read(REG_ODPG_CNTRL_ADDR);
1535 	reg &= ~(1 << REG_ODPG_CNTRL_OFFS);	/* [21] = 0 */
1536 	dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
1537 
1538 	/* Change DDR frequency to 100MHz procedure: */
1539 	/* 0x1670 - PHY lock mask register */
1540 	reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
1541 	reg |= ~REG_PHY_LOCK_MASK_MASK;	/* [11:0] = FFF */
1542 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
1543 
1544 	reg = reg_read(REG_METAL_MASK_ADDR) | (1 << 0);	/* [0] - disable */
1545 	/* 0x14B0 - Dunit MMask Register */
1546 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1547 
1548 	DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
1549 		    freq, 1);
1550 	return MV_OK;
1551 #endif
1552 }
1553