1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) Marvell International Ltd. and its affiliates
4  */
5 
6 #include <common.h>
7 #include <i2c.h>
8 #include <spl.h>
9 #include <asm/io.h>
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12 
13 #include "ddr3_hw_training.h"
14 
15 /*
16  * Debug
17  */
18 #define DEBUG_DFS_C(s, d, l) \
19 	DEBUG_DFS_S(s); DEBUG_DFS_D(d, l); DEBUG_DFS_S("\n")
20 #define DEBUG_DFS_FULL_C(s, d, l) \
21 	DEBUG_DFS_FULL_S(s); DEBUG_DFS_FULL_D(d, l); DEBUG_DFS_FULL_S("\n")
22 
23 #ifdef MV_DEBUG_DFS
24 #define DEBUG_DFS_S(s)			puts(s)
25 #define DEBUG_DFS_D(d, l)		printf("%x", d)
26 #else
27 #define DEBUG_DFS_S(s)
28 #define DEBUG_DFS_D(d, l)
29 #endif
30 
31 #ifdef MV_DEBUG_DFS_FULL
32 #define DEBUG_DFS_FULL_S(s)		puts(s)
33 #define DEBUG_DFS_FULL_D(d, l)		printf("%x", d)
34 #else
35 #define DEBUG_DFS_FULL_S(s)
36 #define DEBUG_DFS_FULL_D(d, l)
37 #endif
38 
39 #if defined(MV88F672X)
40 extern u8 div_ratio[CLK_VCO][CLK_DDR];
41 extern void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps);
42 #else
43 extern u16 odt_dynamic[ODT_OPT][MAX_CS];
44 extern u8 div_ratio1to1[CLK_CPU][CLK_DDR];
45 extern u8 div_ratio2to1[CLK_CPU][CLK_DDR];
46 #endif
47 extern u16 odt_static[ODT_OPT][MAX_CS];
48 
49 extern u32 cpu_fab_clk_to_hclk[FAB_OPT][CLK_CPU];
50 
51 extern u32 ddr3_get_vco_freq(void);
52 
53 u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1);
54 
55 #ifdef MV_DEBUG_DFS
56 static inline void dfs_reg_write(u32 addr, u32 val)
57 {
58 	printf("\n write reg 0x%08x = 0x%08x", addr, val);
59 	writel(val, INTER_REGS_BASE + addr);
60 }
61 #else
62 static inline void dfs_reg_write(u32 addr, u32 val)
63 {
64 	writel(val, INTER_REGS_BASE + addr);
65 }
66 #endif
67 
68 static void wait_refresh_op_complete(void)
69 {
70 	u32 reg;
71 
72 	/* Poll - Wait for Refresh operation completion */
73 	do {
74 		reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
75 			REG_SDRAM_OPERATION_CMD_RFRS_DONE;
76 	} while (reg);		/* Wait for '0' */
77 }
78 
79 /*
80  * Name:     ddr3_get_freq_parameter
81  * Desc:     Finds CPU/DDR frequency ratio according to Sample@reset and table.
82  * Args:     target_freq - target frequency
83  * Notes:
84  * Returns:  freq_par - the ratio parameter
85  */
86 u32 ddr3_get_freq_parameter(u32 target_freq, int ratio_2to1)
87 {
88 	u32 ui_vco_freq, freq_par;
89 
90 	ui_vco_freq = ddr3_get_vco_freq();
91 
92 #if defined(MV88F672X)
93 	freq_par = div_ratio[ui_vco_freq][target_freq];
94 #else
95 	/* Find the ratio between PLL frequency and ddr-clk */
96 	if (ratio_2to1)
97 		freq_par = div_ratio2to1[ui_vco_freq][target_freq];
98 	else
99 		freq_par = div_ratio1to1[ui_vco_freq][target_freq];
100 #endif
101 
102 	return freq_par;
103 }
104 
105 /*
106  * Name:     ddr3_dfs_high_2_low
107  * Desc:
108  * Args:     freq - target frequency
109  * Notes:
110  * Returns:  MV_OK - success, MV_FAIL - fail
111  */
112 int ddr3_dfs_high_2_low(u32 freq, MV_DRAM_INFO *dram_info)
113 {
114 #if defined(MV88F78X60) || defined(MV88F672X)
115 	/* This Flow is relevant for ArmadaXP A0 */
116 	u32 reg, freq_par, tmp;
117 	u32 cs = 0;
118 
119 	DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
120 		    freq, 1);
121 
122 	/* target frequency - 100MHz */
123 	freq_par = ddr3_get_freq_parameter(freq, 0);
124 
125 #if defined(MV88F672X)
126 	u32 hclk;
127 	u32 cpu_freq = ddr3_get_cpu_freq();
128 	get_target_freq(cpu_freq, &tmp, &hclk);
129 #endif
130 
131 	/* Configure - DRAM DLL final state after DFS is complete - Enable */
132 	reg = reg_read(REG_DFS_ADDR);
133 	/* [0] - DfsDllNextState - Disable */
134 	reg |= (1 << REG_DFS_DLLNEXTSTATE_OFFS);
135 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
136 
137 	/*
138 	 * Configure - XBAR Retry response during Block to enable internal
139 	 * access - Disable
140 	 */
141 	reg = reg_read(REG_METAL_MASK_ADDR);
142 	/* [0] - RetryMask - Disable */
143 	reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
144 	/* 0x14B0 - Dunit MMask Register */
145 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
146 
147 	/* Configure - Block new external transactions - Enable */
148 	reg = reg_read(REG_DFS_ADDR);
149 	reg |= (1 << REG_DFS_BLOCK_OFFS);	/* [1] - DfsBlock - Enable  */
150 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
151 
152 	/* Registered DIMM support */
153 	if (dram_info->reg_dimm) {
154 		/*
155 		 * Configure - Disable Register DIMM CKE Power
156 		 * Down mode - CWA_RC
157 		 */
158 		reg = (0x9 & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
159 			REG_SDRAM_OPERATION_CWA_RC_OFFS;
160 		/*
161 		 * Configure - Disable Register DIMM CKE Power
162 		 * Down mode - CWA_DATA
163 		 */
164 		reg |= ((0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
165 			REG_SDRAM_OPERATION_CWA_DATA_OFFS);
166 
167 		/*
168 		 * Configure - Disable Register DIMM CKE Power
169 		 * Down mode - Set Delay - tMRD
170 		 */
171 		reg |= (0 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
172 
173 		/* Configure - Issue CWA command with the above parameters */
174 		reg |= (REG_SDRAM_OPERATION_CMD_CWA &
175 			~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
176 
177 		/* 0x1418 - SDRAM Operation Register */
178 		dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
179 
180 		/* Poll - Wait for CWA operation completion */
181 		do {
182 			reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
183 			       (REG_SDRAM_OPERATION_CMD_MASK);
184 		} while (reg);
185 
186 		/* Configure - Disable outputs floating during Self Refresh */
187 		reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
188 		/* [15] - SRFloatEn - Disable */
189 		reg &= ~(1 << REG_REGISTERED_DRAM_CTRL_SR_FLOAT_OFFS);
190 		/* 0x16D0 - DDR3 Registered DRAM Control */
191 		dfs_reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
192 	}
193 
194 	/* Optional - Configure - DDR3_Rtt_nom_CS# */
195 	for (cs = 0; cs < MAX_CS; cs++) {
196 		if (dram_info->cs_ena & (1 << cs)) {
197 			reg = reg_read(REG_DDR3_MR1_CS_ADDR +
198 				       (cs << MR_CS_ADDR_OFFS));
199 			reg &= REG_DDR3_MR1_RTT_MASK;
200 			dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
201 				      (cs << MR_CS_ADDR_OFFS), reg);
202 		}
203 	}
204 
205 	/* Configure - Move DRAM into Self Refresh */
206 	reg = reg_read(REG_DFS_ADDR);
207 	reg |= (1 << REG_DFS_SR_OFFS);	/* [2] - DfsSR - Enable */
208 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
209 
210 	/* Poll - Wait for Self Refresh indication */
211 	do {
212 		reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
213 	} while (reg == 0x0);	/* 0x1528 [3] - DfsAtSR - Wait for '1' */
214 
215 	/* Start of clock change procedure (PLL) */
216 #if defined(MV88F672X)
217 	/* avantaLP */
218 	/* Configure    cpupll_clkdiv_reset_mask */
219 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
220 	reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
221 	/* 0xE8264[7:0]   0xff CPU Clock Dividers Reset mask */
222 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
223 
224 	/* Configure    cpu_clkdiv_reload_smooth    */
225 	reg = reg_read(CPU_PLL_CNTRL0);
226 	reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
227 	/* 0xE8260   [15:8]  0x2 CPU Clock Dividers Reload Smooth enable */
228 	dfs_reg_write(CPU_PLL_CNTRL0,
229 		      (reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS)));
230 
231 	/* Configure    cpupll_clkdiv_relax_en */
232 	reg = reg_read(CPU_PLL_CNTRL0);
233 	reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
234 	/* 0xE8260 [31:24] 0x2 Relax Enable */
235 	dfs_reg_write(CPU_PLL_CNTRL0,
236 		      (reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS)));
237 
238 	/* Configure    cpupll_clkdiv_ddr_clk_ratio */
239 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
240 	/*
241 	 * 0xE8268  [13:8]  N   Set Training clock:
242 	 * APLL Out Clock (VCO freq) / N = 100 MHz
243 	 */
244 	reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
245 	reg |= (freq_par << 8);	/* full Integer ratio from PLL-out to ddr-clk */
246 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
247 
248 	/* Configure    cpupll_clkdiv_reload_ratio  */
249 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
250 	reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
251 	/* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
252 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
253 		      (reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS)));
254 
255 	udelay(1);
256 
257 	/* Configure    cpupll_clkdiv_reload_ratio  */
258 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
259 	reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
260 	/* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
261 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
262 
263 	udelay(5);
264 
265 #else
266 	/*
267 	 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
268 	 * and in the same chance, block reassertions of reset [15:8] and
269 	 * force reserved bits[7:0].
270 	 */
271 	reg = 0x0000FDFF;
272 	/* 0x18700 - CPU Div CLK control 0 */
273 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
274 
275 	/*
276 	 * RelaX whenever reset is asserted to that channel
277 	 * (good for any case)
278 	 */
279 	reg = 0x0000FF00;
280 	/* 0x18704 - CPU Div CLK control 0 */
281 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
282 
283 	reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
284 		REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
285 
286 	/* full Integer ratio from PLL-out to ddr-clk */
287 	reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
288 	/* 0x1870C - CPU Div CLK control 3 register */
289 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
290 
291 	/*
292 	 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
293 	 * All the rest are kept as is (forced, but could be read-modify-write).
294 	 * This is done now by RMW above.
295 	 */
296 
297 	/* Clock is not shut off gracefully - keep it running */
298 	reg = 0x000FFF02;
299 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
300 
301 	/* Wait before replacing the clock on the DDR Phy Channel. */
302 	udelay(1);
303 
304 	/*
305 	 * This for triggering the frequency update. Bit[24] is the
306 	 * central control
307 	 * bits [23:16] == which channels to change ==2 ==>
308 	 *                 only DDR Phy (smooth transition)
309 	 * bits [15:8] == mask reset reassertion due to clock modification
310 	 *                to these channels.
311 	 * bits [7:0] == not in use
312 	 */
313 	reg = 0x0102FDFF;
314 	/* 0x18700 - CPU Div CLK control 0 register */
315 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
316 
317 	udelay(1);		/* Wait 1usec */
318 
319 	/*
320 	 * Poll Div CLK status 0 register - indication that the clocks
321 	 * are active - 0x18718 [8]
322 	 */
323 	do {
324 		reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
325 			(1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
326 	} while (reg == 0);
327 
328 	/*
329 	 * Clean the CTRL0, to be ready for next resets and next requests
330 	 * of ratio modifications.
331 	 */
332 	reg = 0x000000FF;
333 	/* 0x18700 - CPU Div CLK control 0 register */
334 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
335 
336 	udelay(5);
337 #endif
338 	/* End of clock change procedure (PLL) */
339 
340 	/* Configure - Select normal clock for the DDR PHY - Enable */
341 	reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
342 	/* [16] - ddr_phy_trn_clk_sel - Enable  */
343 	reg |= (1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
344 	/* 0x18488 - DRAM Init control status register */
345 	dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
346 
347 	/* Configure - Set Correct Ratio - 1:1 */
348 	/* [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between Dunit and Phy */
349 
350 	reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
351 	dfs_reg_write(REG_DDR_IO_ADDR, reg);	/* 0x1524 - DDR IO Register */
352 
353 	/* Configure - 2T Mode - Restore original configuration */
354 	reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
355 	/* [3:4] 2T - 1T Mode - low freq */
356 	reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
357 	/* 0x1404 - DDR Controller Control Low Register */
358 	dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
359 
360 	/* Configure - Restore CL and CWL - MRS Commands */
361 	reg = reg_read(REG_DFS_ADDR);
362 	reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
363 	reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
364 	/* [8] - DfsCLNextState - MRS CL=6 after DFS (due to DLL-off mode) */
365 	reg |= (0x4 << REG_DFS_CL_NEXT_STATE_OFFS);
366 	/* [12] - DfsCWLNextState - MRS CWL=6 after DFS (due to DLL-off mode) */
367 	reg |= (0x1 << REG_DFS_CWL_NEXT_STATE_OFFS);
368 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
369 
370 	/* Poll - Wait for APLL + ADLLs lock on new frequency */
371 	do {
372 		reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
373 			REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
374 		/* 0x1674 [10:0] - Phy lock status Register */
375 	} while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
376 
377 	/* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
378 	reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
379 	/* [30:29] = 0 - Data Pup R/W path reset */
380 	/* 0x1400 - SDRAM Configuration register */
381 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
382 
383 	/*
384 	 * Configure - DRAM Data PHY Read [30], Write [29] path
385 	 * reset - Release Reset
386 	 */
387 	reg = (reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK);
388 	/* [30:29] = '11' - Data Pup R/W path reset */
389 	/* 0x1400 - SDRAM Configuration register */
390 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
391 
392 	/* Registered DIMM support */
393 	if (dram_info->reg_dimm) {
394 		/*
395 		 * Configure - Change register DRAM operating speed
396 		 * (below 400MHz) - CWA_RC
397 		 */
398 		reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
399 			REG_SDRAM_OPERATION_CWA_RC_OFFS;
400 
401 		/*
402 		 * Configure - Change register DRAM operating speed
403 		 * (below 400MHz) - CWA_DATA
404 		 */
405 		reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
406 			REG_SDRAM_OPERATION_CWA_DATA_OFFS);
407 
408 		/* Configure - Set Delay - tSTAB */
409 		reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
410 
411 		/* Configure - Issue CWA command with the above parameters */
412 		reg |= (REG_SDRAM_OPERATION_CMD_CWA &
413 			~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
414 
415 		/* 0x1418 - SDRAM Operation Register */
416 		dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
417 
418 		/* Poll - Wait for CWA operation completion */
419 		do {
420 			reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
421 				(REG_SDRAM_OPERATION_CMD_MASK);
422 		} while (reg);
423 	}
424 
425 	/* Configure - Exit Self Refresh */
426 	/* [2] - DfsSR  */
427 	reg = (reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS));
428 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
429 
430 	/*
431 	 * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices
432 	 * on all ranks are NOT in self refresh mode
433 	 */
434 	do {
435 		reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
436 	} while (reg);		/* Wait for '0' */
437 
438 	/* Configure - Issue Refresh command */
439 	/* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
440 	reg = REG_SDRAM_OPERATION_CMD_RFRS;
441 	for (cs = 0; cs < MAX_CS; cs++) {
442 		if (dram_info->cs_ena & (1 << cs))
443 			reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
444 	}
445 
446 	/* 0x1418 - SDRAM Operation Register */
447 	dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
448 
449 	/* Poll - Wait for Refresh operation completion */
450 	wait_refresh_op_complete();
451 
452 	/* Configure - Block new external transactions - Disable */
453 	reg = reg_read(REG_DFS_ADDR);
454 	reg &= ~(1 << REG_DFS_BLOCK_OFFS);	/* [1] - DfsBlock - Disable  */
455 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
456 
457 	/*
458 	 * Configure -  XBAR Retry response during Block to enable
459 	 * internal access - Disable
460 	 */
461 	reg = reg_read(REG_METAL_MASK_ADDR);
462 	/* [0] - RetryMask - Enable */
463 	reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
464 	/* 0x14B0 - Dunit MMask Register */
465 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
466 
467 	for (cs = 0; cs < MAX_CS; cs++) {
468 		if (dram_info->cs_ena & (1 << cs)) {
469 			/* Configure - Set CL */
470 			reg = reg_read(REG_DDR3_MR0_CS_ADDR +
471 				       (cs << MR_CS_ADDR_OFFS)) &
472 				~REG_DDR3_MR0_CL_MASK;
473 			tmp = 0x4;	/* CL=6 - 0x4 */
474 			reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
475 			reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
476 			dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
477 				      (cs << MR_CS_ADDR_OFFS), reg);
478 
479 			/* Configure - Set CWL */
480 			reg = reg_read(REG_DDR3_MR2_CS_ADDR +
481 				       (cs << MR_CS_ADDR_OFFS))
482 				& ~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
483 			/* CWL=6 - 0x1 */
484 			reg |= ((0x1) << REG_DDR3_MR2_CWL_OFFS);
485 			dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
486 				      (cs << MR_CS_ADDR_OFFS), reg);
487 		}
488 	}
489 
490 	DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
491 		    freq, 1);
492 
493 	return MV_OK;
494 #else
495 	/* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
496 
497 	u32 reg, freq_par;
498 	u32 cs = 0;
499 
500 	DEBUG_DFS_C("DDR3 - DFS - High To Low - Starting DFS procedure to Frequency - ",
501 		    freq, 1);
502 
503 	/* target frequency - 100MHz */
504 	freq_par = ddr3_get_freq_parameter(freq, 0);
505 
506 	reg = 0x0000FF00;
507 	/* 0x18700 - CPU Div CLK control 0 */
508 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
509 
510 	/* 0x1600 - ODPG_CNTRL_Control */
511 	reg = reg_read(REG_ODPG_CNTRL_ADDR);
512 	/* [21] = 1 - auto refresh disable */
513 	reg |= (1 << REG_ODPG_CNTRL_OFFS);
514 	dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
515 
516 	/* 0x1670 - PHY lock mask register */
517 	reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
518 	reg &= REG_PHY_LOCK_MASK_MASK;	/* [11:0] = 0 */
519 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
520 
521 	reg = reg_read(REG_DFS_ADDR);	/* 0x1528 - DFS register */
522 
523 	/* Disable reconfig */
524 	reg &= ~0x10;	/* [4] - Enable reconfig MR registers after DFS_ERG */
525 	reg |= 0x1;	/* [0] - DRAM DLL disabled after DFS */
526 
527 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
528 
529 	reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0); /* [0] - disable */
530 	/* 0x14B0 - Dunit MMask Register */
531 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
532 
533 	/* [1] - DFS Block enable  */
534 	reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
535 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
536 
537 	/* [2] - DFS Self refresh enable  */
538 	reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
539 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
540 
541 	/*
542 	 * Poll DFS Register - 0x1528 [3] - DfsAtSR -
543 	 * All DRAM devices on all ranks are in self refresh mode -
544 	 * DFS can be executed afterwards
545 	 */
546 	do {
547 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
548 	} while (reg == 0x0);	/* Wait for '1' */
549 
550 	/* Disable ODT on DLL-off mode */
551 	dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR,
552 		      REG_SDRAM_ODT_CTRL_HIGH_OVRD_MASK);
553 
554 	/* [11:0] = 0 */
555 	reg = (reg_read(REG_PHY_LOCK_MASK_ADDR) & REG_PHY_LOCK_MASK_MASK);
556 	/* 0x1670 - PHY lock mask register */
557 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
558 
559 	/* Add delay between entering SR and start ratio modification */
560 	udelay(1);
561 
562 	/*
563 	 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
564 	 * and in the same chance, block reassertions of reset [15:8] and
565 	 * force reserved bits[7:0].
566 	 */
567 	reg = 0x0000FDFF;
568 	/* 0x18700 - CPU Div CLK control 0 */
569 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
570 
571 	/*
572 	 * RelaX whenever reset is asserted to that channel (good for any case)
573 	 */
574 	reg = 0x0000FF00;
575 	/* 0x18700 - CPU Div CLK control 0 */
576 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
577 
578 	reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
579 		REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
580 	/* Full Integer ratio from PLL-out to ddr-clk */
581 	reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
582 	/* 0x1870C - CPU Div CLK control 3 register */
583 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
584 
585 	/*
586 	 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
587 	 * All the rest are kept as is (forced, but could be read-modify-write).
588 	 * This is done now by RMW above.
589 	 */
590 
591 	/* Clock is not shut off gracefully - keep it running */
592 	reg = 0x000FFF02;
593 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
594 
595 	/* Wait before replacing the clock on the DDR Phy Channel. */
596 	udelay(1);
597 
598 	/*
599 	 * This for triggering the frequency update. Bit[24] is the
600 	 * central control
601 	 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
602 	 *                 (smooth transition)
603 	 * bits [15:8] == mask reset reassertion due to clock modification
604 	 *                to these channels.
605 	 * bits [7:0] == not in use
606 	 */
607 	reg = 0x0102FDFF;
608 	/* 0x18700 - CPU Div CLK control 0 register */
609 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
610 
611 	udelay(1);		/* Wait 1usec */
612 
613 	/*
614 	 * Poll Div CLK status 0 register - indication that the clocks
615 	 * are active - 0x18718 [8]
616 	 */
617 	do {
618 		reg = (reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR)) &
619 			(1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
620 	} while (reg == 0);
621 
622 	/*
623 	 * Clean the CTRL0, to be ready for next resets and next requests of
624 	 * ratio modifications.
625 	 */
626 	reg = 0x000000FF;
627 	/* 0x18700 - CPU Div CLK control 0 register */
628 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
629 
630 	udelay(5);
631 
632 	/* Switch HCLK Mux to training clk (100Mhz), keep DFS request bit */
633 	reg = 0x20050000;
634 	/* 0x18488 - DRAM Init control status register */
635 	dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
636 
637 	reg = reg_read(REG_DDR_IO_ADDR) & ~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
638 	/* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
639 	dfs_reg_write(REG_DDR_IO_ADDR, reg);	/* 0x1524 - DDR IO Regist */
640 
641 	reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
642 	/* [31:30]] - reset pup data ctrl ADLL */
643 	/* 0x15EC - DRAM PHY Config register */
644 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
645 
646 	reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK);
647 	/* [31:30] - normal pup data ctrl ADLL */
648 	/* 0x15EC - DRAM PHY Config register */
649 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
650 
651 	udelay(1);		/* Wait 1usec */
652 
653 	/* 0x1404 */
654 	reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7);
655 	dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
656 
657 	/* Poll Phy lock status register - APLL lock indication - 0x1674 */
658 	do {
659 		reg = (reg_read(REG_PHY_LOCK_STATUS_ADDR)) &
660 			REG_PHY_LOCK_STATUS_LOCK_MASK;
661 	} while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK);	/* Wait for '0xFFF' */
662 
663 	reg = (reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK);
664 	/* [30:29] = 0 - Data Pup R/W path reset */
665 	/* 0x1400 - SDRAM Configuration register */
666 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
667 
668 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
669 	/* [30:29] = '11' - Data Pup R/W path reset */
670 	/* 0x1400 - SDRAM Configuration register */
671 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
672 
673 	udelay(1000);		/* Wait 1msec */
674 
675 	for (cs = 0; cs < MAX_CS; cs++) {
676 		if (dram_info->cs_ena & (1 << cs)) {
677 			/* Poll - Wait for Refresh operation completion */
678 			wait_refresh_op_complete();
679 
680 			/* Config CL and CWL with MR0 and MR2 registers */
681 			reg = reg_read(REG_DDR3_MR0_ADDR);
682 			reg &= ~0x74;	/* CL [3:0]; [6:4],[2] */
683 			reg |= (1 << 5);	/* CL = 4, CAS is 6 */
684 			dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
685 			reg = REG_SDRAM_OPERATION_CMD_MR0 &
686 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
687 			/* 0x1418 - SDRAM Operation Register */
688 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
689 
690 			/* Poll - Wait for Refresh operation completion */
691 			wait_refresh_op_complete();
692 
693 			reg = reg_read(REG_DDR3_MR2_ADDR);
694 			reg &= ~0x38;	/* CWL [5:3] */
695 			reg |= (1 << 3);	/* CWL = 1, CWL is 6 */
696 			dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
697 
698 			reg = REG_SDRAM_OPERATION_CMD_MR2 &
699 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
700 			/* 0x1418 - SDRAM Operation Register */
701 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
702 
703 			/* Poll - Wait for Refresh operation completion */
704 			wait_refresh_op_complete();
705 
706 			/* Set current rd_sample_delay  */
707 			reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
708 			reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
709 				 (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
710 			reg |= (5 << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
711 			dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
712 
713 			/* Set current rd_ready_delay  */
714 			reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
715 			reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
716 				 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
717 			reg |= ((6) << (REG_READ_DATA_READY_DELAYS_OFFS * cs));
718 			dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
719 		}
720 	}
721 
722 	/* [2] - DFS Self refresh disable  */
723 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
724 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
725 
726 	/* [1] - DFS Block enable  */
727 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
728 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
729 
730 	/*
731 	 * Poll DFS Register - 0x1528 [3] - DfsAtSR -
732 	 * All DRAM devices on all ranks are in self refresh mode - DFS can
733 	 * be executed afterwards
734 	 */
735 	do {
736 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
737 	} while (reg);		/* Wait for '1' */
738 
739 	reg = (reg_read(REG_METAL_MASK_ADDR) | (1 << 0));
740 	/* [0] - Enable Dunit to crossbar retry */
741 	/* 0x14B0 - Dunit MMask Register */
742 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
743 
744 	/* 0x1600 - PHY lock mask register */
745 	reg = reg_read(REG_ODPG_CNTRL_ADDR);
746 	reg &= ~(1 << REG_ODPG_CNTRL_OFFS);	/* [21] = 0 */
747 	dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
748 
749 	/* 0x1670 - PHY lock mask register */
750 	reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
751 	reg |= ~REG_PHY_LOCK_MASK_MASK;	/* [11:0] = FFF */
752 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
753 
754 	DEBUG_DFS_C("DDR3 - DFS - High To Low - Ended successfuly - new Frequency - ",
755 		    freq, 1);
756 
757 	return MV_OK;
758 #endif
759 }
760 
761 /*
762  * Name:     ddr3_dfs_low_2_high
763  * Desc:
764  * Args:     freq - target frequency
765  * Notes:
766  * Returns:  MV_OK - success, MV_FAIL - fail
767  */
768 int ddr3_dfs_low_2_high(u32 freq, int ratio_2to1, MV_DRAM_INFO *dram_info)
769 {
770 #if defined(MV88F78X60) || defined(MV88F672X)
771 	/* This Flow is relevant for ArmadaXP A0 */
772 	u32 reg, freq_par, tmp;
773 	u32 cs = 0;
774 
775 	DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
776 		    freq, 1);
777 
778 	/* target frequency - freq */
779 	freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
780 
781 #if defined(MV88F672X)
782 	u32 hclk;
783 	u32 cpu_freq = ddr3_get_cpu_freq();
784 	get_target_freq(cpu_freq, &tmp, &hclk);
785 #endif
786 
787 	/* Configure - DRAM DLL final state after DFS is complete - Enable */
788 	reg = reg_read(REG_DFS_ADDR);
789 	/* [0] - DfsDllNextState - Enable */
790 	reg &= ~(1 << REG_DFS_DLLNEXTSTATE_OFFS);
791 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
792 
793 	/*
794 	 * Configure -  XBAR Retry response during Block to enable
795 	 * internal access - Disable
796 	 */
797 	reg = reg_read(REG_METAL_MASK_ADDR);
798 	/* [0] - RetryMask - Disable */
799 	reg &= ~(1 << REG_METAL_MASK_RETRY_OFFS);
800 	/* 0x14B0 - Dunit MMask Register */
801 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
802 
803 	/* Configure - Block new external transactions - Enable */
804 	reg = reg_read(REG_DFS_ADDR);
805 	reg |= (1 << REG_DFS_BLOCK_OFFS);	/* [1] - DfsBlock - Enable  */
806 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
807 
808 	/* Configure - Move DRAM into Self Refresh */
809 	reg = reg_read(REG_DFS_ADDR);
810 	reg |= (1 << REG_DFS_SR_OFFS);	/* [2] - DfsSR - Enable */
811 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
812 
813 	/* Poll - Wait for Self Refresh indication */
814 	do {
815 		reg = ((reg_read(REG_DFS_ADDR)) & (1 << REG_DFS_ATSR_OFFS));
816 	} while (reg == 0x0);	/* 0x1528 [3] - DfsAtSR - Wait for '1' */
817 
818 	/* Start of clock change procedure (PLL) */
819 #if defined(MV88F672X)
820 	/* avantaLP */
821 	/* Configure    cpupll_clkdiv_reset_mask */
822 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
823 	reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL0_MASK;
824 	/* 0xE8264[7:0]   0xff CPU Clock Dividers Reset mask */
825 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, (reg + 0xFF));
826 
827 	/* Configure    cpu_clkdiv_reload_smooth    */
828 	reg = reg_read(CPU_PLL_CNTRL0);
829 	reg &= CPU_PLL_CNTRL0_RELOAD_SMOOTH_MASK;
830 	/* 0xE8260   [15:8]  0x2 CPU Clock Dividers Reload Smooth enable */
831 	dfs_reg_write(CPU_PLL_CNTRL0,
832 		      reg + (2 << CPU_PLL_CNTRL0_RELOAD_SMOOTH_OFFS));
833 
834 	/* Configure    cpupll_clkdiv_relax_en */
835 	reg = reg_read(CPU_PLL_CNTRL0);
836 	reg &= CPU_PLL_CNTRL0_RELAX_EN_MASK;
837 	/* 0xE8260 [31:24] 0x2 Relax Enable */
838 	dfs_reg_write(CPU_PLL_CNTRL0,
839 		      reg + (2 << CPU_PLL_CNTRL0_RELAX_EN_OFFS));
840 
841 	/* Configure    cpupll_clkdiv_ddr_clk_ratio */
842 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL1);
843 	/*
844 	 * 0xE8268  [13:8]  N   Set Training clock:
845 	 * APLL Out Clock (VCO freq) / N = 100 MHz
846 	 */
847 	reg &= CPU_PLL_CLOCK_DIVIDER_CNTRL1_MASK;
848 	reg |= (freq_par << 8);	/* full Integer ratio from PLL-out to ddr-clk */
849 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL1, reg);
850 	/* Configure    cpupll_clkdiv_reload_ratio  */
851 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
852 	reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
853 	/* 0xE8264 [8]=0x1 CPU Clock Dividers Reload Ratio trigger set */
854 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0,
855 		      reg + (1 << CPU_PLL_CLOCK_RELOAD_RATIO_OFFS));
856 
857 	udelay(1);
858 
859 	/* Configure    cpupll_clkdiv_reload_ratio  */
860 	reg = reg_read(CPU_PLL_CLOCK_DIVIDER_CNTRL0);
861 	reg &= CPU_PLL_CLOCK_RELOAD_RATIO_MASK;
862 	/* 0xE8264 [8]=0x0 CPU Clock Dividers Reload Ratio trigger clear */
863 	dfs_reg_write(CPU_PLL_CLOCK_DIVIDER_CNTRL0, reg);
864 
865 	udelay(5);
866 
867 #else
868 	/*
869 	 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
870 	 * and in the same chance, block reassertions of reset [15:8]
871 	 * and force reserved bits[7:0].
872 	 */
873 	reg = 0x0000FFFF;
874 
875 	/* 0x18700 - CPU Div CLK control 0 */
876 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
877 
878 	/*
879 	 * RelaX whenever reset is asserted to that channel (good for any case)
880 	 */
881 	reg = 0x0000FF00;
882 	/* 0x18704 - CPU Div CLK control 0 */
883 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
884 
885 	reg = reg_read(REG_CPU_DIV_CLK_CTRL_2_ADDR) &
886 		REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
887 	reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
888 	/* full Integer ratio from PLL-out to ddr-clk */
889 	/* 0x1870C - CPU Div CLK control 3 register */
890 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_2_ADDR, reg);
891 
892 	/*
893 	 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
894 	 * All the rest are kept as is (forced, but could be read-modify-write).
895 	 * This is done now by RMW above.
896 	 */
897 	reg = 0x000FFF02;
898 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
899 
900 	/* Wait before replacing the clock on the DDR Phy Channel. */
901 	udelay(1);
902 
903 	reg = 0x0102FDFF;
904 	/*
905 	 * This for triggering the frequency update. Bit[24] is the
906 	 * central control
907 	 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
908 	 *                 (smooth transition)
909 	 * bits [15:8] == mask reset reassertion due to clock modification
910 	 *                to these channels.
911 	 * bits [7:0] == not in use
912 	 */
913 	/* 0x18700 - CPU Div CLK control 0 register */
914 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
915 
916 	udelay(1);
917 
918 	/*
919 	 * Poll Div CLK status 0 register - indication that the clocks
920 	 * are active - 0x18718 [8]
921 	 */
922 	do {
923 		reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
924 			(1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
925 	} while (reg == 0);
926 
927 	reg = 0x000000FF;
928 	/*
929 	 * Clean the CTRL0, to be ready for next resets and next requests
930 	 * of ratio modifications.
931 	 */
932 	/* 0x18700 - CPU Div CLK control 0 register */
933 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
934 #endif
935 	/* End of clock change procedure (PLL) */
936 
937 	if (ratio_2to1) {
938 		/* Configure - Select normal clock for the DDR PHY - Disable */
939 		reg = reg_read(REG_DRAM_INIT_CTRL_STATUS_ADDR);
940 		/* [16] - ddr_phy_trn_clk_sel - Disable  */
941 		reg &= ~(1 << REG_DRAM_INIT_CTRL_TRN_CLK_OFFS);
942 		/* 0x18488 - DRAM Init control status register */
943 		dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
944 	}
945 
946 	/*
947 	 * Configure - Set Correct Ratio - according to target ratio
948 	 * parameter - 2:1/1:1
949 	 */
950 	if (ratio_2to1) {
951 		/*
952 		 * [15] - Phy2UnitClkRatio = 1 - Set 2:1 Ratio between
953 		 * Dunit and Phy
954 		 */
955 		reg = reg_read(REG_DDR_IO_ADDR) |
956 			(1 << REG_DDR_IO_CLK_RATIO_OFFS);
957 	} else {
958 		/*
959 		 * [15] - Phy2UnitClkRatio = 0 - Set 1:1 Ratio between
960 		 * Dunit and Phy
961 		 */
962 		reg = reg_read(REG_DDR_IO_ADDR) &
963 			~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
964 	}
965 	dfs_reg_write(REG_DDR_IO_ADDR, reg);	/* 0x1524 - DDR IO Register */
966 
967 	/* Configure - 2T Mode - Restore original configuration */
968 	reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
969 	/* [3:4] 2T - Restore value */
970 	reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
971 	reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
972 		REG_DUNIT_CTRL_LOW_2T_OFFS);
973 	/* 0x1404 - DDR Controller Control Low Register */
974 	dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
975 
976 	/* Configure - Restore CL and CWL - MRS Commands */
977 	reg = reg_read(REG_DFS_ADDR);
978 	reg &= ~(REG_DFS_CL_NEXT_STATE_MASK << REG_DFS_CL_NEXT_STATE_OFFS);
979 	reg &= ~(REG_DFS_CWL_NEXT_STATE_MASK << REG_DFS_CWL_NEXT_STATE_OFFS);
980 
981 	if (freq == DDR_400) {
982 		if (dram_info->target_frequency == 0x8)
983 			tmp = ddr3_cl_to_valid_cl(5);
984 		else
985 			tmp = ddr3_cl_to_valid_cl(6);
986 	} else {
987 		tmp = ddr3_cl_to_valid_cl(dram_info->cl);
988 	}
989 
990 	/* [8] - DfsCLNextState */
991 	reg |= ((tmp & REG_DFS_CL_NEXT_STATE_MASK) << REG_DFS_CL_NEXT_STATE_OFFS);
992 	if (freq == DDR_400) {
993 		/* [12] - DfsCWLNextState */
994 		reg |= (((0) & REG_DFS_CWL_NEXT_STATE_MASK) <<
995 			REG_DFS_CWL_NEXT_STATE_OFFS);
996 	} else {
997 		/* [12] - DfsCWLNextState */
998 		reg |= (((dram_info->cwl) & REG_DFS_CWL_NEXT_STATE_MASK) <<
999 			REG_DFS_CWL_NEXT_STATE_OFFS);
1000 	}
1001 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1002 
1003 	/* Optional - Configure - DDR3_Rtt_nom_CS# */
1004 	for (cs = 0; cs < MAX_CS; cs++) {
1005 		if (dram_info->cs_ena & (1 << cs)) {
1006 			reg = reg_read(REG_DDR3_MR1_CS_ADDR +
1007 				       (cs << MR_CS_ADDR_OFFS));
1008 			reg &= REG_DDR3_MR1_RTT_MASK;
1009 			reg |= odt_static[dram_info->cs_ena][cs];
1010 			dfs_reg_write(REG_DDR3_MR1_CS_ADDR +
1011 				      (cs << MR_CS_ADDR_OFFS), reg);
1012 		}
1013 	}
1014 
1015 	/* Configure - Reset ADLLs - Set Reset */
1016 	reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK;
1017 	/* [31:30]] - reset pup data ctrl ADLL */
1018 	/* 0x15EC - DRAM PHY Config Register */
1019 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1020 
1021 	/* Configure - Reset ADLLs - Release Reset */
1022 	reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
1023 	/* [31:30] - normal pup data ctrl ADLL */
1024 	/* 0x15EC - DRAM PHY Config register */
1025 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1026 
1027 	/* Poll - Wait for APLL + ADLLs lock on new frequency */
1028 	do {
1029 		reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1030 			REG_PHY_LOCK_APLL_ADLL_STATUS_MASK;
1031 		/* 0x1674 [10:0] - Phy lock status Register */
1032 	} while (reg != REG_PHY_LOCK_APLL_ADLL_STATUS_MASK);
1033 
1034 	/* Configure - Reset the PHY SDR clock divider */
1035 	if (ratio_2to1) {
1036 		/* Pup Reset Divider B - Set Reset */
1037 		/* [28] - DataPupRdRST = 0 */
1038 		reg = reg_read(REG_SDRAM_CONFIG_ADDR) &
1039 			~(1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
1040 		/* [28] - DataPupRdRST = 1 */
1041 		tmp = reg_read(REG_SDRAM_CONFIG_ADDR) |
1042 			(1 << REG_SDRAM_CONFIG_PUPRSTDIV_OFFS);
1043 		/* 0x1400 - SDRAM Configuration register */
1044 		dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1045 
1046 		/* Pup Reset Divider B - Release Reset */
1047 		/* 0x1400 - SDRAM Configuration register */
1048 		dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
1049 	}
1050 
1051 	/* Configure - Reset the PHY Read FIFO and Write channels - Set Reset */
1052 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
1053 	/* [30:29] = 0 - Data Pup R/W path reset */
1054 	/* 0x1400 - SDRAM Configuration register */
1055 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1056 
1057 	/*
1058 	 * Configure - DRAM Data PHY Read [30], Write [29] path reset -
1059 	 * Release Reset
1060 	 */
1061 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
1062 	/* [30:29] = '11' - Data Pup R/W path reset */
1063 	/* 0x1400 - SDRAM Configuration register */
1064 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1065 
1066 	/* Registered DIMM support */
1067 	if (dram_info->reg_dimm) {
1068 		/*
1069 		 * Configure - Change register DRAM operating speed
1070 		 * (DDR3-1333 / DDR3-1600) - CWA_RC
1071 		 */
1072 		reg = (0xA & REG_SDRAM_OPERATION_CWA_RC_MASK) <<
1073 			REG_SDRAM_OPERATION_CWA_RC_OFFS;
1074 		if (freq <= DDR_400) {
1075 			/*
1076 			 * Configure - Change register DRAM operating speed
1077 			 * (DDR3-800) - CWA_DATA
1078 			 */
1079 			reg |= ((0x0 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1080 				REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1081 		} else if ((freq > DDR_400) && (freq <= DDR_533)) {
1082 			/*
1083 			 * Configure - Change register DRAM operating speed
1084 			 * (DDR3-1066) - CWA_DATA
1085 			 */
1086 			reg |= ((0x1 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1087 				REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1088 		} else if ((freq > DDR_533) && (freq <= DDR_666)) {
1089 			/*
1090 			 * Configure - Change register DRAM operating speed
1091 			 * (DDR3-1333) - CWA_DATA
1092 			 */
1093 			reg |= ((0x2 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1094 				REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1095 		} else {
1096 			/*
1097 			 * Configure - Change register DRAM operating speed
1098 			 * (DDR3-1600) - CWA_DATA
1099 			 */
1100 			reg |= ((0x3 & REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1101 				REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1102 		}
1103 
1104 		/* Configure - Set Delay - tSTAB */
1105 		reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
1106 		/* Configure - Issue CWA command with the above parameters */
1107 		reg |= (REG_SDRAM_OPERATION_CMD_CWA &
1108 			~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
1109 
1110 		/* 0x1418 - SDRAM Operation Register */
1111 		dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1112 
1113 		/* Poll - Wait for CWA operation completion */
1114 		do {
1115 			reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
1116 				REG_SDRAM_OPERATION_CMD_MASK;
1117 		} while (reg);
1118 	}
1119 
1120 	/* Configure - Exit Self Refresh */
1121 	/* [2] - DfsSR  */
1122 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
1123 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1124 
1125 	/*
1126 	 * Poll - DFS Register - 0x1528 [3] - DfsAtSR - All DRAM
1127 	 * devices on all ranks are NOT in self refresh mode
1128 	 */
1129 	do {
1130 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1131 	} while (reg);		/* Wait for '0' */
1132 
1133 	/* Configure - Issue Refresh command */
1134 	/* [3-0] = 0x2 - Refresh Command, [11-8] - enabled Cs */
1135 	reg = REG_SDRAM_OPERATION_CMD_RFRS;
1136 	for (cs = 0; cs < MAX_CS; cs++) {
1137 		if (dram_info->cs_ena & (1 << cs))
1138 			reg &= ~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1139 	}
1140 
1141 	/* 0x1418 - SDRAM Operation Register */
1142 	dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1143 
1144 	/* Poll - Wait for Refresh operation completion */
1145 	wait_refresh_op_complete();
1146 
1147 	/* Configure - Block new external transactions - Disable */
1148 	reg = reg_read(REG_DFS_ADDR);
1149 	reg &= ~(1 << REG_DFS_BLOCK_OFFS);	/* [1] - DfsBlock - Disable  */
1150 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1151 
1152 	/*
1153 	 * Configure -  XBAR Retry response during Block to enable
1154 	 * internal access - Disable
1155 	 */
1156 	reg = reg_read(REG_METAL_MASK_ADDR);
1157 	/* [0] - RetryMask - Enable */
1158 	reg |= (1 << REG_METAL_MASK_RETRY_OFFS);
1159 	/* 0x14B0 - Dunit MMask Register */
1160 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1161 
1162 	for (cs = 0; cs < MAX_CS; cs++) {
1163 		if (dram_info->cs_ena & (1 << cs)) {
1164 			/* Configure - Set CL */
1165 			reg = reg_read(REG_DDR3_MR0_CS_ADDR +
1166 				       (cs << MR_CS_ADDR_OFFS)) &
1167 				~REG_DDR3_MR0_CL_MASK;
1168 			if (freq == DDR_400)
1169 				tmp = ddr3_cl_to_valid_cl(6);
1170 			else
1171 				tmp = ddr3_cl_to_valid_cl(dram_info->cl);
1172 			reg |= ((tmp & 0x1) << REG_DDR3_MR0_CL_OFFS);
1173 			reg |= ((tmp & 0xE) << REG_DDR3_MR0_CL_HIGH_OFFS);
1174 			dfs_reg_write(REG_DDR3_MR0_CS_ADDR +
1175 				      (cs << MR_CS_ADDR_OFFS), reg);
1176 
1177 			/* Configure - Set CWL */
1178 			reg = reg_read(REG_DDR3_MR2_CS_ADDR +
1179 				       (cs << MR_CS_ADDR_OFFS)) &
1180 				~(REG_DDR3_MR2_CWL_MASK << REG_DDR3_MR2_CWL_OFFS);
1181 			if (freq == DDR_400)
1182 				reg |= ((0) << REG_DDR3_MR2_CWL_OFFS);
1183 			else
1184 				reg |= ((dram_info->cwl) << REG_DDR3_MR2_CWL_OFFS);
1185 			dfs_reg_write(REG_DDR3_MR2_CS_ADDR +
1186 				      (cs << MR_CS_ADDR_OFFS), reg);
1187 		}
1188 	}
1189 
1190 	DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
1191 		    freq, 1);
1192 
1193 	return MV_OK;
1194 
1195 #else
1196 
1197 	/* This Flow is relevant for Armada370 A0 and ArmadaXP Z1 */
1198 
1199 	u32 reg, freq_par, tmp;
1200 	u32 cs = 0;
1201 
1202 	DEBUG_DFS_C("DDR3 - DFS - Low To High - Starting DFS procedure to Frequency - ",
1203 		    freq, 1);
1204 
1205 	/* target frequency - freq */
1206 	freq_par = ddr3_get_freq_parameter(freq, ratio_2to1);
1207 
1208 	reg = 0x0000FF00;
1209 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
1210 
1211 	/* 0x1600 - PHY lock mask register */
1212 	reg = reg_read(REG_ODPG_CNTRL_ADDR);
1213 	reg |= (1 << REG_ODPG_CNTRL_OFFS);	/* [21] = 1 */
1214 	dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
1215 
1216 	/* 0x1670 - PHY lock mask register */
1217 	reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
1218 	reg &= REG_PHY_LOCK_MASK_MASK;	/* [11:0] = 0 */
1219 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
1220 
1221 	/* Enable reconfig MR Registers after DFS */
1222 	reg = reg_read(REG_DFS_ADDR);	/* 0x1528 - DFS register */
1223 	/* [4] - Disable - reconfig MR registers after DFS_ERG */
1224 	reg &= ~0x11;
1225 	/* [0] - Enable - DRAM DLL after DFS */
1226 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1227 
1228 	/* Disable DRAM Controller to crossbar retry */
1229 	/* [0] - disable */
1230 	reg = reg_read(REG_METAL_MASK_ADDR) & ~(1 << 0);
1231 	/* 0x14B0 - Dunit MMask Register */
1232 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1233 
1234 	/* Enable DRAM Blocking */
1235 	/* [1] - DFS Block enable  */
1236 	reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_BLOCK_OFFS);
1237 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1238 
1239 	/* Enable Self refresh */
1240 	/* [2] - DFS Self refresh enable  */
1241 	reg = reg_read(REG_DFS_ADDR) | (1 << REG_DFS_SR_OFFS);
1242 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1243 
1244 	/*
1245 	 * Poll DFS Register - All DRAM devices on all ranks are in
1246 	 * self refresh mode - DFS can be executed afterwards
1247 	 */
1248 	/* 0x1528 [3] - DfsAtSR  */
1249 	do {
1250 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1251 	} while (reg == 0x0);	/* Wait for '1' */
1252 
1253 	/*
1254 	 * Set Correct Ratio - if freq>MARGIN_FREQ use 2:1 ratio
1255 	 * else use 1:1 ratio
1256 	 */
1257 	if (ratio_2to1) {
1258 		/* [15] = 1 - Set 2:1 Ratio between Dunit and Phy */
1259 		reg = reg_read(REG_DDR_IO_ADDR) |
1260 			(1 << REG_DDR_IO_CLK_RATIO_OFFS);
1261 	} else {
1262 		/* [15] = 0 - Set 1:1 Ratio between Dunit and Phy */
1263 		reg = reg_read(REG_DDR_IO_ADDR) &
1264 			~(1 << REG_DDR_IO_CLK_RATIO_OFFS);
1265 	}
1266 	dfs_reg_write(REG_DDR_IO_ADDR, reg);	/* 0x1524 - DDR IO Register */
1267 
1268 	/* Switch HCLK Mux from (100Mhz) [16]=0, keep DFS request bit */
1269 	reg = 0x20040000;
1270 	/*
1271 	 * [29] - training logic request DFS, [28:27] -
1272 	 * preload patterns frequency [18]
1273 	 */
1274 
1275 	/* 0x18488 - DRAM Init control status register */
1276 	dfs_reg_write(REG_DRAM_INIT_CTRL_STATUS_ADDR, reg);
1277 
1278 	/* Add delay between entering SR and start ratio modification */
1279 	udelay(1);
1280 
1281 	/*
1282 	 * Initial Setup - assure that the "load new ratio" is clear (bit 24)
1283 	 * and in the same chance, block reassertions of reset [15:8] and
1284 	 * force reserved bits[7:0].
1285 	 */
1286 	reg = 0x0000FFFF;
1287 	/* 0x18700 - CPU Div CLK control 0 */
1288 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1289 
1290 	/*
1291 	 * RelaX whenever reset is asserted to that channel (good for any case)
1292 	 */
1293 	reg = 0x0000FF00;
1294 	/* 0x18704 - CPU Div CLK control 0 */
1295 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_1_ADDR, reg);
1296 
1297 	reg = reg_read(REG_CPU_DIV_CLK_CTRL_3_ADDR) &
1298 		REG_CPU_DIV_CLK_CTRL_3_FREQ_MASK;
1299 	reg |= (freq_par << REG_CPU_DIV_CLK_CTRL_3_FREQ_OFFS);
1300 	/* Full Integer ratio from PLL-out to ddr-clk */
1301 	/* 0x1870C - CPU Div CLK control 3 register */
1302 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_3_ADDR, reg);
1303 
1304 	/*
1305 	 * Shut off clock enable to the DDRPHY clock channel (this is the "D").
1306 	 * All the rest are kept as is (forced, but could be read-modify-write).
1307 	 * This is done now by RMW above.
1308 	 */
1309 
1310 	reg = 0x000FFF02;
1311 
1312 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_4_ADDR, reg);
1313 
1314 	/* Wait before replacing the clock on the DDR Phy Channel. */
1315 	udelay(1);
1316 
1317 	reg = 0x0102FDFF;
1318 	/*
1319 	 * This for triggering the frequency update. Bit[24] is the
1320 	 * central control
1321 	 * bits [23:16] == which channels to change ==2 ==> only DDR Phy
1322 	 *                 (smooth transition)
1323 	 * bits [15:8] == mask reset reassertion due to clock modification
1324 	 *                to these channels.
1325 	 * bits [7:0] == not in use
1326 	 */
1327 	/* 0x18700 - CPU Div CLK control 0 register */
1328 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1329 
1330 	udelay(1);
1331 
1332 	/*
1333 	 * Poll Div CLK status 0 register - indication that the clocks are
1334 	 * active - 0x18718 [8]
1335 	 */
1336 	do {
1337 		reg = reg_read(REG_CPU_DIV_CLK_STATUS_0_ADDR) &
1338 			(1 << REG_CPU_DIV_CLK_ALL_STABLE_OFFS);
1339 	} while (reg == 0);
1340 
1341 	reg = 0x000000FF;
1342 	/*
1343 	 * Clean the CTRL0, to be ready for next resets and next requests of
1344 	 * ratio modifications.
1345 	 */
1346 	/* 0x18700 - CPU Div CLK control 0 register */
1347 	dfs_reg_write(REG_CPU_DIV_CLK_CTRL_0_ADDR, reg);
1348 
1349 	udelay(5);
1350 
1351 	if (ratio_2to1) {
1352 		/* Pup Reset Divider B - Set Reset */
1353 		/* [28] = 0 - Pup Reset Divider B */
1354 		reg = reg_read(REG_SDRAM_CONFIG_ADDR) & ~(1 << 28);
1355 		/* [28] = 1 - Pup Reset Divider B */
1356 		tmp = reg_read(REG_SDRAM_CONFIG_ADDR) | (1 << 28);
1357 		/* 0x1400 - SDRAM Configuration register */
1358 		dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1359 
1360 		/* Pup Reset Divider B - Release Reset */
1361 		/* 0x1400 - SDRAM Configuration register */
1362 		dfs_reg_write(REG_SDRAM_CONFIG_ADDR, tmp);
1363 	}
1364 
1365 	/* DRAM Data PHYs ADLL Reset - Set Reset */
1366 	reg = (reg_read(REG_DRAM_PHY_CONFIG_ADDR) & REG_DRAM_PHY_CONFIG_MASK);
1367 	/* [31:30]] - reset pup data ctrl ADLL */
1368 	/* 0x15EC - DRAM PHY Config Register */
1369 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1370 
1371 	udelay(25);
1372 
1373 	/* APLL lock indication - Poll Phy lock status Register - 0x1674 [9] */
1374 	do {
1375 		reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1376 			(1 << REG_PHY_LOCK_STATUS_LOCK_OFFS);
1377 	} while (reg == 0);
1378 
1379 	/* DRAM Data PHYs ADLL Reset - Release Reset */
1380 	reg = reg_read(REG_DRAM_PHY_CONFIG_ADDR) | ~REG_DRAM_PHY_CONFIG_MASK;
1381 	/* [31:30] - normal pup data ctrl ADLL */
1382 	/* 0x15EC - DRAM PHY Config register */
1383 	dfs_reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1384 
1385 	udelay(10000);		/* Wait 10msec */
1386 
1387 	/*
1388 	 * APLL lock indication - Poll Phy lock status Register - 0x1674 [11:0]
1389 	 */
1390 	do {
1391 		reg = reg_read(REG_PHY_LOCK_STATUS_ADDR) &
1392 			REG_PHY_LOCK_STATUS_LOCK_MASK;
1393 	} while (reg != REG_PHY_LOCK_STATUS_LOCK_MASK);
1394 
1395 	/* DRAM Data PHY Read [30], Write [29] path reset - Set Reset */
1396 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) & REG_SDRAM_CONFIG_MASK;
1397 	/* [30:29] = 0 - Data Pup R/W path reset */
1398 	/* 0x1400 - SDRAM Configuration register */
1399 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1400 
1401 	/* DRAM Data PHY Read [30], Write [29] path reset - Release Reset */
1402 	reg = reg_read(REG_SDRAM_CONFIG_ADDR) | ~REG_SDRAM_CONFIG_MASK;
1403 	/* [30:29] = '11' - Data Pup R/W path reset */
1404 	/* 0x1400 - SDRAM Configuration register */
1405 	dfs_reg_write(REG_SDRAM_CONFIG_ADDR, reg);
1406 
1407 	/* Disable DFS Reconfig */
1408 	reg = reg_read(REG_DFS_ADDR) & ~(1 << 4);
1409 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1410 
1411 	/* [2] - DFS Self refresh disable  */
1412 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_SR_OFFS);
1413 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1414 
1415 	/*
1416 	 * Poll DFS Register - 0x1528 [3] - DfsAtSR - All DRAM devices on
1417 	 * all ranks are NOT in self refresh mode
1418 	 */
1419 	do {
1420 		reg = reg_read(REG_DFS_ADDR) & (1 << REG_DFS_ATSR_OFFS);
1421 	} while (reg);		/* Wait for '0' */
1422 
1423 	/* 0x1404 */
1424 	reg = (reg_read(REG_DUNIT_CTRL_LOW_ADDR) & 0xFFFFFFE7) | 0x2;
1425 
1426 	/* Configure - 2T Mode - Restore original configuration */
1427 	/* [3:4] 2T - Restore value */
1428 	reg &= ~(REG_DUNIT_CTRL_LOW_2T_MASK << REG_DUNIT_CTRL_LOW_2T_OFFS);
1429 	reg |= ((dram_info->mode_2t & REG_DUNIT_CTRL_LOW_2T_MASK) <<
1430 		REG_DUNIT_CTRL_LOW_2T_OFFS);
1431 	dfs_reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
1432 
1433 	udelay(1);		/* Wait 1us */
1434 
1435 	for (cs = 0; cs < MAX_CS; cs++) {
1436 		if (dram_info->cs_ena & (1 << cs)) {
1437 			reg = (reg_read(REG_DDR3_MR1_ADDR));
1438 			/* DLL Enable */
1439 			reg &= ~(1 << REG_DDR3_MR1_DLL_ENA_OFFS);
1440 			dfs_reg_write(REG_DDR3_MR1_ADDR, reg);
1441 
1442 			/* Issue MRS Command to current cs */
1443 			reg = REG_SDRAM_OPERATION_CMD_MR1 &
1444 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1445 			/*
1446 			 * [3-0] = 0x4 - MR1 Command, [11-8] -
1447 			 * enable current cs
1448 			 */
1449 			/* 0x1418 - SDRAM Operation Register */
1450 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1451 
1452 			/* Poll - Wait for Refresh operation completion */
1453 			wait_refresh_op_complete();
1454 
1455 			/* DLL Reset - MR0 */
1456 			reg = reg_read(REG_DDR3_MR0_ADDR);
1457 			dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
1458 
1459 			/* Issue MRS Command to current cs */
1460 			reg = REG_SDRAM_OPERATION_CMD_MR0 &
1461 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1462 			/*
1463 			 * [3-0] = 0x4 - MR1 Command, [11-8] -
1464 			 * enable current cs
1465 			 */
1466 			/* 0x1418 - SDRAM Operation Register */
1467 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1468 
1469 			/* Poll - Wait for Refresh operation completion */
1470 			wait_refresh_op_complete();
1471 
1472 			reg = reg_read(REG_DDR3_MR0_ADDR);
1473 			reg &= ~0x74;	/* CL [3:0]; [6:4],[2] */
1474 
1475 			if (freq == DDR_400)
1476 				tmp = ddr3_cl_to_valid_cl(6) & 0xF;
1477 			else
1478 				tmp = ddr3_cl_to_valid_cl(dram_info->cl) & 0xF;
1479 
1480 			reg |= ((tmp & 0x1) << 2);
1481 			reg |= ((tmp >> 1) << 4);	/* to bit 4 */
1482 			dfs_reg_write(REG_DDR3_MR0_ADDR, reg);
1483 
1484 			reg = REG_SDRAM_OPERATION_CMD_MR0 &
1485 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1486 			/* 0x1418 - SDRAM Operation Register */
1487 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1488 
1489 			/* Poll - Wait for Refresh operation completion */
1490 			wait_refresh_op_complete();
1491 
1492 			reg = reg_read(REG_DDR3_MR2_ADDR);
1493 			reg &= ~0x38;	/* CWL [5:3] */
1494 			/* CWL = 0 ,for 400 MHg is 5 */
1495 			if (freq != DDR_400)
1496 				reg |= dram_info->cwl << REG_DDR3_MR2_CWL_OFFS;
1497 			dfs_reg_write(REG_DDR3_MR2_ADDR, reg);
1498 			reg = REG_SDRAM_OPERATION_CMD_MR2 &
1499 				~(1 << (REG_SDRAM_OPERATION_CS_OFFS + cs));
1500 			/* 0x1418 - SDRAM Operation Register */
1501 			dfs_reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1502 
1503 			/* Poll - Wait for Refresh operation completion */
1504 			wait_refresh_op_complete();
1505 
1506 			/* Set current rd_sample_delay  */
1507 			reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
1508 			reg &= ~(REG_READ_DATA_SAMPLE_DELAYS_MASK <<
1509 				 (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1510 			reg |= (dram_info->cl <<
1511 				(REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1512 			dfs_reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
1513 
1514 			/* Set current rd_ready_delay  */
1515 			reg = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
1516 			reg &= ~(REG_READ_DATA_READY_DELAYS_MASK <<
1517 				 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
1518 			reg |= ((dram_info->cl + 1) <<
1519 				(REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1520 			dfs_reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
1521 		}
1522 	}
1523 
1524 	/* Enable ODT on DLL-on mode */
1525 	dfs_reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, 0);
1526 
1527 	/* [1] - DFS Block disable  */
1528 	reg = reg_read(REG_DFS_ADDR) & ~(1 << REG_DFS_BLOCK_OFFS);
1529 	dfs_reg_write(REG_DFS_ADDR, reg);	/* 0x1528 - DFS register */
1530 
1531 	/* Change DDR frequency to 100MHz procedure: */
1532 	/* 0x1600 - PHY lock mask register */
1533 	reg = reg_read(REG_ODPG_CNTRL_ADDR);
1534 	reg &= ~(1 << REG_ODPG_CNTRL_OFFS);	/* [21] = 0 */
1535 	dfs_reg_write(REG_ODPG_CNTRL_ADDR, reg);
1536 
1537 	/* Change DDR frequency to 100MHz procedure: */
1538 	/* 0x1670 - PHY lock mask register */
1539 	reg = reg_read(REG_PHY_LOCK_MASK_ADDR);
1540 	reg |= ~REG_PHY_LOCK_MASK_MASK;	/* [11:0] = FFF */
1541 	dfs_reg_write(REG_PHY_LOCK_MASK_ADDR, reg);
1542 
1543 	reg = reg_read(REG_METAL_MASK_ADDR) | (1 << 0);	/* [0] - disable */
1544 	/* 0x14B0 - Dunit MMask Register */
1545 	dfs_reg_write(REG_METAL_MASK_ADDR, reg);
1546 
1547 	DEBUG_DFS_C("DDR3 - DFS - Low To High - Ended successfuly - new Frequency - ",
1548 		    freq, 1);
1549 	return MV_OK;
1550 #endif
1551 }
1552