1 /*
2  * Copyright (C) Marvell International Ltd. and its affiliates
3  *
4  * SPDX-License-Identifier:	GPL-2.0
5  */
6 
7 #include <common.h>
8 #include <i2c.h>
9 #include <spl.h>
10 #include <asm/io.h>
11 #include <asm/arch/cpu.h>
12 #include <asm/arch/soc.h>
13 
14 #include "ddr3_init.h"
15 
16 #include "../../../../arch/arm/mach-mvebu/serdes/a38x/sys_env_lib.h"
17 
18 static struct dlb_config ddr3_dlb_config_table[] = {
19 	{REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
20 	{DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
21 	{DLB_AGING_REGISTER, 0x0f7f007f},
22 	{DLB_EVICTION_CONTROL_REG, 0x0000129f},
23 	{DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
24 	{DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
25 	{DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
26 	{DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
27 	{DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
28 	{DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
29 	{DLB_MAIN_QUEUE_MAP, 0x00000543},
30 	{DLB_LINE_SPLIT, 0x00000000},
31 	{DLB_USER_COMMAND_REG, 0x00000000},
32 	{0x0, 0x0}
33 };
34 
35 static struct dlb_config ddr3_dlb_config_table_a0[] = {
36 	{REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
37 	{DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
38 	{DLB_AGING_REGISTER, 0x0f7f007f},
39 	{DLB_EVICTION_CONTROL_REG, 0x0000129f},
40 	{DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
41 	{DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
42 	{DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
43 	{DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
44 	{DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
45 	{DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
46 	{DLB_MAIN_QUEUE_MAP, 0x00000543},
47 	{DLB_LINE_SPLIT, 0x00000000},
48 	{DLB_USER_COMMAND_REG, 0x00000000},
49 	{0x0, 0x0}
50 };
51 
52 #if defined(CONFIG_ARMADA_38X)
53 struct dram_modes {
54 	char *mode_name;
55 	u8 cpu_freq;
56 	u8 fab_freq;
57 	u8 chip_id;
58 	u8 chip_board_rev;
59 	struct reg_data *regs;
60 };
61 
62 struct dram_modes ddr_modes[] = {
63 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
64 	/* Conf name, CPUFreq, Fab_freq, Chip ID, Chip/Board, MC regs*/
65 #ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
66 	{"a38x_customer_0_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID0,
67 	 ddr3_customer_800},
68 	{"a38x_customer_1_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID1,
69 	 ddr3_customer_800},
70 #else
71 	{"a38x_533", DDR_FREQ_533, 0, 0x0, MARVELL_BOARD, ddr3_a38x_533},
72 	{"a38x_667", DDR_FREQ_667, 0, 0x0, MARVELL_BOARD, ddr3_a38x_667},
73 	{"a38x_800", DDR_FREQ_800, 0, 0x0, MARVELL_BOARD, ddr3_a38x_800},
74 	{"a38x_933", DDR_FREQ_933, 0, 0x0, MARVELL_BOARD, ddr3_a38x_933},
75 #endif
76 #endif
77 };
78 #endif /* defined(CONFIG_ARMADA_38X) */
79 
80 /* Translates topology map definitions to real memory size in bits */
81 u32 mem_size[] = {
82 	ADDR_SIZE_512MB, ADDR_SIZE_1GB, ADDR_SIZE_2GB, ADDR_SIZE_4GB,
83 	ADDR_SIZE_8GB
84 };
85 
86 static char *ddr_type = "DDR3";
87 
88 /*
89  * Set 1 to use dynamic DUNIT configuration,
90  * set 0 (supported for A380 and AC3) to configure DUNIT in values set by
91  * ddr3_tip_init_specific_reg_config
92  */
93 u8 generic_init_controller = 1;
94 
95 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
96 static u32 ddr3_get_static_ddr_mode(void);
97 #endif
98 static int ddr3_hws_tune_training_params(u8 dev_num);
99 
100 /* device revision */
101 #define DEV_VERSION_ID_REG		0x1823c
102 #define REVISON_ID_OFFS			8
103 #define REVISON_ID_MASK			0xf00
104 
105 /* A38x revisions */
106 #define MV_88F68XX_Z1_ID		0x0
107 #define MV_88F68XX_A0_ID		0x4
108 /* A39x revisions */
109 #define MV_88F69XX_Z1_ID		0x2
110 
111 /*
112  * sys_env_device_rev_get - Get Marvell controller device revision number
113  *
114  * DESCRIPTION:
115  *       This function returns 8bit describing the device revision as defined
116  *       Revision ID Register.
117  *
118  * INPUT:
119  *       None.
120  *
121  * OUTPUT:
122  *       None.
123  *
124  * RETURN:
125  *       8bit desscribing Marvell controller revision number
126  */
127 u8 sys_env_device_rev_get(void)
128 {
129 	u32 value;
130 
131 	value = reg_read(DEV_VERSION_ID_REG);
132 	return (value & (REVISON_ID_MASK)) >> REVISON_ID_OFFS;
133 }
134 
135 /*
136  * sys_env_dlb_config_ptr_get
137  *
138  * DESCRIPTION: defines pointer to to DLB COnfiguration table
139  *
140  * INPUT: none
141  *
142  * OUTPUT: pointer to DLB COnfiguration table
143  *
144  * RETURN:
145  *       returns pointer to DLB COnfiguration table
146  */
147 struct dlb_config *sys_env_dlb_config_ptr_get(void)
148 {
149 #ifdef CONFIG_ARMADA_39X
150 	return &ddr3_dlb_config_table_a0[0];
151 #else
152 	if (sys_env_device_rev_get() == MV_88F68XX_A0_ID)
153 		return &ddr3_dlb_config_table_a0[0];
154 	else
155 		return &ddr3_dlb_config_table[0];
156 #endif
157 }
158 
159 /*
160  * sys_env_get_cs_ena_from_reg
161  *
162  * DESCRIPTION: Get bit mask of enabled CS
163  *
164  * INPUT: None
165  *
166  * OUTPUT: None
167  *
168  * RETURN:
169  *       Bit mask of enabled CS, 1 if only CS0 enabled,
170  *       3 if both CS0 and CS1 enabled
171  */
172 u32 sys_env_get_cs_ena_from_reg(void)
173 {
174 	return reg_read(REG_DDR3_RANK_CTRL_ADDR) &
175 		REG_DDR3_RANK_CTRL_CS_ENA_MASK;
176 }
177 
178 static void ddr3_restore_and_set_final_windows(u32 *win)
179 {
180 	u32 win_ctrl_reg, num_of_win_regs;
181 	u32 cs_ena = sys_env_get_cs_ena_from_reg();
182 	u32 ui;
183 
184 	win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
185 	num_of_win_regs = 16;
186 
187 	/* Return XBAR windows 4-7 or 16-19 init configuration */
188 	for (ui = 0; ui < num_of_win_regs; ui++)
189 		reg_write((win_ctrl_reg + 0x4 * ui), win[ui]);
190 
191 	printf("%s Training Sequence - Switching XBAR Window to FastPath Window\n",
192 	       ddr_type);
193 
194 #if defined DYNAMIC_CS_SIZE_CONFIG
195 	if (ddr3_fast_path_dynamic_cs_size_config(cs_ena) != MV_OK)
196 		printf("ddr3_fast_path_dynamic_cs_size_config FAILED\n");
197 #else
198 	u32 reg, cs;
199 	reg = 0x1fffffe1;
200 	for (cs = 0; cs < MAX_CS; cs++) {
201 		if (cs_ena & (1 << cs)) {
202 			reg |= (cs << 2);
203 			break;
204 		}
205 	}
206 	/* Open fast path Window to - 0.5G */
207 	reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, reg);
208 #endif
209 }
210 
211 static int ddr3_save_and_set_training_windows(u32 *win)
212 {
213 	u32 cs_ena;
214 	u32 reg, tmp_count, cs, ui;
215 	u32 win_ctrl_reg, win_base_reg, win_remap_reg;
216 	u32 num_of_win_regs, win_jump_index;
217 	win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
218 	win_base_reg = REG_XBAR_WIN_4_BASE_ADDR;
219 	win_remap_reg = REG_XBAR_WIN_4_REMAP_ADDR;
220 	win_jump_index = 0x10;
221 	num_of_win_regs = 16;
222 	struct hws_topology_map *tm = ddr3_get_topology_map();
223 
224 #ifdef DISABLE_L2_FILTERING_DURING_DDR_TRAINING
225 	/*
226 	 * Disable L2 filtering during DDR training
227 	 * (when Cross Bar window is open)
228 	 */
229 	reg_write(ADDRESS_FILTERING_END_REGISTER, 0);
230 #endif
231 
232 	cs_ena = tm->interface_params[0].as_bus_params[0].cs_bitmask;
233 
234 	/* Close XBAR Window 19 - Not needed */
235 	/* {0x000200e8}  -   Open Mbus Window - 2G */
236 	reg_write(REG_XBAR_WIN_19_CTRL_ADDR, 0);
237 
238 	/* Save XBAR Windows 4-19 init configurations */
239 	for (ui = 0; ui < num_of_win_regs; ui++)
240 		win[ui] = reg_read(win_ctrl_reg + 0x4 * ui);
241 
242 	/* Open XBAR Windows 4-7 or 16-19 for other CS */
243 	reg = 0;
244 	tmp_count = 0;
245 	for (cs = 0; cs < MAX_CS; cs++) {
246 		if (cs_ena & (1 << cs)) {
247 			switch (cs) {
248 			case 0:
249 				reg = 0x0e00;
250 				break;
251 			case 1:
252 				reg = 0x0d00;
253 				break;
254 			case 2:
255 				reg = 0x0b00;
256 				break;
257 			case 3:
258 				reg = 0x0700;
259 				break;
260 			}
261 			reg |= (1 << 0);
262 			reg |= (SDRAM_CS_SIZE & 0xffff0000);
263 
264 			reg_write(win_ctrl_reg + win_jump_index * tmp_count,
265 				  reg);
266 			reg = (((SDRAM_CS_SIZE + 1) * (tmp_count)) &
267 			       0xffff0000);
268 			reg_write(win_base_reg + win_jump_index * tmp_count,
269 				  reg);
270 
271 			if (win_remap_reg <= REG_XBAR_WIN_7_REMAP_ADDR)
272 				reg_write(win_remap_reg +
273 					  win_jump_index * tmp_count, 0);
274 
275 			tmp_count++;
276 		}
277 	}
278 
279 	return MV_OK;
280 }
281 
282 /*
283  * Name:     ddr3_init - Main DDR3 Init function
284  * Desc:     This routine initialize the DDR3 MC and runs HW training.
285  * Args:     None.
286  * Notes:
287  * Returns:  None.
288  */
289 int ddr3_init(void)
290 {
291 	u32 reg = 0;
292 	u32 soc_num;
293 	int status;
294 	u32 win[16];
295 
296 	/* SoC/Board special Initializtions */
297 	/* Get version from internal library */
298 	ddr3_print_version();
299 
300 	/*Add sub_version string */
301 	DEBUG_INIT_C("", SUB_VERSION, 1);
302 
303 	/* Switching CPU to MRVL ID */
304 	soc_num = (reg_read(REG_SAMPLE_RESET_HIGH_ADDR) & SAR1_CPU_CORE_MASK) >>
305 		SAR1_CPU_CORE_OFFSET;
306 	switch (soc_num) {
307 	case 0x3:
308 	case 0x1:
309 		reg_bit_set(CPU_CONFIGURATION_REG(1), CPU_MRVL_ID_OFFSET);
310 	case 0x0:
311 		reg_bit_set(CPU_CONFIGURATION_REG(0), CPU_MRVL_ID_OFFSET);
312 	default:
313 		break;
314 	}
315 
316 	/*
317 	 * Set DRAM Reset Mask in case detected GPIO indication of wakeup from
318 	 * suspend i.e the DRAM values will not be overwritten / reset when
319 	 * waking from suspend
320 	 */
321 	if (sys_env_suspend_wakeup_check() ==
322 	    SUSPEND_WAKEUP_ENABLED_GPIO_DETECTED) {
323 		reg_bit_set(REG_SDRAM_INIT_CTRL_ADDR,
324 			    1 << REG_SDRAM_INIT_RESET_MASK_OFFS);
325 	}
326 
327 	/*
328 	 * Stage 0 - Set board configuration
329 	 */
330 
331 	/* Check if DRAM is already initialized  */
332 	if (reg_read(REG_BOOTROM_ROUTINE_ADDR) &
333 	    (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS)) {
334 		printf("%s Training Sequence - 2nd boot - Skip\n", ddr_type);
335 		return MV_OK;
336 	}
337 
338 	/*
339 	 * Stage 1 - Dunit Setup
340 	 */
341 
342 	/* Fix read ready phases for all SOC in reg 0x15c8 */
343 	reg = reg_read(REG_TRAINING_DEBUG_3_ADDR);
344 	reg &= ~(REG_TRAINING_DEBUG_3_MASK);
345 	reg |= 0x4;		/* Phase 0 */
346 	reg &= ~(REG_TRAINING_DEBUG_3_MASK << REG_TRAINING_DEBUG_3_OFFS);
347 	reg |= (0x4 << (1 * REG_TRAINING_DEBUG_3_OFFS));	/* Phase 1 */
348 	reg &= ~(REG_TRAINING_DEBUG_3_MASK << (3 * REG_TRAINING_DEBUG_3_OFFS));
349 	reg |= (0x6 << (3 * REG_TRAINING_DEBUG_3_OFFS));	/* Phase 3 */
350 	reg &= ~(REG_TRAINING_DEBUG_3_MASK << (4 * REG_TRAINING_DEBUG_3_OFFS));
351 	reg |= (0x6 << (4 * REG_TRAINING_DEBUG_3_OFFS));
352 	reg &= ~(REG_TRAINING_DEBUG_3_MASK << (5 * REG_TRAINING_DEBUG_3_OFFS));
353 	reg |= (0x6 << (5 * REG_TRAINING_DEBUG_3_OFFS));
354 	reg_write(REG_TRAINING_DEBUG_3_ADDR, reg);
355 
356 	/*
357 	 * Axi_bresp_mode[8] = Compliant,
358 	 * Axi_addr_decode_cntrl[11] = Internal,
359 	 * Axi_data_bus_width[0] = 128bit
360 	 * */
361 	/* 0x14a8 - AXI Control Register */
362 	reg_write(REG_DRAM_AXI_CTRL_ADDR, 0);
363 
364 	/*
365 	 * Stage 2 - Training Values Setup
366 	 */
367 	/* Set X-BAR windows for the training sequence */
368 	ddr3_save_and_set_training_windows(win);
369 
370 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
371 	/*
372 	 * Load static controller configuration (in case dynamic/generic init
373 	 * is not enabled
374 	 */
375 	if (generic_init_controller == 0) {
376 		ddr3_tip_init_specific_reg_config(0,
377 						  ddr_modes
378 						  [ddr3_get_static_ddr_mode
379 						   ()].regs);
380 	}
381 #endif
382 
383 	/* Tune training algo paramteres */
384 	status = ddr3_hws_tune_training_params(0);
385 	if (MV_OK != status)
386 		return status;
387 
388 	/* Set log level for training lib */
389 	ddr3_hws_set_log_level(DEBUG_BLOCK_ALL, DEBUG_LEVEL_ERROR);
390 
391 	/* Start New Training IP */
392 	status = ddr3_hws_hw_training();
393 	if (MV_OK != status) {
394 		printf("%s Training Sequence - FAILED\n", ddr_type);
395 		return status;
396 	}
397 
398 	/*
399 	 * Stage 3 - Finish
400 	 */
401 	/* Restore and set windows */
402 	ddr3_restore_and_set_final_windows(win);
403 
404 	/* Update DRAM init indication in bootROM register */
405 	reg = reg_read(REG_BOOTROM_ROUTINE_ADDR);
406 	reg_write(REG_BOOTROM_ROUTINE_ADDR,
407 		  reg | (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS));
408 
409 	/* DLB config */
410 	ddr3_new_tip_dlb_config();
411 
412 #if defined(ECC_SUPPORT)
413 	if (ddr3_if_ecc_enabled())
414 		ddr3_new_tip_ecc_scrub();
415 #endif
416 
417 	printf("%s Training Sequence - Ended Successfully\n", ddr_type);
418 
419 	return MV_OK;
420 }
421 
422 /*
423  * Name:     ddr3_get_cpu_freq
424  * Desc:     read S@R and return CPU frequency
425  * Args:
426  * Notes:
427  * Returns:  required value
428  */
429 u32 ddr3_get_cpu_freq(void)
430 {
431 	return ddr3_tip_get_init_freq();
432 }
433 
434 /*
435  * Name:     ddr3_get_fab_opt
436  * Desc:     read S@R and return CPU frequency
437  * Args:
438  * Notes:
439  * Returns:  required value
440  */
441 u32 ddr3_get_fab_opt(void)
442 {
443 	return 0;		/* No fabric */
444 }
445 
446 /*
447  * Name:     ddr3_get_static_m_cValue - Init Memory controller with
448  *           static parameters
449  * Desc:     Use this routine to init the controller without the HW training
450  *           procedure.
451  *           User must provide compatible header file with registers data.
452  * Args:     None.
453  * Notes:
454  * Returns:  None.
455  */
456 u32 ddr3_get_static_mc_value(u32 reg_addr, u32 offset1, u32 mask1,
457 			     u32 offset2, u32 mask2)
458 {
459 	u32 reg, temp;
460 
461 	reg = reg_read(reg_addr);
462 
463 	temp = (reg >> offset1) & mask1;
464 	if (mask2)
465 		temp |= (reg >> offset2) & mask2;
466 
467 	return temp;
468 }
469 
470 /*
471  * Name:     ddr3_get_static_ddr_mode - Init Memory controller with
472  *           static parameters
473  * Desc:     Use this routine to init the controller without the HW training
474  *           procedure.
475  *           User must provide compatible header file with registers data.
476  * Args:     None.
477  * Notes:
478  * Returns:  None.
479  */
480 u32 ddr3_get_static_ddr_mode(void)
481 {
482 	u32 chip_board_rev, i;
483 	u32 size;
484 
485 	/* Valid only for A380 only, MSYS using dynamic controller config */
486 #ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
487 	/*
488 	 * Customer boards select DDR mode according to
489 	 * board ID & Sample@Reset
490 	 */
491 	chip_board_rev = mv_board_id_get();
492 #else
493 	/* Marvell boards select DDR mode according to Sample@Reset only */
494 	chip_board_rev = MARVELL_BOARD;
495 #endif
496 
497 	size = ARRAY_SIZE(ddr_modes);
498 	for (i = 0; i < size; i++) {
499 		if ((ddr3_get_cpu_freq() == ddr_modes[i].cpu_freq) &&
500 		    (ddr3_get_fab_opt() == ddr_modes[i].fab_freq) &&
501 		    (chip_board_rev == ddr_modes[i].chip_board_rev))
502 			return i;
503 	}
504 
505 	DEBUG_INIT_S("\n*** Error: ddr3_get_static_ddr_mode: No match for requested DDR mode. ***\n\n");
506 
507 	return 0;
508 }
509 
510 /******************************************************************************
511  * Name:     ddr3_get_cs_num_from_reg
512  * Desc:
513  * Args:
514  * Notes:
515  * Returns:
516  */
517 u32 ddr3_get_cs_num_from_reg(void)
518 {
519 	u32 cs_ena = sys_env_get_cs_ena_from_reg();
520 	u32 cs_count = 0;
521 	u32 cs;
522 
523 	for (cs = 0; cs < MAX_CS; cs++) {
524 		if (cs_ena & (1 << cs))
525 			cs_count++;
526 	}
527 
528 	return cs_count;
529 }
530 
531 void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps)
532 {
533 	u32 tmp, hclk = 200;
534 
535 	switch (freq_mode) {
536 	case 4:
537 		tmp = 1;	/* DDR_400; */
538 		hclk = 200;
539 		break;
540 	case 0x8:
541 		tmp = 1;	/* DDR_666; */
542 		hclk = 333;
543 		break;
544 	case 0xc:
545 		tmp = 1;	/* DDR_800; */
546 		hclk = 400;
547 		break;
548 	default:
549 		*ddr_freq = 0;
550 		*hclk_ps = 0;
551 		break;
552 	}
553 
554 	*ddr_freq = tmp;		/* DDR freq define */
555 	*hclk_ps = 1000000 / hclk;	/* values are 1/HCLK in ps */
556 
557 	return;
558 }
559 
560 void ddr3_new_tip_dlb_config(void)
561 {
562 	u32 reg, i = 0;
563 	struct dlb_config *config_table_ptr = sys_env_dlb_config_ptr_get();
564 
565 	/* Write the configuration */
566 	while (config_table_ptr[i].reg_addr != 0) {
567 		reg_write(config_table_ptr[i].reg_addr,
568 			  config_table_ptr[i].reg_data);
569 		i++;
570 	}
571 
572 	/* Enable DLB */
573 	reg = reg_read(REG_STATIC_DRAM_DLB_CONTROL);
574 	reg |= DLB_ENABLE | DLB_WRITE_COALESING | DLB_AXI_PREFETCH_EN |
575 		DLB_MBUS_PREFETCH_EN | PREFETCH_N_LN_SZ_TR;
576 	reg_write(REG_STATIC_DRAM_DLB_CONTROL, reg);
577 }
578 
579 int ddr3_fast_path_dynamic_cs_size_config(u32 cs_ena)
580 {
581 	u32 reg, cs;
582 	u32 mem_total_size = 0;
583 	u32 cs_mem_size = 0;
584 	u32 mem_total_size_c, cs_mem_size_c;
585 
586 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
587 	u32 physical_mem_size;
588 	u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
589 	struct hws_topology_map *tm = ddr3_get_topology_map();
590 #endif
591 
592 	/* Open fast path windows */
593 	for (cs = 0; cs < MAX_CS; cs++) {
594 		if (cs_ena & (1 << cs)) {
595 			/* get CS size */
596 			if (ddr3_calc_mem_cs_size(cs, &cs_mem_size) != MV_OK)
597 				return MV_FAIL;
598 
599 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
600 			/*
601 			 * if number of address pins doesn't allow to use max
602 			 * mem size that is defined in topology
603 			 * mem size is defined by DEVICE_MAX_DRAM_ADDRESS_SIZE
604 			 */
605 			physical_mem_size = mem_size
606 				[tm->interface_params[0].memory_size];
607 
608 			if (ddr3_get_device_width(cs) == 16) {
609 				/*
610 				 * 16bit mem device can be twice more - no need
611 				 * in less significant pin
612 				 */
613 				max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
614 			}
615 
616 			if (physical_mem_size > max_mem_size) {
617 				cs_mem_size = max_mem_size *
618 					(ddr3_get_bus_width() /
619 					 ddr3_get_device_width(cs));
620 				printf("Updated Physical Mem size is from 0x%x to %x\n",
621 				       physical_mem_size,
622 				       DEVICE_MAX_DRAM_ADDRESS_SIZE);
623 			}
624 #endif
625 
626 			/* set fast path window control for the cs */
627 			reg = 0xffffe1;
628 			reg |= (cs << 2);
629 			reg |= (cs_mem_size - 1) & 0xffff0000;
630 			/*Open fast path Window */
631 			reg_write(REG_FASTPATH_WIN_CTRL_ADDR(cs), reg);
632 
633 			/* Set fast path window base address for the cs */
634 			reg = ((cs_mem_size) * cs) & 0xffff0000;
635 			/* Set base address */
636 			reg_write(REG_FASTPATH_WIN_BASE_ADDR(cs), reg);
637 
638 			/*
639 			 * Since memory size may be bigger than 4G the summ may
640 			 * be more than 32 bit word,
641 			 * so to estimate the result divide mem_total_size and
642 			 * cs_mem_size by 0x10000 (it is equal to >> 16)
643 			 */
644 			mem_total_size_c = mem_total_size >> 16;
645 			cs_mem_size_c = cs_mem_size >> 16;
646 			/* if the sum less than 2 G - calculate the value */
647 			if (mem_total_size_c + cs_mem_size_c < 0x10000)
648 				mem_total_size += cs_mem_size;
649 			else	/* put max possible size */
650 				mem_total_size = L2_FILTER_FOR_MAX_MEMORY_SIZE;
651 		}
652 	}
653 
654 	/* Set L2 filtering to Max Memory size */
655 	reg_write(ADDRESS_FILTERING_END_REGISTER, mem_total_size);
656 
657 	return MV_OK;
658 }
659 
660 u32 ddr3_get_bus_width(void)
661 {
662 	u32 bus_width;
663 
664 	bus_width = (reg_read(REG_SDRAM_CONFIG_ADDR) & 0x8000) >>
665 		REG_SDRAM_CONFIG_WIDTH_OFFS;
666 
667 	return (bus_width == 0) ? 16 : 32;
668 }
669 
670 u32 ddr3_get_device_width(u32 cs)
671 {
672 	u32 device_width;
673 
674 	device_width = (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) &
675 			(0x3 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs))) >>
676 		(REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs);
677 
678 	return (device_width == 0) ? 8 : 16;
679 }
680 
681 float ddr3_get_device_size(u32 cs)
682 {
683 	u32 device_size_low, device_size_high, device_size;
684 	u32 data, cs_low_offset, cs_high_offset;
685 
686 	cs_low_offset = REG_SDRAM_ADDRESS_SIZE_OFFS + cs * 4;
687 	cs_high_offset = REG_SDRAM_ADDRESS_SIZE_OFFS +
688 		REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs;
689 
690 	data = reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR);
691 	device_size_low = (data >> cs_low_offset) & 0x3;
692 	device_size_high = (data >> cs_high_offset) & 0x1;
693 
694 	device_size = device_size_low | (device_size_high << 2);
695 
696 	switch (device_size) {
697 	case 0:
698 		return 2;
699 	case 2:
700 		return 0.5;
701 	case 3:
702 		return 1;
703 	case 4:
704 		return 4;
705 	case 5:
706 		return 8;
707 	case 1:
708 	default:
709 		DEBUG_INIT_C("Error: Wrong device size of Cs: ", cs, 1);
710 		/*
711 		 * Small value will give wrong emem size in
712 		 * ddr3_calc_mem_cs_size
713 		 */
714 		return 0.01;
715 	}
716 }
717 
718 int ddr3_calc_mem_cs_size(u32 cs, u32 *cs_size)
719 {
720 	float cs_mem_size;
721 
722 	/* Calculate in GiB */
723 	cs_mem_size = ((ddr3_get_bus_width() / ddr3_get_device_width(cs)) *
724 		       ddr3_get_device_size(cs)) / 8;
725 
726 	/*
727 	 * Multiple controller bus width, 2x for 64 bit
728 	 * (SoC controller may be 32 or 64 bit,
729 	 * so bit 15 in 0x1400, that means if whole bus used or only half,
730 	 * have a differnt meaning
731 	 */
732 	cs_mem_size *= DDR_CONTROLLER_BUS_WIDTH_MULTIPLIER;
733 
734 	if (cs_mem_size == 0.125) {
735 		*cs_size = 128 << 20;
736 	} else if (cs_mem_size == 0.25) {
737 		*cs_size = 256 << 20;
738 	} else if (cs_mem_size == 0.5) {
739 		*cs_size = 512 << 20;
740 	} else if (cs_mem_size == 1) {
741 		*cs_size = 1 << 30;
742 	} else if (cs_mem_size == 2) {
743 		*cs_size = 2 << 30;
744 	} else {
745 		DEBUG_INIT_C("Error: Wrong Memory size of Cs: ", cs, 1);
746 		return MV_BAD_VALUE;
747 	}
748 
749 	return MV_OK;
750 }
751 
752 /*
753  * Name:     ddr3_hws_tune_training_params
754  * Desc:
755  * Args:
756  * Notes: Tune internal training params
757  * Returns:
758  */
759 static int ddr3_hws_tune_training_params(u8 dev_num)
760 {
761 	struct tune_train_params params;
762 	int status;
763 
764 	/* NOTE: do not remove any field initilization */
765 	params.ck_delay = TUNE_TRAINING_PARAMS_CK_DELAY;
766 	params.ck_delay_16 = TUNE_TRAINING_PARAMS_CK_DELAY_16;
767 	params.p_finger = TUNE_TRAINING_PARAMS_PFINGER;
768 	params.n_finger = TUNE_TRAINING_PARAMS_NFINGER;
769 	params.phy_reg3_val = TUNE_TRAINING_PARAMS_PHYREG3VAL;
770 
771 	status = ddr3_tip_tune_training_params(dev_num, &params);
772 	if (MV_OK != status) {
773 		printf("%s Training Sequence - FAILED\n", ddr_type);
774 		return status;
775 	}
776 
777 	return MV_OK;
778 }
779