1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) Marvell International Ltd. and its affiliates
4  */
5 
6 #include <common.h>
7 #include <i2c.h>
8 #include <spl.h>
9 #include <asm/io.h>
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12 
13 #include "ddr3_init.h"
14 
15 #include "../../../../arch/arm/mach-mvebu/serdes/a38x/sys_env_lib.h"
16 
17 static struct dlb_config ddr3_dlb_config_table[] = {
18 	{REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
19 	{DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
20 	{DLB_AGING_REGISTER, 0x0f7f007f},
21 	{DLB_EVICTION_CONTROL_REG, 0x0000129f},
22 	{DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
23 	{DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
24 	{DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
25 	{DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
26 	{DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
27 	{DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
28 	{DLB_MAIN_QUEUE_MAP, 0x00000543},
29 	{DLB_LINE_SPLIT, 0x00000000},
30 	{DLB_USER_COMMAND_REG, 0x00000000},
31 	{0x0, 0x0}
32 };
33 
34 static struct dlb_config ddr3_dlb_config_table_a0[] = {
35 	{REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
36 	{DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
37 	{DLB_AGING_REGISTER, 0x0f7f007f},
38 	{DLB_EVICTION_CONTROL_REG, 0x0000129f},
39 	{DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
40 	{DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
41 	{DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
42 	{DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
43 	{DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
44 	{DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
45 	{DLB_MAIN_QUEUE_MAP, 0x00000543},
46 	{DLB_LINE_SPLIT, 0x00000000},
47 	{DLB_USER_COMMAND_REG, 0x00000000},
48 	{0x0, 0x0}
49 };
50 
51 #if defined(CONFIG_ARMADA_38X)
52 struct dram_modes {
53 	char *mode_name;
54 	u8 cpu_freq;
55 	u8 fab_freq;
56 	u8 chip_id;
57 	u8 chip_board_rev;
58 	struct reg_data *regs;
59 };
60 
61 struct dram_modes ddr_modes[] = {
62 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
63 	/* Conf name, CPUFreq, Fab_freq, Chip ID, Chip/Board, MC regs*/
64 #ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
65 	{"a38x_customer_0_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID0,
66 	 ddr3_customer_800},
67 	{"a38x_customer_1_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID1,
68 	 ddr3_customer_800},
69 #else
70 	{"a38x_533", DDR_FREQ_533, 0, 0x0, MARVELL_BOARD, ddr3_a38x_533},
71 	{"a38x_667", DDR_FREQ_667, 0, 0x0, MARVELL_BOARD, ddr3_a38x_667},
72 	{"a38x_800", DDR_FREQ_800, 0, 0x0, MARVELL_BOARD, ddr3_a38x_800},
73 	{"a38x_933", DDR_FREQ_933, 0, 0x0, MARVELL_BOARD, ddr3_a38x_933},
74 #endif
75 #endif
76 };
77 #endif /* defined(CONFIG_ARMADA_38X) */
78 
79 /* Translates topology map definitions to real memory size in bits */
80 u32 mem_size[] = {
81 	ADDR_SIZE_512MB, ADDR_SIZE_1GB, ADDR_SIZE_2GB, ADDR_SIZE_4GB,
82 	ADDR_SIZE_8GB
83 };
84 
85 static char *ddr_type = "DDR3";
86 
87 /*
88  * Set 1 to use dynamic DUNIT configuration,
89  * set 0 (supported for A380 and AC3) to configure DUNIT in values set by
90  * ddr3_tip_init_specific_reg_config
91  */
92 u8 generic_init_controller = 1;
93 
94 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
95 static u32 ddr3_get_static_ddr_mode(void);
96 #endif
97 static int ddr3_hws_tune_training_params(u8 dev_num);
98 
99 /* device revision */
100 #define DEV_VERSION_ID_REG		0x1823c
101 #define REVISON_ID_OFFS			8
102 #define REVISON_ID_MASK			0xf00
103 
104 /* A38x revisions */
105 #define MV_88F68XX_Z1_ID		0x0
106 #define MV_88F68XX_A0_ID		0x4
107 /* A39x revisions */
108 #define MV_88F69XX_Z1_ID		0x2
109 
110 /*
111  * sys_env_device_rev_get - Get Marvell controller device revision number
112  *
113  * DESCRIPTION:
114  *       This function returns 8bit describing the device revision as defined
115  *       Revision ID Register.
116  *
117  * INPUT:
118  *       None.
119  *
120  * OUTPUT:
121  *       None.
122  *
123  * RETURN:
124  *       8bit desscribing Marvell controller revision number
125  */
126 u8 sys_env_device_rev_get(void)
127 {
128 	u32 value;
129 
130 	value = reg_read(DEV_VERSION_ID_REG);
131 	return (value & (REVISON_ID_MASK)) >> REVISON_ID_OFFS;
132 }
133 
134 /*
135  * sys_env_dlb_config_ptr_get
136  *
137  * DESCRIPTION: defines pointer to to DLB COnfiguration table
138  *
139  * INPUT: none
140  *
141  * OUTPUT: pointer to DLB COnfiguration table
142  *
143  * RETURN:
144  *       returns pointer to DLB COnfiguration table
145  */
146 struct dlb_config *sys_env_dlb_config_ptr_get(void)
147 {
148 #ifdef CONFIG_ARMADA_39X
149 	return &ddr3_dlb_config_table_a0[0];
150 #else
151 	if (sys_env_device_rev_get() == MV_88F68XX_A0_ID)
152 		return &ddr3_dlb_config_table_a0[0];
153 	else
154 		return &ddr3_dlb_config_table[0];
155 #endif
156 }
157 
158 /*
159  * sys_env_get_cs_ena_from_reg
160  *
161  * DESCRIPTION: Get bit mask of enabled CS
162  *
163  * INPUT: None
164  *
165  * OUTPUT: None
166  *
167  * RETURN:
168  *       Bit mask of enabled CS, 1 if only CS0 enabled,
169  *       3 if both CS0 and CS1 enabled
170  */
171 u32 sys_env_get_cs_ena_from_reg(void)
172 {
173 	return reg_read(REG_DDR3_RANK_CTRL_ADDR) &
174 		REG_DDR3_RANK_CTRL_CS_ENA_MASK;
175 }
176 
177 static void ddr3_restore_and_set_final_windows(u32 *win)
178 {
179 	u32 win_ctrl_reg, num_of_win_regs;
180 	u32 cs_ena = sys_env_get_cs_ena_from_reg();
181 	u32 ui;
182 
183 	win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
184 	num_of_win_regs = 16;
185 
186 	/* Return XBAR windows 4-7 or 16-19 init configuration */
187 	for (ui = 0; ui < num_of_win_regs; ui++)
188 		reg_write((win_ctrl_reg + 0x4 * ui), win[ui]);
189 
190 	printf("%s Training Sequence - Switching XBAR Window to FastPath Window\n",
191 	       ddr_type);
192 
193 #if defined DYNAMIC_CS_SIZE_CONFIG
194 	if (ddr3_fast_path_dynamic_cs_size_config(cs_ena) != MV_OK)
195 		printf("ddr3_fast_path_dynamic_cs_size_config FAILED\n");
196 #else
197 	u32 reg, cs;
198 	reg = 0x1fffffe1;
199 	for (cs = 0; cs < MAX_CS; cs++) {
200 		if (cs_ena & (1 << cs)) {
201 			reg |= (cs << 2);
202 			break;
203 		}
204 	}
205 	/* Open fast path Window to - 0.5G */
206 	reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, reg);
207 #endif
208 }
209 
210 static int ddr3_save_and_set_training_windows(u32 *win)
211 {
212 	u32 cs_ena;
213 	u32 reg, tmp_count, cs, ui;
214 	u32 win_ctrl_reg, win_base_reg, win_remap_reg;
215 	u32 num_of_win_regs, win_jump_index;
216 	win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
217 	win_base_reg = REG_XBAR_WIN_4_BASE_ADDR;
218 	win_remap_reg = REG_XBAR_WIN_4_REMAP_ADDR;
219 	win_jump_index = 0x10;
220 	num_of_win_regs = 16;
221 	struct hws_topology_map *tm = ddr3_get_topology_map();
222 
223 #ifdef DISABLE_L2_FILTERING_DURING_DDR_TRAINING
224 	/*
225 	 * Disable L2 filtering during DDR training
226 	 * (when Cross Bar window is open)
227 	 */
228 	reg_write(ADDRESS_FILTERING_END_REGISTER, 0);
229 #endif
230 
231 	cs_ena = tm->interface_params[0].as_bus_params[0].cs_bitmask;
232 
233 	/* Close XBAR Window 19 - Not needed */
234 	/* {0x000200e8}  -   Open Mbus Window - 2G */
235 	reg_write(REG_XBAR_WIN_19_CTRL_ADDR, 0);
236 
237 	/* Save XBAR Windows 4-19 init configurations */
238 	for (ui = 0; ui < num_of_win_regs; ui++)
239 		win[ui] = reg_read(win_ctrl_reg + 0x4 * ui);
240 
241 	/* Open XBAR Windows 4-7 or 16-19 for other CS */
242 	reg = 0;
243 	tmp_count = 0;
244 	for (cs = 0; cs < MAX_CS; cs++) {
245 		if (cs_ena & (1 << cs)) {
246 			switch (cs) {
247 			case 0:
248 				reg = 0x0e00;
249 				break;
250 			case 1:
251 				reg = 0x0d00;
252 				break;
253 			case 2:
254 				reg = 0x0b00;
255 				break;
256 			case 3:
257 				reg = 0x0700;
258 				break;
259 			}
260 			reg |= (1 << 0);
261 			reg |= (SDRAM_CS_SIZE & 0xffff0000);
262 
263 			reg_write(win_ctrl_reg + win_jump_index * tmp_count,
264 				  reg);
265 			reg = (((SDRAM_CS_SIZE + 1) * (tmp_count)) &
266 			       0xffff0000);
267 			reg_write(win_base_reg + win_jump_index * tmp_count,
268 				  reg);
269 
270 			if (win_remap_reg <= REG_XBAR_WIN_7_REMAP_ADDR)
271 				reg_write(win_remap_reg +
272 					  win_jump_index * tmp_count, 0);
273 
274 			tmp_count++;
275 		}
276 	}
277 
278 	return MV_OK;
279 }
280 
281 /*
282  * Name:     ddr3_init - Main DDR3 Init function
283  * Desc:     This routine initialize the DDR3 MC and runs HW training.
284  * Args:     None.
285  * Notes:
286  * Returns:  None.
287  */
288 int ddr3_init(void)
289 {
290 	u32 reg = 0;
291 	u32 soc_num;
292 	int status;
293 	u32 win[16];
294 
295 	/* SoC/Board special Initializtions */
296 	/* Get version from internal library */
297 	ddr3_print_version();
298 
299 	/*Add sub_version string */
300 	DEBUG_INIT_C("", SUB_VERSION, 1);
301 
302 	/* Switching CPU to MRVL ID */
303 	soc_num = (reg_read(REG_SAMPLE_RESET_HIGH_ADDR) & SAR1_CPU_CORE_MASK) >>
304 		SAR1_CPU_CORE_OFFSET;
305 	switch (soc_num) {
306 	case 0x3:
307 	case 0x1:
308 		reg_bit_set(CPU_CONFIGURATION_REG(1), CPU_MRVL_ID_OFFSET);
309 	case 0x0:
310 		reg_bit_set(CPU_CONFIGURATION_REG(0), CPU_MRVL_ID_OFFSET);
311 	default:
312 		break;
313 	}
314 
315 	/*
316 	 * Set DRAM Reset Mask in case detected GPIO indication of wakeup from
317 	 * suspend i.e the DRAM values will not be overwritten / reset when
318 	 * waking from suspend
319 	 */
320 	if (sys_env_suspend_wakeup_check() ==
321 	    SUSPEND_WAKEUP_ENABLED_GPIO_DETECTED) {
322 		reg_bit_set(REG_SDRAM_INIT_CTRL_ADDR,
323 			    1 << REG_SDRAM_INIT_RESET_MASK_OFFS);
324 	}
325 
326 	/*
327 	 * Stage 0 - Set board configuration
328 	 */
329 
330 	/* Check if DRAM is already initialized  */
331 	if (reg_read(REG_BOOTROM_ROUTINE_ADDR) &
332 	    (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS)) {
333 		printf("%s Training Sequence - 2nd boot - Skip\n", ddr_type);
334 		return MV_OK;
335 	}
336 
337 	/*
338 	 * Stage 1 - Dunit Setup
339 	 */
340 
341 	/* Fix read ready phases for all SOC in reg 0x15c8 */
342 	reg = reg_read(REG_TRAINING_DEBUG_3_ADDR);
343 	reg &= ~(REG_TRAINING_DEBUG_3_MASK);
344 	reg |= 0x4;		/* Phase 0 */
345 	reg &= ~(REG_TRAINING_DEBUG_3_MASK << REG_TRAINING_DEBUG_3_OFFS);
346 	reg |= (0x4 << (1 * REG_TRAINING_DEBUG_3_OFFS));	/* Phase 1 */
347 	reg &= ~(REG_TRAINING_DEBUG_3_MASK << (3 * REG_TRAINING_DEBUG_3_OFFS));
348 	reg |= (0x6 << (3 * REG_TRAINING_DEBUG_3_OFFS));	/* Phase 3 */
349 	reg &= ~(REG_TRAINING_DEBUG_3_MASK << (4 * REG_TRAINING_DEBUG_3_OFFS));
350 	reg |= (0x6 << (4 * REG_TRAINING_DEBUG_3_OFFS));
351 	reg &= ~(REG_TRAINING_DEBUG_3_MASK << (5 * REG_TRAINING_DEBUG_3_OFFS));
352 	reg |= (0x6 << (5 * REG_TRAINING_DEBUG_3_OFFS));
353 	reg_write(REG_TRAINING_DEBUG_3_ADDR, reg);
354 
355 	/*
356 	 * Axi_bresp_mode[8] = Compliant,
357 	 * Axi_addr_decode_cntrl[11] = Internal,
358 	 * Axi_data_bus_width[0] = 128bit
359 	 * */
360 	/* 0x14a8 - AXI Control Register */
361 	reg_write(REG_DRAM_AXI_CTRL_ADDR, 0);
362 
363 	/*
364 	 * Stage 2 - Training Values Setup
365 	 */
366 	/* Set X-BAR windows for the training sequence */
367 	ddr3_save_and_set_training_windows(win);
368 
369 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
370 	/*
371 	 * Load static controller configuration (in case dynamic/generic init
372 	 * is not enabled
373 	 */
374 	if (generic_init_controller == 0) {
375 		ddr3_tip_init_specific_reg_config(0,
376 						  ddr_modes
377 						  [ddr3_get_static_ddr_mode
378 						   ()].regs);
379 	}
380 #endif
381 
382 	/* Tune training algo paramteres */
383 	status = ddr3_hws_tune_training_params(0);
384 	if (MV_OK != status)
385 		return status;
386 
387 	/* Set log level for training lib */
388 	ddr3_hws_set_log_level(DEBUG_BLOCK_ALL, DEBUG_LEVEL_ERROR);
389 
390 	/* Start New Training IP */
391 	status = ddr3_hws_hw_training();
392 	if (MV_OK != status) {
393 		printf("%s Training Sequence - FAILED\n", ddr_type);
394 		return status;
395 	}
396 
397 	/*
398 	 * Stage 3 - Finish
399 	 */
400 	/* Restore and set windows */
401 	ddr3_restore_and_set_final_windows(win);
402 
403 	/* Update DRAM init indication in bootROM register */
404 	reg = reg_read(REG_BOOTROM_ROUTINE_ADDR);
405 	reg_write(REG_BOOTROM_ROUTINE_ADDR,
406 		  reg | (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS));
407 
408 	/* DLB config */
409 	ddr3_new_tip_dlb_config();
410 
411 #if defined(ECC_SUPPORT)
412 	if (ddr3_if_ecc_enabled())
413 		ddr3_new_tip_ecc_scrub();
414 #endif
415 
416 	printf("%s Training Sequence - Ended Successfully\n", ddr_type);
417 
418 	return MV_OK;
419 }
420 
421 /*
422  * Name:     ddr3_get_cpu_freq
423  * Desc:     read S@R and return CPU frequency
424  * Args:
425  * Notes:
426  * Returns:  required value
427  */
428 u32 ddr3_get_cpu_freq(void)
429 {
430 	return ddr3_tip_get_init_freq();
431 }
432 
433 /*
434  * Name:     ddr3_get_fab_opt
435  * Desc:     read S@R and return CPU frequency
436  * Args:
437  * Notes:
438  * Returns:  required value
439  */
440 u32 ddr3_get_fab_opt(void)
441 {
442 	return 0;		/* No fabric */
443 }
444 
445 /*
446  * Name:     ddr3_get_static_m_cValue - Init Memory controller with
447  *           static parameters
448  * Desc:     Use this routine to init the controller without the HW training
449  *           procedure.
450  *           User must provide compatible header file with registers data.
451  * Args:     None.
452  * Notes:
453  * Returns:  None.
454  */
455 u32 ddr3_get_static_mc_value(u32 reg_addr, u32 offset1, u32 mask1,
456 			     u32 offset2, u32 mask2)
457 {
458 	u32 reg, temp;
459 
460 	reg = reg_read(reg_addr);
461 
462 	temp = (reg >> offset1) & mask1;
463 	if (mask2)
464 		temp |= (reg >> offset2) & mask2;
465 
466 	return temp;
467 }
468 
469 /*
470  * Name:     ddr3_get_static_ddr_mode - Init Memory controller with
471  *           static parameters
472  * Desc:     Use this routine to init the controller without the HW training
473  *           procedure.
474  *           User must provide compatible header file with registers data.
475  * Args:     None.
476  * Notes:
477  * Returns:  None.
478  */
479 u32 ddr3_get_static_ddr_mode(void)
480 {
481 	u32 chip_board_rev, i;
482 	u32 size;
483 
484 	/* Valid only for A380 only, MSYS using dynamic controller config */
485 #ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
486 	/*
487 	 * Customer boards select DDR mode according to
488 	 * board ID & Sample@Reset
489 	 */
490 	chip_board_rev = mv_board_id_get();
491 #else
492 	/* Marvell boards select DDR mode according to Sample@Reset only */
493 	chip_board_rev = MARVELL_BOARD;
494 #endif
495 
496 	size = ARRAY_SIZE(ddr_modes);
497 	for (i = 0; i < size; i++) {
498 		if ((ddr3_get_cpu_freq() == ddr_modes[i].cpu_freq) &&
499 		    (ddr3_get_fab_opt() == ddr_modes[i].fab_freq) &&
500 		    (chip_board_rev == ddr_modes[i].chip_board_rev))
501 			return i;
502 	}
503 
504 	DEBUG_INIT_S("\n*** Error: ddr3_get_static_ddr_mode: No match for requested DDR mode. ***\n\n");
505 
506 	return 0;
507 }
508 
509 /******************************************************************************
510  * Name:     ddr3_get_cs_num_from_reg
511  * Desc:
512  * Args:
513  * Notes:
514  * Returns:
515  */
516 u32 ddr3_get_cs_num_from_reg(void)
517 {
518 	u32 cs_ena = sys_env_get_cs_ena_from_reg();
519 	u32 cs_count = 0;
520 	u32 cs;
521 
522 	for (cs = 0; cs < MAX_CS; cs++) {
523 		if (cs_ena & (1 << cs))
524 			cs_count++;
525 	}
526 
527 	return cs_count;
528 }
529 
530 void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps)
531 {
532 	u32 tmp, hclk = 200;
533 
534 	switch (freq_mode) {
535 	case 4:
536 		tmp = 1;	/* DDR_400; */
537 		hclk = 200;
538 		break;
539 	case 0x8:
540 		tmp = 1;	/* DDR_666; */
541 		hclk = 333;
542 		break;
543 	case 0xc:
544 		tmp = 1;	/* DDR_800; */
545 		hclk = 400;
546 		break;
547 	default:
548 		*ddr_freq = 0;
549 		*hclk_ps = 0;
550 		break;
551 	}
552 
553 	*ddr_freq = tmp;		/* DDR freq define */
554 	*hclk_ps = 1000000 / hclk;	/* values are 1/HCLK in ps */
555 
556 	return;
557 }
558 
559 void ddr3_new_tip_dlb_config(void)
560 {
561 	u32 reg, i = 0;
562 	struct dlb_config *config_table_ptr = sys_env_dlb_config_ptr_get();
563 
564 	/* Write the configuration */
565 	while (config_table_ptr[i].reg_addr != 0) {
566 		reg_write(config_table_ptr[i].reg_addr,
567 			  config_table_ptr[i].reg_data);
568 		i++;
569 	}
570 
571 	/* Enable DLB */
572 	reg = reg_read(REG_STATIC_DRAM_DLB_CONTROL);
573 	reg |= DLB_ENABLE | DLB_WRITE_COALESING | DLB_AXI_PREFETCH_EN |
574 		DLB_MBUS_PREFETCH_EN | PREFETCH_N_LN_SZ_TR;
575 	reg_write(REG_STATIC_DRAM_DLB_CONTROL, reg);
576 }
577 
578 int ddr3_fast_path_dynamic_cs_size_config(u32 cs_ena)
579 {
580 	u32 reg, cs;
581 	u32 mem_total_size = 0;
582 	u32 cs_mem_size = 0;
583 	u32 mem_total_size_c, cs_mem_size_c;
584 
585 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
586 	u32 physical_mem_size;
587 	u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
588 	struct hws_topology_map *tm = ddr3_get_topology_map();
589 #endif
590 
591 	/* Open fast path windows */
592 	for (cs = 0; cs < MAX_CS; cs++) {
593 		if (cs_ena & (1 << cs)) {
594 			/* get CS size */
595 			if (ddr3_calc_mem_cs_size(cs, &cs_mem_size) != MV_OK)
596 				return MV_FAIL;
597 
598 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
599 			/*
600 			 * if number of address pins doesn't allow to use max
601 			 * mem size that is defined in topology
602 			 * mem size is defined by DEVICE_MAX_DRAM_ADDRESS_SIZE
603 			 */
604 			physical_mem_size = mem_size
605 				[tm->interface_params[0].memory_size];
606 
607 			if (ddr3_get_device_width(cs) == 16) {
608 				/*
609 				 * 16bit mem device can be twice more - no need
610 				 * in less significant pin
611 				 */
612 				max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
613 			}
614 
615 			if (physical_mem_size > max_mem_size) {
616 				cs_mem_size = max_mem_size *
617 					(ddr3_get_bus_width() /
618 					 ddr3_get_device_width(cs));
619 				printf("Updated Physical Mem size is from 0x%x to %x\n",
620 				       physical_mem_size,
621 				       DEVICE_MAX_DRAM_ADDRESS_SIZE);
622 			}
623 #endif
624 
625 			/* set fast path window control for the cs */
626 			reg = 0xffffe1;
627 			reg |= (cs << 2);
628 			reg |= (cs_mem_size - 1) & 0xffff0000;
629 			/*Open fast path Window */
630 			reg_write(REG_FASTPATH_WIN_CTRL_ADDR(cs), reg);
631 
632 			/* Set fast path window base address for the cs */
633 			reg = ((cs_mem_size) * cs) & 0xffff0000;
634 			/* Set base address */
635 			reg_write(REG_FASTPATH_WIN_BASE_ADDR(cs), reg);
636 
637 			/*
638 			 * Since memory size may be bigger than 4G the summ may
639 			 * be more than 32 bit word,
640 			 * so to estimate the result divide mem_total_size and
641 			 * cs_mem_size by 0x10000 (it is equal to >> 16)
642 			 */
643 			mem_total_size_c = mem_total_size >> 16;
644 			cs_mem_size_c = cs_mem_size >> 16;
645 			/* if the sum less than 2 G - calculate the value */
646 			if (mem_total_size_c + cs_mem_size_c < 0x10000)
647 				mem_total_size += cs_mem_size;
648 			else	/* put max possible size */
649 				mem_total_size = L2_FILTER_FOR_MAX_MEMORY_SIZE;
650 		}
651 	}
652 
653 	/* Set L2 filtering to Max Memory size */
654 	reg_write(ADDRESS_FILTERING_END_REGISTER, mem_total_size);
655 
656 	return MV_OK;
657 }
658 
659 u32 ddr3_get_bus_width(void)
660 {
661 	u32 bus_width;
662 
663 	bus_width = (reg_read(REG_SDRAM_CONFIG_ADDR) & 0x8000) >>
664 		REG_SDRAM_CONFIG_WIDTH_OFFS;
665 
666 	return (bus_width == 0) ? 16 : 32;
667 }
668 
669 u32 ddr3_get_device_width(u32 cs)
670 {
671 	u32 device_width;
672 
673 	device_width = (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) &
674 			(0x3 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs))) >>
675 		(REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs);
676 
677 	return (device_width == 0) ? 8 : 16;
678 }
679 
680 static int ddr3_get_device_size(u32 cs)
681 {
682 	u32 device_size_low, device_size_high, device_size;
683 	u32 data, cs_low_offset, cs_high_offset;
684 
685 	cs_low_offset = REG_SDRAM_ADDRESS_SIZE_OFFS + cs * 4;
686 	cs_high_offset = REG_SDRAM_ADDRESS_SIZE_OFFS +
687 		REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs;
688 
689 	data = reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR);
690 	device_size_low = (data >> cs_low_offset) & 0x3;
691 	device_size_high = (data >> cs_high_offset) & 0x1;
692 
693 	device_size = device_size_low | (device_size_high << 2);
694 
695 	switch (device_size) {
696 	case 0:
697 		return 2048;
698 	case 2:
699 		return 512;
700 	case 3:
701 		return 1024;
702 	case 4:
703 		return 4096;
704 	case 5:
705 		return 8192;
706 	case 1:
707 	default:
708 		DEBUG_INIT_C("Error: Wrong device size of Cs: ", cs, 1);
709 		/*
710 		 * Small value will give wrong emem size in
711 		 * ddr3_calc_mem_cs_size
712 		 */
713 		return 0;
714 	}
715 }
716 
717 int ddr3_calc_mem_cs_size(u32 cs, u32 *cs_size)
718 {
719 	int cs_mem_size;
720 
721 	/* Calculate in GiB */
722 	cs_mem_size = ((ddr3_get_bus_width() / ddr3_get_device_width(cs)) *
723 		       ddr3_get_device_size(cs)) / 8;
724 
725 	/*
726 	 * Multiple controller bus width, 2x for 64 bit
727 	 * (SoC controller may be 32 or 64 bit,
728 	 * so bit 15 in 0x1400, that means if whole bus used or only half,
729 	 * have a differnt meaning
730 	 */
731 	cs_mem_size *= DDR_CONTROLLER_BUS_WIDTH_MULTIPLIER;
732 
733 	if (!cs_mem_size || (cs_mem_size == 64) || (cs_mem_size == 4096)) {
734 		DEBUG_INIT_C("Error: Wrong Memory size of Cs: ", cs, 1);
735 		return MV_BAD_VALUE;
736 	}
737 
738 	*cs_size = cs_mem_size << 20;
739 	return MV_OK;
740 }
741 
742 /*
743  * Name:     ddr3_hws_tune_training_params
744  * Desc:
745  * Args:
746  * Notes: Tune internal training params
747  * Returns:
748  */
749 static int ddr3_hws_tune_training_params(u8 dev_num)
750 {
751 	struct tune_train_params params;
752 	int status;
753 
754 	/* NOTE: do not remove any field initilization */
755 	params.ck_delay = TUNE_TRAINING_PARAMS_CK_DELAY;
756 	params.ck_delay_16 = TUNE_TRAINING_PARAMS_CK_DELAY_16;
757 	params.p_finger = TUNE_TRAINING_PARAMS_PFINGER;
758 	params.n_finger = TUNE_TRAINING_PARAMS_NFINGER;
759 	params.phy_reg3_val = TUNE_TRAINING_PARAMS_PHYREG3VAL;
760 
761 	status = ddr3_tip_tune_training_params(dev_num, &params);
762 	if (MV_OK != status) {
763 		printf("%s Training Sequence - FAILED\n", ddr_type);
764 		return status;
765 	}
766 
767 	return MV_OK;
768 }
769