1 /*
2  * Copyright (C) Marvell International Ltd. and its affiliates
3  *
4  * SPDX-License-Identifier:	GPL-2.0
5  */
6 
7 #include <common.h>
8 #include <i2c.h>
9 #include <spl.h>
10 #include <asm/io.h>
11 #include <asm/arch/cpu.h>
12 #include <asm/arch/soc.h>
13 
14 #include "ddr3_init.h"
15 
16 u8 is_reg_dump = 0;
17 u8 debug_pbs = DEBUG_LEVEL_ERROR;
18 
19 /*
20  * API to change flags outside of the lib
21  */
22 #ifndef SILENT_LIB
23 /* Debug flags for other Training modules */
24 u8 debug_training_static = DEBUG_LEVEL_ERROR;
25 u8 debug_training = DEBUG_LEVEL_ERROR;
26 u8 debug_leveling = DEBUG_LEVEL_ERROR;
27 u8 debug_centralization = DEBUG_LEVEL_ERROR;
28 u8 debug_training_ip = DEBUG_LEVEL_ERROR;
29 u8 debug_training_bist = DEBUG_LEVEL_ERROR;
30 u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
31 u8 debug_training_access = DEBUG_LEVEL_ERROR;
32 u8 debug_training_a38x = DEBUG_LEVEL_ERROR;
33 
34 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
35 {
36 	switch (block) {
37 	case DEBUG_BLOCK_STATIC:
38 		debug_training_static = level;
39 		break;
40 	case DEBUG_BLOCK_TRAINING_MAIN:
41 		debug_training = level;
42 		break;
43 	case DEBUG_BLOCK_LEVELING:
44 		debug_leveling = level;
45 		break;
46 	case DEBUG_BLOCK_CENTRALIZATION:
47 		debug_centralization = level;
48 		break;
49 	case DEBUG_BLOCK_PBS:
50 		debug_pbs = level;
51 		break;
52 	case DEBUG_BLOCK_ALG:
53 		debug_training_hw_alg = level;
54 		break;
55 	case DEBUG_BLOCK_DEVICE:
56 		debug_training_a38x = level;
57 		break;
58 	case DEBUG_BLOCK_ACCESS:
59 		debug_training_access = level;
60 		break;
61 	case DEBUG_STAGES_REG_DUMP:
62 		if (level == DEBUG_LEVEL_TRACE)
63 			is_reg_dump = 1;
64 		else
65 			is_reg_dump = 0;
66 		break;
67 	case DEBUG_BLOCK_ALL:
68 	default:
69 		debug_training_static = level;
70 		debug_training = level;
71 		debug_leveling = level;
72 		debug_centralization = level;
73 		debug_pbs = level;
74 		debug_training_hw_alg = level;
75 		debug_training_access = level;
76 		debug_training_a38x = level;
77 	}
78 }
79 #else
80 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
81 {
82 	return;
83 }
84 #endif
85 
86 struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
87 u8 is_default_centralization = 0;
88 u8 is_tune_result = 0;
89 u8 is_validate_window_per_if = 0;
90 u8 is_validate_window_per_pup = 0;
91 u8 sweep_cnt = 1;
92 u32 is_bist_reset_bit = 1;
93 static struct hws_xsb_info xsb_info[HWS_MAX_DEVICE_NUM];
94 
95 /*
96  * Dump Dunit & Phy registers
97  */
98 int ddr3_tip_reg_dump(u32 dev_num)
99 {
100 	u32 if_id, reg_addr, data_value, bus_id;
101 	u32 read_data[MAX_INTERFACE_NUM];
102 	struct hws_topology_map *tm = ddr3_get_topology_map();
103 
104 	printf("-- dunit registers --\n");
105 	for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
106 		printf("0x%x ", reg_addr);
107 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
108 			VALIDATE_ACTIVE(tm->if_act_mask, if_id);
109 			CHECK_STATUS(ddr3_tip_if_read
110 				     (dev_num, ACCESS_TYPE_UNICAST,
111 				      if_id, reg_addr, read_data,
112 				      MASK_ALL_BITS));
113 			printf("0x%x ", read_data[if_id]);
114 		}
115 		printf("\n");
116 	}
117 
118 	printf("-- Phy registers --\n");
119 	for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
120 		printf("0x%x ", reg_addr);
121 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
122 			VALIDATE_ACTIVE(tm->if_act_mask, if_id);
123 			for (bus_id = 0;
124 			     bus_id < tm->num_of_bus_per_interface;
125 			     bus_id++) {
126 				VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
127 				CHECK_STATUS(ddr3_tip_bus_read
128 					     (dev_num, if_id,
129 					      ACCESS_TYPE_UNICAST, bus_id,
130 					      DDR_PHY_DATA, reg_addr,
131 					      &data_value));
132 				printf("0x%x ", data_value);
133 			}
134 			for (bus_id = 0;
135 			     bus_id < tm->num_of_bus_per_interface;
136 			     bus_id++) {
137 				VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
138 				CHECK_STATUS(ddr3_tip_bus_read
139 					     (dev_num, if_id,
140 					      ACCESS_TYPE_UNICAST, bus_id,
141 					      DDR_PHY_CONTROL, reg_addr,
142 					      &data_value));
143 				printf("0x%x ", data_value);
144 			}
145 		}
146 		printf("\n");
147 	}
148 
149 	return MV_OK;
150 }
151 
152 /*
153  * Register access func registration
154  */
155 int ddr3_tip_init_config_func(u32 dev_num,
156 			      struct hws_tip_config_func_db *config_func)
157 {
158 	if (config_func == NULL)
159 		return MV_BAD_PARAM;
160 
161 	memcpy(&config_func_info[dev_num], config_func,
162 	       sizeof(struct hws_tip_config_func_db));
163 
164 	return MV_OK;
165 }
166 
167 /*
168  * Read training result table
169  */
170 int hws_ddr3_tip_read_training_result(
171 	u32 dev_num, enum hws_result result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM])
172 {
173 	dev_num = dev_num;
174 
175 	if (result == NULL)
176 		return MV_BAD_PARAM;
177 	memcpy(result, training_result, sizeof(result));
178 
179 	return MV_OK;
180 }
181 
182 /*
183  * Get training result info pointer
184  */
185 enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
186 {
187 	return training_result[stage];
188 }
189 
190 /*
191  * Device info read
192  */
193 int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
194 {
195 	if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
196 		return config_func_info[dev_num].
197 			tip_get_device_info_func((u8) dev_num, info_ptr);
198 	}
199 
200 	return MV_FAIL;
201 }
202 
203 #ifndef EXCLUDE_SWITCH_DEBUG
204 /*
205  * Convert freq to character string
206  */
207 static char *convert_freq(enum hws_ddr_freq freq)
208 {
209 	switch (freq) {
210 	case DDR_FREQ_LOW_FREQ:
211 		return "DDR_FREQ_LOW_FREQ";
212 	case DDR_FREQ_400:
213 		return "400";
214 
215 	case DDR_FREQ_533:
216 		return "533";
217 	case DDR_FREQ_667:
218 		return "667";
219 
220 	case DDR_FREQ_800:
221 		return "800";
222 
223 	case DDR_FREQ_933:
224 		return "933";
225 
226 	case DDR_FREQ_1066:
227 		return "1066";
228 	case DDR_FREQ_311:
229 		return "311";
230 
231 	case DDR_FREQ_333:
232 		return "333";
233 
234 	case DDR_FREQ_467:
235 		return "467";
236 
237 	case DDR_FREQ_850:
238 		return "850";
239 
240 	case DDR_FREQ_900:
241 		return "900";
242 
243 	case DDR_FREQ_360:
244 		return "DDR_FREQ_360";
245 
246 	case DDR_FREQ_1000:
247 		return "DDR_FREQ_1000";
248 	default:
249 		return "Unknown Frequency";
250 	}
251 }
252 
253 /*
254  * Convert device ID to character string
255  */
256 static char *convert_dev_id(u32 dev_id)
257 {
258 	switch (dev_id) {
259 	case 0x6800:
260 		return "A38xx";
261 	case 0x6900:
262 		return "A39XX";
263 	case 0xf400:
264 		return "AC3";
265 	case 0xfc00:
266 		return "BC2";
267 
268 	default:
269 		return "Unknown Device";
270 	}
271 }
272 
273 /*
274  * Convert device ID to character string
275  */
276 static char *convert_mem_size(u32 dev_id)
277 {
278 	switch (dev_id) {
279 	case 0:
280 		return "512 MB";
281 	case 1:
282 		return "1 GB";
283 	case 2:
284 		return "2 GB";
285 	case 3:
286 		return "4 GB";
287 	case 4:
288 		return "8 GB";
289 
290 	default:
291 		return "wrong mem size";
292 	}
293 }
294 
295 int print_device_info(u8 dev_num)
296 {
297 	struct ddr3_device_info info_ptr;
298 	struct hws_topology_map *tm = ddr3_get_topology_map();
299 
300 	CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
301 	printf("=== DDR setup START===\n");
302 	printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
303 	printf("\tDDR3  CK delay: %d\n", info_ptr.ck_delay);
304 	print_topology(tm);
305 	printf("=== DDR setup END===\n");
306 
307 	return MV_OK;
308 }
309 
310 void hws_ddr3_tip_sweep_test(int enable)
311 {
312 	if (enable) {
313 		is_validate_window_per_if = 1;
314 		is_validate_window_per_pup = 1;
315 		debug_training = DEBUG_LEVEL_TRACE;
316 	} else {
317 		is_validate_window_per_if = 0;
318 		is_validate_window_per_pup = 0;
319 	}
320 }
321 #endif
322 
323 char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
324 {
325 	switch (tune_result) {
326 	case TEST_FAILED:
327 		return "FAILED";
328 	case TEST_SUCCESS:
329 		return "PASS";
330 	case NO_TEST_DONE:
331 		return "NOT COMPLETED";
332 	default:
333 		return "Un-KNOWN";
334 	}
335 }
336 
337 /*
338  * Print log info
339  */
340 int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
341 {
342 	u32 if_id = 0;
343 	struct hws_topology_map *tm = ddr3_get_topology_map();
344 
345 	mem_addr = mem_addr;
346 
347 #ifndef EXCLUDE_SWITCH_DEBUG
348 	if ((is_validate_window_per_if != 0) ||
349 	    (is_validate_window_per_pup != 0)) {
350 		u32 is_pup_log = 0;
351 		enum hws_ddr_freq freq;
352 
353 		freq = tm->interface_params[first_active_if].memory_freq;
354 
355 		is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
356 		printf("===VALIDATE WINDOW LOG START===\n");
357 		printf("DDR Frequency: %s   ======\n", convert_freq(freq));
358 		/* print sweep windows */
359 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
360 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
361 		ddr3_tip_print_all_pbs_result(dev_num);
362 		ddr3_tip_print_wl_supp_result(dev_num);
363 		printf("===VALIDATE WINDOW LOG END ===\n");
364 		CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
365 		ddr3_tip_reg_dump(dev_num);
366 	}
367 #endif
368 
369 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
370 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
371 
372 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
373 				  ("IF %d Status:\n", if_id));
374 
375 		if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
376 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
377 					  ("\tInit Controller: %s\n",
378 					   ddr3_tip_convert_tune_result
379 					   (training_result[INIT_CONTROLLER]
380 					    [if_id])));
381 		}
382 		if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
383 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
384 					  ("\tLow freq Config: %s\n",
385 					   ddr3_tip_convert_tune_result
386 					   (training_result[SET_LOW_FREQ]
387 					    [if_id])));
388 		}
389 		if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
390 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
391 					  ("\tLoad Pattern: %s\n",
392 					   ddr3_tip_convert_tune_result
393 					   (training_result[LOAD_PATTERN]
394 					    [if_id])));
395 		}
396 		if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
397 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
398 					  ("\tMedium freq Config: %s\n",
399 					   ddr3_tip_convert_tune_result
400 					   (training_result[SET_MEDIUM_FREQ]
401 					    [if_id])));
402 		}
403 		if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
404 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
405 					  ("\tWL: %s\n",
406 					   ddr3_tip_convert_tune_result
407 					   (training_result[WRITE_LEVELING]
408 					    [if_id])));
409 		}
410 		if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
411 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
412 					  ("\tLoad Pattern: %s\n",
413 					   ddr3_tip_convert_tune_result
414 					   (training_result[LOAD_PATTERN_2]
415 					    [if_id])));
416 		}
417 		if (mask_tune_func & READ_LEVELING_MASK_BIT) {
418 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
419 					  ("\tRL: %s\n",
420 					   ddr3_tip_convert_tune_result
421 					   (training_result[READ_LEVELING]
422 					    [if_id])));
423 		}
424 		if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
425 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
426 					  ("\tWL Supp: %s\n",
427 					   ddr3_tip_convert_tune_result
428 					   (training_result[WRITE_LEVELING_SUPP]
429 					    [if_id])));
430 		}
431 		if (mask_tune_func & PBS_RX_MASK_BIT) {
432 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
433 					  ("\tPBS RX: %s\n",
434 					   ddr3_tip_convert_tune_result
435 					   (training_result[PBS_RX]
436 					    [if_id])));
437 		}
438 		if (mask_tune_func & PBS_TX_MASK_BIT) {
439 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
440 					  ("\tPBS TX: %s\n",
441 					   ddr3_tip_convert_tune_result
442 					   (training_result[PBS_TX]
443 					    [if_id])));
444 		}
445 		if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
446 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
447 					  ("\tTarget freq Config: %s\n",
448 					   ddr3_tip_convert_tune_result
449 					   (training_result[SET_TARGET_FREQ]
450 					    [if_id])));
451 		}
452 		if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
453 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
454 					  ("\tWL TF: %s\n",
455 					   ddr3_tip_convert_tune_result
456 					   (training_result[WRITE_LEVELING_TF]
457 					    [if_id])));
458 		}
459 		if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
460 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
461 					  ("\tRL TF: %s\n",
462 					   ddr3_tip_convert_tune_result
463 					   (training_result[READ_LEVELING_TF]
464 					    [if_id])));
465 		}
466 		if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
467 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
468 					  ("\tWL TF Supp: %s\n",
469 					   ddr3_tip_convert_tune_result
470 					   (training_result
471 					    [WRITE_LEVELING_SUPP_TF]
472 					    [if_id])));
473 		}
474 		if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
475 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
476 					  ("\tCentr RX: %s\n",
477 					   ddr3_tip_convert_tune_result
478 					   (training_result[CENTRALIZATION_RX]
479 					    [if_id])));
480 		}
481 		if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
482 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
483 					  ("\tVREF_CALIBRATION: %s\n",
484 					   ddr3_tip_convert_tune_result
485 					   (training_result[VREF_CALIBRATION]
486 					    [if_id])));
487 		}
488 		if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
489 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
490 					  ("\tCentr TX: %s\n",
491 					   ddr3_tip_convert_tune_result
492 					   (training_result[CENTRALIZATION_TX]
493 					    [if_id])));
494 		}
495 	}
496 
497 	return MV_OK;
498 }
499 
500 /*
501  * Print stability log info
502  */
503 int ddr3_tip_print_stability_log(u32 dev_num)
504 {
505 	u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
506 	u32 reg_data;
507 	u32 read_data[MAX_INTERFACE_NUM];
508 	u32 max_cs = hws_ddr3_tip_max_cs_get();
509 	struct hws_topology_map *tm = ddr3_get_topology_map();
510 
511 	/* Title print */
512 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
513 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
514 		printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
515 		for (csindex = 0; csindex < max_cs; csindex++) {
516 			printf("CS%d , ", csindex);
517 			printf("\n");
518 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
519 			printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
520 			printf("\t\t");
521 			for (idx = 0; idx < 11; idx++)
522 				printf("PBSTx-Pad%d,", idx);
523 			printf("\t\t");
524 			for (idx = 0; idx < 11; idx++)
525 				printf("PBSRx-Pad%d,", idx);
526 		}
527 	}
528 	printf("\n");
529 
530 	/* Data print */
531 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
532 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
533 
534 		printf("Data: %d,%d,", if_id,
535 		       (config_func_info[dev_num].tip_get_temperature != NULL)
536 		       ? (config_func_info[dev_num].
537 			  tip_get_temperature(dev_num)) : (0));
538 
539 		CHECK_STATUS(ddr3_tip_if_read
540 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
541 			      read_data, MASK_ALL_BITS));
542 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
543 		       ((read_data[if_id] & 0xfc00) >> 10));
544 		CHECK_STATUS(ddr3_tip_if_read
545 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
546 			      read_data, MASK_ALL_BITS));
547 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
548 		       ((read_data[if_id] & 0xfc00) >> 10));
549 		CHECK_STATUS(ddr3_tip_if_read
550 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
551 			      read_data, MASK_ALL_BITS));
552 		printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
553 		       ((read_data[if_id] & 0xfc00000) >> 22));
554 
555 		for (csindex = 0; csindex < max_cs; csindex++) {
556 			printf("CS%d , ", csindex);
557 			for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
558 				printf("\n");
559 				VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
560 				ddr3_tip_bus_read(dev_num, if_id,
561 						  ACCESS_TYPE_UNICAST,
562 						  bus_id, DDR_PHY_DATA,
563 						  RESULT_DB_PHY_REG_ADDR +
564 						  csindex, &reg_data);
565 				printf("%d,%d,", (reg_data & 0x1f),
566 				       ((reg_data & 0x3e0) >> 5));
567 				/* WL */
568 				ddr3_tip_bus_read(dev_num, if_id,
569 						  ACCESS_TYPE_UNICAST,
570 						  bus_id, DDR_PHY_DATA,
571 						  WL_PHY_REG +
572 						  csindex * 4, &reg_data);
573 				printf("%d,%d,%d,",
574 				       (reg_data & 0x1f) +
575 				       ((reg_data & 0x1c0) >> 6) * 32,
576 				       (reg_data & 0x1f),
577 				       (reg_data & 0x1c0) >> 6);
578 				/* RL */
579 				CHECK_STATUS(ddr3_tip_if_read
580 					     (dev_num, ACCESS_TYPE_UNICAST,
581 					      if_id,
582 					      READ_DATA_SAMPLE_DELAY,
583 					      read_data, MASK_ALL_BITS));
584 				read_data[if_id] =
585 					(read_data[if_id] &
586 					 (0xf << (4 * csindex))) >>
587 					(4 * csindex);
588 				ddr3_tip_bus_read(dev_num, if_id,
589 						  ACCESS_TYPE_UNICAST, bus_id,
590 						  DDR_PHY_DATA,
591 						  RL_PHY_REG + csindex * 4,
592 						  &reg_data);
593 				printf("%d,%d,%d,%d,",
594 				       (reg_data & 0x1f) +
595 				       ((reg_data & 0x1c0) >> 6) * 32 +
596 				       read_data[if_id] * 64,
597 				       (reg_data & 0x1f),
598 				       ((reg_data & 0x1c0) >> 6),
599 				       read_data[if_id]);
600 				/* Centralization */
601 				ddr3_tip_bus_read(dev_num, if_id,
602 						  ACCESS_TYPE_UNICAST, bus_id,
603 						  DDR_PHY_DATA,
604 						  WRITE_CENTRALIZATION_PHY_REG
605 						  + csindex * 4, &reg_data);
606 				printf("%d,", (reg_data & 0x3f));
607 				ddr3_tip_bus_read(dev_num, if_id,
608 						  ACCESS_TYPE_UNICAST, bus_id,
609 						  DDR_PHY_DATA,
610 						  READ_CENTRALIZATION_PHY_REG
611 						  + csindex * 4, &reg_data);
612 				printf("%d,", (reg_data & 0x1f));
613 				/* Vref */
614 				ddr3_tip_bus_read(dev_num, if_id,
615 						  ACCESS_TYPE_UNICAST, bus_id,
616 						  DDR_PHY_DATA,
617 						  PAD_CONFIG_PHY_REG,
618 						  &reg_data);
619 				printf("%d,", (reg_data & 0x7));
620 				/* DQVref */
621 				/* Need to add the Read Function from device */
622 				printf("%d,", 0);
623 				printf("\t\t");
624 				for (idx = 0; idx < 11; idx++) {
625 					ddr3_tip_bus_read(dev_num, if_id,
626 							  ACCESS_TYPE_UNICAST,
627 							  bus_id, DDR_PHY_DATA,
628 							  0xd0 +
629 							  12 * csindex +
630 							  idx, &reg_data);
631 					printf("%d,", (reg_data & 0x3f));
632 				}
633 				printf("\t\t");
634 				for (idx = 0; idx < 11; idx++) {
635 					ddr3_tip_bus_read(dev_num, if_id,
636 							  ACCESS_TYPE_UNICAST,
637 							  bus_id, DDR_PHY_DATA,
638 							  0x10 +
639 							  16 * csindex +
640 							  idx, &reg_data);
641 					printf("%d,", (reg_data & 0x3f));
642 				}
643 				printf("\t\t");
644 				for (idx = 0; idx < 11; idx++) {
645 					ddr3_tip_bus_read(dev_num, if_id,
646 							  ACCESS_TYPE_UNICAST,
647 							  bus_id, DDR_PHY_DATA,
648 							  0x50 +
649 							  16 * csindex +
650 							  idx, &reg_data);
651 					printf("%d,", (reg_data & 0x3f));
652 				}
653 			}
654 		}
655 	}
656 	printf("\n");
657 
658 	return MV_OK;
659 }
660 
661 /*
662  * Register XSB information
663  */
664 int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
665 {
666 	memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
667 	return MV_OK;
668 }
669 
670 /*
671  * Read ADLL Value
672  */
673 int read_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
674 		    int reg_addr, u32 mask)
675 {
676 	u32 data_value;
677 	u32 if_id = 0, bus_id = 0;
678 	u32 dev_num = 0;
679 	struct hws_topology_map *tm = ddr3_get_topology_map();
680 
681 	/*
682 	 * multi CS support - reg_addr is calucalated in calling function
683 	 * with CS offset
684 	 */
685 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
686 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
687 		for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
688 		     bus_id++) {
689 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
690 			CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
691 						       ACCESS_TYPE_UNICAST,
692 						       bus_id,
693 						       DDR_PHY_DATA, reg_addr,
694 						       &data_value));
695 			pup_values[if_id *
696 				   tm->num_of_bus_per_interface + bus_id] =
697 				data_value & mask;
698 		}
699 	}
700 
701 	return 0;
702 }
703 
704 /*
705  * Write ADLL Value
706  */
707 int write_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
708 		     int reg_addr)
709 {
710 	u32 if_id = 0, bus_id = 0;
711 	u32 dev_num = 0, data;
712 	struct hws_topology_map *tm = ddr3_get_topology_map();
713 
714 	/*
715 	 * multi CS support - reg_addr is calucalated in calling function
716 	 * with CS offset
717 	 */
718 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
719 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
720 		for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
721 		     bus_id++) {
722 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
723 			data = pup_values[if_id *
724 					  tm->num_of_bus_per_interface +
725 					  bus_id];
726 			CHECK_STATUS(ddr3_tip_bus_write(dev_num,
727 							ACCESS_TYPE_UNICAST,
728 							if_id,
729 							ACCESS_TYPE_UNICAST,
730 							bus_id, DDR_PHY_DATA,
731 							reg_addr, data));
732 		}
733 	}
734 
735 	return 0;
736 }
737 
738 #ifndef EXCLUDE_SWITCH_DEBUG
739 u32 rl_version = 1;		/* 0 - old RL machine */
740 struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
741 u32 start_xsb_offset = 0;
742 u8 is_rl_old = 0;
743 u8 is_freq_old = 0;
744 u8 is_dfs_disabled = 0;
745 u32 default_centrlization_value = 0x12;
746 u32 vref = 0x4;
747 u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
748 	rl_test = 0, reset_read_fifo = 0;
749 int debug_acc = 0;
750 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
751 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
752 u8 cs_mask_reg[] = {
753 	0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
754 };
755 
756 u32 xsb_test_table[][8] = {
757 	{0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
758 	 0x66666666, 0x77777777},
759 	{0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
760 	 0xeeeeeeee, 0xffffffff},
761 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
762 	 0x00000000, 0xffffffff},
763 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
764 	 0x00000000, 0xffffffff},
765 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
766 	 0x00000000, 0xffffffff},
767 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
768 	 0x00000000, 0xffffffff},
769 	{0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
770 	 0xffffffff, 0xffffffff},
771 	{0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
772 	 0x00000000, 0x00000000},
773 	{0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
774 	 0xffffffff, 0xffffffff}
775 };
776 
777 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr);
778 
779 int ddr3_tip_print_adll(void)
780 {
781 	u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
782 	struct hws_topology_map *tm = ddr3_get_topology_map();
783 
784 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
785 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
786 		for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
787 		     bus_cnt++) {
788 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
789 			CHECK_STATUS(ddr3_tip_bus_read
790 				     (dev_num, if_id,
791 				      ACCESS_TYPE_UNICAST, bus_cnt,
792 				      DDR_PHY_DATA, 0x1, &data_p1));
793 			CHECK_STATUS(ddr3_tip_bus_read
794 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
795 				      bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
796 			CHECK_STATUS(ddr3_tip_bus_read
797 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
798 				      bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
799 			DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
800 					  (" IF %d bus_cnt %d  phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
801 					   if_id, bus_cnt, data_p1, data_p2,
802 					   ui_data3));
803 			}
804 	}
805 
806 	return MV_OK;
807 }
808 
809 /*
810  * Set attribute value
811  */
812 int ddr3_tip_set_atr(u32 dev_num, u32 flag_id, u32 value)
813 {
814 	int ret;
815 	u32 *ptr_flag = NULL;
816 
817 	ret = ddr3_tip_access_atr(dev_num, flag_id, value, &ptr_flag);
818 	if (ptr_flag != NULL) {
819 		printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x (was 0x%x)\n",
820 		       flag_id, value, *ptr_flag);
821 		*ptr_flag = value;
822 	} else {
823 		printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x\n",
824 		       flag_id, value);
825 	}
826 
827 	return ret;
828 }
829 
830 /*
831  * Access attribute
832  */
833 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr)
834 {
835 	u32 tmp_val = 0, if_id = 0, pup_id = 0;
836 	struct hws_topology_map *tm = ddr3_get_topology_map();
837 
838 	dev_num = dev_num;
839 	*ptr = NULL;
840 
841 	switch (flag_id) {
842 	case 0:
843 		*ptr = (u32 *)&(tm->if_act_mask);
844 		break;
845 
846 	case 0x1:
847 		*ptr = (u32 *)&mask_tune_func;
848 		break;
849 
850 	case 0x2:
851 		*ptr = (u32 *)&low_freq;
852 		break;
853 
854 	case 0x3:
855 		*ptr = (u32 *)&medium_freq;
856 		break;
857 
858 	case 0x4:
859 		*ptr = (u32 *)&generic_init_controller;
860 		break;
861 
862 	case 0x5:
863 		*ptr = (u32 *)&rl_version;
864 		break;
865 
866 	case 0x8:
867 		*ptr = (u32 *)&start_xsb_offset;
868 		break;
869 
870 	case 0x20:
871 		*ptr = (u32 *)&is_rl_old;
872 		break;
873 
874 	case 0x21:
875 		*ptr = (u32 *)&is_freq_old;
876 		break;
877 
878 	case 0x23:
879 		*ptr = (u32 *)&is_dfs_disabled;
880 		break;
881 
882 	case 0x24:
883 		*ptr = (u32 *)&is_pll_before_init;
884 		break;
885 
886 	case 0x25:
887 		*ptr = (u32 *)&is_adll_calib_before_init;
888 		break;
889 #ifdef STATIC_ALGO_SUPPORT
890 	case 0x26:
891 		*ptr = (u32 *)&(silicon_delay[0]);
892 		break;
893 
894 	case 0x27:
895 		*ptr = (u32 *)&wl_debug_delay;
896 		break;
897 #endif
898 	case 0x28:
899 		*ptr = (u32 *)&is_tune_result;
900 		break;
901 
902 	case 0x29:
903 		*ptr = (u32 *)&is_validate_window_per_if;
904 		break;
905 
906 	case 0x2a:
907 		*ptr = (u32 *)&is_validate_window_per_pup;
908 		break;
909 
910 	case 0x30:
911 		*ptr = (u32 *)&sweep_cnt;
912 		break;
913 
914 	case 0x31:
915 		*ptr = (u32 *)&is_bist_reset_bit;
916 		break;
917 
918 	case 0x32:
919 		*ptr = (u32 *)&is_dfs_in_init;
920 		break;
921 
922 	case 0x33:
923 		*ptr = (u32 *)&p_finger;
924 		break;
925 
926 	case 0x34:
927 		*ptr = (u32 *)&n_finger;
928 		break;
929 
930 	case 0x35:
931 		*ptr = (u32 *)&init_freq;
932 		break;
933 
934 	case 0x36:
935 		*ptr = (u32 *)&(freq_val[DDR_FREQ_LOW_FREQ]);
936 		break;
937 
938 	case 0x37:
939 		*ptr = (u32 *)&start_pattern;
940 		break;
941 
942 	case 0x38:
943 		*ptr = (u32 *)&end_pattern;
944 		break;
945 
946 	case 0x39:
947 		*ptr = (u32 *)&phy_reg0_val;
948 		break;
949 
950 	case 0x4a:
951 		*ptr = (u32 *)&phy_reg1_val;
952 		break;
953 
954 	case 0x4b:
955 		*ptr = (u32 *)&phy_reg2_val;
956 		break;
957 
958 	case 0x4c:
959 		*ptr = (u32 *)&phy_reg3_val;
960 		break;
961 
962 	case 0x4e:
963 		*ptr = (u32 *)&sweep_pattern;
964 		break;
965 
966 	case 0x50:
967 		*ptr = (u32 *)&is_rzq6;
968 		break;
969 
970 	case 0x51:
971 		*ptr = (u32 *)&znri_data_phy_val;
972 		break;
973 
974 	case 0x52:
975 		*ptr = (u32 *)&zpri_data_phy_val;
976 		break;
977 
978 	case 0x53:
979 		*ptr = (u32 *)&finger_test;
980 		break;
981 
982 	case 0x54:
983 		*ptr = (u32 *)&n_finger_start;
984 		break;
985 
986 	case 0x55:
987 		*ptr = (u32 *)&n_finger_end;
988 		break;
989 
990 	case 0x56:
991 		*ptr = (u32 *)&p_finger_start;
992 		break;
993 
994 	case 0x57:
995 		*ptr = (u32 *)&p_finger_end;
996 		break;
997 
998 	case 0x58:
999 		*ptr = (u32 *)&p_finger_step;
1000 		break;
1001 
1002 	case 0x59:
1003 		*ptr = (u32 *)&n_finger_step;
1004 		break;
1005 
1006 	case 0x5a:
1007 		*ptr = (u32 *)&znri_ctrl_phy_val;
1008 		break;
1009 
1010 	case 0x5b:
1011 		*ptr = (u32 *)&zpri_ctrl_phy_val;
1012 		break;
1013 
1014 	case 0x5c:
1015 		*ptr = (u32 *)&is_reg_dump;
1016 		break;
1017 
1018 	case 0x5d:
1019 		*ptr = (u32 *)&vref;
1020 		break;
1021 
1022 	case 0x5e:
1023 		*ptr = (u32 *)&mode2_t;
1024 		break;
1025 
1026 	case 0x5f:
1027 		*ptr = (u32 *)&xsb_validate_type;
1028 		break;
1029 
1030 	case 0x60:
1031 		*ptr = (u32 *)&xsb_validation_base_address;
1032 		break;
1033 
1034 	case 0x67:
1035 		*ptr = (u32 *)&activate_select_before_run_alg;
1036 		break;
1037 
1038 	case 0x68:
1039 		*ptr = (u32 *)&activate_deselect_after_run_alg;
1040 		break;
1041 
1042 	case 0x69:
1043 		*ptr = (u32 *)&odt_additional;
1044 		break;
1045 
1046 	case 0x70:
1047 		*ptr = (u32 *)&debug_mode;
1048 		break;
1049 
1050 	case 0x71:
1051 		*ptr = (u32 *)&pbs_pattern;
1052 		break;
1053 
1054 	case 0x72:
1055 		*ptr = (u32 *)&delay_enable;
1056 		break;
1057 
1058 	case 0x73:
1059 		*ptr = (u32 *)&ck_delay;
1060 		break;
1061 
1062 	case 0x74:
1063 		*ptr = (u32 *)&ck_delay_16;
1064 		break;
1065 
1066 	case 0x75:
1067 		*ptr = (u32 *)&ca_delay;
1068 		break;
1069 
1070 	case 0x100:
1071 		*ptr = (u32 *)&debug_dunit;
1072 		break;
1073 
1074 	case 0x101:
1075 		debug_acc = (int)value;
1076 		break;
1077 
1078 	case 0x102:
1079 		debug_training = (u8)value;
1080 		break;
1081 
1082 	case 0x103:
1083 		debug_training_bist = (u8)value;
1084 		break;
1085 
1086 	case 0x104:
1087 		debug_centralization = (u8)value;
1088 		break;
1089 
1090 	case 0x105:
1091 		debug_training_ip = (u8)value;
1092 		break;
1093 
1094 	case 0x106:
1095 		debug_leveling = (u8)value;
1096 		break;
1097 
1098 	case 0x107:
1099 		debug_pbs = (u8)value;
1100 		break;
1101 
1102 	case 0x108:
1103 		debug_training_static = (u8)value;
1104 		break;
1105 
1106 	case 0x109:
1107 		debug_training_access = (u8)value;
1108 		break;
1109 
1110 	case 0x112:
1111 		*ptr = &start_pattern;
1112 		break;
1113 
1114 	case 0x113:
1115 		*ptr = &end_pattern;
1116 		break;
1117 
1118 	default:
1119 		if ((flag_id >= 0x200) && (flag_id < 0x210)) {
1120 			if_id = flag_id - 0x200;
1121 			*ptr = (u32 *)&(tm->interface_params
1122 					[if_id].memory_freq);
1123 		} else if ((flag_id >= 0x210) && (flag_id < 0x220)) {
1124 			if_id = flag_id - 0x210;
1125 			*ptr = (u32 *)&(tm->interface_params
1126 					[if_id].speed_bin_index);
1127 		} else if ((flag_id >= 0x220) && (flag_id < 0x230)) {
1128 			if_id = flag_id - 0x220;
1129 			*ptr = (u32 *)&(tm->interface_params
1130 					[if_id].bus_width);
1131 		} else if ((flag_id >= 0x230) && (flag_id < 0x240)) {
1132 			if_id = flag_id - 0x230;
1133 			*ptr = (u32 *)&(tm->interface_params
1134 					[if_id].memory_size);
1135 		} else if ((flag_id >= 0x240) && (flag_id < 0x250)) {
1136 			if_id = flag_id - 0x240;
1137 			*ptr = (u32 *)&(tm->interface_params
1138 					[if_id].cas_l);
1139 		} else if ((flag_id >= 0x250) && (flag_id < 0x260)) {
1140 			if_id = flag_id - 0x250;
1141 			*ptr = (u32 *)&(tm->interface_params
1142 					[if_id].cas_wl);
1143 		} else if ((flag_id >= 0x270) && (flag_id < 0x2cf)) {
1144 			if_id = (flag_id - 0x270) / MAX_BUS_NUM;
1145 			pup_id = (flag_id - 0x270) % MAX_BUS_NUM;
1146 			*ptr = (u32 *)&(tm->interface_params[if_id].
1147 					as_bus_params[pup_id].is_ck_swap);
1148 		} else if ((flag_id >= 0x2d0) && (flag_id < 0x32f)) {
1149 			if_id = (flag_id - 0x2d0) / MAX_BUS_NUM;
1150 			pup_id = (flag_id - 0x2d0) % MAX_BUS_NUM;
1151 			*ptr = (u32 *)&(tm->interface_params[if_id].
1152 					as_bus_params[pup_id].is_dqs_swap);
1153 		} else if ((flag_id >= 0x330) && (flag_id < 0x38f)) {
1154 			if_id = (flag_id - 0x330) / MAX_BUS_NUM;
1155 			pup_id = (flag_id - 0x330) % MAX_BUS_NUM;
1156 			*ptr = (u32 *)&(tm->interface_params[if_id].
1157 					as_bus_params[pup_id].cs_bitmask);
1158 		} else if ((flag_id >= 0x390) && (flag_id < 0x3ef)) {
1159 			if_id = (flag_id - 0x390) / MAX_BUS_NUM;
1160 			pup_id = (flag_id - 0x390) % MAX_BUS_NUM;
1161 			*ptr = (u32 *)&(tm->interface_params
1162 					[if_id].as_bus_params
1163 					[pup_id].mirror_enable_bitmask);
1164 		} else if ((flag_id >= 0x500) && (flag_id <= 0x50f)) {
1165 			tmp_val = flag_id - 0x320;
1166 			*ptr = (u32 *)&(clamp_tbl[tmp_val]);
1167 		} else {
1168 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1169 					  ("flag_id out of boundary %d\n",
1170 					   flag_id));
1171 			return MV_BAD_PARAM;
1172 		}
1173 	}
1174 
1175 	return MV_OK;
1176 }
1177 
1178 #ifndef EXCLUDE_SWITCH_DEBUG
1179 /*
1180  * Print ADLL
1181  */
1182 int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1183 {
1184 	u32 i, j;
1185 	struct hws_topology_map *tm = ddr3_get_topology_map();
1186 
1187 	dev_num = dev_num;
1188 
1189 	for (j = 0; j < tm->num_of_bus_per_interface; j++) {
1190 		VALIDATE_ACTIVE(tm->bus_act_mask, j);
1191 		for (i = 0; i < MAX_INTERFACE_NUM; i++) {
1192 			printf("%d ,",
1193 			       adll[i * tm->num_of_bus_per_interface + j]);
1194 		}
1195 	}
1196 	printf("\n");
1197 
1198 	return MV_OK;
1199 }
1200 #endif
1201 
1202 /* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1203 static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1204 			    u32 byte_index)
1205 {
1206 	u32 burst_cnt = 0, addr_offset, i_id;
1207 	int b_is_fail = 0;
1208 
1209 	addr_offset =
1210 		(byte_index ==
1211 		 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1212 	for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1213 		if ((p_src[burst_cnt] & addr_offset) !=
1214 		    (p_dst[burst_cnt] & addr_offset))
1215 			b_is_fail = 1;
1216 	}
1217 
1218 	if (b_is_fail == 1) {
1219 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1220 				  ("IF %d exp: ", if_id));
1221 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1222 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1223 					  ("0x%8x ", p_src[i_id]));
1224 		}
1225 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1226 				  ("\n_i_f %d rcv: ", if_id));
1227 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1228 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1229 					  ("(0x%8x ", p_dst[i_id]));
1230 		}
1231 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1232 	}
1233 
1234 	return b_is_fail;
1235 }
1236 
1237 /* test_type = 0-tx , 1-rx */
1238 int ddr3_tip_sweep_test(u32 dev_num, u32 test_type,
1239 			u32 mem_addr, u32 is_modify_adll,
1240 			u32 start_if, u32 end_if, u32 startpup, u32 endpup)
1241 {
1242 	u32 bus_cnt = 0, adll_val = 0, if_id, ui_prev_adll, ui_mask_bit,
1243 		end_adll, start_adll;
1244 	u32 reg_addr = 0;
1245 	struct hws_topology_map *tm = ddr3_get_topology_map();
1246 
1247 	mem_addr = mem_addr;
1248 
1249 	if (test_type == 0) {
1250 		reg_addr = 1;
1251 		ui_mask_bit = 0x3f;
1252 		start_adll = 0;
1253 		end_adll = ui_mask_bit;
1254 	} else {
1255 		reg_addr = 3;
1256 		ui_mask_bit = 0x1f;
1257 		start_adll = 0;
1258 		end_adll = ui_mask_bit;
1259 	}
1260 
1261 	DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1262 			  ("==============================\n"));
1263 	DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1264 			  ("Test type %d (0-tx, 1-rx)\n", test_type));
1265 
1266 	for (if_id = start_if; if_id <= end_if; if_id++) {
1267 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1268 		for (bus_cnt = startpup; bus_cnt < endpup; bus_cnt++) {
1269 			CHECK_STATUS(ddr3_tip_bus_read
1270 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
1271 				      bus_cnt, DDR_PHY_DATA, reg_addr,
1272 				      &ui_prev_adll));
1273 
1274 			for (adll_val = start_adll; adll_val <= end_adll;
1275 			     adll_val++) {
1276 				if (is_modify_adll == 1) {
1277 					CHECK_STATUS(ddr3_tip_bus_read_modify_write
1278 						     (dev_num,
1279 						      ACCESS_TYPE_UNICAST,
1280 						      if_id, bus_cnt,
1281 						      DDR_PHY_DATA, reg_addr,
1282 						      adll_val, ui_mask_bit));
1283 				}
1284 			}
1285 			if (is_modify_adll == 1) {
1286 				CHECK_STATUS(ddr3_tip_bus_write
1287 					     (dev_num, ACCESS_TYPE_UNICAST,
1288 					      if_id, ACCESS_TYPE_UNICAST,
1289 					      bus_cnt, DDR_PHY_DATA, reg_addr,
1290 					      ui_prev_adll));
1291 			}
1292 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1293 		}
1294 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1295 	}
1296 
1297 	return MV_OK;
1298 }
1299 
1300 #ifndef EXCLUDE_SWITCH_DEBUG
1301 /*
1302  * Sweep validation
1303  */
1304 int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1305 			    u32 mode)
1306 {
1307 	u32 pup = 0, start_pup = 0, end_pup = 0;
1308 	u32 adll = 0;
1309 	u32 res[MAX_INTERFACE_NUM] = { 0 };
1310 	int if_id = 0;
1311 	u32 adll_value = 0;
1312 	int reg = (direction == 0) ? WRITE_CENTRALIZATION_PHY_REG :
1313 		READ_CENTRALIZATION_PHY_REG;
1314 	enum hws_access_type pup_access;
1315 	u32 cs;
1316 	u32 max_cs = hws_ddr3_tip_max_cs_get();
1317 	struct hws_topology_map *tm = ddr3_get_topology_map();
1318 
1319 	repeat_num = repeat_num;
1320 
1321 	if (mode == 1) {
1322 		/* per pup */
1323 		start_pup = 0;
1324 		end_pup = tm->num_of_bus_per_interface - 1;
1325 		pup_access = ACCESS_TYPE_UNICAST;
1326 	} else {
1327 		start_pup = 0;
1328 		end_pup = 0;
1329 		pup_access = ACCESS_TYPE_MULTICAST;
1330 	}
1331 
1332 	for (cs = 0; cs < max_cs; cs++) {
1333 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1334 			for (if_id = 0;
1335 			     if_id <= MAX_INTERFACE_NUM - 1;
1336 			     if_id++) {
1337 				VALIDATE_ACTIVE
1338 					(tm->if_act_mask,
1339 					 if_id);
1340 				for (pup = start_pup; pup <= end_pup; pup++) {
1341 					ctrl_sweepres[adll][if_id][pup] =
1342 						0;
1343 				}
1344 			}
1345 		}
1346 
1347 		for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1348 			ctrl_adll[adll] = 0;
1349 		/* Save DQS value(after algorithm run) */
1350 		read_adll_value(ctrl_adll,
1351 				(reg + (cs * CS_REGISTER_ADDR_OFFSET)),
1352 				MASK_ALL_BITS);
1353 
1354 		/*
1355 		 * Sweep ADLL  from 0:31 on all I/F on all Pup and perform
1356 		 * BIST on each stage.
1357 		 */
1358 		for (pup = start_pup; pup <= end_pup; pup++) {
1359 			for (adll = 0; adll < ADLL_LENGTH; adll++) {
1360 				adll_value =
1361 					(direction == 0) ? (adll * 2) : adll;
1362 				CHECK_STATUS(ddr3_tip_bus_write
1363 					     (dev_num, ACCESS_TYPE_MULTICAST, 0,
1364 					      pup_access, pup, DDR_PHY_DATA,
1365 					      reg + CS_REG_VALUE(cs),
1366 					      adll_value));
1367 				hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1368 						  cs);
1369 				/* ddr3_tip_reset_fifo_ptr(dev_num); */
1370 				for (if_id = 0;
1371 				     if_id <= MAX_INTERFACE_NUM - 1;
1372 				     if_id++) {
1373 					VALIDATE_ACTIVE
1374 						(tm->if_act_mask,
1375 						 if_id);
1376 					ctrl_sweepres[adll][if_id][pup]
1377 						= res[if_id];
1378 					if (mode == 1) {
1379 						CHECK_STATUS
1380 							(ddr3_tip_bus_write
1381 							 (dev_num,
1382 							  ACCESS_TYPE_UNICAST,
1383 							  if_id,
1384 							  ACCESS_TYPE_UNICAST,
1385 							  pup,
1386 							  DDR_PHY_DATA,
1387 							  reg + CS_REG_VALUE(cs),
1388 							  ctrl_adll[if_id *
1389 								    cs *
1390 								    tm->num_of_bus_per_interface
1391 								    + pup]));
1392 					}
1393 				}
1394 			}
1395 		}
1396 		printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1397 		       ((direction == 0) ? "TX" : "RX"));
1398 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1399 			VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1400 			if (mode == 1) {
1401 				for (pup = start_pup; pup <= end_pup; pup++) {
1402 					VALIDATE_ACTIVE(tm->bus_act_mask, pup);
1403 					printf("I/F%d-PHY%d , ", if_id, pup);
1404 				}
1405 			} else {
1406 				printf("I/F%d , ", if_id);
1407 			}
1408 		}
1409 		printf("\n");
1410 
1411 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1412 			adll_value = (direction == 0) ? (adll * 2) : adll;
1413 			printf("Final,%s, Sweep, Result, %d ,",
1414 			       ((direction == 0) ? "TX" : "RX"), adll_value);
1415 
1416 			for (if_id = 0;
1417 			     if_id <= MAX_INTERFACE_NUM - 1;
1418 			     if_id++) {
1419 				VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1420 				for (pup = start_pup; pup <= end_pup; pup++) {
1421 					printf("%d , ",
1422 					       ctrl_sweepres[adll][if_id]
1423 					       [pup]);
1424 				}
1425 			}
1426 			printf("\n");
1427 		}
1428 
1429 		/*
1430 		 * Write back to the phy the Rx DQS value, we store in
1431 		 * the beginning.
1432 		 */
1433 		write_adll_value(ctrl_adll,
1434 				 (reg + cs * CS_REGISTER_ADDR_OFFSET));
1435 		/* print adll results */
1436 		read_adll_value(ctrl_adll, (reg + cs * CS_REGISTER_ADDR_OFFSET),
1437 				MASK_ALL_BITS);
1438 		printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1439 		print_adll(dev_num, ctrl_adll);
1440 	}
1441 	ddr3_tip_reset_fifo_ptr(dev_num);
1442 
1443 	return 0;
1444 }
1445 
1446 void print_topology(struct hws_topology_map *topology_db)
1447 {
1448 	u32 ui, uj;
1449 
1450 	printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1451 	printf("\tNum Bus:  %d\n", topology_db->num_of_bus_per_interface);
1452 	printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1453 
1454 	for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1455 		VALIDATE_ACTIVE(topology_db->if_act_mask, ui);
1456 		printf("\n\tInterface ID: %d\n", ui);
1457 		printf("\t\tDDR Frequency: %s\n",
1458 		       convert_freq(topology_db->
1459 				    interface_params[ui].memory_freq));
1460 		printf("\t\tSpeed_bin: %d\n",
1461 		       topology_db->interface_params[ui].speed_bin_index);
1462 		printf("\t\tBus_width: %d\n",
1463 		       (4 << topology_db->interface_params[ui].bus_width));
1464 		printf("\t\tMem_size: %s\n",
1465 		       convert_mem_size(topology_db->
1466 					interface_params[ui].memory_size));
1467 		printf("\t\tCAS-WL: %d\n",
1468 		       topology_db->interface_params[ui].cas_wl);
1469 		printf("\t\tCAS-L: %d\n",
1470 		       topology_db->interface_params[ui].cas_l);
1471 		printf("\t\tTemperature: %d\n",
1472 		       topology_db->interface_params[ui].interface_temp);
1473 		printf("\n");
1474 		for (uj = 0; uj < 4; uj++) {
1475 			printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1476 			       topology_db->interface_params[ui].
1477 			       as_bus_params[uj].cs_bitmask);
1478 			printf("Mirror: 0x%x\t",
1479 			       topology_db->interface_params[ui].
1480 			       as_bus_params[uj].mirror_enable_bitmask);
1481 			printf("DQS Swap is %s \t",
1482 			       (topology_db->
1483 				interface_params[ui].as_bus_params[uj].
1484 				is_dqs_swap == 1) ? "enabled" : "disabled");
1485 			printf("Ck Swap:%s\t",
1486 			       (topology_db->
1487 				interface_params[ui].as_bus_params[uj].
1488 				is_ck_swap == 1) ? "enabled" : "disabled");
1489 			printf("\n");
1490 		}
1491 	}
1492 }
1493 #endif
1494 
1495 /*
1496  * Execute XSB Test transaction (rd/wr/both)
1497  */
1498 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1499 		 u32 read_type, u32 burst_length)
1500 {
1501 	u32 seq = 0, if_id = 0, addr, cnt;
1502 	int ret = MV_OK, ret_tmp;
1503 	u32 data_read[MAX_INTERFACE_NUM];
1504 	struct hws_topology_map *tm = ddr3_get_topology_map();
1505 
1506 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1507 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1508 		addr = mem_addr;
1509 		for (cnt = 0; cnt <= burst_length; cnt++) {
1510 			seq = (seq + 1) % 8;
1511 			if (write_type != 0) {
1512 				CHECK_STATUS(ddr3_tip_ext_write
1513 					     (dev_num, if_id, addr, 1,
1514 					      xsb_test_table[seq]));
1515 			}
1516 			if (read_type != 0) {
1517 				CHECK_STATUS(ddr3_tip_ext_read
1518 					     (dev_num, if_id, addr, 1,
1519 					      data_read));
1520 			}
1521 			if ((read_type != 0) && (write_type != 0)) {
1522 				ret_tmp =
1523 					ddr3_tip_compare(if_id,
1524 							 xsb_test_table[seq],
1525 							 data_read,
1526 							 0xff);
1527 				addr += (EXT_ACCESS_BURST_LENGTH * 4);
1528 				ret = (ret != MV_OK) ? ret : ret_tmp;
1529 			}
1530 		}
1531 	}
1532 
1533 	return ret;
1534 }
1535 
1536 #else /*EXCLUDE_SWITCH_DEBUG */
1537 
1538 u32 rl_version = 1;		/* 0 - old RL machine */
1539 u32 vref = 0x4;
1540 u32 start_xsb_offset = 0;
1541 u8 cs_mask_reg[] = {
1542 	0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
1543 };
1544 
1545 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1546 		 u32 read_type, u32 burst_length)
1547 {
1548 	return MV_OK;
1549 }
1550 
1551 #endif
1552