xref: /openbmc/u-boot/drivers/ddr/marvell/a38x/ddr3_debug.c (revision 04ab29ab257598b0e33785c075c9163ea090e6b7)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) Marvell International Ltd. and its affiliates
4  */
5 
6 #include <common.h>
7 #include <i2c.h>
8 #include <spl.h>
9 #include <asm/io.h>
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12 
13 #include "ddr3_init.h"
14 
15 u8 is_reg_dump = 0;
16 u8 debug_pbs = DEBUG_LEVEL_ERROR;
17 
18 /*
19  * API to change flags outside of the lib
20  */
21 #ifndef SILENT_LIB
22 /* Debug flags for other Training modules */
23 u8 debug_training_static = DEBUG_LEVEL_ERROR;
24 u8 debug_training = DEBUG_LEVEL_ERROR;
25 u8 debug_leveling = DEBUG_LEVEL_ERROR;
26 u8 debug_centralization = DEBUG_LEVEL_ERROR;
27 u8 debug_training_ip = DEBUG_LEVEL_ERROR;
28 u8 debug_training_bist = DEBUG_LEVEL_ERROR;
29 u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
30 u8 debug_training_access = DEBUG_LEVEL_ERROR;
31 u8 debug_training_a38x = DEBUG_LEVEL_ERROR;
32 
33 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
34 {
35 	switch (block) {
36 	case DEBUG_BLOCK_STATIC:
37 		debug_training_static = level;
38 		break;
39 	case DEBUG_BLOCK_TRAINING_MAIN:
40 		debug_training = level;
41 		break;
42 	case DEBUG_BLOCK_LEVELING:
43 		debug_leveling = level;
44 		break;
45 	case DEBUG_BLOCK_CENTRALIZATION:
46 		debug_centralization = level;
47 		break;
48 	case DEBUG_BLOCK_PBS:
49 		debug_pbs = level;
50 		break;
51 	case DEBUG_BLOCK_ALG:
52 		debug_training_hw_alg = level;
53 		break;
54 	case DEBUG_BLOCK_DEVICE:
55 		debug_training_a38x = level;
56 		break;
57 	case DEBUG_BLOCK_ACCESS:
58 		debug_training_access = level;
59 		break;
60 	case DEBUG_STAGES_REG_DUMP:
61 		if (level == DEBUG_LEVEL_TRACE)
62 			is_reg_dump = 1;
63 		else
64 			is_reg_dump = 0;
65 		break;
66 	case DEBUG_BLOCK_ALL:
67 	default:
68 		debug_training_static = level;
69 		debug_training = level;
70 		debug_leveling = level;
71 		debug_centralization = level;
72 		debug_pbs = level;
73 		debug_training_hw_alg = level;
74 		debug_training_access = level;
75 		debug_training_a38x = level;
76 	}
77 }
78 #else
79 void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
80 {
81 	return;
82 }
83 #endif
84 
85 struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
86 u8 is_default_centralization = 0;
87 u8 is_tune_result = 0;
88 u8 is_validate_window_per_if = 0;
89 u8 is_validate_window_per_pup = 0;
90 u8 sweep_cnt = 1;
91 u32 is_bist_reset_bit = 1;
92 static struct hws_xsb_info xsb_info[HWS_MAX_DEVICE_NUM];
93 
94 /*
95  * Dump Dunit & Phy registers
96  */
97 int ddr3_tip_reg_dump(u32 dev_num)
98 {
99 	u32 if_id, reg_addr, data_value, bus_id;
100 	u32 read_data[MAX_INTERFACE_NUM];
101 	struct hws_topology_map *tm = ddr3_get_topology_map();
102 
103 	printf("-- dunit registers --\n");
104 	for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
105 		printf("0x%x ", reg_addr);
106 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
107 			VALIDATE_ACTIVE(tm->if_act_mask, if_id);
108 			CHECK_STATUS(ddr3_tip_if_read
109 				     (dev_num, ACCESS_TYPE_UNICAST,
110 				      if_id, reg_addr, read_data,
111 				      MASK_ALL_BITS));
112 			printf("0x%x ", read_data[if_id]);
113 		}
114 		printf("\n");
115 	}
116 
117 	printf("-- Phy registers --\n");
118 	for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
119 		printf("0x%x ", reg_addr);
120 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
121 			VALIDATE_ACTIVE(tm->if_act_mask, if_id);
122 			for (bus_id = 0;
123 			     bus_id < tm->num_of_bus_per_interface;
124 			     bus_id++) {
125 				VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
126 				CHECK_STATUS(ddr3_tip_bus_read
127 					     (dev_num, if_id,
128 					      ACCESS_TYPE_UNICAST, bus_id,
129 					      DDR_PHY_DATA, reg_addr,
130 					      &data_value));
131 				printf("0x%x ", data_value);
132 			}
133 			for (bus_id = 0;
134 			     bus_id < tm->num_of_bus_per_interface;
135 			     bus_id++) {
136 				VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
137 				CHECK_STATUS(ddr3_tip_bus_read
138 					     (dev_num, if_id,
139 					      ACCESS_TYPE_UNICAST, bus_id,
140 					      DDR_PHY_CONTROL, reg_addr,
141 					      &data_value));
142 				printf("0x%x ", data_value);
143 			}
144 		}
145 		printf("\n");
146 	}
147 
148 	return MV_OK;
149 }
150 
151 /*
152  * Register access func registration
153  */
154 int ddr3_tip_init_config_func(u32 dev_num,
155 			      struct hws_tip_config_func_db *config_func)
156 {
157 	if (config_func == NULL)
158 		return MV_BAD_PARAM;
159 
160 	memcpy(&config_func_info[dev_num], config_func,
161 	       sizeof(struct hws_tip_config_func_db));
162 
163 	return MV_OK;
164 }
165 
166 /*
167  * Get training result info pointer
168  */
169 enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
170 {
171 	return training_result[stage];
172 }
173 
174 /*
175  * Device info read
176  */
177 int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
178 {
179 	if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
180 		return config_func_info[dev_num].
181 			tip_get_device_info_func((u8) dev_num, info_ptr);
182 	}
183 
184 	return MV_FAIL;
185 }
186 
187 #ifndef EXCLUDE_SWITCH_DEBUG
188 /*
189  * Convert freq to character string
190  */
191 static char *convert_freq(enum hws_ddr_freq freq)
192 {
193 	switch (freq) {
194 	case DDR_FREQ_LOW_FREQ:
195 		return "DDR_FREQ_LOW_FREQ";
196 	case DDR_FREQ_400:
197 		return "400";
198 
199 	case DDR_FREQ_533:
200 		return "533";
201 	case DDR_FREQ_667:
202 		return "667";
203 
204 	case DDR_FREQ_800:
205 		return "800";
206 
207 	case DDR_FREQ_933:
208 		return "933";
209 
210 	case DDR_FREQ_1066:
211 		return "1066";
212 	case DDR_FREQ_311:
213 		return "311";
214 
215 	case DDR_FREQ_333:
216 		return "333";
217 
218 	case DDR_FREQ_467:
219 		return "467";
220 
221 	case DDR_FREQ_850:
222 		return "850";
223 
224 	case DDR_FREQ_900:
225 		return "900";
226 
227 	case DDR_FREQ_360:
228 		return "DDR_FREQ_360";
229 
230 	case DDR_FREQ_1000:
231 		return "DDR_FREQ_1000";
232 	default:
233 		return "Unknown Frequency";
234 	}
235 }
236 
237 /*
238  * Convert device ID to character string
239  */
240 static char *convert_dev_id(u32 dev_id)
241 {
242 	switch (dev_id) {
243 	case 0x6800:
244 		return "A38xx";
245 	case 0x6900:
246 		return "A39XX";
247 	case 0xf400:
248 		return "AC3";
249 	case 0xfc00:
250 		return "BC2";
251 
252 	default:
253 		return "Unknown Device";
254 	}
255 }
256 
257 /*
258  * Convert device ID to character string
259  */
260 static char *convert_mem_size(u32 dev_id)
261 {
262 	switch (dev_id) {
263 	case 0:
264 		return "512 MB";
265 	case 1:
266 		return "1 GB";
267 	case 2:
268 		return "2 GB";
269 	case 3:
270 		return "4 GB";
271 	case 4:
272 		return "8 GB";
273 
274 	default:
275 		return "wrong mem size";
276 	}
277 }
278 
279 int print_device_info(u8 dev_num)
280 {
281 	struct ddr3_device_info info_ptr;
282 	struct hws_topology_map *tm = ddr3_get_topology_map();
283 
284 	CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
285 	printf("=== DDR setup START===\n");
286 	printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
287 	printf("\tDDR3  CK delay: %d\n", info_ptr.ck_delay);
288 	print_topology(tm);
289 	printf("=== DDR setup END===\n");
290 
291 	return MV_OK;
292 }
293 
294 void hws_ddr3_tip_sweep_test(int enable)
295 {
296 	if (enable) {
297 		is_validate_window_per_if = 1;
298 		is_validate_window_per_pup = 1;
299 		debug_training = DEBUG_LEVEL_TRACE;
300 	} else {
301 		is_validate_window_per_if = 0;
302 		is_validate_window_per_pup = 0;
303 	}
304 }
305 #endif
306 
307 char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
308 {
309 	switch (tune_result) {
310 	case TEST_FAILED:
311 		return "FAILED";
312 	case TEST_SUCCESS:
313 		return "PASS";
314 	case NO_TEST_DONE:
315 		return "NOT COMPLETED";
316 	default:
317 		return "Un-KNOWN";
318 	}
319 }
320 
321 /*
322  * Print log info
323  */
324 int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
325 {
326 	u32 if_id = 0;
327 	struct hws_topology_map *tm = ddr3_get_topology_map();
328 
329 #ifndef EXCLUDE_SWITCH_DEBUG
330 	if ((is_validate_window_per_if != 0) ||
331 	    (is_validate_window_per_pup != 0)) {
332 		u32 is_pup_log = 0;
333 		enum hws_ddr_freq freq;
334 
335 		freq = tm->interface_params[first_active_if].memory_freq;
336 
337 		is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
338 		printf("===VALIDATE WINDOW LOG START===\n");
339 		printf("DDR Frequency: %s   ======\n", convert_freq(freq));
340 		/* print sweep windows */
341 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
342 		ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
343 		ddr3_tip_print_all_pbs_result(dev_num);
344 		ddr3_tip_print_wl_supp_result(dev_num);
345 		printf("===VALIDATE WINDOW LOG END ===\n");
346 		CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
347 		ddr3_tip_reg_dump(dev_num);
348 	}
349 #endif
350 
351 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
352 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
353 
354 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
355 				  ("IF %d Status:\n", if_id));
356 
357 		if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
358 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
359 					  ("\tInit Controller: %s\n",
360 					   ddr3_tip_convert_tune_result
361 					   (training_result[INIT_CONTROLLER]
362 					    [if_id])));
363 		}
364 		if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
365 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
366 					  ("\tLow freq Config: %s\n",
367 					   ddr3_tip_convert_tune_result
368 					   (training_result[SET_LOW_FREQ]
369 					    [if_id])));
370 		}
371 		if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
372 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
373 					  ("\tLoad Pattern: %s\n",
374 					   ddr3_tip_convert_tune_result
375 					   (training_result[LOAD_PATTERN]
376 					    [if_id])));
377 		}
378 		if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
379 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
380 					  ("\tMedium freq Config: %s\n",
381 					   ddr3_tip_convert_tune_result
382 					   (training_result[SET_MEDIUM_FREQ]
383 					    [if_id])));
384 		}
385 		if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
386 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
387 					  ("\tWL: %s\n",
388 					   ddr3_tip_convert_tune_result
389 					   (training_result[WRITE_LEVELING]
390 					    [if_id])));
391 		}
392 		if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
393 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
394 					  ("\tLoad Pattern: %s\n",
395 					   ddr3_tip_convert_tune_result
396 					   (training_result[LOAD_PATTERN_2]
397 					    [if_id])));
398 		}
399 		if (mask_tune_func & READ_LEVELING_MASK_BIT) {
400 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
401 					  ("\tRL: %s\n",
402 					   ddr3_tip_convert_tune_result
403 					   (training_result[READ_LEVELING]
404 					    [if_id])));
405 		}
406 		if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
407 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
408 					  ("\tWL Supp: %s\n",
409 					   ddr3_tip_convert_tune_result
410 					   (training_result[WRITE_LEVELING_SUPP]
411 					    [if_id])));
412 		}
413 		if (mask_tune_func & PBS_RX_MASK_BIT) {
414 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
415 					  ("\tPBS RX: %s\n",
416 					   ddr3_tip_convert_tune_result
417 					   (training_result[PBS_RX]
418 					    [if_id])));
419 		}
420 		if (mask_tune_func & PBS_TX_MASK_BIT) {
421 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
422 					  ("\tPBS TX: %s\n",
423 					   ddr3_tip_convert_tune_result
424 					   (training_result[PBS_TX]
425 					    [if_id])));
426 		}
427 		if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
428 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
429 					  ("\tTarget freq Config: %s\n",
430 					   ddr3_tip_convert_tune_result
431 					   (training_result[SET_TARGET_FREQ]
432 					    [if_id])));
433 		}
434 		if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
435 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
436 					  ("\tWL TF: %s\n",
437 					   ddr3_tip_convert_tune_result
438 					   (training_result[WRITE_LEVELING_TF]
439 					    [if_id])));
440 		}
441 		if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
442 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
443 					  ("\tRL TF: %s\n",
444 					   ddr3_tip_convert_tune_result
445 					   (training_result[READ_LEVELING_TF]
446 					    [if_id])));
447 		}
448 		if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
449 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
450 					  ("\tWL TF Supp: %s\n",
451 					   ddr3_tip_convert_tune_result
452 					   (training_result
453 					    [WRITE_LEVELING_SUPP_TF]
454 					    [if_id])));
455 		}
456 		if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
457 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
458 					  ("\tCentr RX: %s\n",
459 					   ddr3_tip_convert_tune_result
460 					   (training_result[CENTRALIZATION_RX]
461 					    [if_id])));
462 		}
463 		if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
464 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
465 					  ("\tVREF_CALIBRATION: %s\n",
466 					   ddr3_tip_convert_tune_result
467 					   (training_result[VREF_CALIBRATION]
468 					    [if_id])));
469 		}
470 		if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
471 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
472 					  ("\tCentr TX: %s\n",
473 					   ddr3_tip_convert_tune_result
474 					   (training_result[CENTRALIZATION_TX]
475 					    [if_id])));
476 		}
477 	}
478 
479 	return MV_OK;
480 }
481 
482 /*
483  * Print stability log info
484  */
485 int ddr3_tip_print_stability_log(u32 dev_num)
486 {
487 	u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
488 	u32 reg_data;
489 	u32 read_data[MAX_INTERFACE_NUM];
490 	u32 max_cs = hws_ddr3_tip_max_cs_get();
491 	struct hws_topology_map *tm = ddr3_get_topology_map();
492 
493 	/* Title print */
494 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
495 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
496 		printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
497 		for (csindex = 0; csindex < max_cs; csindex++) {
498 			printf("CS%d , ", csindex);
499 			printf("\n");
500 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
501 			printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
502 			printf("\t\t");
503 			for (idx = 0; idx < 11; idx++)
504 				printf("PBSTx-Pad%d,", idx);
505 			printf("\t\t");
506 			for (idx = 0; idx < 11; idx++)
507 				printf("PBSRx-Pad%d,", idx);
508 		}
509 	}
510 	printf("\n");
511 
512 	/* Data print */
513 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
514 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
515 
516 		printf("Data: %d,%d,", if_id,
517 		       (config_func_info[dev_num].tip_get_temperature != NULL)
518 		       ? (config_func_info[dev_num].
519 			  tip_get_temperature(dev_num)) : (0));
520 
521 		CHECK_STATUS(ddr3_tip_if_read
522 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
523 			      read_data, MASK_ALL_BITS));
524 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
525 		       ((read_data[if_id] & 0xfc00) >> 10));
526 		CHECK_STATUS(ddr3_tip_if_read
527 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
528 			      read_data, MASK_ALL_BITS));
529 		printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
530 		       ((read_data[if_id] & 0xfc00) >> 10));
531 		CHECK_STATUS(ddr3_tip_if_read
532 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
533 			      read_data, MASK_ALL_BITS));
534 		printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
535 		       ((read_data[if_id] & 0xfc00000) >> 22));
536 
537 		for (csindex = 0; csindex < max_cs; csindex++) {
538 			printf("CS%d , ", csindex);
539 			for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
540 				printf("\n");
541 				VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
542 				ddr3_tip_bus_read(dev_num, if_id,
543 						  ACCESS_TYPE_UNICAST,
544 						  bus_id, DDR_PHY_DATA,
545 						  RESULT_DB_PHY_REG_ADDR +
546 						  csindex, &reg_data);
547 				printf("%d,%d,", (reg_data & 0x1f),
548 				       ((reg_data & 0x3e0) >> 5));
549 				/* WL */
550 				ddr3_tip_bus_read(dev_num, if_id,
551 						  ACCESS_TYPE_UNICAST,
552 						  bus_id, DDR_PHY_DATA,
553 						  WL_PHY_REG +
554 						  csindex * 4, &reg_data);
555 				printf("%d,%d,%d,",
556 				       (reg_data & 0x1f) +
557 				       ((reg_data & 0x1c0) >> 6) * 32,
558 				       (reg_data & 0x1f),
559 				       (reg_data & 0x1c0) >> 6);
560 				/* RL */
561 				CHECK_STATUS(ddr3_tip_if_read
562 					     (dev_num, ACCESS_TYPE_UNICAST,
563 					      if_id,
564 					      READ_DATA_SAMPLE_DELAY,
565 					      read_data, MASK_ALL_BITS));
566 				read_data[if_id] =
567 					(read_data[if_id] &
568 					 (0xf << (4 * csindex))) >>
569 					(4 * csindex);
570 				ddr3_tip_bus_read(dev_num, if_id,
571 						  ACCESS_TYPE_UNICAST, bus_id,
572 						  DDR_PHY_DATA,
573 						  RL_PHY_REG + csindex * 4,
574 						  &reg_data);
575 				printf("%d,%d,%d,%d,",
576 				       (reg_data & 0x1f) +
577 				       ((reg_data & 0x1c0) >> 6) * 32 +
578 				       read_data[if_id] * 64,
579 				       (reg_data & 0x1f),
580 				       ((reg_data & 0x1c0) >> 6),
581 				       read_data[if_id]);
582 				/* Centralization */
583 				ddr3_tip_bus_read(dev_num, if_id,
584 						  ACCESS_TYPE_UNICAST, bus_id,
585 						  DDR_PHY_DATA,
586 						  WRITE_CENTRALIZATION_PHY_REG
587 						  + csindex * 4, &reg_data);
588 				printf("%d,", (reg_data & 0x3f));
589 				ddr3_tip_bus_read(dev_num, if_id,
590 						  ACCESS_TYPE_UNICAST, bus_id,
591 						  DDR_PHY_DATA,
592 						  READ_CENTRALIZATION_PHY_REG
593 						  + csindex * 4, &reg_data);
594 				printf("%d,", (reg_data & 0x1f));
595 				/* Vref */
596 				ddr3_tip_bus_read(dev_num, if_id,
597 						  ACCESS_TYPE_UNICAST, bus_id,
598 						  DDR_PHY_DATA,
599 						  PAD_CONFIG_PHY_REG,
600 						  &reg_data);
601 				printf("%d,", (reg_data & 0x7));
602 				/* DQVref */
603 				/* Need to add the Read Function from device */
604 				printf("%d,", 0);
605 				printf("\t\t");
606 				for (idx = 0; idx < 11; idx++) {
607 					ddr3_tip_bus_read(dev_num, if_id,
608 							  ACCESS_TYPE_UNICAST,
609 							  bus_id, DDR_PHY_DATA,
610 							  0xd0 +
611 							  12 * csindex +
612 							  idx, &reg_data);
613 					printf("%d,", (reg_data & 0x3f));
614 				}
615 				printf("\t\t");
616 				for (idx = 0; idx < 11; idx++) {
617 					ddr3_tip_bus_read(dev_num, if_id,
618 							  ACCESS_TYPE_UNICAST,
619 							  bus_id, DDR_PHY_DATA,
620 							  0x10 +
621 							  16 * csindex +
622 							  idx, &reg_data);
623 					printf("%d,", (reg_data & 0x3f));
624 				}
625 				printf("\t\t");
626 				for (idx = 0; idx < 11; idx++) {
627 					ddr3_tip_bus_read(dev_num, if_id,
628 							  ACCESS_TYPE_UNICAST,
629 							  bus_id, DDR_PHY_DATA,
630 							  0x50 +
631 							  16 * csindex +
632 							  idx, &reg_data);
633 					printf("%d,", (reg_data & 0x3f));
634 				}
635 			}
636 		}
637 	}
638 	printf("\n");
639 
640 	return MV_OK;
641 }
642 
643 /*
644  * Register XSB information
645  */
646 int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
647 {
648 	memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
649 	return MV_OK;
650 }
651 
652 /*
653  * Read ADLL Value
654  */
655 int read_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
656 		    int reg_addr, u32 mask)
657 {
658 	u32 data_value;
659 	u32 if_id = 0, bus_id = 0;
660 	u32 dev_num = 0;
661 	struct hws_topology_map *tm = ddr3_get_topology_map();
662 
663 	/*
664 	 * multi CS support - reg_addr is calucalated in calling function
665 	 * with CS offset
666 	 */
667 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
668 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
669 		for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
670 		     bus_id++) {
671 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
672 			CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
673 						       ACCESS_TYPE_UNICAST,
674 						       bus_id,
675 						       DDR_PHY_DATA, reg_addr,
676 						       &data_value));
677 			pup_values[if_id *
678 				   tm->num_of_bus_per_interface + bus_id] =
679 				data_value & mask;
680 		}
681 	}
682 
683 	return 0;
684 }
685 
686 /*
687  * Write ADLL Value
688  */
689 int write_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
690 		     int reg_addr)
691 {
692 	u32 if_id = 0, bus_id = 0;
693 	u32 dev_num = 0, data;
694 	struct hws_topology_map *tm = ddr3_get_topology_map();
695 
696 	/*
697 	 * multi CS support - reg_addr is calucalated in calling function
698 	 * with CS offset
699 	 */
700 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
701 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
702 		for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
703 		     bus_id++) {
704 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
705 			data = pup_values[if_id *
706 					  tm->num_of_bus_per_interface +
707 					  bus_id];
708 			CHECK_STATUS(ddr3_tip_bus_write(dev_num,
709 							ACCESS_TYPE_UNICAST,
710 							if_id,
711 							ACCESS_TYPE_UNICAST,
712 							bus_id, DDR_PHY_DATA,
713 							reg_addr, data));
714 		}
715 	}
716 
717 	return 0;
718 }
719 
720 #ifndef EXCLUDE_SWITCH_DEBUG
721 u32 rl_version = 1;		/* 0 - old RL machine */
722 struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
723 u32 start_xsb_offset = 0;
724 u8 is_rl_old = 0;
725 u8 is_freq_old = 0;
726 u8 is_dfs_disabled = 0;
727 u32 default_centrlization_value = 0x12;
728 u32 vref = 0x4;
729 u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
730 	rl_test = 0, reset_read_fifo = 0;
731 int debug_acc = 0;
732 u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
733 u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
734 u8 cs_mask_reg[] = {
735 	0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
736 };
737 
738 u32 xsb_test_table[][8] = {
739 	{0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
740 	 0x66666666, 0x77777777},
741 	{0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
742 	 0xeeeeeeee, 0xffffffff},
743 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
744 	 0x00000000, 0xffffffff},
745 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
746 	 0x00000000, 0xffffffff},
747 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
748 	 0x00000000, 0xffffffff},
749 	{0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
750 	 0x00000000, 0xffffffff},
751 	{0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
752 	 0xffffffff, 0xffffffff},
753 	{0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
754 	 0x00000000, 0x00000000},
755 	{0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
756 	 0xffffffff, 0xffffffff}
757 };
758 
759 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr);
760 
761 int ddr3_tip_print_adll(void)
762 {
763 	u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
764 	struct hws_topology_map *tm = ddr3_get_topology_map();
765 
766 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
767 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
768 		for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
769 		     bus_cnt++) {
770 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
771 			CHECK_STATUS(ddr3_tip_bus_read
772 				     (dev_num, if_id,
773 				      ACCESS_TYPE_UNICAST, bus_cnt,
774 				      DDR_PHY_DATA, 0x1, &data_p1));
775 			CHECK_STATUS(ddr3_tip_bus_read
776 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
777 				      bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
778 			CHECK_STATUS(ddr3_tip_bus_read
779 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
780 				      bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
781 			DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
782 					  (" IF %d bus_cnt %d  phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
783 					   if_id, bus_cnt, data_p1, data_p2,
784 					   ui_data3));
785 			}
786 	}
787 
788 	return MV_OK;
789 }
790 
791 /*
792  * Set attribute value
793  */
794 int ddr3_tip_set_atr(u32 dev_num, u32 flag_id, u32 value)
795 {
796 	int ret;
797 	u32 *ptr_flag = NULL;
798 
799 	ret = ddr3_tip_access_atr(dev_num, flag_id, value, &ptr_flag);
800 	if (ptr_flag != NULL) {
801 		printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x (was 0x%x)\n",
802 		       flag_id, value, *ptr_flag);
803 		*ptr_flag = value;
804 	} else {
805 		printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x\n",
806 		       flag_id, value);
807 	}
808 
809 	return ret;
810 }
811 
812 /*
813  * Access attribute
814  */
815 static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr)
816 {
817 	u32 tmp_val = 0, if_id = 0, pup_id = 0;
818 	struct hws_topology_map *tm = ddr3_get_topology_map();
819 
820 	*ptr = NULL;
821 
822 	switch (flag_id) {
823 	case 0:
824 		*ptr = (u32 *)&(tm->if_act_mask);
825 		break;
826 
827 	case 0x1:
828 		*ptr = (u32 *)&mask_tune_func;
829 		break;
830 
831 	case 0x2:
832 		*ptr = (u32 *)&low_freq;
833 		break;
834 
835 	case 0x3:
836 		*ptr = (u32 *)&medium_freq;
837 		break;
838 
839 	case 0x4:
840 		*ptr = (u32 *)&generic_init_controller;
841 		break;
842 
843 	case 0x5:
844 		*ptr = (u32 *)&rl_version;
845 		break;
846 
847 	case 0x8:
848 		*ptr = (u32 *)&start_xsb_offset;
849 		break;
850 
851 	case 0x20:
852 		*ptr = (u32 *)&is_rl_old;
853 		break;
854 
855 	case 0x21:
856 		*ptr = (u32 *)&is_freq_old;
857 		break;
858 
859 	case 0x23:
860 		*ptr = (u32 *)&is_dfs_disabled;
861 		break;
862 
863 	case 0x24:
864 		*ptr = (u32 *)&is_pll_before_init;
865 		break;
866 
867 	case 0x25:
868 		*ptr = (u32 *)&is_adll_calib_before_init;
869 		break;
870 #ifdef STATIC_ALGO_SUPPORT
871 	case 0x26:
872 		*ptr = (u32 *)&(silicon_delay[0]);
873 		break;
874 
875 	case 0x27:
876 		*ptr = (u32 *)&wl_debug_delay;
877 		break;
878 #endif
879 	case 0x28:
880 		*ptr = (u32 *)&is_tune_result;
881 		break;
882 
883 	case 0x29:
884 		*ptr = (u32 *)&is_validate_window_per_if;
885 		break;
886 
887 	case 0x2a:
888 		*ptr = (u32 *)&is_validate_window_per_pup;
889 		break;
890 
891 	case 0x30:
892 		*ptr = (u32 *)&sweep_cnt;
893 		break;
894 
895 	case 0x31:
896 		*ptr = (u32 *)&is_bist_reset_bit;
897 		break;
898 
899 	case 0x32:
900 		*ptr = (u32 *)&is_dfs_in_init;
901 		break;
902 
903 	case 0x33:
904 		*ptr = (u32 *)&p_finger;
905 		break;
906 
907 	case 0x34:
908 		*ptr = (u32 *)&n_finger;
909 		break;
910 
911 	case 0x35:
912 		*ptr = (u32 *)&init_freq;
913 		break;
914 
915 	case 0x36:
916 		*ptr = (u32 *)&(freq_val[DDR_FREQ_LOW_FREQ]);
917 		break;
918 
919 	case 0x37:
920 		*ptr = (u32 *)&start_pattern;
921 		break;
922 
923 	case 0x38:
924 		*ptr = (u32 *)&end_pattern;
925 		break;
926 
927 	case 0x39:
928 		*ptr = (u32 *)&phy_reg0_val;
929 		break;
930 
931 	case 0x4a:
932 		*ptr = (u32 *)&phy_reg1_val;
933 		break;
934 
935 	case 0x4b:
936 		*ptr = (u32 *)&phy_reg2_val;
937 		break;
938 
939 	case 0x4c:
940 		*ptr = (u32 *)&phy_reg3_val;
941 		break;
942 
943 	case 0x4e:
944 		*ptr = (u32 *)&sweep_pattern;
945 		break;
946 
947 	case 0x50:
948 		*ptr = (u32 *)&is_rzq6;
949 		break;
950 
951 	case 0x51:
952 		*ptr = (u32 *)&znri_data_phy_val;
953 		break;
954 
955 	case 0x52:
956 		*ptr = (u32 *)&zpri_data_phy_val;
957 		break;
958 
959 	case 0x53:
960 		*ptr = (u32 *)&finger_test;
961 		break;
962 
963 	case 0x54:
964 		*ptr = (u32 *)&n_finger_start;
965 		break;
966 
967 	case 0x55:
968 		*ptr = (u32 *)&n_finger_end;
969 		break;
970 
971 	case 0x56:
972 		*ptr = (u32 *)&p_finger_start;
973 		break;
974 
975 	case 0x57:
976 		*ptr = (u32 *)&p_finger_end;
977 		break;
978 
979 	case 0x58:
980 		*ptr = (u32 *)&p_finger_step;
981 		break;
982 
983 	case 0x59:
984 		*ptr = (u32 *)&n_finger_step;
985 		break;
986 
987 	case 0x5a:
988 		*ptr = (u32 *)&znri_ctrl_phy_val;
989 		break;
990 
991 	case 0x5b:
992 		*ptr = (u32 *)&zpri_ctrl_phy_val;
993 		break;
994 
995 	case 0x5c:
996 		*ptr = (u32 *)&is_reg_dump;
997 		break;
998 
999 	case 0x5d:
1000 		*ptr = (u32 *)&vref;
1001 		break;
1002 
1003 	case 0x5e:
1004 		*ptr = (u32 *)&mode2_t;
1005 		break;
1006 
1007 	case 0x5f:
1008 		*ptr = (u32 *)&xsb_validate_type;
1009 		break;
1010 
1011 	case 0x60:
1012 		*ptr = (u32 *)&xsb_validation_base_address;
1013 		break;
1014 
1015 	case 0x67:
1016 		*ptr = (u32 *)&activate_select_before_run_alg;
1017 		break;
1018 
1019 	case 0x68:
1020 		*ptr = (u32 *)&activate_deselect_after_run_alg;
1021 		break;
1022 
1023 	case 0x69:
1024 		*ptr = (u32 *)&odt_additional;
1025 		break;
1026 
1027 	case 0x70:
1028 		*ptr = (u32 *)&debug_mode;
1029 		break;
1030 
1031 	case 0x71:
1032 		*ptr = (u32 *)&pbs_pattern;
1033 		break;
1034 
1035 	case 0x72:
1036 		*ptr = (u32 *)&delay_enable;
1037 		break;
1038 
1039 	case 0x73:
1040 		*ptr = (u32 *)&ck_delay;
1041 		break;
1042 
1043 	case 0x74:
1044 		*ptr = (u32 *)&ck_delay_16;
1045 		break;
1046 
1047 	case 0x75:
1048 		*ptr = (u32 *)&ca_delay;
1049 		break;
1050 
1051 	case 0x100:
1052 		*ptr = (u32 *)&debug_dunit;
1053 		break;
1054 
1055 	case 0x101:
1056 		debug_acc = (int)value;
1057 		break;
1058 
1059 	case 0x102:
1060 		debug_training = (u8)value;
1061 		break;
1062 
1063 	case 0x103:
1064 		debug_training_bist = (u8)value;
1065 		break;
1066 
1067 	case 0x104:
1068 		debug_centralization = (u8)value;
1069 		break;
1070 
1071 	case 0x105:
1072 		debug_training_ip = (u8)value;
1073 		break;
1074 
1075 	case 0x106:
1076 		debug_leveling = (u8)value;
1077 		break;
1078 
1079 	case 0x107:
1080 		debug_pbs = (u8)value;
1081 		break;
1082 
1083 	case 0x108:
1084 		debug_training_static = (u8)value;
1085 		break;
1086 
1087 	case 0x109:
1088 		debug_training_access = (u8)value;
1089 		break;
1090 
1091 	case 0x112:
1092 		*ptr = &start_pattern;
1093 		break;
1094 
1095 	case 0x113:
1096 		*ptr = &end_pattern;
1097 		break;
1098 
1099 	default:
1100 		if ((flag_id >= 0x200) && (flag_id < 0x210)) {
1101 			if_id = flag_id - 0x200;
1102 			*ptr = (u32 *)&(tm->interface_params
1103 					[if_id].memory_freq);
1104 		} else if ((flag_id >= 0x210) && (flag_id < 0x220)) {
1105 			if_id = flag_id - 0x210;
1106 			*ptr = (u32 *)&(tm->interface_params
1107 					[if_id].speed_bin_index);
1108 		} else if ((flag_id >= 0x220) && (flag_id < 0x230)) {
1109 			if_id = flag_id - 0x220;
1110 			*ptr = (u32 *)&(tm->interface_params
1111 					[if_id].bus_width);
1112 		} else if ((flag_id >= 0x230) && (flag_id < 0x240)) {
1113 			if_id = flag_id - 0x230;
1114 			*ptr = (u32 *)&(tm->interface_params
1115 					[if_id].memory_size);
1116 		} else if ((flag_id >= 0x240) && (flag_id < 0x250)) {
1117 			if_id = flag_id - 0x240;
1118 			*ptr = (u32 *)&(tm->interface_params
1119 					[if_id].cas_l);
1120 		} else if ((flag_id >= 0x250) && (flag_id < 0x260)) {
1121 			if_id = flag_id - 0x250;
1122 			*ptr = (u32 *)&(tm->interface_params
1123 					[if_id].cas_wl);
1124 		} else if ((flag_id >= 0x270) && (flag_id < 0x2cf)) {
1125 			if_id = (flag_id - 0x270) / MAX_BUS_NUM;
1126 			pup_id = (flag_id - 0x270) % MAX_BUS_NUM;
1127 			*ptr = (u32 *)&(tm->interface_params[if_id].
1128 					as_bus_params[pup_id].is_ck_swap);
1129 		} else if ((flag_id >= 0x2d0) && (flag_id < 0x32f)) {
1130 			if_id = (flag_id - 0x2d0) / MAX_BUS_NUM;
1131 			pup_id = (flag_id - 0x2d0) % MAX_BUS_NUM;
1132 			*ptr = (u32 *)&(tm->interface_params[if_id].
1133 					as_bus_params[pup_id].is_dqs_swap);
1134 		} else if ((flag_id >= 0x330) && (flag_id < 0x38f)) {
1135 			if_id = (flag_id - 0x330) / MAX_BUS_NUM;
1136 			pup_id = (flag_id - 0x330) % MAX_BUS_NUM;
1137 			*ptr = (u32 *)&(tm->interface_params[if_id].
1138 					as_bus_params[pup_id].cs_bitmask);
1139 		} else if ((flag_id >= 0x390) && (flag_id < 0x3ef)) {
1140 			if_id = (flag_id - 0x390) / MAX_BUS_NUM;
1141 			pup_id = (flag_id - 0x390) % MAX_BUS_NUM;
1142 			*ptr = (u32 *)&(tm->interface_params
1143 					[if_id].as_bus_params
1144 					[pup_id].mirror_enable_bitmask);
1145 		} else if ((flag_id >= 0x500) && (flag_id <= 0x50f)) {
1146 			tmp_val = flag_id - 0x320;
1147 			*ptr = (u32 *)&(clamp_tbl[tmp_val]);
1148 		} else {
1149 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1150 					  ("flag_id out of boundary %d\n",
1151 					   flag_id));
1152 			return MV_BAD_PARAM;
1153 		}
1154 	}
1155 
1156 	return MV_OK;
1157 }
1158 
1159 #ifndef EXCLUDE_SWITCH_DEBUG
1160 /*
1161  * Print ADLL
1162  */
1163 int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1164 {
1165 	u32 i, j;
1166 	struct hws_topology_map *tm = ddr3_get_topology_map();
1167 
1168 	for (j = 0; j < tm->num_of_bus_per_interface; j++) {
1169 		VALIDATE_ACTIVE(tm->bus_act_mask, j);
1170 		for (i = 0; i < MAX_INTERFACE_NUM; i++) {
1171 			printf("%d ,",
1172 			       adll[i * tm->num_of_bus_per_interface + j]);
1173 		}
1174 	}
1175 	printf("\n");
1176 
1177 	return MV_OK;
1178 }
1179 #endif
1180 
1181 /* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1182 static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1183 			    u32 byte_index)
1184 {
1185 	u32 burst_cnt = 0, addr_offset, i_id;
1186 	int b_is_fail = 0;
1187 
1188 	addr_offset =
1189 		(byte_index ==
1190 		 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1191 	for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1192 		if ((p_src[burst_cnt] & addr_offset) !=
1193 		    (p_dst[burst_cnt] & addr_offset))
1194 			b_is_fail = 1;
1195 	}
1196 
1197 	if (b_is_fail == 1) {
1198 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1199 				  ("IF %d exp: ", if_id));
1200 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1201 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1202 					  ("0x%8x ", p_src[i_id]));
1203 		}
1204 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1205 				  ("\n_i_f %d rcv: ", if_id));
1206 		for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1207 			DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1208 					  ("(0x%8x ", p_dst[i_id]));
1209 		}
1210 		DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1211 	}
1212 
1213 	return b_is_fail;
1214 }
1215 
1216 /* test_type = 0-tx , 1-rx */
1217 int ddr3_tip_sweep_test(u32 dev_num, u32 test_type,
1218 			u32 mem_addr, u32 is_modify_adll,
1219 			u32 start_if, u32 end_if, u32 startpup, u32 endpup)
1220 {
1221 	u32 bus_cnt = 0, adll_val = 0, if_id, ui_prev_adll, ui_mask_bit,
1222 		end_adll, start_adll;
1223 	u32 reg_addr = 0;
1224 	struct hws_topology_map *tm = ddr3_get_topology_map();
1225 
1226 	if (test_type == 0) {
1227 		reg_addr = 1;
1228 		ui_mask_bit = 0x3f;
1229 		start_adll = 0;
1230 		end_adll = ui_mask_bit;
1231 	} else {
1232 		reg_addr = 3;
1233 		ui_mask_bit = 0x1f;
1234 		start_adll = 0;
1235 		end_adll = ui_mask_bit;
1236 	}
1237 
1238 	DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1239 			  ("==============================\n"));
1240 	DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1241 			  ("Test type %d (0-tx, 1-rx)\n", test_type));
1242 
1243 	for (if_id = start_if; if_id <= end_if; if_id++) {
1244 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1245 		for (bus_cnt = startpup; bus_cnt < endpup; bus_cnt++) {
1246 			CHECK_STATUS(ddr3_tip_bus_read
1247 				     (dev_num, if_id, ACCESS_TYPE_UNICAST,
1248 				      bus_cnt, DDR_PHY_DATA, reg_addr,
1249 				      &ui_prev_adll));
1250 
1251 			for (adll_val = start_adll; adll_val <= end_adll;
1252 			     adll_val++) {
1253 				if (is_modify_adll == 1) {
1254 					CHECK_STATUS(ddr3_tip_bus_read_modify_write
1255 						     (dev_num,
1256 						      ACCESS_TYPE_UNICAST,
1257 						      if_id, bus_cnt,
1258 						      DDR_PHY_DATA, reg_addr,
1259 						      adll_val, ui_mask_bit));
1260 				}
1261 			}
1262 			if (is_modify_adll == 1) {
1263 				CHECK_STATUS(ddr3_tip_bus_write
1264 					     (dev_num, ACCESS_TYPE_UNICAST,
1265 					      if_id, ACCESS_TYPE_UNICAST,
1266 					      bus_cnt, DDR_PHY_DATA, reg_addr,
1267 					      ui_prev_adll));
1268 			}
1269 			DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1270 		}
1271 		DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1272 	}
1273 
1274 	return MV_OK;
1275 }
1276 
1277 #ifndef EXCLUDE_SWITCH_DEBUG
1278 /*
1279  * Sweep validation
1280  */
1281 int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1282 			    u32 mode)
1283 {
1284 	u32 pup = 0, start_pup = 0, end_pup = 0;
1285 	u32 adll = 0;
1286 	u32 res[MAX_INTERFACE_NUM] = { 0 };
1287 	int if_id = 0;
1288 	u32 adll_value = 0;
1289 	int reg = (direction == 0) ? WRITE_CENTRALIZATION_PHY_REG :
1290 		READ_CENTRALIZATION_PHY_REG;
1291 	enum hws_access_type pup_access;
1292 	u32 cs;
1293 	u32 max_cs = hws_ddr3_tip_max_cs_get();
1294 	struct hws_topology_map *tm = ddr3_get_topology_map();
1295 
1296 	if (mode == 1) {
1297 		/* per pup */
1298 		start_pup = 0;
1299 		end_pup = tm->num_of_bus_per_interface - 1;
1300 		pup_access = ACCESS_TYPE_UNICAST;
1301 	} else {
1302 		start_pup = 0;
1303 		end_pup = 0;
1304 		pup_access = ACCESS_TYPE_MULTICAST;
1305 	}
1306 
1307 	for (cs = 0; cs < max_cs; cs++) {
1308 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1309 			for (if_id = 0;
1310 			     if_id <= MAX_INTERFACE_NUM - 1;
1311 			     if_id++) {
1312 				VALIDATE_ACTIVE
1313 					(tm->if_act_mask,
1314 					 if_id);
1315 				for (pup = start_pup; pup <= end_pup; pup++) {
1316 					ctrl_sweepres[adll][if_id][pup] =
1317 						0;
1318 				}
1319 			}
1320 		}
1321 
1322 		for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1323 			ctrl_adll[adll] = 0;
1324 		/* Save DQS value(after algorithm run) */
1325 		read_adll_value(ctrl_adll,
1326 				(reg + (cs * CS_REGISTER_ADDR_OFFSET)),
1327 				MASK_ALL_BITS);
1328 
1329 		/*
1330 		 * Sweep ADLL  from 0:31 on all I/F on all Pup and perform
1331 		 * BIST on each stage.
1332 		 */
1333 		for (pup = start_pup; pup <= end_pup; pup++) {
1334 			for (adll = 0; adll < ADLL_LENGTH; adll++) {
1335 				adll_value =
1336 					(direction == 0) ? (adll * 2) : adll;
1337 				CHECK_STATUS(ddr3_tip_bus_write
1338 					     (dev_num, ACCESS_TYPE_MULTICAST, 0,
1339 					      pup_access, pup, DDR_PHY_DATA,
1340 					      reg + CS_REG_VALUE(cs),
1341 					      adll_value));
1342 				hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1343 						  cs);
1344 				/* ddr3_tip_reset_fifo_ptr(dev_num); */
1345 				for (if_id = 0;
1346 				     if_id <= MAX_INTERFACE_NUM - 1;
1347 				     if_id++) {
1348 					VALIDATE_ACTIVE
1349 						(tm->if_act_mask,
1350 						 if_id);
1351 					ctrl_sweepres[adll][if_id][pup]
1352 						= res[if_id];
1353 					if (mode == 1) {
1354 						CHECK_STATUS
1355 							(ddr3_tip_bus_write
1356 							 (dev_num,
1357 							  ACCESS_TYPE_UNICAST,
1358 							  if_id,
1359 							  ACCESS_TYPE_UNICAST,
1360 							  pup,
1361 							  DDR_PHY_DATA,
1362 							  reg + CS_REG_VALUE(cs),
1363 							  ctrl_adll[if_id *
1364 								    cs *
1365 								    tm->num_of_bus_per_interface
1366 								    + pup]));
1367 					}
1368 				}
1369 			}
1370 		}
1371 		printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1372 		       ((direction == 0) ? "TX" : "RX"));
1373 		for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1374 			VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1375 			if (mode == 1) {
1376 				for (pup = start_pup; pup <= end_pup; pup++) {
1377 					VALIDATE_ACTIVE(tm->bus_act_mask, pup);
1378 					printf("I/F%d-PHY%d , ", if_id, pup);
1379 				}
1380 			} else {
1381 				printf("I/F%d , ", if_id);
1382 			}
1383 		}
1384 		printf("\n");
1385 
1386 		for (adll = 0; adll < ADLL_LENGTH; adll++) {
1387 			adll_value = (direction == 0) ? (adll * 2) : adll;
1388 			printf("Final,%s, Sweep, Result, %d ,",
1389 			       ((direction == 0) ? "TX" : "RX"), adll_value);
1390 
1391 			for (if_id = 0;
1392 			     if_id <= MAX_INTERFACE_NUM - 1;
1393 			     if_id++) {
1394 				VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1395 				for (pup = start_pup; pup <= end_pup; pup++) {
1396 					printf("%d , ",
1397 					       ctrl_sweepres[adll][if_id]
1398 					       [pup]);
1399 				}
1400 			}
1401 			printf("\n");
1402 		}
1403 
1404 		/*
1405 		 * Write back to the phy the Rx DQS value, we store in
1406 		 * the beginning.
1407 		 */
1408 		write_adll_value(ctrl_adll,
1409 				 (reg + cs * CS_REGISTER_ADDR_OFFSET));
1410 		/* print adll results */
1411 		read_adll_value(ctrl_adll, (reg + cs * CS_REGISTER_ADDR_OFFSET),
1412 				MASK_ALL_BITS);
1413 		printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1414 		print_adll(dev_num, ctrl_adll);
1415 	}
1416 	ddr3_tip_reset_fifo_ptr(dev_num);
1417 
1418 	return 0;
1419 }
1420 
1421 void print_topology(struct hws_topology_map *topology_db)
1422 {
1423 	u32 ui, uj;
1424 
1425 	printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1426 	printf("\tNum Bus:  %d\n", topology_db->num_of_bus_per_interface);
1427 	printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1428 
1429 	for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1430 		VALIDATE_ACTIVE(topology_db->if_act_mask, ui);
1431 		printf("\n\tInterface ID: %d\n", ui);
1432 		printf("\t\tDDR Frequency: %s\n",
1433 		       convert_freq(topology_db->
1434 				    interface_params[ui].memory_freq));
1435 		printf("\t\tSpeed_bin: %d\n",
1436 		       topology_db->interface_params[ui].speed_bin_index);
1437 		printf("\t\tBus_width: %d\n",
1438 		       (4 << topology_db->interface_params[ui].bus_width));
1439 		printf("\t\tMem_size: %s\n",
1440 		       convert_mem_size(topology_db->
1441 					interface_params[ui].memory_size));
1442 		printf("\t\tCAS-WL: %d\n",
1443 		       topology_db->interface_params[ui].cas_wl);
1444 		printf("\t\tCAS-L: %d\n",
1445 		       topology_db->interface_params[ui].cas_l);
1446 		printf("\t\tTemperature: %d\n",
1447 		       topology_db->interface_params[ui].interface_temp);
1448 		printf("\n");
1449 		for (uj = 0; uj < 4; uj++) {
1450 			printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1451 			       topology_db->interface_params[ui].
1452 			       as_bus_params[uj].cs_bitmask);
1453 			printf("Mirror: 0x%x\t",
1454 			       topology_db->interface_params[ui].
1455 			       as_bus_params[uj].mirror_enable_bitmask);
1456 			printf("DQS Swap is %s \t",
1457 			       (topology_db->
1458 				interface_params[ui].as_bus_params[uj].
1459 				is_dqs_swap == 1) ? "enabled" : "disabled");
1460 			printf("Ck Swap:%s\t",
1461 			       (topology_db->
1462 				interface_params[ui].as_bus_params[uj].
1463 				is_ck_swap == 1) ? "enabled" : "disabled");
1464 			printf("\n");
1465 		}
1466 	}
1467 }
1468 #endif
1469 
1470 /*
1471  * Execute XSB Test transaction (rd/wr/both)
1472  */
1473 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1474 		 u32 read_type, u32 burst_length)
1475 {
1476 	u32 seq = 0, if_id = 0, addr, cnt;
1477 	int ret = MV_OK, ret_tmp;
1478 	u32 data_read[MAX_INTERFACE_NUM];
1479 	struct hws_topology_map *tm = ddr3_get_topology_map();
1480 
1481 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1482 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1483 		addr = mem_addr;
1484 		for (cnt = 0; cnt <= burst_length; cnt++) {
1485 			seq = (seq + 1) % 8;
1486 			if (write_type != 0) {
1487 				CHECK_STATUS(ddr3_tip_ext_write
1488 					     (dev_num, if_id, addr, 1,
1489 					      xsb_test_table[seq]));
1490 			}
1491 			if (read_type != 0) {
1492 				CHECK_STATUS(ddr3_tip_ext_read
1493 					     (dev_num, if_id, addr, 1,
1494 					      data_read));
1495 			}
1496 			if ((read_type != 0) && (write_type != 0)) {
1497 				ret_tmp =
1498 					ddr3_tip_compare(if_id,
1499 							 xsb_test_table[seq],
1500 							 data_read,
1501 							 0xff);
1502 				addr += (EXT_ACCESS_BURST_LENGTH * 4);
1503 				ret = (ret != MV_OK) ? ret : ret_tmp;
1504 			}
1505 		}
1506 	}
1507 
1508 	return ret;
1509 }
1510 
1511 #else /*EXCLUDE_SWITCH_DEBUG */
1512 
1513 u32 rl_version = 1;		/* 0 - old RL machine */
1514 u32 vref = 0x4;
1515 u32 start_xsb_offset = 0;
1516 u8 cs_mask_reg[] = {
1517 	0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
1518 };
1519 
1520 int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1521 		 u32 read_type, u32 burst_length)
1522 {
1523 	return MV_OK;
1524 }
1525 
1526 #endif
1527