xref: /openbmc/u-boot/drivers/ddr/marvell/a38x/ddr3_training_ip_engine.c (revision 04ab29ab257598b0e33785c075c9163ea090e6b7)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) Marvell International Ltd. and its affiliates
4  */
5 
6 #include <common.h>
7 #include <spl.h>
8 #include <asm/io.h>
9 #include <asm/arch/cpu.h>
10 #include <asm/arch/soc.h>
11 
12 #include "ddr3_init.h"
13 
14 #define PATTERN_1	0x55555555
15 #define PATTERN_2	0xaaaaaaaa
16 
17 #define VALIDATE_TRAINING_LIMIT(e1, e2)			\
18 	((((e2) - (e1) + 1) > 33) && ((e1) < 67))
19 
20 u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
21 
22 u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
23 		 HWS_SEARCH_DIR_LIMIT];
24 
25 u16 mask_results_dq_reg_map[] = {
26 	RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
27 	RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
28 	RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
29 	RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
30 	RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
31 	RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
32 	RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
33 	RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
34 	RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
35 	RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
36 	RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
37 	RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
38 	RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
39 	RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
40 	RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
41 	RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
42 	RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
43 	RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
44 	RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
45 	RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
46 };
47 
48 u16 mask_results_pup_reg_map[] = {
49 	RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
50 	RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
51 	RESULT_CONTROL_BYTE_PUP_4_REG
52 };
53 
54 u16 mask_results_dq_reg_map_pup3_ecc[] = {
55 	RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
56 	RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
57 	RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
58 	RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
59 	RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
60 	RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
61 	RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
62 	RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
63 	RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
64 	RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
65 	RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
66 	RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
67 	RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
68 	RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
69 	RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
70 	RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
71 	RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
72 	RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
73 	RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
74 	RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
75 };
76 
77 u16 mask_results_pup_reg_map_pup3_ecc[] = {
78 	RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
79 	RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
80 	RESULT_CONTROL_BYTE_PUP_4_REG
81 };
82 
83 struct pattern_info pattern_table_16[] = {
84 	/*
85 	 * num tx phases, tx burst, delay between, rx pattern,
86 	 * start_address, pattern_len
87 	 */
88 	{1, 1, 2, 1, 0x0080, 2},	/* PATTERN_PBS1 */
89 	{1, 1, 2, 1, 0x00c0, 2},	/* PATTERN_PBS2 */
90 	{1, 1, 2, 1, 0x0100, 2},	/* PATTERN_RL */
91 	{0xf, 0x7, 2, 0x7, 0x0140, 16},	/* PATTERN_STATIC_PBS */
92 	{0xf, 0x7, 2, 0x7, 0x0190, 16},	/* PATTERN_KILLER_DQ0 */
93 	{0xf, 0x7, 2, 0x7, 0x01d0, 16},	/* PATTERN_KILLER_DQ1 */
94 	{0xf, 0x7, 2, 0x7, 0x0210, 16},	/* PATTERN_KILLER_DQ2 */
95 	{0xf, 0x7, 2, 0x7, 0x0250, 16},	/* PATTERN_KILLER_DQ3 */
96 	{0xf, 0x7, 2, 0x7, 0x0290, 16},	/* PATTERN_KILLER_DQ4 */
97 	{0xf, 0x7, 2, 0x7, 0x02d0, 16},	/* PATTERN_KILLER_DQ5 */
98 	{0xf, 0x7, 2, 0x7, 0x0310, 16},	/* PATTERN_KILLER_DQ6 */
99 	{0xf, 0x7, 2, 0x7, 0x0350, 16},	/* PATTERN_KILLER_DQ7 */
100 	{1, 1, 2, 1, 0x0380, 2},	/* PATTERN_PBS3 */
101 	{1, 1, 2, 1, 0x0000, 2},	/* PATTERN_RL2 */
102 	{1, 1, 2, 1, 0x0040, 2},	/* PATTERN_TEST */
103 	{0xf, 0x7, 2, 0x7, 0x03c0, 16},	/* PATTERN_FULL_SSO_1T */
104 	{0xf, 0x7, 2, 0x7, 0x0400, 16},	/* PATTERN_FULL_SSO_2T */
105 	{0xf, 0x7, 2, 0x7, 0x0440, 16},	/* PATTERN_FULL_SSO_3T */
106 	{0xf, 0x7, 2, 0x7, 0x0480, 16},	/* PATTERN_FULL_SSO_4T */
107 	{0xf, 0x7, 2, 0x7, 0x04c0, 16}	/* PATTERN_VREF */
108 	/*Note: actual start_address is <<3 of defined addess */
109 };
110 
111 struct pattern_info pattern_table_32[] = {
112 	/*
113 	 * num tx phases, tx burst, delay between, rx pattern,
114 	 * start_address, pattern_len
115 	 */
116 	{3, 3, 2, 3, 0x0080, 4},	/* PATTERN_PBS1 */
117 	{3, 3, 2, 3, 0x00c0, 4},	/* PATTERN_PBS2 */
118 	{3, 3, 2, 3, 0x0100, 4},	/* PATTERN_RL */
119 	{0x1f, 0xf, 2, 0xf, 0x0140, 32},	/* PATTERN_STATIC_PBS */
120 	{0x1f, 0xf, 2, 0xf, 0x0190, 32},	/* PATTERN_KILLER_DQ0 */
121 	{0x1f, 0xf, 2, 0xf, 0x01d0, 32},	/* PATTERN_KILLER_DQ1 */
122 	{0x1f, 0xf, 2, 0xf, 0x0210, 32},	/* PATTERN_KILLER_DQ2 */
123 	{0x1f, 0xf, 2, 0xf, 0x0250, 32},	/* PATTERN_KILLER_DQ3 */
124 	{0x1f, 0xf, 2, 0xf, 0x0290, 32},	/* PATTERN_KILLER_DQ4 */
125 	{0x1f, 0xf, 2, 0xf, 0x02d0, 32},	/* PATTERN_KILLER_DQ5 */
126 	{0x1f, 0xf, 2, 0xf, 0x0310, 32},	/* PATTERN_KILLER_DQ6 */
127 	{0x1f, 0xf, 2, 0xf, 0x0350, 32},	/* PATTERN_KILLER_DQ7 */
128 	{3, 3, 2, 3, 0x0380, 4},	/* PATTERN_PBS3 */
129 	{3, 3, 2, 3, 0x0000, 4},	/* PATTERN_RL2 */
130 	{3, 3, 2, 3, 0x0040, 4},	/* PATTERN_TEST */
131 	{0x1f, 0xf, 2, 0xf, 0x03c0, 32},	/* PATTERN_FULL_SSO_1T */
132 	{0x1f, 0xf, 2, 0xf, 0x0400, 32},	/* PATTERN_FULL_SSO_2T */
133 	{0x1f, 0xf, 2, 0xf, 0x0440, 32},	/* PATTERN_FULL_SSO_3T */
134 	{0x1f, 0xf, 2, 0xf, 0x0480, 32},	/* PATTERN_FULL_SSO_4T */
135 	{0x1f, 0xf, 2, 0xf, 0x04c0, 32}	/* PATTERN_VREF */
136 	/*Note: actual start_address is <<3 of defined addess */
137 };
138 
139 u32 train_dev_num;
140 enum hws_ddr_cs traintrain_cs_type;
141 u32 train_pup_num;
142 enum hws_training_result train_result_type;
143 enum hws_control_element train_control_element;
144 enum hws_search_dir traine_search_dir;
145 enum hws_dir train_direction;
146 u32 train_if_select;
147 u32 train_init_value;
148 u32 train_number_iterations;
149 enum hws_pattern train_pattern;
150 enum hws_edge_compare train_edge_compare;
151 u32 train_cs_num;
152 u32 train_if_acess, train_if_id, train_pup_access;
153 u32 max_polling_for_done = 1000000;
154 
155 u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
156 			  enum hws_training_result result_type,
157 			  u32 interface_num)
158 {
159 	u32 *buf_ptr = NULL;
160 
161 	buf_ptr = &training_res
162 		[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
163 		 interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
164 
165 	return buf_ptr;
166 }
167 
168 /*
169  * IP Training search
170  * Note: for one edge search only from fail to pass, else jitter can
171  * be be entered into solution.
172  */
173 int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
174 			 u32 interface_num,
175 			 enum hws_access_type pup_access_type,
176 			 u32 pup_num, enum hws_training_result result_type,
177 			 enum hws_control_element control_element,
178 			 enum hws_search_dir search_dir, enum hws_dir direction,
179 			 u32 interface_mask, u32 init_value, u32 num_iter,
180 			 enum hws_pattern pattern,
181 			 enum hws_edge_compare edge_comp,
182 			 enum hws_ddr_cs cs_type, u32 cs_num,
183 			 enum hws_training_ip_stat *train_status)
184 {
185 	u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt, poll_cnt,
186 		reg_data, pup_id;
187 	u32 tx_burst_size;
188 	u32 delay_between_burst;
189 	u32 rd_mode;
190 	u32 read_data[MAX_INTERFACE_NUM];
191 	struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
192 	u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
193 	u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
194 	struct hws_topology_map *tm = ddr3_get_topology_map();
195 
196 	if (pup_num >= tm->num_of_bus_per_interface) {
197 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
198 					 ("pup_num %d not valid\n", pup_num));
199 	}
200 	if (interface_num >= MAX_INTERFACE_NUM) {
201 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
202 					 ("if_id %d not valid\n",
203 					  interface_num));
204 	}
205 	if (train_status == NULL) {
206 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
207 					 ("error param 4\n"));
208 		return MV_BAD_PARAM;
209 	}
210 
211 	/* load pattern */
212 	if (cs_type == CS_SINGLE) {
213 		/* All CSs to CS0     */
214 		CHECK_STATUS(ddr3_tip_if_write
215 			     (dev_num, access_type, interface_num,
216 			      CS_ENABLE_REG, 1 << 3, 1 << 3));
217 		/* All CSs to CS0     */
218 		CHECK_STATUS(ddr3_tip_if_write
219 			     (dev_num, access_type, interface_num,
220 			      ODPG_DATA_CONTROL_REG,
221 			      (0x3 | (effective_cs << 26)), 0xc000003));
222 	} else {
223 		CHECK_STATUS(ddr3_tip_if_write
224 			     (dev_num, access_type, interface_num,
225 			      CS_ENABLE_REG, 0, 1 << 3));
226 		/*  CS select */
227 		CHECK_STATUS(ddr3_tip_if_write
228 			     (dev_num, access_type, interface_num,
229 			      ODPG_DATA_CONTROL_REG, 0x3 | cs_num << 26,
230 			      0x3 | 3 << 26));
231 	}
232 
233 	/* load pattern to ODPG */
234 	ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
235 				      pattern,
236 				      pattern_table[pattern].start_addr);
237 	tx_burst_size =	(direction == OPER_WRITE) ?
238 		pattern_table[pattern].tx_burst_size : 0;
239 	delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
240 	rd_mode = (direction == OPER_WRITE) ? 1 : 0;
241 	CHECK_STATUS(ddr3_tip_configure_odpg
242 		     (dev_num, access_type, interface_num, direction,
243 		      pattern_table[pattern].num_of_phases_tx, tx_burst_size,
244 		      pattern_table[pattern].num_of_phases_rx,
245 		      delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
246 		      DURATION_SINGLE));
247 	reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
248 	reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
249 	CHECK_STATUS(ddr3_tip_if_write
250 		     (dev_num, access_type, interface_num,
251 		      ODPG_WRITE_READ_MODE_ENABLE_REG, reg_data,
252 		      MASK_ALL_BITS));
253 	reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
254 	reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
255 		(1 << 7) : 0;
256 
257 	/* change from Pass to Fail will lock the result */
258 	if (pup_access_type == ACCESS_TYPE_MULTICAST)
259 		reg_data |= 0xe << 14;
260 	else
261 		reg_data |= pup_num << 14;
262 
263 	if (edge_comp == EDGE_FP) {
264 		/* don't search for readl edge change, only the state */
265 		reg_data |= (0 << 20);
266 	} else if (edge_comp == EDGE_FPF) {
267 		reg_data |= (0 << 20);
268 	} else {
269 		reg_data |= (3 << 20);
270 	}
271 
272 	CHECK_STATUS(ddr3_tip_if_write
273 		     (dev_num, access_type, interface_num,
274 		      ODPG_TRAINING_CONTROL_REG,
275 		      reg_data | (0x7 << 8) | (0x7 << 11),
276 		      (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
277 		       (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
278 	reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
279 	CHECK_STATUS(ddr3_tip_if_write
280 		     (dev_num, access_type, interface_num, ODPG_OBJ1_OPCODE_REG,
281 		      1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
282 		      0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
283 
284 	/*
285 	 * Write2_dunit(0x10b4, Number_iteration , [15:0])
286 	 * Max number of iterations
287 	 */
288 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
289 				       ODPG_OBJ1_ITER_CNT_REG, num_iter,
290 				       0xffff));
291 	if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
292 	    direction == OPER_READ) {
293 		/*
294 		 * Write2_dunit(0x10c0, 0x5f , [7:0])
295 		 * MC PBS Reg Address at DDR PHY
296 		 */
297 		reg_data = 0x5f +
298 			effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
299 	} else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
300 		   direction == OPER_WRITE) {
301 		reg_data = 0x1f +
302 			effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
303 	} else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
304 		   direction == OPER_WRITE) {
305 		/*
306 		 * LOOP         0x00000001 + 4*n:
307 		 * where n (0-3) represents M_CS number
308 		 */
309 		/*
310 		 * Write2_dunit(0x10c0, 0x1 , [7:0])
311 		 * ADLL WR Reg Address at DDR PHY
312 		 */
313 		reg_data = 1 + effective_cs * CS_REGISTER_ADDR_OFFSET;
314 	} else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
315 		   direction == OPER_READ) {
316 		/* ADLL RD Reg Address at DDR PHY */
317 		reg_data = 3 + effective_cs * CS_REGISTER_ADDR_OFFSET;
318 	} else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
319 		   direction == OPER_WRITE) {
320 		/* TBD not defined in 0.5.0 requirement  */
321 	} else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
322 		   direction == OPER_READ) {
323 		/* TBD not defined in 0.5.0 requirement */
324 	}
325 
326 	reg_data |= (0x6 << 28);
327 	CHECK_STATUS(ddr3_tip_if_write
328 		     (dev_num, access_type, interface_num, CALIB_OBJ_PRFA_REG,
329 		      reg_data | (init_value << 8),
330 		      0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
331 
332 	mask_dq_num_of_regs = tm->num_of_bus_per_interface * BUS_WIDTH_IN_BITS;
333 	mask_pup_num_of_regs = tm->num_of_bus_per_interface;
334 
335 	if (result_type == RESULT_PER_BIT) {
336 		for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
337 		     index_cnt++) {
338 			CHECK_STATUS(ddr3_tip_if_write
339 				     (dev_num, access_type, interface_num,
340 				      mask_results_dq_reg_map[index_cnt], 0,
341 				      1 << 24));
342 		}
343 
344 		/* Mask disabled buses */
345 		for (pup_id = 0; pup_id < tm->num_of_bus_per_interface;
346 		     pup_id++) {
347 			if (IS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
348 				continue;
349 
350 			for (index_cnt = (mask_dq_num_of_regs - pup_id * 8);
351 			     index_cnt <
352 				     (mask_dq_num_of_regs - (pup_id + 1) * 8);
353 			     index_cnt++) {
354 				CHECK_STATUS(ddr3_tip_if_write
355 					     (dev_num, access_type,
356 					      interface_num,
357 					      mask_results_dq_reg_map
358 					      [index_cnt], (1 << 24), 1 << 24));
359 			}
360 		}
361 
362 		for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
363 		     index_cnt++) {
364 			CHECK_STATUS(ddr3_tip_if_write
365 				     (dev_num, access_type, interface_num,
366 				      mask_results_pup_reg_map[index_cnt],
367 				      (1 << 24), 1 << 24));
368 		}
369 	} else if (result_type == RESULT_PER_BYTE) {
370 		/* write to adll */
371 		for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
372 		     index_cnt++) {
373 			CHECK_STATUS(ddr3_tip_if_write
374 				     (dev_num, access_type, interface_num,
375 				      mask_results_pup_reg_map[index_cnt], 0,
376 				      1 << 24));
377 		}
378 		for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
379 		     index_cnt++) {
380 			CHECK_STATUS(ddr3_tip_if_write
381 				     (dev_num, access_type, interface_num,
382 				      mask_results_dq_reg_map[index_cnt],
383 				      (1 << 24), (1 << 24)));
384 		}
385 	}
386 
387 	/* Start Training Trigger */
388 	CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
389 				       ODPG_TRAINING_TRIGGER_REG, 1, 1));
390 	/* wait for all RFU tests to finish (or timeout) */
391 	/* WA for 16 bit mode, more investigation needed */
392 	mdelay(1);
393 
394 	/* Training "Done ?" */
395 	for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
396 		if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
397 			continue;
398 
399 		if (interface_mask & (1 << index_cnt)) {
400 			/* need to check results for this Dunit */
401 			for (poll_cnt = 0; poll_cnt < max_polling_for_done;
402 			     poll_cnt++) {
403 				CHECK_STATUS(ddr3_tip_if_read
404 					     (dev_num, ACCESS_TYPE_UNICAST,
405 					      index_cnt,
406 					      ODPG_TRAINING_STATUS_REG,
407 					      &reg_data, MASK_ALL_BITS));
408 				if ((reg_data & 0x2) != 0) {
409 					/*done */
410 					train_status[index_cnt] =
411 						HWS_TRAINING_IP_STATUS_SUCCESS;
412 					break;
413 				}
414 			}
415 
416 			if (poll_cnt == max_polling_for_done) {
417 				train_status[index_cnt] =
418 					HWS_TRAINING_IP_STATUS_TIMEOUT;
419 			}
420 		}
421 		/* Be sure that ODPG done */
422 		CHECK_STATUS(is_odpg_access_done(dev_num, index_cnt));
423 	}
424 
425 	/* Write ODPG done in Dunit */
426 	CHECK_STATUS(ddr3_tip_if_write
427 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
428 		      ODPG_STATUS_DONE_REG, 0, 0x1));
429 
430 	/* wait for all Dunit tests to finish (or timeout) */
431 	/* Training "Done ?" */
432 	/* Training "Pass ?" */
433 	for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
434 		if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
435 			continue;
436 
437 		if (interface_mask & (1 << index_cnt)) {
438 			/* need to check results for this Dunit */
439 			for (poll_cnt = 0; poll_cnt < max_polling_for_done;
440 			     poll_cnt++) {
441 				CHECK_STATUS(ddr3_tip_if_read
442 					     (dev_num, ACCESS_TYPE_UNICAST,
443 					      index_cnt,
444 					      ODPG_TRAINING_TRIGGER_REG,
445 					      read_data, MASK_ALL_BITS));
446 				reg_data = read_data[index_cnt];
447 				if ((reg_data & 0x2) != 0) {
448 					/* done */
449 					if ((reg_data & 0x4) == 0) {
450 						train_status[index_cnt] =
451 							HWS_TRAINING_IP_STATUS_SUCCESS;
452 					} else {
453 						train_status[index_cnt] =
454 							HWS_TRAINING_IP_STATUS_FAIL;
455 					}
456 					break;
457 				}
458 			}
459 
460 			if (poll_cnt == max_polling_for_done) {
461 				train_status[index_cnt] =
462 					HWS_TRAINING_IP_STATUS_TIMEOUT;
463 			}
464 		}
465 	}
466 
467 	CHECK_STATUS(ddr3_tip_if_write
468 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
469 		      ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
470 
471 	return MV_OK;
472 }
473 
474 /*
475  * Load expected Pattern to ODPG
476  */
477 int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
478 				  u32 if_id, enum hws_pattern pattern,
479 				  u32 load_addr)
480 {
481 	u32 pattern_length_cnt = 0;
482 	struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
483 
484 	for (pattern_length_cnt = 0;
485 	     pattern_length_cnt < pattern_table[pattern].pattern_len;
486 	     pattern_length_cnt++) {
487 		CHECK_STATUS(ddr3_tip_if_write
488 			     (dev_num, access_type, if_id,
489 			      ODPG_PATTERN_DATA_LOW_REG,
490 			      pattern_table_get_word(dev_num, pattern,
491 						     (u8) (pattern_length_cnt *
492 							   2)), MASK_ALL_BITS));
493 		CHECK_STATUS(ddr3_tip_if_write
494 			     (dev_num, access_type, if_id,
495 			      ODPG_PATTERN_DATA_HI_REG,
496 			      pattern_table_get_word(dev_num, pattern,
497 						     (u8) (pattern_length_cnt *
498 							   2 + 1)),
499 			      MASK_ALL_BITS));
500 		CHECK_STATUS(ddr3_tip_if_write
501 			     (dev_num, access_type, if_id,
502 			      ODPG_PATTERN_ADDR_REG, pattern_length_cnt,
503 			      MASK_ALL_BITS));
504 	}
505 
506 	CHECK_STATUS(ddr3_tip_if_write
507 		     (dev_num, access_type, if_id,
508 		      ODPG_PATTERN_ADDR_OFFSET_REG, load_addr, MASK_ALL_BITS));
509 
510 	return MV_OK;
511 }
512 
513 /*
514  * Configure ODPG
515  */
516 int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
517 			    u32 if_id, enum hws_dir direction, u32 tx_phases,
518 			    u32 tx_burst_size, u32 rx_phases,
519 			    u32 delay_between_burst, u32 rd_mode, u32 cs_num,
520 			    u32 addr_stress_jump, u32 single_pattern)
521 {
522 	u32 data_value = 0;
523 	int ret;
524 
525 	data_value = ((single_pattern << 2) | (tx_phases << 5) |
526 		      (tx_burst_size << 11) | (delay_between_burst << 15) |
527 		      (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
528 		      (addr_stress_jump << 29));
529 	ret = ddr3_tip_if_write(dev_num, access_type, if_id,
530 				ODPG_DATA_CONTROL_REG, data_value, 0xaffffffc);
531 	if (ret != MV_OK)
532 		return ret;
533 
534 	return MV_OK;
535 }
536 
537 int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
538 			    enum hws_edge_search e_edge_search,
539 			    u32 *edge_result)
540 {
541 	u32 i, res;
542 	int tap_val, max_val = -10000, min_val = 10000;
543 	int lock_success = 1;
544 
545 	for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
546 		res = GET_LOCK_RESULT(ar_result[i]);
547 		if (res == 0) {
548 			lock_success = 0;
549 			break;
550 		}
551 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
552 					 ("lock failed for bit %d\n", i));
553 	}
554 
555 	if (lock_success == 1) {
556 		for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
557 			tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
558 			if (tap_val > max_val)
559 				max_val = tap_val;
560 			if (tap_val < min_val)
561 				min_val = tap_val;
562 			if (e_edge_search == TRAINING_EDGE_MAX)
563 				*edge_result = (u32) max_val;
564 			else
565 				*edge_result = (u32) min_val;
566 
567 			DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
568 						 ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
569 						  i, ar_result[i], tap_val,
570 						  max_val, min_val,
571 						  *edge_result));
572 		}
573 	} else {
574 		return MV_FAIL;
575 	}
576 
577 	return MV_OK;
578 }
579 
580 /*
581  * Read training search result
582  */
583 int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
584 				  enum hws_access_type pup_access_type,
585 				  u32 pup_num, u32 bit_num,
586 				  enum hws_search_dir search,
587 				  enum hws_dir direction,
588 				  enum hws_training_result result_type,
589 				  enum hws_training_load_op operation,
590 				  u32 cs_num_type, u32 **load_res,
591 				  int is_read_from_db, u8 cons_tap,
592 				  int is_check_result_validity)
593 {
594 	u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
595 	u32 *interface_train_res = NULL;
596 	u16 *reg_addr = NULL;
597 	u32 read_data[MAX_INTERFACE_NUM];
598 	u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
599 	u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
600 	struct hws_topology_map *tm = ddr3_get_topology_map();
601 
602 	/*
603 	 * Agreed assumption: all CS mask contain same number of bits,
604 	 * i.e. in multi CS, the number of CS per memory is the same for
605 	 * all pups
606 	 */
607 	CHECK_STATUS(ddr3_tip_if_write
608 		     (dev_num, ACCESS_TYPE_UNICAST, if_id, CS_ENABLE_REG,
609 		      (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
610 	CHECK_STATUS(ddr3_tip_if_write
611 		     (dev_num, ACCESS_TYPE_UNICAST, if_id,
612 		      ODPG_DATA_CONTROL_REG, (cs_num_type << 26), (3 << 26)));
613 	DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
614 				 ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
615 				  is_read_from_db, cs_num_type, operation,
616 				  result_type, direction, search, pup_num,
617 				  if_id, pup_access_type));
618 
619 	if ((load_res == NULL) && (is_read_from_db == 1)) {
620 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
621 					 ("ddr3_tip_read_training_result load_res = NULL"));
622 		return MV_FAIL;
623 	}
624 	if (pup_num >= tm->num_of_bus_per_interface) {
625 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
626 					 ("pup_num %d not valid\n", pup_num));
627 	}
628 	if (if_id >= MAX_INTERFACE_NUM) {
629 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
630 					 ("if_id %d not valid\n", if_id));
631 	}
632 	if (result_type == RESULT_PER_BIT)
633 		reg_addr = mask_results_dq_reg_map;
634 	else
635 		reg_addr = mask_results_pup_reg_map;
636 	if (pup_access_type == ACCESS_TYPE_UNICAST) {
637 		start_pup = pup_num;
638 		end_pup = pup_num;
639 	} else {		/*pup_access_type == ACCESS_TYPE_MULTICAST) */
640 
641 		start_pup = 0;
642 		end_pup = tm->num_of_bus_per_interface - 1;
643 	}
644 
645 	for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
646 		VALIDATE_ACTIVE(tm->bus_act_mask, pup_cnt);
647 		DEBUG_TRAINING_IP_ENGINE(
648 			DEBUG_LEVEL_TRACE,
649 			("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
650 			 if_id, start_pup, end_pup, pup_cnt));
651 		if (result_type == RESULT_PER_BIT) {
652 			if (bit_num == ALL_BITS_PER_PUP) {
653 				start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
654 				end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
655 			} else {
656 				start_reg =
657 					pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
658 				end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
659 			}
660 		} else {
661 			start_reg = pup_cnt;
662 			end_reg = pup_cnt;
663 		}
664 
665 		interface_train_res =
666 			ddr3_tip_get_buf_ptr(dev_num, search, result_type,
667 					     if_id);
668 		DEBUG_TRAINING_IP_ENGINE(
669 			DEBUG_LEVEL_TRACE,
670 			("start_reg %d end_reg %d interface %p\n",
671 			 start_reg, end_reg, interface_train_res));
672 		if (interface_train_res == NULL) {
673 			DEBUG_TRAINING_IP_ENGINE(
674 				DEBUG_LEVEL_ERROR,
675 				("interface_train_res is NULL\n"));
676 			return MV_FAIL;
677 		}
678 
679 		for (reg_offset = start_reg; reg_offset <= end_reg;
680 		     reg_offset++) {
681 			if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
682 				if (is_read_from_db == 0) {
683 					CHECK_STATUS(ddr3_tip_if_read
684 						     (dev_num,
685 						      ACCESS_TYPE_UNICAST,
686 						      if_id,
687 						      reg_addr[reg_offset],
688 						      read_data,
689 						      MASK_ALL_BITS));
690 					if (is_check_result_validity == 1) {
691 						if ((read_data[if_id] &
692 						     0x02000000) == 0) {
693 							interface_train_res
694 								[reg_offset] =
695 								0x02000000 +
696 								64 + cons_tap;
697 						} else {
698 							interface_train_res
699 								[reg_offset] =
700 								read_data
701 								[if_id] +
702 								cons_tap;
703 						}
704 					} else {
705 						interface_train_res[reg_offset]
706 							= read_data[if_id] +
707 							cons_tap;
708 					}
709 					DEBUG_TRAINING_IP_ENGINE
710 						(DEBUG_LEVEL_TRACE,
711 						 ("reg_offset %d value 0x%x addr %p\n",
712 						  reg_offset,
713 						  interface_train_res
714 						  [reg_offset],
715 						  &interface_train_res
716 						  [reg_offset]));
717 				} else {
718 					*load_res =
719 						&interface_train_res[start_reg];
720 					DEBUG_TRAINING_IP_ENGINE
721 						(DEBUG_LEVEL_TRACE,
722 						 ("*load_res %p\n", *load_res));
723 				}
724 			} else {
725 				DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
726 							 ("not supported\n"));
727 			}
728 		}
729 	}
730 
731 	return MV_OK;
732 }
733 
734 /*
735  * Load all pattern to memory using ODPG
736  */
737 int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
738 {
739 	u32 pattern = 0, if_id;
740 	struct hws_topology_map *tm = ddr3_get_topology_map();
741 
742 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
743 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
744 		training_result[training_stage][if_id] = TEST_SUCCESS;
745 	}
746 
747 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
748 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
749 		/* enable single cs */
750 		CHECK_STATUS(ddr3_tip_if_write
751 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
752 			      CS_ENABLE_REG, (1 << 3), (1 << 3)));
753 	}
754 
755 	for (pattern = 0; pattern < PATTERN_LIMIT; pattern++)
756 		ddr3_tip_load_pattern_to_mem(dev_num, pattern);
757 
758 	return MV_OK;
759 }
760 
761 /*
762  * Wait till ODPG access is ready
763  */
764 int is_odpg_access_done(u32 dev_num, u32 if_id)
765 {
766 	u32 poll_cnt = 0, data_value;
767 	u32 read_data[MAX_INTERFACE_NUM];
768 
769 	for (poll_cnt = 0; poll_cnt < MAX_POLLING_ITERATIONS; poll_cnt++) {
770 		CHECK_STATUS(ddr3_tip_if_read
771 			     (dev_num, ACCESS_TYPE_UNICAST, if_id,
772 			      ODPG_BIST_DONE, read_data, MASK_ALL_BITS));
773 		data_value = read_data[if_id];
774 		if (((data_value >> ODPG_BIST_DONE_BIT_OFFS) & 0x1) ==
775 		    ODPG_BIST_DONE_BIT_VALUE) {
776 				data_value = data_value & 0xfffffffe;
777 				CHECK_STATUS(ddr3_tip_if_write
778 					     (dev_num, ACCESS_TYPE_UNICAST,
779 					      if_id, ODPG_BIST_DONE, data_value,
780 					      MASK_ALL_BITS));
781 				break;
782 			}
783 	}
784 
785 	if (poll_cnt >= MAX_POLLING_ITERATIONS) {
786 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
787 					 ("Bist Activate: poll failure 2\n"));
788 		return MV_FAIL;
789 	}
790 
791 	return MV_OK;
792 }
793 
794 /*
795  * Load specific pattern to memory using ODPG
796  */
797 int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
798 {
799 	u32 reg_data, if_id;
800 	struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
801 	struct hws_topology_map *tm = ddr3_get_topology_map();
802 
803 	/* load pattern to memory */
804 	/*
805 	 * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
806 	 * rx pattern phases
807 	 */
808 	reg_data =
809 		0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
810 		(pattern_table[pattern].tx_burst_size << 11) |
811 		(pattern_table[pattern].delay_between_bursts << 15) |
812 		(pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
813 		(effective_cs << 26);
814 	CHECK_STATUS(ddr3_tip_if_write
815 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
816 		      ODPG_DATA_CONTROL_REG, reg_data, MASK_ALL_BITS));
817 	/* ODPG Write enable from BIST */
818 	CHECK_STATUS(ddr3_tip_if_write
819 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
820 		      ODPG_DATA_CONTROL_REG, (0x1 | (effective_cs << 26)),
821 		      0xc000003));
822 	/* disable error injection */
823 	CHECK_STATUS(ddr3_tip_if_write
824 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
825 		      ODPG_WRITE_DATA_ERROR_REG, 0, 0x1));
826 	/* load pattern to ODPG */
827 	ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
828 				      PARAM_NOT_CARE, pattern,
829 				      pattern_table[pattern].start_addr);
830 
831 	for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
832 		if (IS_ACTIVE(tm->if_act_mask, if_id) == 0)
833 			continue;
834 
835 		CHECK_STATUS(ddr3_tip_if_write
836 			     (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1498,
837 			      0x3, 0xf));
838 	}
839 
840 	CHECK_STATUS(ddr3_tip_if_write
841 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
842 		      ODPG_ENABLE_REG, 0x1 << ODPG_ENABLE_OFFS,
843 		      (0x1 << ODPG_ENABLE_OFFS)));
844 
845 	mdelay(1);
846 
847 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
848 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
849 		CHECK_STATUS(is_odpg_access_done(dev_num, if_id));
850 	}
851 
852 	/* Disable ODPG and stop write to memory */
853 	CHECK_STATUS(ddr3_tip_if_write
854 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
855 		      ODPG_DATA_CONTROL_REG, (0x1 << 30), (u32) (0x3 << 30)));
856 
857 	/* return to default */
858 	CHECK_STATUS(ddr3_tip_if_write
859 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
860 		      ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
861 
862 	/* Disable odt0 for CS0 training - need to adjust for multy CS */
863 	CHECK_STATUS(ddr3_tip_if_write
864 		     (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x1498,
865 		      0x0, 0xf));
866 
867 	/* temporary added */
868 	mdelay(1);
869 
870 	return MV_OK;
871 }
872 
873 /*
874  * Load specific pattern to memory using CPU
875  */
876 int ddr3_tip_load_pattern_to_mem_by_cpu(u32 dev_num, enum hws_pattern pattern,
877 					u32 offset)
878 {
879 	/* eranba - TBD */
880 	return MV_OK;
881 }
882 
883 /*
884  * Training search routine
885  */
886 int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
887 				     enum hws_access_type access_type,
888 				     u32 if_id,
889 				     enum hws_access_type pup_access_type,
890 				     u32 pup_num, u32 bit_num,
891 				     enum hws_training_result result_type,
892 				     enum hws_control_element control_element,
893 				     enum hws_search_dir search_dir,
894 				     enum hws_dir direction,
895 				     u32 interface_mask, u32 init_value_l2h,
896 				     u32 init_value_h2l, u32 num_iter,
897 				     enum hws_pattern pattern,
898 				     enum hws_edge_compare edge_comp,
899 				     enum hws_ddr_cs train_cs_type, u32 cs_num,
900 				     enum hws_training_ip_stat *train_status)
901 {
902 	u32 interface_num = 0, start_if, end_if, init_value_used;
903 	enum hws_search_dir search_dir_id, start_search, end_search;
904 	enum hws_edge_compare edge_comp_used;
905 	u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
906 	struct hws_topology_map *tm = ddr3_get_topology_map();
907 
908 	if (train_status == NULL) {
909 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
910 					 ("train_status is NULL\n"));
911 		return MV_FAIL;
912 	}
913 
914 	if ((train_cs_type > CS_NON_SINGLE) ||
915 	    (edge_comp >= EDGE_PFP) ||
916 	    (pattern >= PATTERN_LIMIT) ||
917 	    (direction > OPER_WRITE_AND_READ) ||
918 	    (search_dir > HWS_HIGH2LOW) ||
919 	    (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
920 	    (result_type > RESULT_PER_BYTE) ||
921 	    (pup_num >= tm->num_of_bus_per_interface) ||
922 	    (pup_access_type > ACCESS_TYPE_MULTICAST) ||
923 	    (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
924 		DEBUG_TRAINING_IP_ENGINE(
925 			DEBUG_LEVEL_ERROR,
926 			("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
927 			 train_cs_type, edge_comp, pattern, direction,
928 			 search_dir, control_element, result_type, pup_num,
929 			 pup_access_type, if_id, access_type));
930 		return MV_FAIL;
931 	}
932 
933 	if (edge_comp == EDGE_FPF) {
934 		start_search = HWS_LOW2HIGH;
935 		end_search = HWS_HIGH2LOW;
936 		edge_comp_used = EDGE_FP;
937 	} else {
938 		start_search = search_dir;
939 		end_search = search_dir;
940 		edge_comp_used = edge_comp;
941 	}
942 
943 	for (search_dir_id = start_search; search_dir_id <= end_search;
944 	     search_dir_id++) {
945 		init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
946 			init_value_l2h : init_value_h2l;
947 		DEBUG_TRAINING_IP_ENGINE(
948 			DEBUG_LEVEL_TRACE,
949 			("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
950 			 dev_num, access_type, if_id, pup_access_type, pup_num,
951 			 result_type, control_element, search_dir_id,
952 			 direction, interface_mask, init_value_used, num_iter,
953 			 pattern, edge_comp_used, train_cs_type, cs_num));
954 
955 		ddr3_tip_ip_training(dev_num, access_type, if_id,
956 				     pup_access_type, pup_num, result_type,
957 				     control_element, search_dir_id, direction,
958 				     interface_mask, init_value_used, num_iter,
959 				     pattern, edge_comp_used, train_cs_type,
960 				     cs_num, train_status);
961 		if (access_type == ACCESS_TYPE_MULTICAST) {
962 			start_if = 0;
963 			end_if = MAX_INTERFACE_NUM - 1;
964 		} else {
965 			start_if = if_id;
966 			end_if = if_id;
967 		}
968 
969 		for (interface_num = start_if; interface_num <= end_if;
970 		     interface_num++) {
971 			VALIDATE_ACTIVE(tm->if_act_mask, interface_num);
972 			cs_num = 0;
973 			CHECK_STATUS(ddr3_tip_read_training_result
974 				     (dev_num, interface_num, pup_access_type,
975 				      pup_num, bit_num, search_dir_id,
976 				      direction, result_type,
977 				      TRAINING_LOAD_OPERATION_UNLOAD,
978 				      train_cs_type, NULL, 0, cons_tap,
979 				      0));
980 		}
981 	}
982 
983 	return MV_OK;
984 }
985 
986 /*
987  * Training search & read result routine
988  */
989 int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
990 				 u32 if_id,
991 				 enum hws_access_type pup_access_type,
992 				 u32 pup_num,
993 				 enum hws_training_result result_type,
994 				 enum hws_control_element control_element,
995 				 enum hws_search_dir search_dir,
996 				 enum hws_dir direction, u32 interface_mask,
997 				 u32 init_value_l2h, u32 init_value_h2l,
998 				 u32 num_iter, enum hws_pattern pattern,
999 				 enum hws_edge_compare edge_comp,
1000 				 enum hws_ddr_cs train_cs_type, u32 cs_num,
1001 				 enum hws_training_ip_stat *train_status)
1002 {
1003 	u8 e1, e2;
1004 	u32 interface_cnt, bit_id, start_if, end_if, bit_end = 0;
1005 	u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
1006 	u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
1007 	u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
1008 	u8 pup_id;
1009 	struct hws_topology_map *tm = ddr3_get_topology_map();
1010 
1011 	if (pup_num >= tm->num_of_bus_per_interface) {
1012 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1013 					 ("pup_num %d not valid\n", pup_num));
1014 	}
1015 
1016 	if (if_id >= MAX_INTERFACE_NUM) {
1017 		DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1018 					 ("if_id %d not valid\n", if_id));
1019 	}
1020 
1021 	CHECK_STATUS(ddr3_tip_ip_training_wrapper_int
1022 		     (dev_num, access_type, if_id, pup_access_type, pup_num,
1023 		      ALL_BITS_PER_PUP, result_type, control_element,
1024 		      search_dir, direction, interface_mask, init_value_l2h,
1025 		      init_value_h2l, num_iter, pattern, edge_comp,
1026 		      train_cs_type, cs_num, train_status));
1027 
1028 	if (access_type == ACCESS_TYPE_MULTICAST) {
1029 		start_if = 0;
1030 		end_if = MAX_INTERFACE_NUM - 1;
1031 	} else {
1032 		start_if = if_id;
1033 		end_if = if_id;
1034 	}
1035 
1036 	for (interface_cnt = start_if; interface_cnt <= end_if;
1037 	     interface_cnt++) {
1038 		VALIDATE_ACTIVE(tm->if_act_mask, interface_cnt);
1039 		for (pup_id = 0;
1040 		     pup_id <= (tm->num_of_bus_per_interface - 1); pup_id++) {
1041 			VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1042 			if (result_type == RESULT_PER_BIT)
1043 				bit_end = BUS_WIDTH_IN_BITS - 1;
1044 			else
1045 				bit_end = 0;
1046 
1047 			bit_bit_mask[pup_id] = 0;
1048 			for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1049 				enum hws_search_dir search_dir_id;
1050 				for (search_dir_id = HWS_LOW2HIGH;
1051 				     search_dir_id <= HWS_HIGH2LOW;
1052 				     search_dir_id++) {
1053 					CHECK_STATUS
1054 						(ddr3_tip_read_training_result
1055 						 (dev_num, interface_cnt,
1056 						  ACCESS_TYPE_UNICAST, pup_id,
1057 						  bit_id, search_dir_id,
1058 						  direction, result_type,
1059 						  TRAINING_LOAD_OPERATION_UNLOAD,
1060 						  CS_SINGLE,
1061 						  &result[search_dir_id],
1062 						  1, 0, 0));
1063 				}
1064 				e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0],
1065 						    EDGE_1);
1066 				e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0],
1067 						    EDGE_1);
1068 				DEBUG_TRAINING_IP_ENGINE(
1069 					DEBUG_LEVEL_INFO,
1070 					("wrapper if_id %d pup_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
1071 					 interface_cnt, pup_id, bit_id,
1072 					 result[HWS_LOW2HIGH][0], e1,
1073 					 result[HWS_HIGH2LOW][0], e2));
1074 				/* TBD validate is valid only for tx */
1075 				if (VALIDATE_TRAINING_LIMIT(e1, e2) == 1 &&
1076 				    GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
1077 				    GET_LOCK_RESULT(result[HWS_LOW2HIGH][0])) {
1078 					/* Mark problem bits */
1079 					bit_bit_mask[pup_id] |= 1 << bit_id;
1080 					bit_bit_mask_active = 1;
1081 				}
1082 			}	/* For all bits */
1083 		}		/* For all PUPs */
1084 
1085 		/* Fix problem bits */
1086 		if (bit_bit_mask_active != 0) {
1087 			u32 *l2h_if_train_res = NULL;
1088 			u32 *h2l_if_train_res = NULL;
1089 			l2h_if_train_res =
1090 				ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH,
1091 						     result_type,
1092 						     interface_cnt);
1093 			h2l_if_train_res =
1094 				ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW,
1095 						     result_type,
1096 						     interface_cnt);
1097 
1098 			ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
1099 					     interface_cnt,
1100 					     ACCESS_TYPE_MULTICAST,
1101 					     PARAM_NOT_CARE, result_type,
1102 					     control_element, HWS_LOW2HIGH,
1103 					     direction, interface_mask,
1104 					     num_iter / 2, num_iter / 2,
1105 					     pattern, EDGE_FP, train_cs_type,
1106 					     cs_num, train_status);
1107 
1108 			for (pup_id = 0;
1109 			     pup_id <= (tm->num_of_bus_per_interface - 1);
1110 			     pup_id++) {
1111 				VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1112 
1113 				if (bit_bit_mask[pup_id] == 0)
1114 					continue;
1115 
1116 				for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1117 					if ((bit_bit_mask[pup_id] &
1118 					     (1 << bit_id)) == 0)
1119 						continue;
1120 					CHECK_STATUS
1121 						(ddr3_tip_read_training_result
1122 						 (dev_num, interface_cnt,
1123 						  ACCESS_TYPE_UNICAST, pup_id,
1124 						  bit_id, HWS_LOW2HIGH,
1125 						  direction,
1126 						  result_type,
1127 						  TRAINING_LOAD_OPERATION_UNLOAD,
1128 						  CS_SINGLE, &l2h_if_train_res,
1129 						  0, 0, 1));
1130 				}
1131 			}
1132 
1133 			ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
1134 					     interface_cnt,
1135 					     ACCESS_TYPE_MULTICAST,
1136 					     PARAM_NOT_CARE, result_type,
1137 					     control_element, HWS_HIGH2LOW,
1138 					     direction, interface_mask,
1139 					     num_iter / 2, num_iter / 2,
1140 					     pattern, EDGE_FP, train_cs_type,
1141 					     cs_num, train_status);
1142 
1143 			for (pup_id = 0;
1144 			     pup_id <= (tm->num_of_bus_per_interface - 1);
1145 			     pup_id++) {
1146 				VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1147 
1148 				if (bit_bit_mask[pup_id] == 0)
1149 					continue;
1150 
1151 				for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1152 					if ((bit_bit_mask[pup_id] &
1153 					     (1 << bit_id)) == 0)
1154 						continue;
1155 					CHECK_STATUS
1156 						(ddr3_tip_read_training_result
1157 						 (dev_num, interface_cnt,
1158 						  ACCESS_TYPE_UNICAST, pup_id,
1159 						  bit_id, HWS_HIGH2LOW, direction,
1160 						  result_type,
1161 						  TRAINING_LOAD_OPERATION_UNLOAD,
1162 						  CS_SINGLE, &h2l_if_train_res,
1163 						  0, cons_tap, 1));
1164 				}
1165 			}
1166 		}		/* if bit_bit_mask_active */
1167 	}			/* For all Interfacess */
1168 
1169 	return MV_OK;
1170 }
1171 
1172 /*
1173  * Load phy values
1174  */
1175 int ddr3_tip_load_phy_values(int b_load)
1176 {
1177 	u32 bus_cnt = 0, if_id, dev_num = 0;
1178 	struct hws_topology_map *tm = ddr3_get_topology_map();
1179 
1180 	for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1181 		VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1182 		for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
1183 		     bus_cnt++) {
1184 			VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1185 			if (b_load == 1) {
1186 				CHECK_STATUS(ddr3_tip_bus_read
1187 					     (dev_num, if_id,
1188 					      ACCESS_TYPE_UNICAST, bus_cnt,
1189 					      DDR_PHY_DATA,
1190 					      WRITE_CENTRALIZATION_PHY_REG +
1191 					      (effective_cs *
1192 					       CS_REGISTER_ADDR_OFFSET),
1193 					      &phy_reg_bk[if_id][bus_cnt]
1194 					      [0]));
1195 				CHECK_STATUS(ddr3_tip_bus_read
1196 					     (dev_num, if_id,
1197 					      ACCESS_TYPE_UNICAST, bus_cnt,
1198 					      DDR_PHY_DATA,
1199 					      RL_PHY_REG +
1200 					      (effective_cs *
1201 					       CS_REGISTER_ADDR_OFFSET),
1202 					      &phy_reg_bk[if_id][bus_cnt]
1203 					      [1]));
1204 				CHECK_STATUS(ddr3_tip_bus_read
1205 					     (dev_num, if_id,
1206 					      ACCESS_TYPE_UNICAST, bus_cnt,
1207 					      DDR_PHY_DATA,
1208 					      READ_CENTRALIZATION_PHY_REG +
1209 					      (effective_cs *
1210 					       CS_REGISTER_ADDR_OFFSET),
1211 					      &phy_reg_bk[if_id][bus_cnt]
1212 					      [2]));
1213 			} else {
1214 				CHECK_STATUS(ddr3_tip_bus_write
1215 					     (dev_num, ACCESS_TYPE_UNICAST,
1216 					      if_id, ACCESS_TYPE_UNICAST,
1217 					      bus_cnt, DDR_PHY_DATA,
1218 					      WRITE_CENTRALIZATION_PHY_REG +
1219 					      (effective_cs *
1220 					       CS_REGISTER_ADDR_OFFSET),
1221 					      phy_reg_bk[if_id][bus_cnt]
1222 					      [0]));
1223 				CHECK_STATUS(ddr3_tip_bus_write
1224 					     (dev_num, ACCESS_TYPE_UNICAST,
1225 					      if_id, ACCESS_TYPE_UNICAST,
1226 					      bus_cnt, DDR_PHY_DATA,
1227 					      RL_PHY_REG +
1228 					      (effective_cs *
1229 					       CS_REGISTER_ADDR_OFFSET),
1230 					      phy_reg_bk[if_id][bus_cnt]
1231 					      [1]));
1232 				CHECK_STATUS(ddr3_tip_bus_write
1233 					     (dev_num, ACCESS_TYPE_UNICAST,
1234 					      if_id, ACCESS_TYPE_UNICAST,
1235 					      bus_cnt, DDR_PHY_DATA,
1236 					      READ_CENTRALIZATION_PHY_REG +
1237 					      (effective_cs *
1238 					       CS_REGISTER_ADDR_OFFSET),
1239 					      phy_reg_bk[if_id][bus_cnt]
1240 					      [2]));
1241 			}
1242 		}
1243 	}
1244 
1245 	return MV_OK;
1246 }
1247 
1248 int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
1249 			      enum hws_search_dir search_dir,
1250 			      enum hws_dir direction,
1251 			      enum hws_edge_compare edge,
1252 			      u32 init_val1, u32 init_val2,
1253 			      u32 num_of_iterations,
1254 			      u32 start_pattern, u32 end_pattern)
1255 {
1256 	u32 pattern, if_id, pup_id;
1257 	enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
1258 	u32 *res = NULL;
1259 	u32 search_state = 0;
1260 	struct hws_topology_map *tm = ddr3_get_topology_map();
1261 
1262 	ddr3_tip_load_phy_values(1);
1263 
1264 	for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
1265 		for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
1266 		     search_state++) {
1267 			ddr3_tip_ip_training_wrapper(dev_num,
1268 						     ACCESS_TYPE_MULTICAST, 0,
1269 						     ACCESS_TYPE_MULTICAST, 0,
1270 						     result_type,
1271 						     HWS_CONTROL_ELEMENT_ADLL,
1272 						     search_dir, direction,
1273 						     0xfff, init_val1,
1274 						     init_val2,
1275 						     num_of_iterations, pattern,
1276 						     edge, CS_SINGLE,
1277 						     PARAM_NOT_CARE,
1278 						     train_status);
1279 
1280 			for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
1281 			     if_id++) {
1282 				VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1283 				for (pup_id = 0; pup_id <
1284 					     tm->num_of_bus_per_interface;
1285 				     pup_id++) {
1286 					VALIDATE_ACTIVE(tm->bus_act_mask,
1287 							pup_id);
1288 					CHECK_STATUS
1289 						(ddr3_tip_read_training_result
1290 						 (dev_num, if_id,
1291 						  ACCESS_TYPE_UNICAST, pup_id,
1292 						  ALL_BITS_PER_PUP,
1293 						  search_state,
1294 						  direction, result_type,
1295 						  TRAINING_LOAD_OPERATION_UNLOAD,
1296 						  CS_SINGLE, &res, 1, 0,
1297 						  0));
1298 					if (result_type == RESULT_PER_BYTE) {
1299 						DEBUG_TRAINING_IP_ENGINE
1300 							(DEBUG_LEVEL_INFO,
1301 							 ("search_state %d if_id %d pup_id %d 0x%x\n",
1302 							  search_state, if_id,
1303 							  pup_id, res[0]));
1304 					} else {
1305 						DEBUG_TRAINING_IP_ENGINE
1306 							(DEBUG_LEVEL_INFO,
1307 							 ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1308 							  search_state, if_id,
1309 							  pup_id, res[0],
1310 							  res[1], res[2],
1311 							  res[3], res[4],
1312 							  res[5], res[6],
1313 							  res[7]));
1314 					}
1315 				}
1316 			}	/* interface */
1317 		}		/* search */
1318 	}			/* pattern */
1319 
1320 	ddr3_tip_load_phy_values(0);
1321 
1322 	return MV_OK;
1323 }
1324 
1325 struct pattern_info *ddr3_tip_get_pattern_table()
1326 {
1327 	struct hws_topology_map *tm = ddr3_get_topology_map();
1328 
1329 	if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
1330 		return pattern_table_32;
1331 	else
1332 		return pattern_table_16;
1333 }
1334 
1335 u16 *ddr3_tip_get_mask_results_dq_reg()
1336 {
1337 	struct hws_topology_map *tm = ddr3_get_topology_map();
1338 
1339 	if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1340 		return mask_results_dq_reg_map_pup3_ecc;
1341 	else
1342 		return mask_results_dq_reg_map;
1343 }
1344 
1345 u16 *ddr3_tip_get_mask_results_pup_reg_map()
1346 {
1347 	struct hws_topology_map *tm = ddr3_get_topology_map();
1348 
1349 	if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1350 		return mask_results_pup_reg_map_pup3_ecc;
1351 	else
1352 		return mask_results_pup_reg_map;
1353 }
1354