1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) Marvell International Ltd. and its affiliates
4 */
5
6 #include "ddr3_init.h"
7 #include "mv_ddr_regs.h"
8
9 #define VREF_INITIAL_STEP 3
10 #define VREF_SECOND_STEP 1
11 #define VREF_MAX_INDEX 7
12 #define MAX_VALUE (1024 - 1)
13 #define MIN_VALUE (-MAX_VALUE)
14 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0xf)
15
16 u32 ca_delay;
17 int ddr3_tip_centr_skip_min_win_check = 0;
18 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
19 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
20 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
21 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM];
22 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM];
23 u8 interface_state[MAX_INTERFACE_NUM];
24 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM];
25 u8 vref_window_size_th = 12;
26
27 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM];
28
29 static u32 rd_sample_mask[] = {
30 0,
31 8,
32 16,
33 24
34 };
35
36 #define VREF_STEP_1 0
37 #define VREF_STEP_2 1
38 #define VREF_CONVERGE 2
39
40 /*
41 * ODT additional timing
42 */
ddr3_tip_write_additional_odt_setting(u32 dev_num,u32 if_id)43 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id)
44 {
45 u32 cs_num = 0, max_read_sample = 0, min_read_sample = 0x1f;
46 u32 data_read[MAX_INTERFACE_NUM] = { 0 };
47 u32 read_sample[MAX_CS_NUM];
48 u32 val;
49 u32 pup_index;
50 int max_phase = MIN_VALUE, current_phase;
51 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
52 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
53 unsigned int max_cs = mv_ddr_cs_num_get();
54
55 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
56 DUNIT_ODT_CTRL_REG,
57 0 << 8, 0x3 << 8));
58 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id,
59 RD_DATA_SMPL_DLYS_REG,
60 data_read, MASK_ALL_BITS));
61 val = data_read[if_id];
62
63 for (cs_num = 0; cs_num < max_cs; cs_num++) {
64 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num);
65
66 /* find maximum of read_samples */
67 if (read_sample[cs_num] >= max_read_sample) {
68 if (read_sample[cs_num] == max_read_sample)
69 max_phase = MIN_VALUE;
70 else
71 max_read_sample = read_sample[cs_num];
72
73 for (pup_index = 0;
74 pup_index < octets_per_if_num;
75 pup_index++) {
76 CHECK_STATUS(ddr3_tip_bus_read
77 (dev_num, if_id,
78 ACCESS_TYPE_UNICAST, pup_index,
79 DDR_PHY_DATA,
80 RL_PHY_REG(cs_num),
81 &val));
82
83 current_phase = ((int)val & 0xe0) >> 6;
84 if (current_phase >= max_phase)
85 max_phase = current_phase;
86 }
87 }
88
89 /* find minimum */
90 if (read_sample[cs_num] < min_read_sample)
91 min_read_sample = read_sample[cs_num];
92 }
93
94 min_read_sample = min_read_sample - 1;
95 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1;
96 if (min_read_sample >= 0xf)
97 min_read_sample = 0xf;
98 if (max_read_sample >= 0x1f)
99 max_read_sample = 0x1f;
100
101 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
102 DDR_ODT_TIMING_LOW_REG,
103 ((min_read_sample - 1) << 12),
104 0xf << 12));
105 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
106 DDR_ODT_TIMING_LOW_REG,
107 (max_read_sample << 16),
108 0x1f << 16));
109
110 return MV_OK;
111 }
112
get_valid_win_rx(u32 dev_num,u32 if_id,u8 res[4])113 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4])
114 {
115 u32 reg_pup = RESULT_PHY_REG;
116 u32 reg_data;
117 u32 cs_num;
118 int i;
119
120 cs_num = 0;
121
122 /* TBD */
123 reg_pup += cs_num;
124
125 for (i = 0; i < 4; i++) {
126 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
127 ACCESS_TYPE_UNICAST, i,
128 DDR_PHY_DATA, reg_pup,
129 ®_data));
130 res[i] = (reg_data >> RESULT_PHY_RX_OFFS) & 0x1f;
131 }
132
133 return 0;
134 }
135
136 /*
137 * This algorithm deals with the vertical optimum from Voltage point of view
138 * of the sample signal.
139 * Voltage sample point can improve the Eye / window size of the bit and the
140 * pup.
141 * The problem is that it is tune for all DQ the same so there isn't any
142 * PBS like code.
143 * It is more like centralization.
144 * But because we don't have The training SM support we do it a bit more
145 * smart search to save time.
146 */
ddr3_tip_vref(u32 dev_num)147 int ddr3_tip_vref(u32 dev_num)
148 {
149 /*
150 * The Vref register have non linear order. Need to check what will be
151 * in future projects.
152 */
153 u32 vref_map[8] = {
154 1, 2, 3, 4, 5, 6, 7, 0
155 };
156 /* State and parameter definitions */
157 u32 initial_step = VREF_INITIAL_STEP;
158 /* need to be assign with minus ????? */
159 u32 second_step = VREF_SECOND_STEP;
160 u32 algo_run_flag = 0, currrent_vref = 0;
161 u32 while_count = 0;
162 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0;
163 u32 val = 0;
164 u32 reg_addr = 0xa8;
165 u32 copy_start_pattern, copy_end_pattern;
166 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage);
167 u8 res[4];
168 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
169 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
170
171 CHECK_STATUS(ddr3_tip_special_rx(dev_num));
172
173 /* save start/end pattern */
174 copy_start_pattern = start_pattern;
175 copy_end_pattern = end_pattern;
176
177 /* set vref as centralization pattern */
178 start_pattern = PATTERN_VREF;
179 end_pattern = PATTERN_VREF;
180
181 /* init params */
182 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
183 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
184 for (pup = 0;
185 pup < octets_per_if_num; pup++) {
186 current_vref[pup][if_id] = 0;
187 last_vref[pup][if_id] = 0;
188 lim_vref[pup][if_id] = 0;
189 current_valid_window[pup][if_id] = 0;
190 last_valid_window[pup][if_id] = 0;
191 if (vref_window_size[if_id][pup] >
192 vref_window_size_th) {
193 pup_st[pup][if_id] = VREF_CONVERGE;
194 DEBUG_TRAINING_HW_ALG(
195 DEBUG_LEVEL_INFO,
196 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n",
197 if_id, pup, __LINE__));
198 } else {
199 pup_st[pup][if_id] = VREF_STEP_1;
200 CHECK_STATUS(ddr3_tip_bus_read
201 (dev_num, if_id,
202 ACCESS_TYPE_UNICAST, pup,
203 DDR_PHY_DATA, reg_addr, &val));
204 CHECK_STATUS(ddr3_tip_bus_write
205 (dev_num, ACCESS_TYPE_UNICAST,
206 if_id, ACCESS_TYPE_UNICAST,
207 pup, DDR_PHY_DATA, reg_addr,
208 (val & (~0xf)) | vref_map[0]));
209 DEBUG_TRAINING_HW_ALG(
210 DEBUG_LEVEL_INFO,
211 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
212 if_id, pup,
213 (val & (~0xf)) | vref_map[0],
214 __LINE__));
215 }
216 }
217 interface_state[if_id] = 0;
218 }
219
220 /* TODO: Set number of active interfaces */
221 num_pup = octets_per_if_num * MAX_INTERFACE_NUM;
222
223 while ((algo_run_flag <= num_pup) & (while_count < 10)) {
224 while_count++;
225 for (rep = 1; rep < 4; rep++) {
226 ddr3_tip_centr_skip_min_win_check = 1;
227 ddr3_tip_centralization_rx(dev_num);
228 ddr3_tip_centr_skip_min_win_check = 0;
229
230 /* Read Valid window results only for non converge pups */
231 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
232 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
233 if (interface_state[if_id] != 4) {
234 get_valid_win_rx(dev_num, if_id, res);
235 for (pup = 0;
236 pup < octets_per_if_num;
237 pup++) {
238 VALIDATE_BUS_ACTIVE
239 (tm->bus_act_mask, pup);
240 if (pup_st[pup]
241 [if_id] ==
242 VREF_CONVERGE)
243 continue;
244
245 current_valid_window[pup]
246 [if_id] =
247 (current_valid_window[pup]
248 [if_id] * (rep - 1) +
249 1000 * res[pup]) / rep;
250 }
251 }
252 }
253 }
254
255 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
256 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
257 DEBUG_TRAINING_HW_ALG(
258 DEBUG_LEVEL_TRACE,
259 ("current_valid_window: IF[ %d ] - ", if_id));
260
261 for (pup = 0;
262 pup < octets_per_if_num; pup++) {
263 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
264 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
265 ("%d ",
266 current_valid_window
267 [pup][if_id]));
268 }
269 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n"));
270 }
271
272 /* Compare results and respond as function of state */
273 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
274 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
275 for (pup = 0;
276 pup < octets_per_if_num; pup++) {
277 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
278 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
279 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n",
280 if_id, pup,
281 pup_st[pup]
282 [if_id], __LINE__));
283
284 if (pup_st[pup][if_id] == VREF_CONVERGE)
285 continue;
286
287 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE,
288 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n",
289 if_id, pup,
290 current_valid_window[pup]
291 [if_id],
292 last_valid_window[pup]
293 [if_id], lim_vref[pup]
294 [if_id], __LINE__));
295
296 /*
297 * The -1 is for solution resolution +/- 1 tap
298 * of ADLL
299 */
300 if (current_valid_window[pup][if_id] + 200 >=
301 (last_valid_window[pup][if_id])) {
302 if (pup_st[pup][if_id] == VREF_STEP_1) {
303 /*
304 * We stay in the same state and
305 * step just update the window
306 * size (take the max) and Vref
307 */
308 if (current_vref[pup]
309 [if_id] == VREF_MAX_INDEX) {
310 /*
311 * If we step to the end
312 * and didn't converge
313 * to some particular
314 * better Vref value
315 * define the pup as
316 * converge and step
317 * back to nominal
318 * Vref.
319 */
320 pup_st[pup]
321 [if_id] =
322 VREF_CONVERGE;
323 algo_run_flag++;
324 interface_state
325 [if_id]++;
326 DEBUG_TRAINING_HW_ALG
327 (DEBUG_LEVEL_TRACE,
328 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
329 if_id, pup,
330 current_vref[pup]
331 [if_id],
332 __LINE__));
333 } else {
334 /* continue to update the Vref index */
335 current_vref[pup]
336 [if_id] =
337 ((current_vref[pup]
338 [if_id] +
339 initial_step) >
340 VREF_MAX_INDEX) ?
341 VREF_MAX_INDEX
342 : (current_vref[pup]
343 [if_id] +
344 initial_step);
345 if (current_vref[pup]
346 [if_id] ==
347 VREF_MAX_INDEX) {
348 pup_st[pup]
349 [if_id]
350 =
351 VREF_STEP_2;
352 }
353 lim_vref[pup]
354 [if_id] =
355 last_vref[pup]
356 [if_id] =
357 current_vref[pup]
358 [if_id];
359 }
360
361 last_valid_window[pup]
362 [if_id] =
363 GET_MAX(current_valid_window
364 [pup][if_id],
365 last_valid_window
366 [pup]
367 [if_id]);
368
369 /* update the Vref for next stage */
370 currrent_vref =
371 current_vref[pup]
372 [if_id];
373 CHECK_STATUS
374 (ddr3_tip_bus_read
375 (dev_num, if_id,
376 ACCESS_TYPE_UNICAST, pup,
377 DDR_PHY_DATA, reg_addr,
378 &val));
379 CHECK_STATUS
380 (ddr3_tip_bus_write
381 (dev_num,
382 ACCESS_TYPE_UNICAST,
383 if_id,
384 ACCESS_TYPE_UNICAST, pup,
385 DDR_PHY_DATA, reg_addr,
386 (val & (~0xf)) |
387 vref_map[currrent_vref]));
388 DEBUG_TRAINING_HW_ALG
389 (DEBUG_LEVEL_TRACE,
390 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
391 if_id, pup,
392 (val & (~0xf)) |
393 vref_map[currrent_vref],
394 __LINE__));
395 } else if (pup_st[pup][if_id]
396 == VREF_STEP_2) {
397 /*
398 * We keep on search back with
399 * the same step size.
400 */
401 last_valid_window[pup]
402 [if_id] =
403 GET_MAX(current_valid_window
404 [pup][if_id],
405 last_valid_window
406 [pup]
407 [if_id]);
408 last_vref[pup][if_id] =
409 current_vref[pup]
410 [if_id];
411
412 /* we finish all search space */
413 if ((current_vref[pup]
414 [if_id] - second_step) == lim_vref[pup][if_id]) {
415 /*
416 * If we step to the end
417 * and didn't converge
418 * to some particular
419 * better Vref value
420 * define the pup as
421 * converge and step
422 * back to nominal
423 * Vref.
424 */
425 pup_st[pup]
426 [if_id] =
427 VREF_CONVERGE;
428 algo_run_flag++;
429
430 interface_state
431 [if_id]++;
432
433 current_vref[pup]
434 [if_id] =
435 (current_vref[pup]
436 [if_id] -
437 second_step);
438
439 DEBUG_TRAINING_HW_ALG
440 (DEBUG_LEVEL_TRACE,
441 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
442 if_id, pup,
443 current_vref[pup]
444 [if_id],
445 __LINE__));
446 } else
447 /* we finish all search space */
448 if (current_vref[pup]
449 [if_id] ==
450 lim_vref[pup]
451 [if_id]) {
452 /*
453 * If we step to the end
454 * and didn't converge
455 * to some particular
456 * better Vref value
457 * define the pup as
458 * converge and step
459 * back to nominal
460 * Vref.
461 */
462 pup_st[pup]
463 [if_id] =
464 VREF_CONVERGE;
465
466 algo_run_flag++;
467 interface_state
468 [if_id]++;
469 DEBUG_TRAINING_HW_ALG
470 (DEBUG_LEVEL_TRACE,
471 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
472 if_id, pup,
473 current_vref[pup]
474 [if_id],
475 __LINE__));
476 } else {
477 current_vref[pup]
478 [if_id] =
479 current_vref[pup]
480 [if_id] -
481 second_step;
482 }
483
484 /* Update the Vref for next stage */
485 currrent_vref =
486 current_vref[pup]
487 [if_id];
488 CHECK_STATUS
489 (ddr3_tip_bus_read
490 (dev_num, if_id,
491 ACCESS_TYPE_UNICAST, pup,
492 DDR_PHY_DATA, reg_addr,
493 &val));
494 CHECK_STATUS
495 (ddr3_tip_bus_write
496 (dev_num,
497 ACCESS_TYPE_UNICAST,
498 if_id,
499 ACCESS_TYPE_UNICAST, pup,
500 DDR_PHY_DATA, reg_addr,
501 (val & (~0xf)) |
502 vref_map[currrent_vref]));
503 DEBUG_TRAINING_HW_ALG
504 (DEBUG_LEVEL_TRACE,
505 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
506 if_id, pup,
507 (val & (~0xf)) |
508 vref_map[currrent_vref],
509 __LINE__));
510 }
511 } else {
512 /* we change state and change step */
513 if (pup_st[pup][if_id] == VREF_STEP_1) {
514 pup_st[pup][if_id] =
515 VREF_STEP_2;
516 lim_vref[pup][if_id] =
517 current_vref[pup]
518 [if_id] - initial_step;
519 last_valid_window[pup]
520 [if_id] =
521 current_valid_window[pup]
522 [if_id];
523 last_vref[pup][if_id] =
524 current_vref[pup]
525 [if_id];
526 current_vref[pup][if_id] =
527 last_vref[pup][if_id] -
528 second_step;
529
530 /* Update the Vref for next stage */
531 CHECK_STATUS
532 (ddr3_tip_bus_read
533 (dev_num, if_id,
534 ACCESS_TYPE_UNICAST, pup,
535 DDR_PHY_DATA, reg_addr,
536 &val));
537 CHECK_STATUS
538 (ddr3_tip_bus_write
539 (dev_num,
540 ACCESS_TYPE_UNICAST,
541 if_id,
542 ACCESS_TYPE_UNICAST, pup,
543 DDR_PHY_DATA, reg_addr,
544 (val & (~0xf)) |
545 vref_map[current_vref[pup]
546 [if_id]]));
547 DEBUG_TRAINING_HW_ALG
548 (DEBUG_LEVEL_TRACE,
549 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
550 if_id, pup,
551 (val & (~0xf)) |
552 vref_map[current_vref[pup]
553 [if_id]],
554 __LINE__));
555
556 } else if (pup_st[pup][if_id] == VREF_STEP_2) {
557 /*
558 * The last search was the max
559 * point set value and exit
560 */
561 CHECK_STATUS
562 (ddr3_tip_bus_read
563 (dev_num, if_id,
564 ACCESS_TYPE_UNICAST, pup,
565 DDR_PHY_DATA, reg_addr,
566 &val));
567 CHECK_STATUS
568 (ddr3_tip_bus_write
569 (dev_num,
570 ACCESS_TYPE_UNICAST,
571 if_id,
572 ACCESS_TYPE_UNICAST, pup,
573 DDR_PHY_DATA, reg_addr,
574 (val & (~0xf)) |
575 vref_map[last_vref[pup]
576 [if_id]]));
577 DEBUG_TRAINING_HW_ALG
578 (DEBUG_LEVEL_TRACE,
579 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n",
580 if_id, pup,
581 (val & (~0xf)) |
582 vref_map[last_vref[pup]
583 [if_id]],
584 __LINE__));
585 pup_st[pup][if_id] =
586 VREF_CONVERGE;
587 algo_run_flag++;
588 interface_state[if_id]++;
589 DEBUG_TRAINING_HW_ALG
590 (DEBUG_LEVEL_TRACE,
591 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n",
592 if_id, pup,
593 current_vref[pup]
594 [if_id], __LINE__));
595 }
596 }
597 }
598 }
599 }
600
601 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
602 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
603 for (pup = 0;
604 pup < octets_per_if_num; pup++) {
605 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
606 CHECK_STATUS(ddr3_tip_bus_read
607 (dev_num, if_id,
608 ACCESS_TYPE_UNICAST, pup,
609 DDR_PHY_DATA, reg_addr, &val));
610 DEBUG_TRAINING_HW_ALG(
611 DEBUG_LEVEL_INFO,
612 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n",
613 if_id, pup, val, __LINE__));
614 }
615 }
616
617 flow_result[if_id] = TEST_SUCCESS;
618
619 /* restore start/end pattern */
620 start_pattern = copy_start_pattern;
621 end_pattern = copy_end_pattern;
622
623 return 0;
624 }
625
626 /*
627 * CK/CA Delay
628 */
ddr3_tip_cmd_addr_init_delay(u32 dev_num,u32 adll_tap)629 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap)
630 {
631 u32 if_id = 0;
632 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0;
633 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
634
635 /*
636 * ck_delay_table is delaying the of the clock signal only.
637 * (to overcome timing issues between_c_k & command/address signals)
638 */
639 /*
640 * ca_delay is delaying the of the entire command & Address signals
641 * (include Clock signal to overcome DGL error on the Clock versus
642 * the DQS).
643 */
644
645 /* Calc ADLL Tap */
646 if (ck_delay == PARAM_UNDEFINED)
647 DEBUG_TRAINING_HW_ALG(
648 DEBUG_LEVEL_ERROR,
649 ("ERROR: ck_delay is not initialized!\n"));
650
651 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
652 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
653
654 /* Calc delay ps in ADLL tap */
655 ck_num_adll_tap = ck_delay / adll_tap;
656 ca_num_adll_tap = ca_delay / adll_tap;
657
658 data = (ck_num_adll_tap & 0x3f) +
659 ((ca_num_adll_tap & 0x3f) << 10);
660
661 /*
662 * Set the ADLL number to the CK ADLL for Interfaces for
663 * all Pup
664 */
665 DEBUG_TRAINING_HW_ALG(
666 DEBUG_LEVEL_TRACE,
667 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n",
668 ck_num_adll_tap, ca_num_adll_tap, adll_tap));
669
670 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST,
671 if_id, ACCESS_TYPE_MULTICAST,
672 PARAM_NOT_CARE, DDR_PHY_CONTROL,
673 0x0, data));
674 }
675
676 return MV_OK;
677 }
678