1 /* 2 * Copyright (C) Marvell International Ltd. and its affiliates 3 * 4 * SPDX-License-Identifier: GPL-2.0 5 */ 6 7 #include <common.h> 8 #include <spl.h> 9 #include <asm/io.h> 10 #include <asm/arch/cpu.h> 11 #include <asm/arch/soc.h> 12 13 #include "ddr3_init.h" 14 15 #define VREF_INITIAL_STEP 3 16 #define VREF_SECOND_STEP 1 17 #define VREF_MAX_INDEX 7 18 #define MAX_VALUE (1024 - 1) 19 #define MIN_VALUE (-MAX_VALUE) 20 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0x1f) 21 22 u32 ck_delay = (u32)-1, ck_delay_16 = (u32)-1; 23 u32 ca_delay; 24 int ddr3_tip_centr_skip_min_win_check = 0; 25 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 26 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 27 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 28 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 29 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 30 u8 interface_state[MAX_INTERFACE_NUM]; 31 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 32 u8 vref_window_size_th = 12; 33 34 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 35 36 static u32 rd_sample_mask[] = { 37 0, 38 8, 39 16, 40 24 41 }; 42 43 #define VREF_STEP_1 0 44 #define VREF_STEP_2 1 45 #define VREF_CONVERGE 2 46 47 /* 48 * ODT additional timing 49 */ 50 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id) 51 { 52 u32 cs_num = 0, max_cs = 0, max_read_sample = 0, min_read_sample = 0x1f; 53 u32 data_read[MAX_INTERFACE_NUM] = { 0 }; 54 u32 read_sample[MAX_CS_NUM]; 55 u32 val; 56 u32 pup_index; 57 int max_phase = MIN_VALUE, current_phase; 58 enum hws_access_type access_type = ACCESS_TYPE_UNICAST; 59 struct hws_topology_map *tm = ddr3_get_topology_map(); 60 61 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 62 DUNIT_ODT_CONTROL_REG, 63 0 << 8, 0x3 << 8)); 64 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id, 65 READ_DATA_SAMPLE_DELAY, 66 data_read, MASK_ALL_BITS)); 67 val = data_read[if_id]; 68 69 max_cs = hws_ddr3_tip_max_cs_get(); 70 71 for (cs_num = 0; cs_num < max_cs; cs_num++) { 72 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num); 73 74 /* find maximum of read_samples */ 75 if (read_sample[cs_num] >= max_read_sample) { 76 if (read_sample[cs_num] == max_read_sample) { 77 /* search for max phase */; 78 } else { 79 max_read_sample = read_sample[cs_num]; 80 max_phase = MIN_VALUE; 81 } 82 83 for (pup_index = 0; 84 pup_index < tm->num_of_bus_per_interface; 85 pup_index++) { 86 CHECK_STATUS(ddr3_tip_bus_read 87 (dev_num, if_id, 88 ACCESS_TYPE_UNICAST, pup_index, 89 DDR_PHY_DATA, 90 RL_PHY_REG + CS_REG_VALUE(cs_num), 91 &val)); 92 93 current_phase = ((int)val & 0xe0) >> 6; 94 if (current_phase >= max_phase) 95 max_phase = current_phase; 96 } 97 } 98 99 /* find minimum */ 100 if (read_sample[cs_num] < min_read_sample) 101 min_read_sample = read_sample[cs_num]; 102 } 103 104 if (min_read_sample <= tm->interface_params[if_id].cas_l) { 105 min_read_sample = (int)tm->interface_params[if_id].cas_l; 106 } 107 108 min_read_sample = min_read_sample - 1; 109 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1; 110 if (max_read_sample >= 0x1f) 111 max_read_sample = 0x1f; 112 113 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 114 ODT_TIMING_LOW, 115 ((min_read_sample - 1) << 12), 116 0xf << 12)); 117 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 118 ODT_TIMING_LOW, 119 (max_read_sample << 16), 120 0x1f << 16)); 121 122 return MV_OK; 123 } 124 125 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4]) 126 { 127 u32 reg_pup = RESULT_DB_PHY_REG_ADDR; 128 u32 reg_data; 129 u32 cs_num; 130 int i; 131 132 cs_num = 0; 133 134 /* TBD */ 135 reg_pup += cs_num; 136 137 for (i = 0; i < 4; i++) { 138 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id, 139 ACCESS_TYPE_UNICAST, i, 140 DDR_PHY_DATA, reg_pup, 141 ®_data)); 142 res[i] = (reg_data >> RESULT_DB_PHY_REG_RX_OFFSET) & 0x1f; 143 } 144 145 return 0; 146 } 147 148 /* 149 * This algorithm deals with the vertical optimum from Voltage point of view 150 * of the sample signal. 151 * Voltage sample point can improve the Eye / window size of the bit and the 152 * pup. 153 * The problem is that it is tune for all DQ the same so there isn't any 154 * PBS like code. 155 * It is more like centralization. 156 * But because we don't have The training SM support we do it a bit more 157 * smart search to save time. 158 */ 159 int ddr3_tip_vref(u32 dev_num) 160 { 161 /* 162 * The Vref register have non linear order. Need to check what will be 163 * in future projects. 164 */ 165 u32 vref_map[8] = { 166 1, 2, 3, 4, 5, 6, 7, 0 167 }; 168 /* State and parameter definitions */ 169 u32 initial_step = VREF_INITIAL_STEP; 170 /* need to be assign with minus ????? */ 171 u32 second_step = VREF_SECOND_STEP; 172 u32 algo_run_flag = 0, currrent_vref = 0; 173 u32 while_count = 0; 174 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0; 175 u32 val = 0; 176 u32 reg_addr = 0xa8; 177 u32 copy_start_pattern, copy_end_pattern; 178 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage); 179 u8 res[4]; 180 struct hws_topology_map *tm = ddr3_get_topology_map(); 181 182 CHECK_STATUS(ddr3_tip_special_rx(dev_num)); 183 184 /* save start/end pattern */ 185 copy_start_pattern = start_pattern; 186 copy_end_pattern = end_pattern; 187 188 /* set vref as centralization pattern */ 189 start_pattern = PATTERN_VREF; 190 end_pattern = PATTERN_VREF; 191 192 /* init params */ 193 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 194 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 195 for (pup = 0; 196 pup < tm->num_of_bus_per_interface; pup++) { 197 current_vref[pup][if_id] = 0; 198 last_vref[pup][if_id] = 0; 199 lim_vref[pup][if_id] = 0; 200 current_valid_window[pup][if_id] = 0; 201 last_valid_window[pup][if_id] = 0; 202 if (vref_window_size[if_id][pup] > 203 vref_window_size_th) { 204 pup_st[pup][if_id] = VREF_CONVERGE; 205 DEBUG_TRAINING_HW_ALG( 206 DEBUG_LEVEL_INFO, 207 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n", 208 if_id, pup, __LINE__)); 209 } else { 210 pup_st[pup][if_id] = VREF_STEP_1; 211 CHECK_STATUS(ddr3_tip_bus_read 212 (dev_num, if_id, 213 ACCESS_TYPE_UNICAST, pup, 214 DDR_PHY_DATA, reg_addr, &val)); 215 CHECK_STATUS(ddr3_tip_bus_write 216 (dev_num, ACCESS_TYPE_UNICAST, 217 if_id, ACCESS_TYPE_UNICAST, 218 pup, DDR_PHY_DATA, reg_addr, 219 (val & (~0xf)) | vref_map[0])); 220 DEBUG_TRAINING_HW_ALG( 221 DEBUG_LEVEL_INFO, 222 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 223 if_id, pup, 224 (val & (~0xf)) | vref_map[0], 225 __LINE__)); 226 } 227 } 228 interface_state[if_id] = 0; 229 } 230 231 /* TODO: Set number of active interfaces */ 232 num_pup = tm->num_of_bus_per_interface * MAX_INTERFACE_NUM; 233 234 while ((algo_run_flag <= num_pup) & (while_count < 10)) { 235 while_count++; 236 for (rep = 1; rep < 4; rep++) { 237 ddr3_tip_centr_skip_min_win_check = 1; 238 ddr3_tip_centralization_rx(dev_num); 239 ddr3_tip_centr_skip_min_win_check = 0; 240 241 /* Read Valid window results only for non converge pups */ 242 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 243 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 244 if (interface_state[if_id] != 4) { 245 get_valid_win_rx(dev_num, if_id, res); 246 for (pup = 0; 247 pup < tm->num_of_bus_per_interface; 248 pup++) { 249 VALIDATE_ACTIVE 250 (tm->bus_act_mask, pup); 251 if (pup_st[pup] 252 [if_id] == 253 VREF_CONVERGE) 254 continue; 255 256 current_valid_window[pup] 257 [if_id] = 258 (current_valid_window[pup] 259 [if_id] * (rep - 1) + 260 1000 * res[pup]) / rep; 261 } 262 } 263 } 264 } 265 266 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 267 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 268 DEBUG_TRAINING_HW_ALG( 269 DEBUG_LEVEL_TRACE, 270 ("current_valid_window: IF[ %d ] - ", if_id)); 271 272 for (pup = 0; 273 pup < tm->num_of_bus_per_interface; pup++) { 274 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 275 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 276 ("%d ", 277 current_valid_window 278 [pup][if_id])); 279 } 280 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n")); 281 } 282 283 /* Compare results and respond as function of state */ 284 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 285 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 286 for (pup = 0; 287 pup < tm->num_of_bus_per_interface; pup++) { 288 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 289 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 290 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n", 291 if_id, pup, 292 pup_st[pup] 293 [if_id], __LINE__)); 294 295 if (pup_st[pup][if_id] == VREF_CONVERGE) 296 continue; 297 298 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 299 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n", 300 if_id, pup, 301 current_valid_window[pup] 302 [if_id], 303 last_valid_window[pup] 304 [if_id], lim_vref[pup] 305 [if_id], __LINE__)); 306 307 /* 308 * The -1 is for solution resolution +/- 1 tap 309 * of ADLL 310 */ 311 if (current_valid_window[pup][if_id] + 200 >= 312 (last_valid_window[pup][if_id])) { 313 if (pup_st[pup][if_id] == VREF_STEP_1) { 314 /* 315 * We stay in the same state and 316 * step just update the window 317 * size (take the max) and Vref 318 */ 319 if (current_vref[pup] 320 [if_id] == VREF_MAX_INDEX) { 321 /* 322 * If we step to the end 323 * and didn't converge 324 * to some particular 325 * better Vref value 326 * define the pup as 327 * converge and step 328 * back to nominal 329 * Vref. 330 */ 331 pup_st[pup] 332 [if_id] = 333 VREF_CONVERGE; 334 algo_run_flag++; 335 interface_state 336 [if_id]++; 337 DEBUG_TRAINING_HW_ALG 338 (DEBUG_LEVEL_TRACE, 339 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 340 if_id, pup, 341 current_vref[pup] 342 [if_id], 343 __LINE__)); 344 } else { 345 /* continue to update the Vref index */ 346 current_vref[pup] 347 [if_id] = 348 ((current_vref[pup] 349 [if_id] + 350 initial_step) > 351 VREF_MAX_INDEX) ? 352 VREF_MAX_INDEX 353 : (current_vref[pup] 354 [if_id] + 355 initial_step); 356 if (current_vref[pup] 357 [if_id] == 358 VREF_MAX_INDEX) { 359 pup_st[pup] 360 [if_id] 361 = 362 VREF_STEP_2; 363 } 364 lim_vref[pup] 365 [if_id] = 366 last_vref[pup] 367 [if_id] = 368 current_vref[pup] 369 [if_id]; 370 } 371 372 last_valid_window[pup] 373 [if_id] = 374 GET_MAX(current_valid_window 375 [pup][if_id], 376 last_valid_window 377 [pup] 378 [if_id]); 379 380 /* update the Vref for next stage */ 381 currrent_vref = 382 current_vref[pup] 383 [if_id]; 384 CHECK_STATUS 385 (ddr3_tip_bus_read 386 (dev_num, if_id, 387 ACCESS_TYPE_UNICAST, pup, 388 DDR_PHY_DATA, reg_addr, 389 &val)); 390 CHECK_STATUS 391 (ddr3_tip_bus_write 392 (dev_num, 393 ACCESS_TYPE_UNICAST, 394 if_id, 395 ACCESS_TYPE_UNICAST, pup, 396 DDR_PHY_DATA, reg_addr, 397 (val & (~0xf)) | 398 vref_map[currrent_vref])); 399 DEBUG_TRAINING_HW_ALG 400 (DEBUG_LEVEL_TRACE, 401 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 402 if_id, pup, 403 (val & (~0xf)) | 404 vref_map[currrent_vref], 405 __LINE__)); 406 } else if (pup_st[pup][if_id] 407 == VREF_STEP_2) { 408 /* 409 * We keep on search back with 410 * the same step size. 411 */ 412 last_valid_window[pup] 413 [if_id] = 414 GET_MAX(current_valid_window 415 [pup][if_id], 416 last_valid_window 417 [pup] 418 [if_id]); 419 last_vref[pup][if_id] = 420 current_vref[pup] 421 [if_id]; 422 423 /* we finish all search space */ 424 if ((current_vref[pup] 425 [if_id] - second_step) == lim_vref[pup][if_id]) { 426 /* 427 * If we step to the end 428 * and didn't converge 429 * to some particular 430 * better Vref value 431 * define the pup as 432 * converge and step 433 * back to nominal 434 * Vref. 435 */ 436 pup_st[pup] 437 [if_id] = 438 VREF_CONVERGE; 439 algo_run_flag++; 440 441 interface_state 442 [if_id]++; 443 444 current_vref[pup] 445 [if_id] = 446 (current_vref[pup] 447 [if_id] - 448 second_step); 449 450 DEBUG_TRAINING_HW_ALG 451 (DEBUG_LEVEL_TRACE, 452 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 453 if_id, pup, 454 current_vref[pup] 455 [if_id], 456 __LINE__)); 457 } else 458 /* we finish all search space */ 459 if (current_vref[pup] 460 [if_id] == 461 lim_vref[pup] 462 [if_id]) { 463 /* 464 * If we step to the end 465 * and didn't converge 466 * to some particular 467 * better Vref value 468 * define the pup as 469 * converge and step 470 * back to nominal 471 * Vref. 472 */ 473 pup_st[pup] 474 [if_id] = 475 VREF_CONVERGE; 476 477 algo_run_flag++; 478 interface_state 479 [if_id]++; 480 DEBUG_TRAINING_HW_ALG 481 (DEBUG_LEVEL_TRACE, 482 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 483 if_id, pup, 484 current_vref[pup] 485 [if_id], 486 __LINE__)); 487 } else { 488 current_vref[pup] 489 [if_id] = 490 current_vref[pup] 491 [if_id] - 492 second_step; 493 } 494 495 /* Update the Vref for next stage */ 496 currrent_vref = 497 current_vref[pup] 498 [if_id]; 499 CHECK_STATUS 500 (ddr3_tip_bus_read 501 (dev_num, if_id, 502 ACCESS_TYPE_UNICAST, pup, 503 DDR_PHY_DATA, reg_addr, 504 &val)); 505 CHECK_STATUS 506 (ddr3_tip_bus_write 507 (dev_num, 508 ACCESS_TYPE_UNICAST, 509 if_id, 510 ACCESS_TYPE_UNICAST, pup, 511 DDR_PHY_DATA, reg_addr, 512 (val & (~0xf)) | 513 vref_map[currrent_vref])); 514 DEBUG_TRAINING_HW_ALG 515 (DEBUG_LEVEL_TRACE, 516 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 517 if_id, pup, 518 (val & (~0xf)) | 519 vref_map[currrent_vref], 520 __LINE__)); 521 } 522 } else { 523 /* we change state and change step */ 524 if (pup_st[pup][if_id] == VREF_STEP_1) { 525 pup_st[pup][if_id] = 526 VREF_STEP_2; 527 lim_vref[pup][if_id] = 528 current_vref[pup] 529 [if_id] - initial_step; 530 last_valid_window[pup] 531 [if_id] = 532 current_valid_window[pup] 533 [if_id]; 534 last_vref[pup][if_id] = 535 current_vref[pup] 536 [if_id]; 537 current_vref[pup][if_id] = 538 last_vref[pup][if_id] - 539 second_step; 540 541 /* Update the Vref for next stage */ 542 CHECK_STATUS 543 (ddr3_tip_bus_read 544 (dev_num, if_id, 545 ACCESS_TYPE_UNICAST, pup, 546 DDR_PHY_DATA, reg_addr, 547 &val)); 548 CHECK_STATUS 549 (ddr3_tip_bus_write 550 (dev_num, 551 ACCESS_TYPE_UNICAST, 552 if_id, 553 ACCESS_TYPE_UNICAST, pup, 554 DDR_PHY_DATA, reg_addr, 555 (val & (~0xf)) | 556 vref_map[current_vref[pup] 557 [if_id]])); 558 DEBUG_TRAINING_HW_ALG 559 (DEBUG_LEVEL_TRACE, 560 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 561 if_id, pup, 562 (val & (~0xf)) | 563 vref_map[current_vref[pup] 564 [if_id]], 565 __LINE__)); 566 567 } else if (pup_st[pup][if_id] == VREF_STEP_2) { 568 /* 569 * The last search was the max 570 * point set value and exit 571 */ 572 CHECK_STATUS 573 (ddr3_tip_bus_read 574 (dev_num, if_id, 575 ACCESS_TYPE_UNICAST, pup, 576 DDR_PHY_DATA, reg_addr, 577 &val)); 578 CHECK_STATUS 579 (ddr3_tip_bus_write 580 (dev_num, 581 ACCESS_TYPE_UNICAST, 582 if_id, 583 ACCESS_TYPE_UNICAST, pup, 584 DDR_PHY_DATA, reg_addr, 585 (val & (~0xf)) | 586 vref_map[last_vref[pup] 587 [if_id]])); 588 DEBUG_TRAINING_HW_ALG 589 (DEBUG_LEVEL_TRACE, 590 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 591 if_id, pup, 592 (val & (~0xf)) | 593 vref_map[last_vref[pup] 594 [if_id]], 595 __LINE__)); 596 pup_st[pup][if_id] = 597 VREF_CONVERGE; 598 algo_run_flag++; 599 interface_state[if_id]++; 600 DEBUG_TRAINING_HW_ALG 601 (DEBUG_LEVEL_TRACE, 602 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 603 if_id, pup, 604 current_vref[pup] 605 [if_id], __LINE__)); 606 } 607 } 608 } 609 } 610 } 611 612 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 613 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 614 for (pup = 0; 615 pup < tm->num_of_bus_per_interface; pup++) { 616 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 617 CHECK_STATUS(ddr3_tip_bus_read 618 (dev_num, if_id, 619 ACCESS_TYPE_UNICAST, pup, 620 DDR_PHY_DATA, reg_addr, &val)); 621 DEBUG_TRAINING_HW_ALG( 622 DEBUG_LEVEL_INFO, 623 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n", 624 if_id, pup, val, __LINE__)); 625 } 626 } 627 628 flow_result[if_id] = TEST_SUCCESS; 629 630 /* restore start/end pattern */ 631 start_pattern = copy_start_pattern; 632 end_pattern = copy_end_pattern; 633 634 return 0; 635 } 636 637 /* 638 * CK/CA Delay 639 */ 640 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap) 641 { 642 u32 if_id = 0; 643 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0; 644 struct hws_topology_map *tm = ddr3_get_topology_map(); 645 646 /* 647 * ck_delay_table is delaying the of the clock signal only. 648 * (to overcome timing issues between_c_k & command/address signals) 649 */ 650 /* 651 * ca_delay is delaying the of the entire command & Address signals 652 * (include Clock signal to overcome DGL error on the Clock versus 653 * the DQS). 654 */ 655 656 /* Calc ADLL Tap */ 657 if ((ck_delay == -1) || (ck_delay_16 == -1)) { 658 DEBUG_TRAINING_HW_ALG( 659 DEBUG_LEVEL_ERROR, 660 ("ERROR: One of ck_delay values not initialized!!!\n")); 661 } 662 663 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 664 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 665 /* Calc delay ps in ADLL tap */ 666 if (tm->interface_params[if_id].bus_width == 667 BUS_WIDTH_16) 668 ck_num_adll_tap = ck_delay_16 / adll_tap; 669 else 670 ck_num_adll_tap = ck_delay / adll_tap; 671 672 ca_num_adll_tap = ca_delay / adll_tap; 673 data = (ck_num_adll_tap & 0x3f) + 674 ((ca_num_adll_tap & 0x3f) << 10); 675 676 /* 677 * Set the ADLL number to the CK ADLL for Interfaces for 678 * all Pup 679 */ 680 DEBUG_TRAINING_HW_ALG( 681 DEBUG_LEVEL_TRACE, 682 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n", 683 ck_num_adll_tap, ca_num_adll_tap, adll_tap)); 684 685 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, 686 if_id, ACCESS_TYPE_MULTICAST, 687 PARAM_NOT_CARE, DDR_PHY_CONTROL, 688 0x0, data)); 689 } 690 691 return MV_OK; 692 } 693