1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6 #include <common.h> 7 #include <spl.h> 8 #include <asm/io.h> 9 #include <asm/arch/cpu.h> 10 #include <asm/arch/soc.h> 11 12 #include "ddr3_init.h" 13 14 #define VREF_INITIAL_STEP 3 15 #define VREF_SECOND_STEP 1 16 #define VREF_MAX_INDEX 7 17 #define MAX_VALUE (1024 - 1) 18 #define MIN_VALUE (-MAX_VALUE) 19 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0x1f) 20 21 u32 ck_delay = (u32)-1, ck_delay_16 = (u32)-1; 22 u32 ca_delay; 23 int ddr3_tip_centr_skip_min_win_check = 0; 24 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 25 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 26 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 27 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 28 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 29 u8 interface_state[MAX_INTERFACE_NUM]; 30 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 31 u8 vref_window_size_th = 12; 32 33 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 34 35 static u32 rd_sample_mask[] = { 36 0, 37 8, 38 16, 39 24 40 }; 41 42 #define VREF_STEP_1 0 43 #define VREF_STEP_2 1 44 #define VREF_CONVERGE 2 45 46 /* 47 * ODT additional timing 48 */ 49 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id) 50 { 51 u32 cs_num = 0, max_cs = 0, max_read_sample = 0, min_read_sample = 0x1f; 52 u32 data_read[MAX_INTERFACE_NUM] = { 0 }; 53 u32 read_sample[MAX_CS_NUM]; 54 u32 val; 55 u32 pup_index; 56 int max_phase = MIN_VALUE, current_phase; 57 enum hws_access_type access_type = ACCESS_TYPE_UNICAST; 58 struct hws_topology_map *tm = ddr3_get_topology_map(); 59 60 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 61 DUNIT_ODT_CONTROL_REG, 62 0 << 8, 0x3 << 8)); 63 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id, 64 READ_DATA_SAMPLE_DELAY, 65 data_read, MASK_ALL_BITS)); 66 val = data_read[if_id]; 67 68 max_cs = hws_ddr3_tip_max_cs_get(); 69 70 for (cs_num = 0; cs_num < max_cs; cs_num++) { 71 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num); 72 73 /* find maximum of read_samples */ 74 if (read_sample[cs_num] >= max_read_sample) { 75 if (read_sample[cs_num] == max_read_sample) { 76 /* search for max phase */; 77 } else { 78 max_read_sample = read_sample[cs_num]; 79 max_phase = MIN_VALUE; 80 } 81 82 for (pup_index = 0; 83 pup_index < tm->num_of_bus_per_interface; 84 pup_index++) { 85 CHECK_STATUS(ddr3_tip_bus_read 86 (dev_num, if_id, 87 ACCESS_TYPE_UNICAST, pup_index, 88 DDR_PHY_DATA, 89 RL_PHY_REG + CS_REG_VALUE(cs_num), 90 &val)); 91 92 current_phase = ((int)val & 0xe0) >> 6; 93 if (current_phase >= max_phase) 94 max_phase = current_phase; 95 } 96 } 97 98 /* find minimum */ 99 if (read_sample[cs_num] < min_read_sample) 100 min_read_sample = read_sample[cs_num]; 101 } 102 103 if (min_read_sample <= tm->interface_params[if_id].cas_l) { 104 min_read_sample = (int)tm->interface_params[if_id].cas_l; 105 } 106 107 min_read_sample = min_read_sample - 1; 108 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1; 109 if (max_read_sample >= 0x1f) 110 max_read_sample = 0x1f; 111 112 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 113 ODT_TIMING_LOW, 114 ((min_read_sample - 1) << 12), 115 0xf << 12)); 116 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 117 ODT_TIMING_LOW, 118 (max_read_sample << 16), 119 0x1f << 16)); 120 121 return MV_OK; 122 } 123 124 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4]) 125 { 126 u32 reg_pup = RESULT_DB_PHY_REG_ADDR; 127 u32 reg_data; 128 u32 cs_num; 129 int i; 130 131 cs_num = 0; 132 133 /* TBD */ 134 reg_pup += cs_num; 135 136 for (i = 0; i < 4; i++) { 137 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id, 138 ACCESS_TYPE_UNICAST, i, 139 DDR_PHY_DATA, reg_pup, 140 ®_data)); 141 res[i] = (reg_data >> RESULT_DB_PHY_REG_RX_OFFSET) & 0x1f; 142 } 143 144 return 0; 145 } 146 147 /* 148 * This algorithm deals with the vertical optimum from Voltage point of view 149 * of the sample signal. 150 * Voltage sample point can improve the Eye / window size of the bit and the 151 * pup. 152 * The problem is that it is tune for all DQ the same so there isn't any 153 * PBS like code. 154 * It is more like centralization. 155 * But because we don't have The training SM support we do it a bit more 156 * smart search to save time. 157 */ 158 int ddr3_tip_vref(u32 dev_num) 159 { 160 /* 161 * The Vref register have non linear order. Need to check what will be 162 * in future projects. 163 */ 164 u32 vref_map[8] = { 165 1, 2, 3, 4, 5, 6, 7, 0 166 }; 167 /* State and parameter definitions */ 168 u32 initial_step = VREF_INITIAL_STEP; 169 /* need to be assign with minus ????? */ 170 u32 second_step = VREF_SECOND_STEP; 171 u32 algo_run_flag = 0, currrent_vref = 0; 172 u32 while_count = 0; 173 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0; 174 u32 val = 0; 175 u32 reg_addr = 0xa8; 176 u32 copy_start_pattern, copy_end_pattern; 177 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage); 178 u8 res[4]; 179 struct hws_topology_map *tm = ddr3_get_topology_map(); 180 181 CHECK_STATUS(ddr3_tip_special_rx(dev_num)); 182 183 /* save start/end pattern */ 184 copy_start_pattern = start_pattern; 185 copy_end_pattern = end_pattern; 186 187 /* set vref as centralization pattern */ 188 start_pattern = PATTERN_VREF; 189 end_pattern = PATTERN_VREF; 190 191 /* init params */ 192 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 193 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 194 for (pup = 0; 195 pup < tm->num_of_bus_per_interface; pup++) { 196 current_vref[pup][if_id] = 0; 197 last_vref[pup][if_id] = 0; 198 lim_vref[pup][if_id] = 0; 199 current_valid_window[pup][if_id] = 0; 200 last_valid_window[pup][if_id] = 0; 201 if (vref_window_size[if_id][pup] > 202 vref_window_size_th) { 203 pup_st[pup][if_id] = VREF_CONVERGE; 204 DEBUG_TRAINING_HW_ALG( 205 DEBUG_LEVEL_INFO, 206 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n", 207 if_id, pup, __LINE__)); 208 } else { 209 pup_st[pup][if_id] = VREF_STEP_1; 210 CHECK_STATUS(ddr3_tip_bus_read 211 (dev_num, if_id, 212 ACCESS_TYPE_UNICAST, pup, 213 DDR_PHY_DATA, reg_addr, &val)); 214 CHECK_STATUS(ddr3_tip_bus_write 215 (dev_num, ACCESS_TYPE_UNICAST, 216 if_id, ACCESS_TYPE_UNICAST, 217 pup, DDR_PHY_DATA, reg_addr, 218 (val & (~0xf)) | vref_map[0])); 219 DEBUG_TRAINING_HW_ALG( 220 DEBUG_LEVEL_INFO, 221 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 222 if_id, pup, 223 (val & (~0xf)) | vref_map[0], 224 __LINE__)); 225 } 226 } 227 interface_state[if_id] = 0; 228 } 229 230 /* TODO: Set number of active interfaces */ 231 num_pup = tm->num_of_bus_per_interface * MAX_INTERFACE_NUM; 232 233 while ((algo_run_flag <= num_pup) & (while_count < 10)) { 234 while_count++; 235 for (rep = 1; rep < 4; rep++) { 236 ddr3_tip_centr_skip_min_win_check = 1; 237 ddr3_tip_centralization_rx(dev_num); 238 ddr3_tip_centr_skip_min_win_check = 0; 239 240 /* Read Valid window results only for non converge pups */ 241 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 242 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 243 if (interface_state[if_id] != 4) { 244 get_valid_win_rx(dev_num, if_id, res); 245 for (pup = 0; 246 pup < tm->num_of_bus_per_interface; 247 pup++) { 248 VALIDATE_ACTIVE 249 (tm->bus_act_mask, pup); 250 if (pup_st[pup] 251 [if_id] == 252 VREF_CONVERGE) 253 continue; 254 255 current_valid_window[pup] 256 [if_id] = 257 (current_valid_window[pup] 258 [if_id] * (rep - 1) + 259 1000 * res[pup]) / rep; 260 } 261 } 262 } 263 } 264 265 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 266 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 267 DEBUG_TRAINING_HW_ALG( 268 DEBUG_LEVEL_TRACE, 269 ("current_valid_window: IF[ %d ] - ", if_id)); 270 271 for (pup = 0; 272 pup < tm->num_of_bus_per_interface; pup++) { 273 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 274 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 275 ("%d ", 276 current_valid_window 277 [pup][if_id])); 278 } 279 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n")); 280 } 281 282 /* Compare results and respond as function of state */ 283 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 284 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 285 for (pup = 0; 286 pup < tm->num_of_bus_per_interface; pup++) { 287 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 288 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 289 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n", 290 if_id, pup, 291 pup_st[pup] 292 [if_id], __LINE__)); 293 294 if (pup_st[pup][if_id] == VREF_CONVERGE) 295 continue; 296 297 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 298 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n", 299 if_id, pup, 300 current_valid_window[pup] 301 [if_id], 302 last_valid_window[pup] 303 [if_id], lim_vref[pup] 304 [if_id], __LINE__)); 305 306 /* 307 * The -1 is for solution resolution +/- 1 tap 308 * of ADLL 309 */ 310 if (current_valid_window[pup][if_id] + 200 >= 311 (last_valid_window[pup][if_id])) { 312 if (pup_st[pup][if_id] == VREF_STEP_1) { 313 /* 314 * We stay in the same state and 315 * step just update the window 316 * size (take the max) and Vref 317 */ 318 if (current_vref[pup] 319 [if_id] == VREF_MAX_INDEX) { 320 /* 321 * If we step to the end 322 * and didn't converge 323 * to some particular 324 * better Vref value 325 * define the pup as 326 * converge and step 327 * back to nominal 328 * Vref. 329 */ 330 pup_st[pup] 331 [if_id] = 332 VREF_CONVERGE; 333 algo_run_flag++; 334 interface_state 335 [if_id]++; 336 DEBUG_TRAINING_HW_ALG 337 (DEBUG_LEVEL_TRACE, 338 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 339 if_id, pup, 340 current_vref[pup] 341 [if_id], 342 __LINE__)); 343 } else { 344 /* continue to update the Vref index */ 345 current_vref[pup] 346 [if_id] = 347 ((current_vref[pup] 348 [if_id] + 349 initial_step) > 350 VREF_MAX_INDEX) ? 351 VREF_MAX_INDEX 352 : (current_vref[pup] 353 [if_id] + 354 initial_step); 355 if (current_vref[pup] 356 [if_id] == 357 VREF_MAX_INDEX) { 358 pup_st[pup] 359 [if_id] 360 = 361 VREF_STEP_2; 362 } 363 lim_vref[pup] 364 [if_id] = 365 last_vref[pup] 366 [if_id] = 367 current_vref[pup] 368 [if_id]; 369 } 370 371 last_valid_window[pup] 372 [if_id] = 373 GET_MAX(current_valid_window 374 [pup][if_id], 375 last_valid_window 376 [pup] 377 [if_id]); 378 379 /* update the Vref for next stage */ 380 currrent_vref = 381 current_vref[pup] 382 [if_id]; 383 CHECK_STATUS 384 (ddr3_tip_bus_read 385 (dev_num, if_id, 386 ACCESS_TYPE_UNICAST, pup, 387 DDR_PHY_DATA, reg_addr, 388 &val)); 389 CHECK_STATUS 390 (ddr3_tip_bus_write 391 (dev_num, 392 ACCESS_TYPE_UNICAST, 393 if_id, 394 ACCESS_TYPE_UNICAST, pup, 395 DDR_PHY_DATA, reg_addr, 396 (val & (~0xf)) | 397 vref_map[currrent_vref])); 398 DEBUG_TRAINING_HW_ALG 399 (DEBUG_LEVEL_TRACE, 400 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 401 if_id, pup, 402 (val & (~0xf)) | 403 vref_map[currrent_vref], 404 __LINE__)); 405 } else if (pup_st[pup][if_id] 406 == VREF_STEP_2) { 407 /* 408 * We keep on search back with 409 * the same step size. 410 */ 411 last_valid_window[pup] 412 [if_id] = 413 GET_MAX(current_valid_window 414 [pup][if_id], 415 last_valid_window 416 [pup] 417 [if_id]); 418 last_vref[pup][if_id] = 419 current_vref[pup] 420 [if_id]; 421 422 /* we finish all search space */ 423 if ((current_vref[pup] 424 [if_id] - second_step) == lim_vref[pup][if_id]) { 425 /* 426 * If we step to the end 427 * and didn't converge 428 * to some particular 429 * better Vref value 430 * define the pup as 431 * converge and step 432 * back to nominal 433 * Vref. 434 */ 435 pup_st[pup] 436 [if_id] = 437 VREF_CONVERGE; 438 algo_run_flag++; 439 440 interface_state 441 [if_id]++; 442 443 current_vref[pup] 444 [if_id] = 445 (current_vref[pup] 446 [if_id] - 447 second_step); 448 449 DEBUG_TRAINING_HW_ALG 450 (DEBUG_LEVEL_TRACE, 451 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 452 if_id, pup, 453 current_vref[pup] 454 [if_id], 455 __LINE__)); 456 } else 457 /* we finish all search space */ 458 if (current_vref[pup] 459 [if_id] == 460 lim_vref[pup] 461 [if_id]) { 462 /* 463 * If we step to the end 464 * and didn't converge 465 * to some particular 466 * better Vref value 467 * define the pup as 468 * converge and step 469 * back to nominal 470 * Vref. 471 */ 472 pup_st[pup] 473 [if_id] = 474 VREF_CONVERGE; 475 476 algo_run_flag++; 477 interface_state 478 [if_id]++; 479 DEBUG_TRAINING_HW_ALG 480 (DEBUG_LEVEL_TRACE, 481 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 482 if_id, pup, 483 current_vref[pup] 484 [if_id], 485 __LINE__)); 486 } else { 487 current_vref[pup] 488 [if_id] = 489 current_vref[pup] 490 [if_id] - 491 second_step; 492 } 493 494 /* Update the Vref for next stage */ 495 currrent_vref = 496 current_vref[pup] 497 [if_id]; 498 CHECK_STATUS 499 (ddr3_tip_bus_read 500 (dev_num, if_id, 501 ACCESS_TYPE_UNICAST, pup, 502 DDR_PHY_DATA, reg_addr, 503 &val)); 504 CHECK_STATUS 505 (ddr3_tip_bus_write 506 (dev_num, 507 ACCESS_TYPE_UNICAST, 508 if_id, 509 ACCESS_TYPE_UNICAST, pup, 510 DDR_PHY_DATA, reg_addr, 511 (val & (~0xf)) | 512 vref_map[currrent_vref])); 513 DEBUG_TRAINING_HW_ALG 514 (DEBUG_LEVEL_TRACE, 515 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 516 if_id, pup, 517 (val & (~0xf)) | 518 vref_map[currrent_vref], 519 __LINE__)); 520 } 521 } else { 522 /* we change state and change step */ 523 if (pup_st[pup][if_id] == VREF_STEP_1) { 524 pup_st[pup][if_id] = 525 VREF_STEP_2; 526 lim_vref[pup][if_id] = 527 current_vref[pup] 528 [if_id] - initial_step; 529 last_valid_window[pup] 530 [if_id] = 531 current_valid_window[pup] 532 [if_id]; 533 last_vref[pup][if_id] = 534 current_vref[pup] 535 [if_id]; 536 current_vref[pup][if_id] = 537 last_vref[pup][if_id] - 538 second_step; 539 540 /* Update the Vref for next stage */ 541 CHECK_STATUS 542 (ddr3_tip_bus_read 543 (dev_num, if_id, 544 ACCESS_TYPE_UNICAST, pup, 545 DDR_PHY_DATA, reg_addr, 546 &val)); 547 CHECK_STATUS 548 (ddr3_tip_bus_write 549 (dev_num, 550 ACCESS_TYPE_UNICAST, 551 if_id, 552 ACCESS_TYPE_UNICAST, pup, 553 DDR_PHY_DATA, reg_addr, 554 (val & (~0xf)) | 555 vref_map[current_vref[pup] 556 [if_id]])); 557 DEBUG_TRAINING_HW_ALG 558 (DEBUG_LEVEL_TRACE, 559 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 560 if_id, pup, 561 (val & (~0xf)) | 562 vref_map[current_vref[pup] 563 [if_id]], 564 __LINE__)); 565 566 } else if (pup_st[pup][if_id] == VREF_STEP_2) { 567 /* 568 * The last search was the max 569 * point set value and exit 570 */ 571 CHECK_STATUS 572 (ddr3_tip_bus_read 573 (dev_num, if_id, 574 ACCESS_TYPE_UNICAST, pup, 575 DDR_PHY_DATA, reg_addr, 576 &val)); 577 CHECK_STATUS 578 (ddr3_tip_bus_write 579 (dev_num, 580 ACCESS_TYPE_UNICAST, 581 if_id, 582 ACCESS_TYPE_UNICAST, pup, 583 DDR_PHY_DATA, reg_addr, 584 (val & (~0xf)) | 585 vref_map[last_vref[pup] 586 [if_id]])); 587 DEBUG_TRAINING_HW_ALG 588 (DEBUG_LEVEL_TRACE, 589 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 590 if_id, pup, 591 (val & (~0xf)) | 592 vref_map[last_vref[pup] 593 [if_id]], 594 __LINE__)); 595 pup_st[pup][if_id] = 596 VREF_CONVERGE; 597 algo_run_flag++; 598 interface_state[if_id]++; 599 DEBUG_TRAINING_HW_ALG 600 (DEBUG_LEVEL_TRACE, 601 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 602 if_id, pup, 603 current_vref[pup] 604 [if_id], __LINE__)); 605 } 606 } 607 } 608 } 609 } 610 611 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 612 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 613 for (pup = 0; 614 pup < tm->num_of_bus_per_interface; pup++) { 615 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 616 CHECK_STATUS(ddr3_tip_bus_read 617 (dev_num, if_id, 618 ACCESS_TYPE_UNICAST, pup, 619 DDR_PHY_DATA, reg_addr, &val)); 620 DEBUG_TRAINING_HW_ALG( 621 DEBUG_LEVEL_INFO, 622 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n", 623 if_id, pup, val, __LINE__)); 624 } 625 } 626 627 flow_result[if_id] = TEST_SUCCESS; 628 629 /* restore start/end pattern */ 630 start_pattern = copy_start_pattern; 631 end_pattern = copy_end_pattern; 632 633 return 0; 634 } 635 636 /* 637 * CK/CA Delay 638 */ 639 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap) 640 { 641 u32 if_id = 0; 642 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0; 643 struct hws_topology_map *tm = ddr3_get_topology_map(); 644 645 /* 646 * ck_delay_table is delaying the of the clock signal only. 647 * (to overcome timing issues between_c_k & command/address signals) 648 */ 649 /* 650 * ca_delay is delaying the of the entire command & Address signals 651 * (include Clock signal to overcome DGL error on the Clock versus 652 * the DQS). 653 */ 654 655 /* Calc ADLL Tap */ 656 if ((ck_delay == -1) || (ck_delay_16 == -1)) { 657 DEBUG_TRAINING_HW_ALG( 658 DEBUG_LEVEL_ERROR, 659 ("ERROR: One of ck_delay values not initialized!!!\n")); 660 } 661 662 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 663 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 664 /* Calc delay ps in ADLL tap */ 665 if (tm->interface_params[if_id].bus_width == 666 BUS_WIDTH_16) 667 ck_num_adll_tap = ck_delay_16 / adll_tap; 668 else 669 ck_num_adll_tap = ck_delay / adll_tap; 670 671 ca_num_adll_tap = ca_delay / adll_tap; 672 data = (ck_num_adll_tap & 0x3f) + 673 ((ca_num_adll_tap & 0x3f) << 10); 674 675 /* 676 * Set the ADLL number to the CK ADLL for Interfaces for 677 * all Pup 678 */ 679 DEBUG_TRAINING_HW_ALG( 680 DEBUG_LEVEL_TRACE, 681 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n", 682 ck_num_adll_tap, ca_num_adll_tap, adll_tap)); 683 684 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, 685 if_id, ACCESS_TYPE_MULTICAST, 686 PARAM_NOT_CARE, DDR_PHY_CONTROL, 687 0x0, data)); 688 } 689 690 return MV_OK; 691 } 692