1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6 #include "ddr3_init.h" 7 8 #define VREF_INITIAL_STEP 3 9 #define VREF_SECOND_STEP 1 10 #define VREF_MAX_INDEX 7 11 #define MAX_VALUE (1024 - 1) 12 #define MIN_VALUE (-MAX_VALUE) 13 #define GET_RD_SAMPLE_DELAY(data, cs) ((data >> rd_sample_mask[cs]) & 0xf) 14 15 u32 ca_delay; 16 int ddr3_tip_centr_skip_min_win_check = 0; 17 u8 current_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 18 u8 last_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 19 u16 current_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 20 u16 last_valid_window[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 21 u8 lim_vref[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 22 u8 interface_state[MAX_INTERFACE_NUM]; 23 u8 vref_window_size[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 24 u8 vref_window_size_th = 12; 25 26 static u8 pup_st[MAX_BUS_NUM][MAX_INTERFACE_NUM]; 27 28 static u32 rd_sample_mask[] = { 29 0, 30 8, 31 16, 32 24 33 }; 34 35 #define VREF_STEP_1 0 36 #define VREF_STEP_2 1 37 #define VREF_CONVERGE 2 38 39 /* 40 * ODT additional timing 41 */ 42 int ddr3_tip_write_additional_odt_setting(u32 dev_num, u32 if_id) 43 { 44 u32 cs_num = 0, max_read_sample = 0, min_read_sample = 0x1f; 45 u32 data_read[MAX_INTERFACE_NUM] = { 0 }; 46 u32 read_sample[MAX_CS_NUM]; 47 u32 val; 48 u32 pup_index; 49 int max_phase = MIN_VALUE, current_phase; 50 enum hws_access_type access_type = ACCESS_TYPE_UNICAST; 51 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 52 53 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 54 DUNIT_ODT_CTRL_REG, 55 0 << 8, 0x3 << 8)); 56 CHECK_STATUS(ddr3_tip_if_read(dev_num, access_type, if_id, 57 RD_DATA_SMPL_DLYS_REG, 58 data_read, MASK_ALL_BITS)); 59 val = data_read[if_id]; 60 61 for (cs_num = 0; cs_num < MAX_CS_NUM; cs_num++) { 62 read_sample[cs_num] = GET_RD_SAMPLE_DELAY(val, cs_num); 63 64 /* find maximum of read_samples */ 65 if (read_sample[cs_num] >= max_read_sample) { 66 if (read_sample[cs_num] == max_read_sample) 67 max_phase = MIN_VALUE; 68 else 69 max_read_sample = read_sample[cs_num]; 70 71 for (pup_index = 0; 72 pup_index < octets_per_if_num; 73 pup_index++) { 74 CHECK_STATUS(ddr3_tip_bus_read 75 (dev_num, if_id, 76 ACCESS_TYPE_UNICAST, pup_index, 77 DDR_PHY_DATA, 78 RL_PHY_REG(cs_num), 79 &val)); 80 81 current_phase = ((int)val & 0xe0) >> 6; 82 if (current_phase >= max_phase) 83 max_phase = current_phase; 84 } 85 } 86 87 /* find minimum */ 88 if (read_sample[cs_num] < min_read_sample) 89 min_read_sample = read_sample[cs_num]; 90 } 91 92 min_read_sample = min_read_sample - 1; 93 max_read_sample = max_read_sample + 4 + (max_phase + 1) / 2 + 1; 94 if (min_read_sample >= 0xf) 95 min_read_sample = 0xf; 96 if (max_read_sample >= 0x1f) 97 max_read_sample = 0x1f; 98 99 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 100 DDR_ODT_TIMING_LOW_REG, 101 ((min_read_sample - 1) << 12), 102 0xf << 12)); 103 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 104 DDR_ODT_TIMING_LOW_REG, 105 (max_read_sample << 16), 106 0x1f << 16)); 107 108 return MV_OK; 109 } 110 111 int get_valid_win_rx(u32 dev_num, u32 if_id, u8 res[4]) 112 { 113 u32 reg_pup = RESULT_PHY_REG; 114 u32 reg_data; 115 u32 cs_num; 116 int i; 117 118 cs_num = 0; 119 120 /* TBD */ 121 reg_pup += cs_num; 122 123 for (i = 0; i < 4; i++) { 124 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id, 125 ACCESS_TYPE_UNICAST, i, 126 DDR_PHY_DATA, reg_pup, 127 ®_data)); 128 res[i] = (reg_data >> RESULT_PHY_RX_OFFS) & 0x1f; 129 } 130 131 return 0; 132 } 133 134 /* 135 * This algorithm deals with the vertical optimum from Voltage point of view 136 * of the sample signal. 137 * Voltage sample point can improve the Eye / window size of the bit and the 138 * pup. 139 * The problem is that it is tune for all DQ the same so there isn't any 140 * PBS like code. 141 * It is more like centralization. 142 * But because we don't have The training SM support we do it a bit more 143 * smart search to save time. 144 */ 145 int ddr3_tip_vref(u32 dev_num) 146 { 147 /* 148 * The Vref register have non linear order. Need to check what will be 149 * in future projects. 150 */ 151 u32 vref_map[8] = { 152 1, 2, 3, 4, 5, 6, 7, 0 153 }; 154 /* State and parameter definitions */ 155 u32 initial_step = VREF_INITIAL_STEP; 156 /* need to be assign with minus ????? */ 157 u32 second_step = VREF_SECOND_STEP; 158 u32 algo_run_flag = 0, currrent_vref = 0; 159 u32 while_count = 0; 160 u32 pup = 0, if_id = 0, num_pup = 0, rep = 0; 161 u32 val = 0; 162 u32 reg_addr = 0xa8; 163 u32 copy_start_pattern, copy_end_pattern; 164 enum hws_result *flow_result = ddr3_tip_get_result_ptr(training_stage); 165 u8 res[4]; 166 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 167 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 168 169 CHECK_STATUS(ddr3_tip_special_rx(dev_num)); 170 171 /* save start/end pattern */ 172 copy_start_pattern = start_pattern; 173 copy_end_pattern = end_pattern; 174 175 /* set vref as centralization pattern */ 176 start_pattern = PATTERN_VREF; 177 end_pattern = PATTERN_VREF; 178 179 /* init params */ 180 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 181 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 182 for (pup = 0; 183 pup < octets_per_if_num; pup++) { 184 current_vref[pup][if_id] = 0; 185 last_vref[pup][if_id] = 0; 186 lim_vref[pup][if_id] = 0; 187 current_valid_window[pup][if_id] = 0; 188 last_valid_window[pup][if_id] = 0; 189 if (vref_window_size[if_id][pup] > 190 vref_window_size_th) { 191 pup_st[pup][if_id] = VREF_CONVERGE; 192 DEBUG_TRAINING_HW_ALG( 193 DEBUG_LEVEL_INFO, 194 ("VREF config, IF[ %d ]pup[ %d ] - Vref tune not requered (%d)\n", 195 if_id, pup, __LINE__)); 196 } else { 197 pup_st[pup][if_id] = VREF_STEP_1; 198 CHECK_STATUS(ddr3_tip_bus_read 199 (dev_num, if_id, 200 ACCESS_TYPE_UNICAST, pup, 201 DDR_PHY_DATA, reg_addr, &val)); 202 CHECK_STATUS(ddr3_tip_bus_write 203 (dev_num, ACCESS_TYPE_UNICAST, 204 if_id, ACCESS_TYPE_UNICAST, 205 pup, DDR_PHY_DATA, reg_addr, 206 (val & (~0xf)) | vref_map[0])); 207 DEBUG_TRAINING_HW_ALG( 208 DEBUG_LEVEL_INFO, 209 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 210 if_id, pup, 211 (val & (~0xf)) | vref_map[0], 212 __LINE__)); 213 } 214 } 215 interface_state[if_id] = 0; 216 } 217 218 /* TODO: Set number of active interfaces */ 219 num_pup = octets_per_if_num * MAX_INTERFACE_NUM; 220 221 while ((algo_run_flag <= num_pup) & (while_count < 10)) { 222 while_count++; 223 for (rep = 1; rep < 4; rep++) { 224 ddr3_tip_centr_skip_min_win_check = 1; 225 ddr3_tip_centralization_rx(dev_num); 226 ddr3_tip_centr_skip_min_win_check = 0; 227 228 /* Read Valid window results only for non converge pups */ 229 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 230 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 231 if (interface_state[if_id] != 4) { 232 get_valid_win_rx(dev_num, if_id, res); 233 for (pup = 0; 234 pup < octets_per_if_num; 235 pup++) { 236 VALIDATE_BUS_ACTIVE 237 (tm->bus_act_mask, pup); 238 if (pup_st[pup] 239 [if_id] == 240 VREF_CONVERGE) 241 continue; 242 243 current_valid_window[pup] 244 [if_id] = 245 (current_valid_window[pup] 246 [if_id] * (rep - 1) + 247 1000 * res[pup]) / rep; 248 } 249 } 250 } 251 } 252 253 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 254 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 255 DEBUG_TRAINING_HW_ALG( 256 DEBUG_LEVEL_TRACE, 257 ("current_valid_window: IF[ %d ] - ", if_id)); 258 259 for (pup = 0; 260 pup < octets_per_if_num; pup++) { 261 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 262 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 263 ("%d ", 264 current_valid_window 265 [pup][if_id])); 266 } 267 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, ("\n")); 268 } 269 270 /* Compare results and respond as function of state */ 271 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 272 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 273 for (pup = 0; 274 pup < octets_per_if_num; pup++) { 275 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 276 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 277 ("I/F[ %d ], pup[ %d ] STATE #%d (%d)\n", 278 if_id, pup, 279 pup_st[pup] 280 [if_id], __LINE__)); 281 282 if (pup_st[pup][if_id] == VREF_CONVERGE) 283 continue; 284 285 DEBUG_TRAINING_HW_ALG(DEBUG_LEVEL_TRACE, 286 ("I/F[ %d ], pup[ %d ] CHECK progress - Current %d Last %d, limit VREF %d (%d)\n", 287 if_id, pup, 288 current_valid_window[pup] 289 [if_id], 290 last_valid_window[pup] 291 [if_id], lim_vref[pup] 292 [if_id], __LINE__)); 293 294 /* 295 * The -1 is for solution resolution +/- 1 tap 296 * of ADLL 297 */ 298 if (current_valid_window[pup][if_id] + 200 >= 299 (last_valid_window[pup][if_id])) { 300 if (pup_st[pup][if_id] == VREF_STEP_1) { 301 /* 302 * We stay in the same state and 303 * step just update the window 304 * size (take the max) and Vref 305 */ 306 if (current_vref[pup] 307 [if_id] == VREF_MAX_INDEX) { 308 /* 309 * If we step to the end 310 * and didn't converge 311 * to some particular 312 * better Vref value 313 * define the pup as 314 * converge and step 315 * back to nominal 316 * Vref. 317 */ 318 pup_st[pup] 319 [if_id] = 320 VREF_CONVERGE; 321 algo_run_flag++; 322 interface_state 323 [if_id]++; 324 DEBUG_TRAINING_HW_ALG 325 (DEBUG_LEVEL_TRACE, 326 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 327 if_id, pup, 328 current_vref[pup] 329 [if_id], 330 __LINE__)); 331 } else { 332 /* continue to update the Vref index */ 333 current_vref[pup] 334 [if_id] = 335 ((current_vref[pup] 336 [if_id] + 337 initial_step) > 338 VREF_MAX_INDEX) ? 339 VREF_MAX_INDEX 340 : (current_vref[pup] 341 [if_id] + 342 initial_step); 343 if (current_vref[pup] 344 [if_id] == 345 VREF_MAX_INDEX) { 346 pup_st[pup] 347 [if_id] 348 = 349 VREF_STEP_2; 350 } 351 lim_vref[pup] 352 [if_id] = 353 last_vref[pup] 354 [if_id] = 355 current_vref[pup] 356 [if_id]; 357 } 358 359 last_valid_window[pup] 360 [if_id] = 361 GET_MAX(current_valid_window 362 [pup][if_id], 363 last_valid_window 364 [pup] 365 [if_id]); 366 367 /* update the Vref for next stage */ 368 currrent_vref = 369 current_vref[pup] 370 [if_id]; 371 CHECK_STATUS 372 (ddr3_tip_bus_read 373 (dev_num, if_id, 374 ACCESS_TYPE_UNICAST, pup, 375 DDR_PHY_DATA, reg_addr, 376 &val)); 377 CHECK_STATUS 378 (ddr3_tip_bus_write 379 (dev_num, 380 ACCESS_TYPE_UNICAST, 381 if_id, 382 ACCESS_TYPE_UNICAST, pup, 383 DDR_PHY_DATA, reg_addr, 384 (val & (~0xf)) | 385 vref_map[currrent_vref])); 386 DEBUG_TRAINING_HW_ALG 387 (DEBUG_LEVEL_TRACE, 388 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 389 if_id, pup, 390 (val & (~0xf)) | 391 vref_map[currrent_vref], 392 __LINE__)); 393 } else if (pup_st[pup][if_id] 394 == VREF_STEP_2) { 395 /* 396 * We keep on search back with 397 * the same step size. 398 */ 399 last_valid_window[pup] 400 [if_id] = 401 GET_MAX(current_valid_window 402 [pup][if_id], 403 last_valid_window 404 [pup] 405 [if_id]); 406 last_vref[pup][if_id] = 407 current_vref[pup] 408 [if_id]; 409 410 /* we finish all search space */ 411 if ((current_vref[pup] 412 [if_id] - second_step) == lim_vref[pup][if_id]) { 413 /* 414 * If we step to the end 415 * and didn't converge 416 * to some particular 417 * better Vref value 418 * define the pup as 419 * converge and step 420 * back to nominal 421 * Vref. 422 */ 423 pup_st[pup] 424 [if_id] = 425 VREF_CONVERGE; 426 algo_run_flag++; 427 428 interface_state 429 [if_id]++; 430 431 current_vref[pup] 432 [if_id] = 433 (current_vref[pup] 434 [if_id] - 435 second_step); 436 437 DEBUG_TRAINING_HW_ALG 438 (DEBUG_LEVEL_TRACE, 439 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 440 if_id, pup, 441 current_vref[pup] 442 [if_id], 443 __LINE__)); 444 } else 445 /* we finish all search space */ 446 if (current_vref[pup] 447 [if_id] == 448 lim_vref[pup] 449 [if_id]) { 450 /* 451 * If we step to the end 452 * and didn't converge 453 * to some particular 454 * better Vref value 455 * define the pup as 456 * converge and step 457 * back to nominal 458 * Vref. 459 */ 460 pup_st[pup] 461 [if_id] = 462 VREF_CONVERGE; 463 464 algo_run_flag++; 465 interface_state 466 [if_id]++; 467 DEBUG_TRAINING_HW_ALG 468 (DEBUG_LEVEL_TRACE, 469 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 470 if_id, pup, 471 current_vref[pup] 472 [if_id], 473 __LINE__)); 474 } else { 475 current_vref[pup] 476 [if_id] = 477 current_vref[pup] 478 [if_id] - 479 second_step; 480 } 481 482 /* Update the Vref for next stage */ 483 currrent_vref = 484 current_vref[pup] 485 [if_id]; 486 CHECK_STATUS 487 (ddr3_tip_bus_read 488 (dev_num, if_id, 489 ACCESS_TYPE_UNICAST, pup, 490 DDR_PHY_DATA, reg_addr, 491 &val)); 492 CHECK_STATUS 493 (ddr3_tip_bus_write 494 (dev_num, 495 ACCESS_TYPE_UNICAST, 496 if_id, 497 ACCESS_TYPE_UNICAST, pup, 498 DDR_PHY_DATA, reg_addr, 499 (val & (~0xf)) | 500 vref_map[currrent_vref])); 501 DEBUG_TRAINING_HW_ALG 502 (DEBUG_LEVEL_TRACE, 503 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 504 if_id, pup, 505 (val & (~0xf)) | 506 vref_map[currrent_vref], 507 __LINE__)); 508 } 509 } else { 510 /* we change state and change step */ 511 if (pup_st[pup][if_id] == VREF_STEP_1) { 512 pup_st[pup][if_id] = 513 VREF_STEP_2; 514 lim_vref[pup][if_id] = 515 current_vref[pup] 516 [if_id] - initial_step; 517 last_valid_window[pup] 518 [if_id] = 519 current_valid_window[pup] 520 [if_id]; 521 last_vref[pup][if_id] = 522 current_vref[pup] 523 [if_id]; 524 current_vref[pup][if_id] = 525 last_vref[pup][if_id] - 526 second_step; 527 528 /* Update the Vref for next stage */ 529 CHECK_STATUS 530 (ddr3_tip_bus_read 531 (dev_num, if_id, 532 ACCESS_TYPE_UNICAST, pup, 533 DDR_PHY_DATA, reg_addr, 534 &val)); 535 CHECK_STATUS 536 (ddr3_tip_bus_write 537 (dev_num, 538 ACCESS_TYPE_UNICAST, 539 if_id, 540 ACCESS_TYPE_UNICAST, pup, 541 DDR_PHY_DATA, reg_addr, 542 (val & (~0xf)) | 543 vref_map[current_vref[pup] 544 [if_id]])); 545 DEBUG_TRAINING_HW_ALG 546 (DEBUG_LEVEL_TRACE, 547 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 548 if_id, pup, 549 (val & (~0xf)) | 550 vref_map[current_vref[pup] 551 [if_id]], 552 __LINE__)); 553 554 } else if (pup_st[pup][if_id] == VREF_STEP_2) { 555 /* 556 * The last search was the max 557 * point set value and exit 558 */ 559 CHECK_STATUS 560 (ddr3_tip_bus_read 561 (dev_num, if_id, 562 ACCESS_TYPE_UNICAST, pup, 563 DDR_PHY_DATA, reg_addr, 564 &val)); 565 CHECK_STATUS 566 (ddr3_tip_bus_write 567 (dev_num, 568 ACCESS_TYPE_UNICAST, 569 if_id, 570 ACCESS_TYPE_UNICAST, pup, 571 DDR_PHY_DATA, reg_addr, 572 (val & (~0xf)) | 573 vref_map[last_vref[pup] 574 [if_id]])); 575 DEBUG_TRAINING_HW_ALG 576 (DEBUG_LEVEL_TRACE, 577 ("VREF config, IF[ %d ]pup[ %d ] - Vref = %X (%d)\n", 578 if_id, pup, 579 (val & (~0xf)) | 580 vref_map[last_vref[pup] 581 [if_id]], 582 __LINE__)); 583 pup_st[pup][if_id] = 584 VREF_CONVERGE; 585 algo_run_flag++; 586 interface_state[if_id]++; 587 DEBUG_TRAINING_HW_ALG 588 (DEBUG_LEVEL_TRACE, 589 ("I/F[ %d ], pup[ %d ] VREF_CONVERGE - Vref = %X (%d)\n", 590 if_id, pup, 591 current_vref[pup] 592 [if_id], __LINE__)); 593 } 594 } 595 } 596 } 597 } 598 599 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 600 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 601 for (pup = 0; 602 pup < octets_per_if_num; pup++) { 603 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 604 CHECK_STATUS(ddr3_tip_bus_read 605 (dev_num, if_id, 606 ACCESS_TYPE_UNICAST, pup, 607 DDR_PHY_DATA, reg_addr, &val)); 608 DEBUG_TRAINING_HW_ALG( 609 DEBUG_LEVEL_INFO, 610 ("FINAL values: I/F[ %d ], pup[ %d ] - Vref = %X (%d)\n", 611 if_id, pup, val, __LINE__)); 612 } 613 } 614 615 flow_result[if_id] = TEST_SUCCESS; 616 617 /* restore start/end pattern */ 618 start_pattern = copy_start_pattern; 619 end_pattern = copy_end_pattern; 620 621 return 0; 622 } 623 624 /* 625 * CK/CA Delay 626 */ 627 int ddr3_tip_cmd_addr_init_delay(u32 dev_num, u32 adll_tap) 628 { 629 u32 if_id = 0; 630 u32 ck_num_adll_tap = 0, ca_num_adll_tap = 0, data = 0; 631 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 632 633 /* 634 * ck_delay_table is delaying the of the clock signal only. 635 * (to overcome timing issues between_c_k & command/address signals) 636 */ 637 /* 638 * ca_delay is delaying the of the entire command & Address signals 639 * (include Clock signal to overcome DGL error on the Clock versus 640 * the DQS). 641 */ 642 643 /* Calc ADLL Tap */ 644 if (ck_delay == PARAM_UNDEFINED) 645 DEBUG_TRAINING_HW_ALG( 646 DEBUG_LEVEL_ERROR, 647 ("ERROR: ck_delay is not initialized!\n")); 648 649 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 650 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 651 652 /* Calc delay ps in ADLL tap */ 653 ck_num_adll_tap = ck_delay / adll_tap; 654 ca_num_adll_tap = ca_delay / adll_tap; 655 656 data = (ck_num_adll_tap & 0x3f) + 657 ((ca_num_adll_tap & 0x3f) << 10); 658 659 /* 660 * Set the ADLL number to the CK ADLL for Interfaces for 661 * all Pup 662 */ 663 DEBUG_TRAINING_HW_ALG( 664 DEBUG_LEVEL_TRACE, 665 ("ck_num_adll_tap %d ca_num_adll_tap %d adll_tap %d\n", 666 ck_num_adll_tap, ca_num_adll_tap, adll_tap)); 667 668 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, 669 if_id, ACCESS_TYPE_MULTICAST, 670 PARAM_NOT_CARE, DDR_PHY_CONTROL, 671 0x0, data)); 672 } 673 674 return MV_OK; 675 } 676