1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6 #include "ddr3_init.h" 7 #include "mv_ddr_training_db.h" 8 #include "ddr_training_ip_db.h" 9 #include "mv_ddr_regs.h" 10 11 #define WL_ITERATION_NUM 10 12 13 static u32 pup_mask_table[] = { 14 0x000000ff, 15 0x0000ff00, 16 0x00ff0000, 17 0xff000000 18 }; 19 20 static struct write_supp_result wr_supp_res[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 21 22 static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num); 23 static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num); 24 static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num); 25 static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id, 26 u32 bus_id); 27 static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id, 28 u32 edge_offset); 29 30 enum { 31 PASS, 32 FAIL 33 }; 34 /***************************************************************************** 35 Dynamic read leveling 36 ******************************************************************************/ 37 int ddr3_tip_dynamic_read_leveling(u32 dev_num, u32 freq) 38 { 39 u32 data, mask; 40 unsigned int max_cs = mv_ddr_cs_num_get(); 41 u32 bus_num, if_id, cl_val; 42 enum mv_ddr_speed_bin speed_bin_index; 43 /* save current CS value */ 44 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 }; 45 int is_any_pup_fail = 0; 46 u32 data_read[MAX_INTERFACE_NUM + 1] = { 0 }; 47 u8 rl_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM]; 48 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 49 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 50 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 51 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 52 53 for (effective_cs = 0; effective_cs < MAX_CS_NUM; effective_cs++) 54 for (bus_num = 0; bus_num < MAX_BUS_NUM; bus_num++) 55 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) 56 rl_values[effective_cs][bus_num][if_id] = 0; 57 58 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 59 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 60 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 61 training_result[training_stage][if_id] = TEST_SUCCESS; 62 63 /* save current cs enable reg val */ 64 CHECK_STATUS(ddr3_tip_if_read 65 (dev_num, ACCESS_TYPE_UNICAST, if_id, 66 DUAL_DUNIT_CFG_REG, cs_enable_reg_val, 67 MASK_ALL_BITS)); 68 /* enable single cs */ 69 CHECK_STATUS(ddr3_tip_if_write 70 (dev_num, ACCESS_TYPE_UNICAST, if_id, 71 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3))); 72 } 73 74 ddr3_tip_reset_fifo_ptr(dev_num); 75 76 /* 77 * Phase 1: Load pattern (using ODPG) 78 * 79 * enter Read Leveling mode 80 * only 27 bits are masked 81 * assuming non multi-CS configuration 82 * write to CS = 0 for the non multi CS configuration, note 83 * that the results shall be read back to the required CS !!! 84 */ 85 86 /* BUS count is 0 shifted 26 */ 87 CHECK_STATUS(ddr3_tip_if_write 88 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 89 ODPG_DATA_CTRL_REG, 0x3, 0x3)); 90 CHECK_STATUS(ddr3_tip_configure_odpg 91 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0, 92 pattern_table[PATTERN_RL].num_of_phases_tx, 0, 93 pattern_table[PATTERN_RL].num_of_phases_rx, 0, 0, 94 effective_cs, STRESS_NONE, DURATION_SINGLE)); 95 96 /* load pattern to ODPG */ 97 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST, 98 PARAM_NOT_CARE, PATTERN_RL, 99 pattern_table[PATTERN_RL]. 100 start_addr); 101 102 /* 103 * Phase 2: ODPG to Read Leveling mode 104 */ 105 106 /* General Training Opcode register */ 107 CHECK_STATUS(ddr3_tip_if_write 108 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 109 ODPG_WR_RD_MODE_ENA_REG, 0, 110 MASK_ALL_BITS)); 111 112 CHECK_STATUS(ddr3_tip_if_write 113 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 114 GENERAL_TRAINING_OPCODE_REG, 115 (0x301b01 | effective_cs << 2), 0x3c3fef)); 116 117 /* Object1 opcode register 0 & 1 */ 118 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 119 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 120 speed_bin_index = 121 tm->interface_params[if_id].speed_bin_index; 122 cl_val = mv_ddr_cl_val_get(speed_bin_index, freq); 123 data = (cl_val << 17) | (0x3 << 25); 124 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25); 125 CHECK_STATUS(ddr3_tip_if_write 126 (dev_num, ACCESS_TYPE_UNICAST, if_id, 127 OPCODE_REG0_REG(1), data, mask)); 128 } 129 130 /* Set iteration count to max value */ 131 CHECK_STATUS(ddr3_tip_if_write 132 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 133 OPCODE_REG1_REG(1), 0xd00, 0xd00)); 134 135 /* 136 * Phase 2: Mask config 137 */ 138 139 ddr3_tip_dynamic_read_leveling_seq(dev_num); 140 141 /* 142 * Phase 3: Read Leveling execution 143 */ 144 145 /* temporary jira dunit=14751 */ 146 CHECK_STATUS(ddr3_tip_if_write 147 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 148 TRAINING_DBG_1_REG, 0, (u32)(1 << 31))); 149 /* configure phy reset value */ 150 CHECK_STATUS(ddr3_tip_if_write 151 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 152 TRAINING_DBG_3_REG, (0x7f << 24), 153 (u32)(0xff << 24))); 154 /* data pup rd reset enable */ 155 CHECK_STATUS(ddr3_tip_if_write 156 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 157 SDRAM_CFG_REG, 0, (1 << 30))); 158 /* data pup rd reset disable */ 159 CHECK_STATUS(ddr3_tip_if_write 160 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 161 SDRAM_CFG_REG, (1 << 30), (1 << 30))); 162 /* training SW override & training RL mode */ 163 CHECK_STATUS(ddr3_tip_if_write 164 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 165 TRAINING_SW_2_REG, 0x1, 0x9)); 166 /* training enable */ 167 CHECK_STATUS(ddr3_tip_if_write 168 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 169 TRAINING_REG, (1 << 24) | (1 << 20), 170 (1 << 24) | (1 << 20))); 171 CHECK_STATUS(ddr3_tip_if_write 172 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 173 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31))); 174 175 /* trigger training */ 176 mv_ddr_training_enable(); 177 178 /* check for training done */ 179 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) { 180 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 181 return MV_FAIL; 182 } 183 /* check for training pass */ 184 if (data != PASS) 185 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 186 187 /* disable odpg; switch back to functional mode */ 188 mv_ddr_odpg_disable(); 189 190 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK) { 191 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("odpg disable failed\n")); 192 return MV_FAIL; 193 } 194 195 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 196 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS); 197 198 /* double loop on bus, pup */ 199 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 200 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 201 /* check training done */ 202 is_any_pup_fail = 0; 203 for (bus_num = 0; 204 bus_num < octets_per_if_num; 205 bus_num++) { 206 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 207 if (ddr3_tip_if_polling 208 (dev_num, ACCESS_TYPE_UNICAST, 209 if_id, (1 << 25), (1 << 25), 210 mask_results_pup_reg_map[bus_num], 211 MAX_POLLING_ITERATIONS) != MV_OK) { 212 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 213 ("\n_r_l: DDR3 poll failed(2) for IF %d CS %d bus %d", 214 if_id, effective_cs, bus_num)); 215 is_any_pup_fail = 1; 216 } else { 217 /* read result per pup */ 218 CHECK_STATUS(ddr3_tip_if_read 219 (dev_num, 220 ACCESS_TYPE_UNICAST, 221 if_id, 222 mask_results_pup_reg_map 223 [bus_num], data_read, 224 0xff)); 225 rl_values[effective_cs][bus_num] 226 [if_id] = (u8)data_read[if_id]; 227 } 228 } 229 230 if (is_any_pup_fail == 1) { 231 training_result[training_stage][if_id] = 232 TEST_FAILED; 233 if (debug_mode == 0) 234 return MV_FAIL; 235 } 236 } 237 238 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n")); 239 240 /* 241 * Phase 3: Exit Read Leveling 242 */ 243 244 CHECK_STATUS(ddr3_tip_if_write 245 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 246 TRAINING_SW_2_REG, (1 << 3), (1 << 3))); 247 CHECK_STATUS(ddr3_tip_if_write 248 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 249 TRAINING_SW_1_REG, (1 << 16), (1 << 16))); 250 /* set ODPG to functional */ 251 CHECK_STATUS(ddr3_tip_if_write 252 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 253 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 254 255 /* 256 * Copy the result from the effective CS search to the 257 * real Functional CS 258 */ 259 /*ddr3_tip_write_cs_result(dev_num, RL_PHY_REG(0); */ 260 CHECK_STATUS(ddr3_tip_if_write 261 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 262 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 263 } 264 265 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 266 /* double loop on bus, pup */ 267 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 268 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 269 for (bus_num = 0; 270 bus_num < octets_per_if_num; 271 bus_num++) { 272 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 273 /* read result per pup from arry */ 274 data = rl_values[effective_cs][bus_num][if_id]; 275 data = (data & 0x1f) | 276 (((data & 0xe0) >> 5) << 6); 277 ddr3_tip_bus_write(dev_num, 278 ACCESS_TYPE_UNICAST, 279 if_id, 280 ACCESS_TYPE_UNICAST, 281 bus_num, DDR_PHY_DATA, 282 RL_PHY_REG(effective_cs), 283 data); 284 } 285 } 286 } 287 /* Set to 0 after each loop to avoid illegal value may be used */ 288 effective_cs = 0; 289 290 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 291 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 292 /* restore cs enable value */ 293 CHECK_STATUS(ddr3_tip_if_write 294 (dev_num, ACCESS_TYPE_UNICAST, if_id, 295 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 296 MASK_ALL_BITS)); 297 if (odt_config != 0) { 298 CHECK_STATUS(ddr3_tip_write_additional_odt_setting 299 (dev_num, if_id)); 300 } 301 } 302 303 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 304 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 305 if (training_result[training_stage][if_id] == TEST_FAILED) 306 return MV_FAIL; 307 } 308 309 return MV_OK; 310 } 311 312 /* 313 * Legacy Dynamic write leveling 314 */ 315 int ddr3_tip_legacy_dynamic_write_leveling(u32 dev_num) 316 { 317 u32 c_cs, if_id, cs_mask = 0; 318 unsigned int max_cs = mv_ddr_cs_num_get(); 319 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 320 321 /* 322 * In TRAINIUNG reg (0x15b0) write 0x80000008 | cs_mask: 323 * Trn_start 324 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training 325 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training 326 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training 327 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training 328 * Trn_auto_seq = write leveling 329 */ 330 for (c_cs = 0; c_cs < max_cs; c_cs++) 331 cs_mask = cs_mask | 1 << (20 + c_cs); 332 333 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 334 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 335 CHECK_STATUS(ddr3_tip_if_write 336 (dev_num, ACCESS_TYPE_MULTICAST, 0, 337 TRAINING_REG, (0x80000008 | cs_mask), 338 0xffffffff)); 339 mdelay(20); 340 if (ddr3_tip_if_polling 341 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 342 (u32)0x80000000, TRAINING_REG, 343 MAX_POLLING_ITERATIONS) != MV_OK) { 344 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 345 ("polling failed for Old WL result\n")); 346 return MV_FAIL; 347 } 348 } 349 350 return MV_OK; 351 } 352 353 /* 354 * Legacy Dynamic read leveling 355 */ 356 int ddr3_tip_legacy_dynamic_read_leveling(u32 dev_num) 357 { 358 u32 c_cs, if_id, cs_mask = 0; 359 unsigned int max_cs = mv_ddr_cs_num_get(); 360 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 361 362 /* 363 * In TRAINIUNG reg (0x15b0) write 0x80000040 | cs_mask: 364 * Trn_start 365 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training 366 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training 367 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training 368 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training 369 * Trn_auto_seq = Read Leveling using training pattern 370 */ 371 for (c_cs = 0; c_cs < max_cs; c_cs++) 372 cs_mask = cs_mask | 1 << (20 + c_cs); 373 374 CHECK_STATUS(ddr3_tip_if_write 375 (dev_num, ACCESS_TYPE_MULTICAST, 0, TRAINING_REG, 376 (0x80000040 | cs_mask), 0xffffffff)); 377 mdelay(100); 378 379 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 380 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 381 if (ddr3_tip_if_polling 382 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 383 (u32)0x80000000, TRAINING_REG, 384 MAX_POLLING_ITERATIONS) != MV_OK) { 385 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 386 ("polling failed for Old RL result\n")); 387 return MV_FAIL; 388 } 389 } 390 391 return MV_OK; 392 } 393 394 /* 395 * Dynamic per bit read leveling 396 */ 397 int ddr3_tip_dynamic_per_bit_read_leveling(u32 dev_num, u32 freq) 398 { 399 u32 data, mask; 400 u32 bus_num, if_id, cl_val, bit_num; 401 u32 curr_numb, curr_min_delay; 402 int adll_array[3] = { 0, -0xa, 0x14 }; 403 u32 phyreg3_arr[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 404 enum mv_ddr_speed_bin speed_bin_index; 405 int is_any_pup_fail = 0; 406 int break_loop = 0; 407 u32 cs_enable_reg_val[MAX_INTERFACE_NUM]; /* save current CS value */ 408 u32 data_read[MAX_INTERFACE_NUM]; 409 int per_bit_rl_pup_status[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 410 u32 data2_write[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 411 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 412 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 413 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 414 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 415 416 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 417 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 418 for (bus_num = 0; 419 bus_num <= octets_per_if_num; bus_num++) { 420 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 421 per_bit_rl_pup_status[if_id][bus_num] = 0; 422 data2_write[if_id][bus_num] = 0; 423 /* read current value of phy register 0x3 */ 424 CHECK_STATUS(ddr3_tip_bus_read 425 (dev_num, if_id, ACCESS_TYPE_UNICAST, 426 bus_num, DDR_PHY_DATA, 427 CRX_PHY_REG(0), 428 &phyreg3_arr[if_id][bus_num])); 429 } 430 } 431 432 /* NEW RL machine */ 433 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 434 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 435 training_result[training_stage][if_id] = TEST_SUCCESS; 436 437 /* save current cs enable reg val */ 438 CHECK_STATUS(ddr3_tip_if_read 439 (dev_num, ACCESS_TYPE_UNICAST, if_id, 440 DUAL_DUNIT_CFG_REG, &cs_enable_reg_val[if_id], 441 MASK_ALL_BITS)); 442 /* enable single cs */ 443 CHECK_STATUS(ddr3_tip_if_write 444 (dev_num, ACCESS_TYPE_UNICAST, if_id, 445 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3))); 446 } 447 448 ddr3_tip_reset_fifo_ptr(dev_num); 449 for (curr_numb = 0; curr_numb < 3; curr_numb++) { 450 /* 451 * Phase 1: Load pattern (using ODPG) 452 * 453 * enter Read Leveling mode 454 * only 27 bits are masked 455 * assuming non multi-CS configuration 456 * write to CS = 0 for the non multi CS configuration, note that 457 * the results shall be read back to the required CS !!! 458 */ 459 460 /* BUS count is 0 shifted 26 */ 461 CHECK_STATUS(ddr3_tip_if_write 462 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 463 ODPG_DATA_CTRL_REG, 0x3, 0x3)); 464 CHECK_STATUS(ddr3_tip_configure_odpg 465 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0, 466 pattern_table[PATTERN_TEST].num_of_phases_tx, 0, 467 pattern_table[PATTERN_TEST].num_of_phases_rx, 0, 468 0, 0, STRESS_NONE, DURATION_SINGLE)); 469 470 /* load pattern to ODPG */ 471 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST, 472 PARAM_NOT_CARE, PATTERN_TEST, 473 pattern_table[PATTERN_TEST]. 474 start_addr); 475 476 /* 477 * Phase 2: ODPG to Read Leveling mode 478 */ 479 480 /* General Training Opcode register */ 481 CHECK_STATUS(ddr3_tip_if_write 482 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 483 ODPG_WR_RD_MODE_ENA_REG, 0, 484 MASK_ALL_BITS)); 485 CHECK_STATUS(ddr3_tip_if_write 486 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 487 GENERAL_TRAINING_OPCODE_REG, 0x301b01, 0x3c3fef)); 488 489 /* Object1 opcode register 0 & 1 */ 490 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 491 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 492 speed_bin_index = 493 tm->interface_params[if_id].speed_bin_index; 494 cl_val = mv_ddr_cl_val_get(speed_bin_index, freq); 495 data = (cl_val << 17) | (0x3 << 25); 496 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25); 497 CHECK_STATUS(ddr3_tip_if_write 498 (dev_num, ACCESS_TYPE_UNICAST, if_id, 499 OPCODE_REG0_REG(1), data, mask)); 500 } 501 502 /* Set iteration count to max value */ 503 CHECK_STATUS(ddr3_tip_if_write 504 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 505 OPCODE_REG1_REG(1), 0xd00, 0xd00)); 506 507 /* 508 * Phase 2: Mask config 509 */ 510 511 ddr3_tip_dynamic_per_bit_read_leveling_seq(dev_num); 512 513 /* 514 * Phase 3: Read Leveling execution 515 */ 516 517 /* temporary jira dunit=14751 */ 518 CHECK_STATUS(ddr3_tip_if_write 519 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 520 TRAINING_DBG_1_REG, 0, (u32)(1 << 31))); 521 /* configure phy reset value */ 522 CHECK_STATUS(ddr3_tip_if_write 523 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 524 TRAINING_DBG_3_REG, (0x7f << 24), 525 (u32)(0xff << 24))); 526 /* data pup rd reset enable */ 527 CHECK_STATUS(ddr3_tip_if_write 528 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 529 SDRAM_CFG_REG, 0, (1 << 30))); 530 /* data pup rd reset disable */ 531 CHECK_STATUS(ddr3_tip_if_write 532 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 533 SDRAM_CFG_REG, (1 << 30), (1 << 30))); 534 /* training SW override & training RL mode */ 535 CHECK_STATUS(ddr3_tip_if_write 536 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 537 TRAINING_SW_2_REG, 0x1, 0x9)); 538 /* training enable */ 539 CHECK_STATUS(ddr3_tip_if_write 540 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 541 TRAINING_REG, (1 << 24) | (1 << 20), 542 (1 << 24) | (1 << 20))); 543 CHECK_STATUS(ddr3_tip_if_write 544 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 545 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31))); 546 547 /* trigger training */ 548 mv_ddr_training_enable(); 549 550 /* check for training done */ 551 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) { 552 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 553 return MV_FAIL; 554 } 555 /* check for training pass */ 556 if (data != PASS) 557 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 558 559 /* disable odpg; switch back to functional mode */ 560 mv_ddr_odpg_disable(); 561 562 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK) { 563 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("odpg disable failed\n")); 564 return MV_FAIL; 565 } 566 567 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 568 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS); 569 570 /* double loop on bus, pup */ 571 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 572 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 573 /* check training done */ 574 for (bus_num = 0; 575 bus_num < octets_per_if_num; 576 bus_num++) { 577 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 578 579 if (per_bit_rl_pup_status[if_id][bus_num] 580 == 0) { 581 curr_min_delay = 0; 582 for (bit_num = 0; bit_num < 8; 583 bit_num++) { 584 if (ddr3_tip_if_polling 585 (dev_num, 586 ACCESS_TYPE_UNICAST, 587 if_id, (1 << 25), 588 (1 << 25), 589 mask_results_dq_reg_map 590 [bus_num * 8 + bit_num], 591 MAX_POLLING_ITERATIONS) != 592 MV_OK) { 593 DEBUG_LEVELING 594 (DEBUG_LEVEL_ERROR, 595 ("\n_r_l: DDR3 poll failed(2) for bus %d bit %d\n", 596 bus_num, 597 bit_num)); 598 } else { 599 /* read result per pup */ 600 CHECK_STATUS 601 (ddr3_tip_if_read 602 (dev_num, 603 ACCESS_TYPE_UNICAST, 604 if_id, 605 mask_results_dq_reg_map 606 [bus_num * 8 + 607 bit_num], 608 data_read, 609 MASK_ALL_BITS)); 610 data = 611 (data_read 612 [if_id] & 613 0x1f) | 614 ((data_read 615 [if_id] & 616 0xe0) << 1); 617 if (curr_min_delay == 0) 618 curr_min_delay = 619 data; 620 else if (data < 621 curr_min_delay) 622 curr_min_delay = 623 data; 624 if (data > data2_write[if_id][bus_num]) 625 data2_write 626 [if_id] 627 [bus_num] = 628 data; 629 } 630 } 631 632 if (data2_write[if_id][bus_num] <= 633 (curr_min_delay + 634 MAX_DQ_READ_LEVELING_DELAY)) { 635 per_bit_rl_pup_status[if_id] 636 [bus_num] = 1; 637 } 638 } 639 } 640 } 641 642 /* check if there is need to search new phyreg3 value */ 643 if (curr_numb < 2) { 644 /* if there is DLL that is not checked yet */ 645 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 646 if_id++) { 647 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 648 for (bus_num = 0; 649 bus_num < octets_per_if_num; 650 bus_num++) { 651 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, 652 bus_num); 653 if (per_bit_rl_pup_status[if_id] 654 [bus_num] != 1) { 655 /* go to next ADLL value */ 656 CHECK_STATUS 657 (ddr3_tip_bus_write 658 (dev_num, 659 ACCESS_TYPE_UNICAST, 660 if_id, 661 ACCESS_TYPE_UNICAST, 662 bus_num, DDR_PHY_DATA, 663 CRX_PHY_REG(0), 664 (phyreg3_arr[if_id] 665 [bus_num] + 666 adll_array[curr_numb]))); 667 break_loop = 1; 668 break; 669 } 670 } 671 if (break_loop) 672 break; 673 } 674 } /* if (curr_numb < 2) */ 675 if (!break_loop) 676 break; 677 } /* for ( curr_numb = 0; curr_numb <3; curr_numb++) */ 678 679 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 680 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 681 for (bus_num = 0; bus_num < octets_per_if_num; 682 bus_num++) { 683 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 684 if (per_bit_rl_pup_status[if_id][bus_num] == 1) 685 ddr3_tip_bus_write(dev_num, 686 ACCESS_TYPE_UNICAST, 687 if_id, 688 ACCESS_TYPE_UNICAST, 689 bus_num, DDR_PHY_DATA, 690 RL_PHY_REG(effective_cs), 691 data2_write[if_id] 692 [bus_num]); 693 else 694 is_any_pup_fail = 1; 695 } 696 697 /* TBD flow does not support multi CS */ 698 /* 699 * cs_bitmask = tm->interface_params[if_id]. 700 * as_bus_params[bus_num].cs_bitmask; 701 */ 702 /* divide by 4 is used for retrieving the CS number */ 703 /* 704 * TBD BC2 - what is the PHY address for other 705 * CS ddr3_tip_write_cs_result() ??? 706 */ 707 /* 708 * find what should be written to PHY 709 * - max delay that is less than threshold 710 */ 711 if (is_any_pup_fail == 1) { 712 training_result[training_stage][if_id] = TEST_FAILED; 713 if (debug_mode == 0) 714 return MV_FAIL; 715 } 716 } 717 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n")); 718 719 /* 720 * Phase 3: Exit Read Leveling 721 */ 722 723 CHECK_STATUS(ddr3_tip_if_write 724 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 725 TRAINING_SW_2_REG, (1 << 3), (1 << 3))); 726 CHECK_STATUS(ddr3_tip_if_write 727 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 728 TRAINING_SW_1_REG, (1 << 16), (1 << 16))); 729 /* set ODPG to functional */ 730 CHECK_STATUS(ddr3_tip_if_write 731 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 732 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 733 /* 734 * Copy the result from the effective CS search to the real 735 * Functional CS 736 */ 737 ddr3_tip_write_cs_result(dev_num, RL_PHY_REG(0)); 738 CHECK_STATUS(ddr3_tip_if_write 739 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 740 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 741 742 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 743 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 744 /* restore cs enable value */ 745 CHECK_STATUS(ddr3_tip_if_write 746 (dev_num, ACCESS_TYPE_UNICAST, if_id, 747 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 748 MASK_ALL_BITS)); 749 if (odt_config != 0) { 750 CHECK_STATUS(ddr3_tip_write_additional_odt_setting 751 (dev_num, if_id)); 752 } 753 } 754 755 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 756 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 757 if (training_result[training_stage][if_id] == TEST_FAILED) 758 return MV_FAIL; 759 } 760 761 return MV_OK; 762 } 763 764 int ddr3_tip_calc_cs_mask(u32 dev_num, u32 if_id, u32 effective_cs, 765 u32 *cs_mask) 766 { 767 u32 all_bus_cs = 0, same_bus_cs; 768 u32 bus_cnt; 769 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 770 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 771 772 *cs_mask = same_bus_cs = CS_BIT_MASK; 773 774 /* 775 * In some of the devices (such as BC2), the CS is per pup and there 776 * for mixed mode is valid on like other devices where CS configuration 777 * is per interface. 778 * In order to know that, we do 'Or' and 'And' operation between all 779 * CS (of the pups). 780 * If they are they are not the same then it's mixed mode so all CS 781 * should be configured (when configuring the MRS) 782 */ 783 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 784 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 785 786 all_bus_cs |= tm->interface_params[if_id]. 787 as_bus_params[bus_cnt].cs_bitmask; 788 same_bus_cs &= tm->interface_params[if_id]. 789 as_bus_params[bus_cnt].cs_bitmask; 790 791 /* cs enable is active low */ 792 *cs_mask &= ~tm->interface_params[if_id]. 793 as_bus_params[bus_cnt].cs_bitmask; 794 } 795 796 if (all_bus_cs == same_bus_cs) 797 *cs_mask = (*cs_mask | (~(1 << effective_cs))) & CS_BIT_MASK; 798 799 return MV_OK; 800 } 801 802 /* 803 * Dynamic write leveling 804 */ 805 int ddr3_tip_dynamic_write_leveling(u32 dev_num, int phase_remove) 806 { 807 u32 reg_data = 0, temp = 0, iter, if_id, bus_cnt; 808 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 }; 809 u32 cs_mask[MAX_INTERFACE_NUM]; 810 u32 read_data_sample_delay_vals[MAX_INTERFACE_NUM] = { 0 }; 811 u32 read_data_ready_delay_vals[MAX_INTERFACE_NUM] = { 0 }; 812 /* 0 for failure */ 813 u32 res_values[MAX_INTERFACE_NUM * MAX_BUS_NUM] = { 0 }; 814 u32 test_res = 0; /* 0 - success for all pup */ 815 u32 data_read[MAX_INTERFACE_NUM]; 816 u8 wl_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM]; 817 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 818 u32 cs_mask0[MAX_INTERFACE_NUM] = { 0 }; 819 unsigned int max_cs = mv_ddr_cs_num_get(); 820 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 821 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 822 823 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 824 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 825 826 training_result[training_stage][if_id] = TEST_SUCCESS; 827 828 /* save Read Data Sample Delay */ 829 CHECK_STATUS(ddr3_tip_if_read 830 (dev_num, ACCESS_TYPE_UNICAST, if_id, 831 RD_DATA_SMPL_DLYS_REG, 832 read_data_sample_delay_vals, MASK_ALL_BITS)); 833 /* save Read Data Ready Delay */ 834 CHECK_STATUS(ddr3_tip_if_read 835 (dev_num, ACCESS_TYPE_UNICAST, if_id, 836 RD_DATA_RDY_DLYS_REG, read_data_ready_delay_vals, 837 MASK_ALL_BITS)); 838 /* save current cs reg val */ 839 CHECK_STATUS(ddr3_tip_if_read 840 (dev_num, ACCESS_TYPE_UNICAST, if_id, 841 DUAL_DUNIT_CFG_REG, cs_enable_reg_val, MASK_ALL_BITS)); 842 } 843 844 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) { 845 /* Enable multi-CS */ 846 CHECK_STATUS(ddr3_tip_if_write 847 (dev_num, ACCESS_TYPE_UNICAST, if_id, 848 DUAL_DUNIT_CFG_REG, 0, (1 << 3))); 849 } 850 851 /* 852 * Phase 1: DRAM 2 Write Leveling mode 853 */ 854 855 /*Assert 10 refresh commands to DRAM to all CS */ 856 for (iter = 0; iter < WL_ITERATION_NUM; iter++) { 857 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 858 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 859 CHECK_STATUS(ddr3_tip_if_write 860 (dev_num, ACCESS_TYPE_UNICAST, 861 if_id, SDRAM_OP_REG, 862 (u32)((~(0xf) << 8) | 0x2), 0xf1f)); 863 } 864 } 865 /* check controller back to normal */ 866 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 867 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 868 if (ddr3_tip_if_polling 869 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f, 870 SDRAM_OP_REG, MAX_POLLING_ITERATIONS) != MV_OK) { 871 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 872 ("WL: DDR3 poll failed(3)")); 873 } 874 } 875 876 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 877 /*enable write leveling to all cs - Q off , WL n */ 878 /* calculate interface cs mask */ 879 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 880 0x1000, 0x1080)); 881 882 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 883 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 884 /* cs enable is active low */ 885 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs, 886 &cs_mask[if_id]); 887 } 888 889 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 890 /* Enable Output buffer to relevant CS - Q on , WL on */ 891 CHECK_STATUS(ddr3_tip_write_mrs_cmd 892 (dev_num, cs_mask, MR_CMD1, 0x80, 0x1080)); 893 894 /*enable odt for relevant CS */ 895 CHECK_STATUS(ddr3_tip_if_write 896 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 897 0x1498, (0x3 << (effective_cs * 2)), 0xf)); 898 } else { 899 /* FIXME: should be the same as _CPU case */ 900 CHECK_STATUS(ddr3_tip_write_mrs_cmd 901 (dev_num, cs_mask, MR_CMD1, 0xc0, 0x12c4)); 902 } 903 904 /* 905 * Phase 2: Set training IP to write leveling mode 906 */ 907 908 CHECK_STATUS(ddr3_tip_dynamic_write_leveling_seq(dev_num)); 909 910 /* phase 3: trigger training */ 911 mv_ddr_training_enable(); 912 913 /* check for training done */ 914 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, data_read) != MV_OK) { 915 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 916 } else { /* check for training pass */ 917 reg_data = data_read[0]; 918 #if defined(CONFIG_ARMADA_38X) /* JIRA #1498 for 16 bit with ECC */ 919 if (tm->bus_act_mask == 0xb) /* set to data to 0 to skip the check */ 920 reg_data = 0; 921 #endif 922 if (reg_data != PASS) 923 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 924 925 /* check for training completion per bus */ 926 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 927 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 928 /* training status */ 929 ddr3_tip_if_read(0, ACCESS_TYPE_UNICAST, 0, 930 mask_results_pup_reg_map[bus_cnt], 931 data_read, MASK_ALL_BITS); 932 reg_data = data_read[0]; 933 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("WL: IF %d BUS %d reg 0x%x\n", 934 0, bus_cnt, reg_data)); 935 if ((reg_data & (1 << 25)) == 0) 936 res_values[bus_cnt] = 1; 937 ddr3_tip_if_read(0, ACCESS_TYPE_UNICAST, 0, 938 mask_results_pup_reg_map[bus_cnt], 939 data_read, 0xff); 940 /* 941 * Save the read value that should be 942 * write to PHY register 943 */ 944 wl_values[effective_cs][bus_cnt][0] = (u8)data_read[0]; 945 } 946 } 947 948 /* 949 * Phase 3.5: Validate result 950 */ 951 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 952 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 953 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 954 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 955 /* 956 * Read result control register according to subphy 957 * "16" below is for a half-phase 958 */ 959 reg_data = wl_values[effective_cs][bus_cnt][if_id] + 16; 960 /* 961 * Write to WL register: ADLL [4:0], Phase [8:6], 962 * Centralization ADLL [15:10] + 0x10 963 */ 964 reg_data = (reg_data & 0x1f) | 965 (((reg_data & 0xe0) >> 5) << 6) | 966 (((reg_data & 0x1f) + phy_reg1_val) << 10); 967 /* Search with WL CS0 subphy reg */ 968 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 969 ACCESS_TYPE_UNICAST, bus_cnt, 970 DDR_PHY_DATA, WL_PHY_REG(0), reg_data); 971 /* 972 * Check for change in data read from DRAM. 973 * If changed, fix the result 974 */ 975 CHECK_STATUS(ddr3_tip_if_read 976 (dev_num, 977 ACCESS_TYPE_UNICAST, 978 if_id, 979 TRAINING_WL_REG, 980 data_read, MASK_ALL_BITS)); 981 if (((data_read[if_id] & (1 << (bus_cnt + 20))) >> 982 (bus_cnt + 20)) == 0) { 983 DEBUG_LEVELING( 984 DEBUG_LEVEL_ERROR, 985 ("WLValues was changed from 0x%X", 986 wl_values[effective_cs] 987 [bus_cnt][if_id])); 988 wl_values[effective_cs] 989 [bus_cnt][if_id] += 32; 990 DEBUG_LEVELING( 991 DEBUG_LEVEL_ERROR, 992 ("to 0x%X", 993 wl_values[effective_cs] 994 [bus_cnt][if_id])); 995 } 996 } 997 } 998 999 /* 1000 * Phase 4: Exit write leveling mode 1001 */ 1002 1003 /* disable DQs toggling */ 1004 CHECK_STATUS(ddr3_tip_if_write 1005 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1006 WL_DQS_PATTERN_REG, 0x0, 0x1)); 1007 1008 /* Update MRS 1 (WL off) */ 1009 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 1010 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 1011 0x1000, 0x1080)); 1012 } else { 1013 /* FIXME: should be same as _CPU case */ 1014 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 1015 0x1000, 0x12c4)); 1016 } 1017 1018 /* Update MRS 1 (return to functional mode - Q on , WL off) */ 1019 CHECK_STATUS(ddr3_tip_write_mrs_cmd 1020 (dev_num, cs_mask0, MR_CMD1, 0x0, 0x1080)); 1021 1022 /* set phy to normal mode */ 1023 CHECK_STATUS(ddr3_tip_if_write 1024 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1025 TRAINING_SW_2_REG, 0x5, 0x7)); 1026 1027 /* exit sw override mode */ 1028 CHECK_STATUS(ddr3_tip_if_write 1029 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1030 TRAINING_SW_2_REG, 0x4, 0x7)); 1031 } 1032 1033 /* 1034 * Phase 5: Load WL values to each PHY 1035 */ 1036 1037 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1038 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1039 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1040 test_res = 0; 1041 for (bus_cnt = 0; 1042 bus_cnt < octets_per_if_num; 1043 bus_cnt++) { 1044 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 1045 /* check if result == pass */ 1046 if (res_values 1047 [(if_id * 1048 octets_per_if_num) + 1049 bus_cnt] == 0) { 1050 /* 1051 * read result control register 1052 * according to pup 1053 */ 1054 reg_data = 1055 wl_values[effective_cs][bus_cnt] 1056 [if_id]; 1057 /* 1058 * Write into write leveling register 1059 * ([4:0] ADLL, [8:6] Phase, [15:10] 1060 * (centralization) ADLL + 0x10) 1061 */ 1062 reg_data = 1063 (reg_data & 0x1f) | 1064 (((reg_data & 0xe0) >> 5) << 6) | 1065 (((reg_data & 0x1f) + 1066 phy_reg1_val) << 10); 1067 /* 1068 * in case phase remove should be executed 1069 * need to remove more than one phase. 1070 * this will take place only in low frequency, 1071 * where there could be more than one phase between sub-phys 1072 */ 1073 if (phase_remove == 1) { 1074 temp = (reg_data >> WR_LVL_PH_SEL_OFFS) & WR_LVL_PH_SEL_PHASE1; 1075 reg_data &= ~(WR_LVL_PH_SEL_MASK << WR_LVL_PH_SEL_OFFS); 1076 reg_data |= (temp << WR_LVL_PH_SEL_OFFS); 1077 } 1078 1079 ddr3_tip_bus_write( 1080 dev_num, 1081 ACCESS_TYPE_UNICAST, 1082 if_id, 1083 ACCESS_TYPE_UNICAST, 1084 bus_cnt, 1085 DDR_PHY_DATA, 1086 WL_PHY_REG(effective_cs), 1087 reg_data); 1088 } else { 1089 test_res = 1; 1090 /* 1091 * read result control register 1092 * according to pup 1093 */ 1094 CHECK_STATUS(ddr3_tip_if_read 1095 (dev_num, 1096 ACCESS_TYPE_UNICAST, 1097 if_id, 1098 mask_results_pup_reg_map 1099 [bus_cnt], data_read, 1100 0xff)); 1101 reg_data = data_read[if_id]; 1102 DEBUG_LEVELING( 1103 DEBUG_LEVEL_ERROR, 1104 ("WL: IF %d BUS %d failed, reg 0x%x\n", 1105 if_id, bus_cnt, reg_data)); 1106 } 1107 } 1108 1109 if (test_res != 0) { 1110 training_result[training_stage][if_id] = 1111 TEST_FAILED; 1112 } 1113 } 1114 } 1115 /* Set to 0 after each loop to avoid illegal value may be used */ 1116 effective_cs = 0; 1117 1118 /* 1119 * Copy the result from the effective CS search to the real 1120 * Functional CS 1121 */ 1122 /* ddr3_tip_write_cs_result(dev_num, WL_PHY_REG(0); */ 1123 /* restore saved values */ 1124 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1125 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1126 /* restore Read Data Sample Delay */ 1127 CHECK_STATUS(ddr3_tip_if_write 1128 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1129 RD_DATA_SMPL_DLYS_REG, 1130 read_data_sample_delay_vals[if_id], 1131 MASK_ALL_BITS)); 1132 1133 /* restore Read Data Ready Delay */ 1134 CHECK_STATUS(ddr3_tip_if_write 1135 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1136 RD_DATA_RDY_DLYS_REG, 1137 read_data_ready_delay_vals[if_id], 1138 MASK_ALL_BITS)); 1139 1140 /* enable multi cs */ 1141 CHECK_STATUS(ddr3_tip_if_write 1142 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1143 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 1144 MASK_ALL_BITS)); 1145 } 1146 1147 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 1148 /* Disable modt0 for CS0 training - need to adjust for multi-CS 1149 * in case of ddr4 set 0xf else 0 1150 */ 1151 if (odt_config != 0) { 1152 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1153 SDRAM_ODT_CTRL_HIGH_REG, 0x0, 0xf)); 1154 } 1155 else { 1156 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1157 SDRAM_ODT_CTRL_HIGH_REG, 0xf, 0xf)); 1158 } 1159 1160 } 1161 1162 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1163 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1164 if (training_result[training_stage][if_id] == TEST_FAILED) 1165 return MV_FAIL; 1166 } 1167 1168 return MV_OK; 1169 } 1170 1171 /* 1172 * Dynamic write leveling supplementary 1173 */ 1174 int ddr3_tip_dynamic_write_leveling_supp(u32 dev_num) 1175 { 1176 int adll_offset; 1177 u32 if_id, bus_id, data, data_tmp; 1178 int is_if_fail = 0; 1179 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1180 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1181 1182 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1183 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1184 is_if_fail = 0; 1185 1186 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1187 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1188 wr_supp_res[if_id][bus_id].is_pup_fail = 1; 1189 CHECK_STATUS(ddr3_tip_bus_read 1190 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1191 bus_id, DDR_PHY_DATA, 1192 CTX_PHY_REG(effective_cs), 1193 &data)); 1194 DEBUG_LEVELING( 1195 DEBUG_LEVEL_TRACE, 1196 ("WL Supp: adll_offset=0 data delay = %d\n", 1197 data)); 1198 if (ddr3_tip_wl_supp_align_phase_shift 1199 (dev_num, if_id, bus_id) == MV_OK) { 1200 DEBUG_LEVELING( 1201 DEBUG_LEVEL_TRACE, 1202 ("WL Supp: IF %d bus_id %d adll_offset=0 Success !\n", 1203 if_id, bus_id)); 1204 continue; 1205 } 1206 1207 /* change adll */ 1208 adll_offset = 5; 1209 CHECK_STATUS(ddr3_tip_bus_write 1210 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1211 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1212 CTX_PHY_REG(effective_cs), 1213 data + adll_offset)); 1214 CHECK_STATUS(ddr3_tip_bus_read 1215 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1216 bus_id, DDR_PHY_DATA, 1217 CTX_PHY_REG(effective_cs), 1218 &data_tmp)); 1219 DEBUG_LEVELING( 1220 DEBUG_LEVEL_TRACE, 1221 ("WL Supp: adll_offset= %d data delay = %d\n", 1222 adll_offset, data_tmp)); 1223 1224 if (ddr3_tip_wl_supp_align_phase_shift 1225 (dev_num, if_id, bus_id) == MV_OK) { 1226 DEBUG_LEVELING( 1227 DEBUG_LEVEL_TRACE, 1228 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n", 1229 if_id, bus_id, adll_offset)); 1230 continue; 1231 } 1232 1233 /* change adll */ 1234 adll_offset = -5; 1235 CHECK_STATUS(ddr3_tip_bus_write 1236 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1237 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1238 CTX_PHY_REG(effective_cs), 1239 data + adll_offset)); 1240 CHECK_STATUS(ddr3_tip_bus_read 1241 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1242 bus_id, DDR_PHY_DATA, 1243 CTX_PHY_REG(effective_cs), 1244 &data_tmp)); 1245 DEBUG_LEVELING( 1246 DEBUG_LEVEL_TRACE, 1247 ("WL Supp: adll_offset= %d data delay = %d\n", 1248 adll_offset, data_tmp)); 1249 if (ddr3_tip_wl_supp_align_phase_shift 1250 (dev_num, if_id, bus_id) == MV_OK) { 1251 DEBUG_LEVELING( 1252 DEBUG_LEVEL_TRACE, 1253 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n", 1254 if_id, bus_id, adll_offset)); 1255 continue; 1256 } else { 1257 DEBUG_LEVELING( 1258 DEBUG_LEVEL_ERROR, 1259 ("WL Supp: IF %d bus_id %d Failed !\n", 1260 if_id, bus_id)); 1261 is_if_fail = 1; 1262 } 1263 } 1264 1265 if (is_if_fail == 1) { 1266 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 1267 ("WL Supp: CS# %d: IF %d failed\n", 1268 effective_cs, if_id)); 1269 training_result[training_stage][if_id] = TEST_FAILED; 1270 } else { 1271 training_result[training_stage][if_id] = TEST_SUCCESS; 1272 } 1273 } 1274 1275 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1276 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1277 if (training_result[training_stage][if_id] == TEST_FAILED) 1278 return MV_FAIL; 1279 } 1280 1281 return MV_OK; 1282 } 1283 1284 /* 1285 * Phase Shift 1286 */ 1287 static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id, 1288 u32 bus_id) 1289 { 1290 u32 original_phase; 1291 u32 data, write_data; 1292 1293 wr_supp_res[if_id][bus_id].stage = PHASE_SHIFT; 1294 if (ddr3_tip_xsb_compare_test 1295 (dev_num, if_id, bus_id, 0) == MV_OK) 1296 return MV_OK; 1297 1298 /* Read current phase */ 1299 CHECK_STATUS(ddr3_tip_bus_read 1300 (dev_num, if_id, ACCESS_TYPE_UNICAST, bus_id, 1301 DDR_PHY_DATA, WL_PHY_REG(effective_cs), &data)); 1302 original_phase = (data >> 6) & 0x7; 1303 1304 /* Set phase (0x0[6-8]) -2 */ 1305 if (original_phase >= 1) { 1306 if (original_phase == 1) 1307 write_data = data & ~0x1df; 1308 else 1309 write_data = (data & ~0x1c0) | 1310 ((original_phase - 2) << 6); 1311 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1312 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1313 WL_PHY_REG(effective_cs), write_data); 1314 if (ddr3_tip_xsb_compare_test 1315 (dev_num, if_id, bus_id, -2) == MV_OK) 1316 return MV_OK; 1317 } 1318 1319 /* Set phase (0x0[6-8]) +2 */ 1320 if (original_phase <= 5) { 1321 write_data = (data & ~0x1c0) | 1322 ((original_phase + 2) << 6); 1323 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1324 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1325 WL_PHY_REG(effective_cs), write_data); 1326 if (ddr3_tip_xsb_compare_test 1327 (dev_num, if_id, bus_id, 2) == MV_OK) 1328 return MV_OK; 1329 } 1330 1331 /* Set phase (0x0[6-8]) +4 */ 1332 if (original_phase <= 3) { 1333 write_data = (data & ~0x1c0) | 1334 ((original_phase + 4) << 6); 1335 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1336 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1337 WL_PHY_REG(effective_cs), write_data); 1338 if (ddr3_tip_xsb_compare_test 1339 (dev_num, if_id, bus_id, 4) == MV_OK) 1340 return MV_OK; 1341 } 1342 1343 /* Set phase (0x0[6-8]) +6 */ 1344 if (original_phase <= 1) { 1345 write_data = (data & ~0x1c0) | 1346 ((original_phase + 6) << 6); 1347 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1348 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1349 WL_PHY_REG(effective_cs), write_data); 1350 if (ddr3_tip_xsb_compare_test 1351 (dev_num, if_id, bus_id, 6) == MV_OK) 1352 return MV_OK; 1353 } 1354 1355 /* Write original WL result back */ 1356 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1357 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1358 WL_PHY_REG(effective_cs), data); 1359 wr_supp_res[if_id][bus_id].is_pup_fail = 1; 1360 1361 return MV_FAIL; 1362 } 1363 1364 /* 1365 * Compare Test 1366 */ 1367 static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id, 1368 u32 edge_offset) 1369 { 1370 u32 num_of_succ_byte_compare, word_in_pattern; 1371 u32 word_offset, i, num_of_word_mult; 1372 u32 read_pattern[TEST_PATTERN_LENGTH * 2]; 1373 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 1374 u32 pattern_test_pattern_table[8]; 1375 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1376 1377 /* 3 below for INTERFACE_BUS_MASK_16BIT */ 1378 num_of_word_mult = (tm->bus_act_mask == 3) ? 1 : 2; 1379 1380 for (i = 0; i < 8; i++) { 1381 pattern_test_pattern_table[i] = 1382 pattern_table_get_word(dev_num, PATTERN_TEST, (u8)i); 1383 } 1384 1385 /* External write, read and compare */ 1386 CHECK_STATUS(ddr3_tip_load_pattern_to_mem(dev_num, PATTERN_TEST)); 1387 1388 CHECK_STATUS(ddr3_tip_reset_fifo_ptr(dev_num)); 1389 1390 CHECK_STATUS(ddr3_tip_ext_read 1391 (dev_num, if_id, 1392 ((pattern_table[PATTERN_TEST].start_addr << 3) + 1393 ((SDRAM_CS_SIZE + 1) * effective_cs)), 1, read_pattern)); 1394 1395 DEBUG_LEVELING( 1396 DEBUG_LEVEL_TRACE, 1397 ("XSB-compt CS#%d: IF %d bus_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1398 effective_cs, if_id, bus_id, 1399 read_pattern[0], read_pattern[1], 1400 read_pattern[2], read_pattern[3], 1401 read_pattern[4], read_pattern[5], 1402 read_pattern[6], read_pattern[7])); 1403 1404 /* compare byte per pup */ 1405 num_of_succ_byte_compare = 0; 1406 for (word_in_pattern = start_xsb_offset; 1407 word_in_pattern < (TEST_PATTERN_LENGTH * num_of_word_mult); 1408 word_in_pattern++) { 1409 word_offset = word_in_pattern; 1410 if ((word_offset > (TEST_PATTERN_LENGTH * 2 - 1))) 1411 continue; 1412 1413 if ((read_pattern[word_in_pattern] & pup_mask_table[bus_id]) == 1414 (pattern_test_pattern_table[word_offset] & 1415 pup_mask_table[bus_id])) 1416 num_of_succ_byte_compare++; 1417 } 1418 1419 if ((TEST_PATTERN_LENGTH * num_of_word_mult - start_xsb_offset) == 1420 num_of_succ_byte_compare) { 1421 wr_supp_res[if_id][bus_id].stage = edge_offset; 1422 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1423 ("supplementary: shift to %d for if %d pup %d success\n", 1424 edge_offset, if_id, bus_id)); 1425 wr_supp_res[if_id][bus_id].is_pup_fail = 0; 1426 1427 return MV_OK; 1428 } else { 1429 DEBUG_LEVELING( 1430 DEBUG_LEVEL_TRACE, 1431 ("XSB-compt CS#%d: IF %d bus_id %d num_of_succ_byte_compare %d - Fail!\n", 1432 effective_cs, if_id, bus_id, num_of_succ_byte_compare)); 1433 1434 DEBUG_LEVELING( 1435 DEBUG_LEVEL_TRACE, 1436 ("XSB-compt: expected 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1437 pattern_test_pattern_table[0], 1438 pattern_test_pattern_table[1], 1439 pattern_test_pattern_table[2], 1440 pattern_test_pattern_table[3], 1441 pattern_test_pattern_table[4], 1442 pattern_test_pattern_table[5], 1443 pattern_test_pattern_table[6], 1444 pattern_test_pattern_table[7])); 1445 DEBUG_LEVELING( 1446 DEBUG_LEVEL_TRACE, 1447 ("XSB-compt: recieved 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1448 read_pattern[0], read_pattern[1], 1449 read_pattern[2], read_pattern[3], 1450 read_pattern[4], read_pattern[5], 1451 read_pattern[6], read_pattern[7])); 1452 1453 return MV_FAIL; 1454 } 1455 } 1456 1457 /* 1458 * Dynamic write leveling sequence 1459 */ 1460 static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num) 1461 { 1462 u32 bus_id, dq_id; 1463 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1464 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1465 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1466 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1467 1468 CHECK_STATUS(ddr3_tip_if_write 1469 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1470 TRAINING_SW_2_REG, 0x1, 0x5)); 1471 CHECK_STATUS(ddr3_tip_if_write 1472 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1473 TRAINING_WL_REG, 0x50, 0xff)); 1474 CHECK_STATUS(ddr3_tip_if_write 1475 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1476 TRAINING_WL_REG, 0x5c, 0xff)); 1477 CHECK_STATUS(ddr3_tip_if_write 1478 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1479 GENERAL_TRAINING_OPCODE_REG, 0x381b82, 0x3c3faf)); 1480 CHECK_STATUS(ddr3_tip_if_write 1481 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1482 OPCODE_REG0_REG(1), (0x3 << 25), (0x3ffff << 9))); 1483 CHECK_STATUS(ddr3_tip_if_write 1484 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1485 OPCODE_REG1_REG(1), 0x80, 0xffff)); 1486 CHECK_STATUS(ddr3_tip_if_write 1487 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1488 WL_DONE_CNTR_REF_REG, 0x14, 0xff)); 1489 CHECK_STATUS(ddr3_tip_if_write 1490 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1491 TRAINING_WL_REG, 0xff5c, 0xffff)); 1492 1493 /* mask PBS */ 1494 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1495 CHECK_STATUS(ddr3_tip_if_write 1496 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1497 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1498 0x1 << 24)); 1499 } 1500 1501 /* Mask all results */ 1502 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1503 CHECK_STATUS(ddr3_tip_if_write 1504 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1505 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1506 0x1 << 24)); 1507 } 1508 1509 /* Unmask only wanted */ 1510 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1511 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1512 CHECK_STATUS(ddr3_tip_if_write 1513 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1514 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24)); 1515 } 1516 1517 CHECK_STATUS(ddr3_tip_if_write 1518 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1519 WL_DQS_PATTERN_REG, 0x1, 0x1)); 1520 1521 return MV_OK; 1522 } 1523 1524 /* 1525 * Dynamic read leveling sequence 1526 */ 1527 static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num) 1528 { 1529 u32 bus_id, dq_id; 1530 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1531 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1532 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1533 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1534 1535 /* mask PBS */ 1536 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1537 CHECK_STATUS(ddr3_tip_if_write 1538 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1539 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1540 0x1 << 24)); 1541 } 1542 1543 /* Mask all results */ 1544 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1545 CHECK_STATUS(ddr3_tip_if_write 1546 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1547 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1548 0x1 << 24)); 1549 } 1550 1551 /* Unmask only wanted */ 1552 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1553 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1554 CHECK_STATUS(ddr3_tip_if_write 1555 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1556 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24)); 1557 } 1558 1559 return MV_OK; 1560 } 1561 1562 /* 1563 * Dynamic read leveling sequence 1564 */ 1565 static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num) 1566 { 1567 u32 bus_id, dq_id; 1568 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1569 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1570 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1571 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1572 1573 /* mask PBS */ 1574 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1575 CHECK_STATUS(ddr3_tip_if_write 1576 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1577 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1578 0x1 << 24)); 1579 } 1580 1581 /* Mask all results */ 1582 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1583 CHECK_STATUS(ddr3_tip_if_write 1584 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1585 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1586 0x1 << 24)); 1587 } 1588 1589 /* Unmask only wanted */ 1590 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1591 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, dq_id / 8); 1592 CHECK_STATUS(ddr3_tip_if_write 1593 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1594 mask_results_dq_reg_map[dq_id], 0x0 << 24, 1595 0x1 << 24)); 1596 } 1597 1598 return MV_OK; 1599 } 1600 1601 /* 1602 * Print write leveling supplementary results 1603 */ 1604 int ddr3_tip_print_wl_supp_result(u32 dev_num) 1605 { 1606 u32 bus_id = 0, if_id = 0; 1607 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1608 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1609 1610 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1611 ("I/F0 PUP0 Result[0 - success, 1-fail] ...\n")); 1612 1613 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1614 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1615 for (bus_id = 0; bus_id < octets_per_if_num; 1616 bus_id++) { 1617 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1618 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1619 ("%d ,", wr_supp_res[if_id] 1620 [bus_id].is_pup_fail)); 1621 } 1622 } 1623 DEBUG_LEVELING( 1624 DEBUG_LEVEL_INFO, 1625 ("I/F0 PUP0 Stage[0-phase_shift, 1-clock_shift, 2-align_shift] ...\n")); 1626 1627 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1628 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1629 for (bus_id = 0; bus_id < octets_per_if_num; 1630 bus_id++) { 1631 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1632 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1633 ("%d ,", wr_supp_res[if_id] 1634 [bus_id].stage)); 1635 } 1636 } 1637 1638 return MV_OK; 1639 } 1640 1641 #define RD_FIFO_PTR_LOW_STAT_INDIR_ADDR 0x9a 1642 #define RD_FIFO_PTR_HIGH_STAT_INDIR_ADDR 0x9b 1643 /* position of falling dqs edge in fifo; walking 1 */ 1644 #define RD_FIFO_DQS_FALL_EDGE_POS_0 0x1 1645 #define RD_FIFO_DQS_FALL_EDGE_POS_1 0x2 1646 #define RD_FIFO_DQS_FALL_EDGE_POS_2 0x4 1647 #define RD_FIFO_DQS_FALL_EDGE_POS_3 0x8 1648 #define RD_FIFO_DQS_FALL_EDGE_POS_4 0x10 /* lock */ 1649 /* position of rising dqs edge in fifo; walking 0 */ 1650 #define RD_FIFO_DQS_RISE_EDGE_POS_0 0x1fff 1651 #define RD_FIFO_DQS_RISE_EDGE_POS_1 0x3ffe 1652 #define RD_FIFO_DQS_RISE_EDGE_POS_2 0x3ffd 1653 #define RD_FIFO_DQS_RISE_EDGE_POS_3 0x3ffb 1654 #define RD_FIFO_DQS_RISE_EDGE_POS_4 0x3ff7 /* lock */ 1655 #define TEST_ADDR 0x8 1656 #define TAPS_PER_UI 32 1657 #define UI_PER_RD_SAMPLE 4 1658 #define TAPS_PER_RD_SAMPLE ((UI_PER_RD_SAMPLE) * (TAPS_PER_UI)) 1659 #define MAX_RD_SAMPLES 32 1660 #define MAX_RL_VALUE ((MAX_RD_SAMPLES) * (TAPS_PER_RD_SAMPLE)) 1661 #define RD_FIFO_DLY 8 1662 #define STEP_SIZE 64 1663 #define RL_JITTER_WIDTH_LMT 20 1664 #define ADLL_TAPS_IN_CYCLE 64 1665 1666 enum rl_dqs_burst_state { 1667 RL_AHEAD = 0, 1668 RL_INSIDE, 1669 RL_BEHIND 1670 }; 1671 1672 1673 int mv_ddr_rl_dqs_burst(u32 dev_num, u32 if_id, u32 freq) 1674 { 1675 enum rl_dqs_burst_state rl_state[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1676 enum hws_ddr_phy subphy_type = DDR_PHY_DATA; 1677 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1678 int cl_val = tm->interface_params[0].cas_l; 1679 int rl_adll_val, rl_phase_val, sdr_cycle_incr, rd_sample, rd_ready; 1680 int final_rd_sample, final_rd_ready; 1681 int i, subphy_id, step; 1682 int pass_lock_num = 0; 1683 int init_pass_lock_num; 1684 int phase_delta; 1685 int min_phase, max_phase; 1686 unsigned int max_cs = mv_ddr_cs_num_get(); 1687 u32 rl_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1688 u32 rl_min_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1689 u32 rl_max_values[MAX_CS_NUM][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1690 u32 rl_val, rl_min_val[MAX_CS_NUM], rl_max_val[MAX_CS_NUM]; 1691 u32 reg_val_low, reg_val_high; 1692 u32 reg_val, reg_mask; 1693 uintptr_t test_addr = TEST_ADDR; 1694 1695 1696 /* initialization */ 1697 if (mv_ddr_is_ecc_ena()) { 1698 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, TRAINING_SW_2_REG, 1699 ®_val, MASK_ALL_BITS); 1700 reg_mask = (TRAINING_ECC_MUX_MASK << TRAINING_ECC_MUX_OFFS) | 1701 (TRAINING_SW_OVRD_MASK << TRAINING_SW_OVRD_OFFS); 1702 reg_val &= ~reg_mask; 1703 reg_val |= (TRAINING_ECC_MUX_DIS << TRAINING_ECC_MUX_OFFS) | 1704 (TRAINING_SW_OVRD_ENA << TRAINING_SW_OVRD_OFFS); 1705 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, TRAINING_SW_2_REG, 1706 reg_val, MASK_ALL_BITS); 1707 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, TRAINING_REG, 1708 ®_val, MASK_ALL_BITS); 1709 reg_mask = (TRN_START_MASK << TRN_START_OFFS); 1710 reg_val &= ~reg_mask; 1711 reg_val |= TRN_START_ENA << TRN_START_OFFS; 1712 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, TRAINING_REG, 1713 reg_val, MASK_ALL_BITS); 1714 } 1715 1716 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) 1717 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) 1718 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) 1719 if (IS_BUS_ACTIVE(tm->bus_act_mask, subphy_id) == 0) 1720 pass_lock_num++; /* increment on inactive subphys */ 1721 1722 init_pass_lock_num = pass_lock_num / max_cs; 1723 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1724 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 1725 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1726 training_result[training_stage][if_id] = TEST_SUCCESS; 1727 } 1728 } 1729 1730 /* search for dqs edges per subphy */ 1731 if_id = 0; 1732 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1733 1734 pass_lock_num = init_pass_lock_num; 1735 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ODPG_DATA_CTRL_REG, 1736 effective_cs << ODPG_DATA_CS_OFFS, 1737 ODPG_DATA_CS_MASK << ODPG_DATA_CS_OFFS); 1738 rl_min_val[effective_cs] = MAX_RL_VALUE; 1739 rl_max_val[effective_cs] = 0; 1740 step = STEP_SIZE; 1741 for (i = 0; i < MAX_RL_VALUE; i += step) { 1742 rl_val = 0; 1743 sdr_cycle_incr = i / TAPS_PER_RD_SAMPLE; /* sdr cycle increment */ 1744 rd_sample = cl_val + 2 * sdr_cycle_incr; 1745 /* fifo out to in delay in search is constant */ 1746 rd_ready = rd_sample + RD_FIFO_DLY; 1747 1748 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_SMPL_DLYS_REG, 1749 rd_sample << RD_SMPL_DLY_CS_OFFS(effective_cs), 1750 RD_SMPL_DLY_CS_MASK << RD_SMPL_DLY_CS_OFFS(effective_cs)); 1751 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_RDY_DLYS_REG, 1752 rd_ready << RD_RDY_DLY_CS_OFFS(effective_cs), 1753 RD_RDY_DLY_CS_MASK << RD_RDY_DLY_CS_OFFS(effective_cs)); 1754 1755 /* one sdr (single data rate) cycle incremented on every four phases of ddr clock */ 1756 sdr_cycle_incr = i % TAPS_PER_RD_SAMPLE; 1757 rl_adll_val = sdr_cycle_incr % MAX_RD_SAMPLES; 1758 rl_phase_val = sdr_cycle_incr / MAX_RD_SAMPLES; 1759 rl_val = ((rl_adll_val & RL_REF_DLY_MASK) << RL_REF_DLY_OFFS) | 1760 ((rl_phase_val & RL_PH_SEL_MASK) << RL_PH_SEL_OFFS); 1761 1762 /* write to all subphys (even to not connected or locked) */ 1763 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, 1764 0, DDR_PHY_DATA, RL_PHY_REG(effective_cs), rl_val); 1765 1766 /* reset read fifo assertion */ 1767 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1768 DATA_PUP_RD_RESET_ENA << DATA_PUP_RD_RESET_OFFS, 1769 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1770 1771 /* reset read fifo deassertion */ 1772 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1773 DATA_PUP_RD_RESET_DIS << DATA_PUP_RD_RESET_OFFS, 1774 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1775 1776 /* perform one read burst */ 1777 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) 1778 readq(test_addr); 1779 else 1780 readl(test_addr); 1781 1782 /* progress read ptr; decide on rl state per byte */ 1783 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 1784 if (rl_state[effective_cs][subphy_id][if_id] == RL_BEHIND) 1785 continue; /* skip locked subphys */ 1786 ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST, subphy_id, DDR_PHY_DATA, 1787 RD_FIFO_PTR_LOW_STAT_INDIR_ADDR, ®_val_low); 1788 ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST, subphy_id, DDR_PHY_DATA, 1789 RD_FIFO_PTR_HIGH_STAT_INDIR_ADDR, ®_val_high); 1790 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1791 ("%s: cs %d, step %d, subphy %d, state %d, low 0x%04x, high 0x%04x; move to ", 1792 __func__, effective_cs, i, subphy_id, 1793 rl_state[effective_cs][subphy_id][if_id], 1794 reg_val_low, reg_val_high)); 1795 1796 switch (rl_state[effective_cs][subphy_id][if_id]) { 1797 case RL_AHEAD: 1798 /* improve search resolution getting closer to the window */ 1799 if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_4 && 1800 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_4) { 1801 rl_state[effective_cs][subphy_id][if_id] = RL_INSIDE; 1802 rl_values[effective_cs][subphy_id][if_id] = i; 1803 rl_min_values[effective_cs][subphy_id][if_id] = i; 1804 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1805 ("new state %d\n", 1806 rl_state[effective_cs][subphy_id][if_id])); 1807 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_3 && 1808 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_3) { 1809 step = (step < 2) ? step : 2; 1810 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_2 && 1811 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_2) { 1812 step = (step < 16) ? step : 16; 1813 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_1 && 1814 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_1) { 1815 step = (step < 32) ? step : 32; 1816 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_0 && 1817 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_0) { 1818 step = (step < 64) ? step : 64; 1819 } else { 1820 /* otherwise, step is unchanged */ 1821 } 1822 break; 1823 case RL_INSIDE: 1824 if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_4 && 1825 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_4) { 1826 rl_max_values[effective_cs][subphy_id][if_id] = i; 1827 if ((rl_max_values[effective_cs][subphy_id][if_id] - 1828 rl_min_values[effective_cs][subphy_id][if_id]) > 1829 ADLL_TAPS_IN_CYCLE) { 1830 rl_state[effective_cs][subphy_id][if_id] = RL_BEHIND; 1831 rl_values[effective_cs][subphy_id][if_id] = 1832 (i + rl_values[effective_cs][subphy_id][if_id]) / 2; 1833 pass_lock_num++; 1834 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1835 ("new lock %d\n", pass_lock_num)); 1836 if (rl_min_val[effective_cs] > 1837 rl_values[effective_cs][subphy_id][if_id]) 1838 rl_min_val[effective_cs] = 1839 rl_values[effective_cs][subphy_id][if_id]; 1840 if (rl_max_val[effective_cs] < 1841 rl_values[effective_cs][subphy_id][if_id]) 1842 rl_max_val[effective_cs] = 1843 rl_values[effective_cs][subphy_id][if_id]; 1844 step = 2; 1845 } 1846 } 1847 if (reg_val_low != RD_FIFO_DQS_FALL_EDGE_POS_4 || 1848 reg_val_high != RD_FIFO_DQS_RISE_EDGE_POS_4) { 1849 if ((i - rl_values[effective_cs][subphy_id][if_id]) < 1850 RL_JITTER_WIDTH_LMT) { 1851 /* inside the jitter; not valid segment */ 1852 rl_state[effective_cs][subphy_id][if_id] = RL_AHEAD; 1853 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1854 ("new state %d; jitter on mask\n", 1855 rl_state[effective_cs][subphy_id][if_id])); 1856 } else { /* finished valid segment */ 1857 rl_state[effective_cs][subphy_id][if_id] = RL_BEHIND; 1858 rl_values[effective_cs][subphy_id][if_id] = 1859 (i + rl_values[effective_cs][subphy_id][if_id]) / 2; 1860 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1861 ("new state %d, solution %d\n", 1862 rl_state[effective_cs][subphy_id][if_id], 1863 rl_values[effective_cs][subphy_id][if_id])); 1864 pass_lock_num++; 1865 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1866 ("new lock %d\n", pass_lock_num)); 1867 if (rl_min_val[effective_cs] > 1868 rl_values[effective_cs][subphy_id][if_id]) 1869 rl_min_val[effective_cs] = 1870 rl_values[effective_cs][subphy_id][if_id]; 1871 if (rl_max_val[effective_cs] < 1872 rl_values[effective_cs][subphy_id][if_id]) 1873 rl_max_val[effective_cs] = 1874 rl_values[effective_cs][subphy_id][if_id]; 1875 step = 2; 1876 } 1877 } 1878 break; 1879 case RL_BEHIND: /* do nothing */ 1880 break; 1881 } 1882 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("\n")); 1883 } 1884 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("pass_lock_num %d\n", pass_lock_num)); 1885 /* exit condition */ 1886 if (pass_lock_num == MAX_BUS_NUM) 1887 break; 1888 } /* for-loop on i */ 1889 1890 if (pass_lock_num != MAX_BUS_NUM) { 1891 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 1892 ("%s: cs %d, pass_lock_num %d, max_bus_num %d, init_pass_lock_num %d\n", 1893 __func__, effective_cs, pass_lock_num, MAX_BUS_NUM, init_pass_lock_num)); 1894 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 1895 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, subphy_id); 1896 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 1897 ("%s: subphy %d %s\n", 1898 __func__, subphy_id, 1899 (rl_state[effective_cs][subphy_id][if_id] == RL_BEHIND) ? 1900 "locked" : "not locked")); 1901 } 1902 } 1903 } /* for-loop on effective_cs */ 1904 1905 /* post-processing read leveling results */ 1906 if_id = 0; 1907 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1908 phase_delta = 0; 1909 i = rl_min_val[effective_cs]; 1910 sdr_cycle_incr = i / TAPS_PER_RD_SAMPLE; /* sdr cycle increment */ 1911 rd_sample = cl_val + 2 * sdr_cycle_incr; 1912 rd_ready = rd_sample + RD_FIFO_DLY; 1913 min_phase = (rl_min_val[effective_cs] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE)) % MAX_RD_SAMPLES; 1914 max_phase = (rl_max_val[effective_cs] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE)) % MAX_RD_SAMPLES; 1915 final_rd_sample = rd_sample; 1916 final_rd_ready = rd_ready; 1917 1918 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_SMPL_DLYS_REG, 1919 rd_sample << RD_SMPL_DLY_CS_OFFS(effective_cs), 1920 RD_SMPL_DLY_CS_MASK << RD_SMPL_DLY_CS_OFFS(effective_cs)); 1921 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_RDY_DLYS_REG, 1922 rd_ready << RD_RDY_DLY_CS_OFFS(effective_cs), 1923 RD_RDY_DLY_CS_MASK << RD_RDY_DLY_CS_OFFS(effective_cs)); 1924 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1925 ("%s: cs %d, min phase %d, max phase %d, read sample %d\n", 1926 __func__, effective_cs, min_phase, max_phase, rd_sample)); 1927 1928 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 1929 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, subphy_id); 1930 /* reduce sdr cycle per cs; extract rl adll and phase values */ 1931 i = rl_values[effective_cs][subphy_id][if_id] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE); 1932 rl_adll_val = i % MAX_RD_SAMPLES; 1933 rl_phase_val = i / MAX_RD_SAMPLES; 1934 rl_phase_val -= phase_delta; 1935 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1936 ("%s: final results: cs %d, subphy %d, read sample %d read ready %d, rl_phase_val %d, rl_adll_val %d\n", 1937 __func__, effective_cs, subphy_id, final_rd_sample, 1938 final_rd_ready, rl_phase_val, rl_adll_val)); 1939 1940 rl_val = ((rl_adll_val & RL_REF_DLY_MASK) << RL_REF_DLY_OFFS) | 1941 ((rl_phase_val & RL_PH_SEL_MASK) << RL_PH_SEL_OFFS); 1942 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_UNICAST, 1943 subphy_id, subphy_type, RL_PHY_REG(effective_cs), rl_val); 1944 } 1945 } /* for-loop on effective cs */ 1946 1947 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 1948 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1949 if (odt_config != 0) 1950 CHECK_STATUS(ddr3_tip_write_additional_odt_setting(dev_num, if_id)); 1951 } 1952 1953 1954 /* reset read fifo assertion */ 1955 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1956 DATA_PUP_RD_RESET_ENA << DATA_PUP_RD_RESET_OFFS, 1957 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1958 1959 /* reset read fifo deassertion */ 1960 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1961 DATA_PUP_RD_RESET_DIS << DATA_PUP_RD_RESET_OFFS, 1962 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1963 1964 return MV_OK; 1965 } 1966