1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6 #include "ddr3_init.h" 7 8 #define WL_ITERATION_NUM 10 9 10 static u32 pup_mask_table[] = { 11 0x000000ff, 12 0x0000ff00, 13 0x00ff0000, 14 0xff000000 15 }; 16 17 static struct write_supp_result wr_supp_res[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 18 19 static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num); 20 static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num); 21 static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num); 22 static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id, 23 u32 bus_id); 24 static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id, 25 u32 edge_offset); 26 27 u32 ddr3_tip_max_cs_get(u32 dev_num) 28 { 29 u32 c_cs, if_id, bus_id; 30 static u32 max_cs; 31 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 32 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 33 34 if (!max_cs) { 35 CHECK_STATUS(ddr3_tip_get_first_active_if((u8)dev_num, 36 tm->if_act_mask, 37 &if_id)); 38 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 39 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 40 break; 41 } 42 43 for (c_cs = 0; c_cs < NUM_OF_CS; c_cs++) { 44 VALIDATE_ACTIVE(tm-> 45 interface_params[if_id].as_bus_params[bus_id]. 46 cs_bitmask, c_cs); 47 max_cs++; 48 } 49 } 50 51 return max_cs; 52 } 53 54 enum { 55 PASS, 56 FAIL 57 }; 58 /***************************************************************************** 59 Dynamic read leveling 60 ******************************************************************************/ 61 int ddr3_tip_dynamic_read_leveling(u32 dev_num, u32 freq) 62 { 63 u32 data, mask; 64 u32 max_cs = ddr3_tip_max_cs_get(dev_num); 65 u32 bus_num, if_id, cl_val; 66 enum hws_speed_bin speed_bin_index; 67 /* save current CS value */ 68 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 }; 69 int is_any_pup_fail = 0; 70 u32 data_read[MAX_INTERFACE_NUM + 1] = { 0 }; 71 u8 rl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM]; 72 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 73 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 74 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 75 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 76 77 for (effective_cs = 0; effective_cs < NUM_OF_CS; effective_cs++) 78 for (bus_num = 0; bus_num < MAX_BUS_NUM; bus_num++) 79 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) 80 rl_values[effective_cs][bus_num][if_id] = 0; 81 82 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 83 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 84 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 85 training_result[training_stage][if_id] = TEST_SUCCESS; 86 87 /* save current cs enable reg val */ 88 CHECK_STATUS(ddr3_tip_if_read 89 (dev_num, ACCESS_TYPE_UNICAST, if_id, 90 DUAL_DUNIT_CFG_REG, cs_enable_reg_val, 91 MASK_ALL_BITS)); 92 /* enable single cs */ 93 CHECK_STATUS(ddr3_tip_if_write 94 (dev_num, ACCESS_TYPE_UNICAST, if_id, 95 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3))); 96 } 97 98 ddr3_tip_reset_fifo_ptr(dev_num); 99 100 /* 101 * Phase 1: Load pattern (using ODPG) 102 * 103 * enter Read Leveling mode 104 * only 27 bits are masked 105 * assuming non multi-CS configuration 106 * write to CS = 0 for the non multi CS configuration, note 107 * that the results shall be read back to the required CS !!! 108 */ 109 110 /* BUS count is 0 shifted 26 */ 111 CHECK_STATUS(ddr3_tip_if_write 112 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 113 ODPG_DATA_CTRL_REG, 0x3, 0x3)); 114 CHECK_STATUS(ddr3_tip_configure_odpg 115 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0, 116 pattern_table[PATTERN_RL].num_of_phases_tx, 0, 117 pattern_table[PATTERN_RL].num_of_phases_rx, 0, 0, 118 effective_cs, STRESS_NONE, DURATION_SINGLE)); 119 120 /* load pattern to ODPG */ 121 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST, 122 PARAM_NOT_CARE, PATTERN_RL, 123 pattern_table[PATTERN_RL]. 124 start_addr); 125 126 /* 127 * Phase 2: ODPG to Read Leveling mode 128 */ 129 130 /* General Training Opcode register */ 131 CHECK_STATUS(ddr3_tip_if_write 132 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 133 ODPG_WR_RD_MODE_ENA_REG, 0, 134 MASK_ALL_BITS)); 135 136 CHECK_STATUS(ddr3_tip_if_write 137 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 138 GENERAL_TRAINING_OPCODE_REG, 139 (0x301b01 | effective_cs << 2), 0x3c3fef)); 140 141 /* Object1 opcode register 0 & 1 */ 142 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 143 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 144 speed_bin_index = 145 tm->interface_params[if_id].speed_bin_index; 146 cl_val = 147 cas_latency_table[speed_bin_index].cl_val[freq]; 148 data = (cl_val << 17) | (0x3 << 25); 149 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25); 150 CHECK_STATUS(ddr3_tip_if_write 151 (dev_num, ACCESS_TYPE_UNICAST, if_id, 152 OPCODE_REG0_REG(1), data, mask)); 153 } 154 155 /* Set iteration count to max value */ 156 CHECK_STATUS(ddr3_tip_if_write 157 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 158 OPCODE_REG1_REG(1), 0xd00, 0xd00)); 159 160 /* 161 * Phase 2: Mask config 162 */ 163 164 ddr3_tip_dynamic_read_leveling_seq(dev_num); 165 166 /* 167 * Phase 3: Read Leveling execution 168 */ 169 170 /* temporary jira dunit=14751 */ 171 CHECK_STATUS(ddr3_tip_if_write 172 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 173 TRAINING_DBG_1_REG, 0, (u32)(1 << 31))); 174 /* configure phy reset value */ 175 CHECK_STATUS(ddr3_tip_if_write 176 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 177 TRAINING_DBG_3_REG, (0x7f << 24), 178 (u32)(0xff << 24))); 179 /* data pup rd reset enable */ 180 CHECK_STATUS(ddr3_tip_if_write 181 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 182 SDRAM_CFG_REG, 0, (1 << 30))); 183 /* data pup rd reset disable */ 184 CHECK_STATUS(ddr3_tip_if_write 185 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 186 SDRAM_CFG_REG, (1 << 30), (1 << 30))); 187 /* training SW override & training RL mode */ 188 CHECK_STATUS(ddr3_tip_if_write 189 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 190 TRAINING_SW_2_REG, 0x1, 0x9)); 191 /* training enable */ 192 CHECK_STATUS(ddr3_tip_if_write 193 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 194 TRAINING_REG, (1 << 24) | (1 << 20), 195 (1 << 24) | (1 << 20))); 196 CHECK_STATUS(ddr3_tip_if_write 197 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 198 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31))); 199 200 /* trigger training */ 201 mv_ddr_training_enable(); 202 203 /* check for training done */ 204 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) { 205 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 206 return MV_FAIL; 207 } 208 /* check for training pass */ 209 if (data != PASS) 210 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 211 212 /* disable odpg; switch back to functional mode */ 213 mv_ddr_odpg_disable(); 214 215 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK) { 216 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("odpg disable failed\n")); 217 return MV_FAIL; 218 } 219 220 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 221 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS); 222 223 /* double loop on bus, pup */ 224 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 225 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 226 /* check training done */ 227 is_any_pup_fail = 0; 228 for (bus_num = 0; 229 bus_num < octets_per_if_num; 230 bus_num++) { 231 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 232 if (ddr3_tip_if_polling 233 (dev_num, ACCESS_TYPE_UNICAST, 234 if_id, (1 << 25), (1 << 25), 235 mask_results_pup_reg_map[bus_num], 236 MAX_POLLING_ITERATIONS) != MV_OK) { 237 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 238 ("\n_r_l: DDR3 poll failed(2) for IF %d CS %d bus %d", 239 if_id, effective_cs, bus_num)); 240 is_any_pup_fail = 1; 241 } else { 242 /* read result per pup */ 243 CHECK_STATUS(ddr3_tip_if_read 244 (dev_num, 245 ACCESS_TYPE_UNICAST, 246 if_id, 247 mask_results_pup_reg_map 248 [bus_num], data_read, 249 0xff)); 250 rl_values[effective_cs][bus_num] 251 [if_id] = (u8)data_read[if_id]; 252 } 253 } 254 255 if (is_any_pup_fail == 1) { 256 training_result[training_stage][if_id] = 257 TEST_FAILED; 258 if (debug_mode == 0) 259 return MV_FAIL; 260 } 261 } 262 263 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n")); 264 265 /* 266 * Phase 3: Exit Read Leveling 267 */ 268 269 CHECK_STATUS(ddr3_tip_if_write 270 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 271 TRAINING_SW_2_REG, (1 << 3), (1 << 3))); 272 CHECK_STATUS(ddr3_tip_if_write 273 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 274 TRAINING_SW_1_REG, (1 << 16), (1 << 16))); 275 /* set ODPG to functional */ 276 CHECK_STATUS(ddr3_tip_if_write 277 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 278 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 279 280 /* 281 * Copy the result from the effective CS search to the 282 * real Functional CS 283 */ 284 /*ddr3_tip_write_cs_result(dev_num, RL_PHY_REG(0); */ 285 CHECK_STATUS(ddr3_tip_if_write 286 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 287 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 288 } 289 290 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 291 /* double loop on bus, pup */ 292 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 293 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 294 for (bus_num = 0; 295 bus_num < octets_per_if_num; 296 bus_num++) { 297 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 298 /* read result per pup from arry */ 299 data = rl_values[effective_cs][bus_num][if_id]; 300 data = (data & 0x1f) | 301 (((data & 0xe0) >> 5) << 6); 302 ddr3_tip_bus_write(dev_num, 303 ACCESS_TYPE_UNICAST, 304 if_id, 305 ACCESS_TYPE_UNICAST, 306 bus_num, DDR_PHY_DATA, 307 RL_PHY_REG(effective_cs), 308 data); 309 } 310 } 311 } 312 /* Set to 0 after each loop to avoid illegal value may be used */ 313 effective_cs = 0; 314 315 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 316 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 317 /* restore cs enable value */ 318 CHECK_STATUS(ddr3_tip_if_write 319 (dev_num, ACCESS_TYPE_UNICAST, if_id, 320 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 321 MASK_ALL_BITS)); 322 if (odt_config != 0) { 323 CHECK_STATUS(ddr3_tip_write_additional_odt_setting 324 (dev_num, if_id)); 325 } 326 } 327 328 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 329 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 330 if (training_result[training_stage][if_id] == TEST_FAILED) 331 return MV_FAIL; 332 } 333 334 return MV_OK; 335 } 336 337 /* 338 * Legacy Dynamic write leveling 339 */ 340 int ddr3_tip_legacy_dynamic_write_leveling(u32 dev_num) 341 { 342 u32 c_cs, if_id, cs_mask = 0; 343 u32 max_cs = ddr3_tip_max_cs_get(dev_num); 344 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 345 346 /* 347 * In TRAINIUNG reg (0x15b0) write 0x80000008 | cs_mask: 348 * Trn_start 349 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training 350 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training 351 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training 352 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training 353 * Trn_auto_seq = write leveling 354 */ 355 for (c_cs = 0; c_cs < max_cs; c_cs++) 356 cs_mask = cs_mask | 1 << (20 + c_cs); 357 358 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 359 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 360 CHECK_STATUS(ddr3_tip_if_write 361 (dev_num, ACCESS_TYPE_MULTICAST, 0, 362 TRAINING_REG, (0x80000008 | cs_mask), 363 0xffffffff)); 364 mdelay(20); 365 if (ddr3_tip_if_polling 366 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 367 (u32)0x80000000, TRAINING_REG, 368 MAX_POLLING_ITERATIONS) != MV_OK) { 369 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 370 ("polling failed for Old WL result\n")); 371 return MV_FAIL; 372 } 373 } 374 375 return MV_OK; 376 } 377 378 /* 379 * Legacy Dynamic read leveling 380 */ 381 int ddr3_tip_legacy_dynamic_read_leveling(u32 dev_num) 382 { 383 u32 c_cs, if_id, cs_mask = 0; 384 u32 max_cs = ddr3_tip_max_cs_get(dev_num); 385 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 386 387 /* 388 * In TRAINIUNG reg (0x15b0) write 0x80000040 | cs_mask: 389 * Trn_start 390 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training 391 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training 392 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training 393 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training 394 * Trn_auto_seq = Read Leveling using training pattern 395 */ 396 for (c_cs = 0; c_cs < max_cs; c_cs++) 397 cs_mask = cs_mask | 1 << (20 + c_cs); 398 399 CHECK_STATUS(ddr3_tip_if_write 400 (dev_num, ACCESS_TYPE_MULTICAST, 0, TRAINING_REG, 401 (0x80000040 | cs_mask), 0xffffffff)); 402 mdelay(100); 403 404 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 405 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 406 if (ddr3_tip_if_polling 407 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 408 (u32)0x80000000, TRAINING_REG, 409 MAX_POLLING_ITERATIONS) != MV_OK) { 410 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 411 ("polling failed for Old RL result\n")); 412 return MV_FAIL; 413 } 414 } 415 416 return MV_OK; 417 } 418 419 /* 420 * Dynamic per bit read leveling 421 */ 422 int ddr3_tip_dynamic_per_bit_read_leveling(u32 dev_num, u32 freq) 423 { 424 u32 data, mask; 425 u32 bus_num, if_id, cl_val, bit_num; 426 u32 curr_numb, curr_min_delay; 427 int adll_array[3] = { 0, -0xa, 0x14 }; 428 u32 phyreg3_arr[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 429 enum hws_speed_bin speed_bin_index; 430 int is_any_pup_fail = 0; 431 int break_loop = 0; 432 u32 cs_enable_reg_val[MAX_INTERFACE_NUM]; /* save current CS value */ 433 u32 data_read[MAX_INTERFACE_NUM]; 434 int per_bit_rl_pup_status[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 435 u32 data2_write[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 436 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 437 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 438 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 439 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 440 441 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 442 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 443 for (bus_num = 0; 444 bus_num <= octets_per_if_num; bus_num++) { 445 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 446 per_bit_rl_pup_status[if_id][bus_num] = 0; 447 data2_write[if_id][bus_num] = 0; 448 /* read current value of phy register 0x3 */ 449 CHECK_STATUS(ddr3_tip_bus_read 450 (dev_num, if_id, ACCESS_TYPE_UNICAST, 451 bus_num, DDR_PHY_DATA, 452 CRX_PHY_REG(0), 453 &phyreg3_arr[if_id][bus_num])); 454 } 455 } 456 457 /* NEW RL machine */ 458 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 459 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 460 training_result[training_stage][if_id] = TEST_SUCCESS; 461 462 /* save current cs enable reg val */ 463 CHECK_STATUS(ddr3_tip_if_read 464 (dev_num, ACCESS_TYPE_UNICAST, if_id, 465 DUAL_DUNIT_CFG_REG, &cs_enable_reg_val[if_id], 466 MASK_ALL_BITS)); 467 /* enable single cs */ 468 CHECK_STATUS(ddr3_tip_if_write 469 (dev_num, ACCESS_TYPE_UNICAST, if_id, 470 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3))); 471 } 472 473 ddr3_tip_reset_fifo_ptr(dev_num); 474 for (curr_numb = 0; curr_numb < 3; curr_numb++) { 475 /* 476 * Phase 1: Load pattern (using ODPG) 477 * 478 * enter Read Leveling mode 479 * only 27 bits are masked 480 * assuming non multi-CS configuration 481 * write to CS = 0 for the non multi CS configuration, note that 482 * the results shall be read back to the required CS !!! 483 */ 484 485 /* BUS count is 0 shifted 26 */ 486 CHECK_STATUS(ddr3_tip_if_write 487 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 488 ODPG_DATA_CTRL_REG, 0x3, 0x3)); 489 CHECK_STATUS(ddr3_tip_configure_odpg 490 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0, 491 pattern_table[PATTERN_TEST].num_of_phases_tx, 0, 492 pattern_table[PATTERN_TEST].num_of_phases_rx, 0, 493 0, 0, STRESS_NONE, DURATION_SINGLE)); 494 495 /* load pattern to ODPG */ 496 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST, 497 PARAM_NOT_CARE, PATTERN_TEST, 498 pattern_table[PATTERN_TEST]. 499 start_addr); 500 501 /* 502 * Phase 2: ODPG to Read Leveling mode 503 */ 504 505 /* General Training Opcode register */ 506 CHECK_STATUS(ddr3_tip_if_write 507 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 508 ODPG_WR_RD_MODE_ENA_REG, 0, 509 MASK_ALL_BITS)); 510 CHECK_STATUS(ddr3_tip_if_write 511 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 512 GENERAL_TRAINING_OPCODE_REG, 0x301b01, 0x3c3fef)); 513 514 /* Object1 opcode register 0 & 1 */ 515 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 516 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 517 speed_bin_index = 518 tm->interface_params[if_id].speed_bin_index; 519 cl_val = 520 cas_latency_table[speed_bin_index].cl_val[freq]; 521 data = (cl_val << 17) | (0x3 << 25); 522 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25); 523 CHECK_STATUS(ddr3_tip_if_write 524 (dev_num, ACCESS_TYPE_UNICAST, if_id, 525 OPCODE_REG0_REG(1), data, mask)); 526 } 527 528 /* Set iteration count to max value */ 529 CHECK_STATUS(ddr3_tip_if_write 530 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 531 OPCODE_REG1_REG(1), 0xd00, 0xd00)); 532 533 /* 534 * Phase 2: Mask config 535 */ 536 537 ddr3_tip_dynamic_per_bit_read_leveling_seq(dev_num); 538 539 /* 540 * Phase 3: Read Leveling execution 541 */ 542 543 /* temporary jira dunit=14751 */ 544 CHECK_STATUS(ddr3_tip_if_write 545 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 546 TRAINING_DBG_1_REG, 0, (u32)(1 << 31))); 547 /* configure phy reset value */ 548 CHECK_STATUS(ddr3_tip_if_write 549 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 550 TRAINING_DBG_3_REG, (0x7f << 24), 551 (u32)(0xff << 24))); 552 /* data pup rd reset enable */ 553 CHECK_STATUS(ddr3_tip_if_write 554 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 555 SDRAM_CFG_REG, 0, (1 << 30))); 556 /* data pup rd reset disable */ 557 CHECK_STATUS(ddr3_tip_if_write 558 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 559 SDRAM_CFG_REG, (1 << 30), (1 << 30))); 560 /* training SW override & training RL mode */ 561 CHECK_STATUS(ddr3_tip_if_write 562 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 563 TRAINING_SW_2_REG, 0x1, 0x9)); 564 /* training enable */ 565 CHECK_STATUS(ddr3_tip_if_write 566 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 567 TRAINING_REG, (1 << 24) | (1 << 20), 568 (1 << 24) | (1 << 20))); 569 CHECK_STATUS(ddr3_tip_if_write 570 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 571 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31))); 572 573 /* trigger training */ 574 mv_ddr_training_enable(); 575 576 /* check for training done */ 577 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) { 578 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 579 return MV_FAIL; 580 } 581 /* check for training pass */ 582 if (data != PASS) 583 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 584 585 /* disable odpg; switch back to functional mode */ 586 mv_ddr_odpg_disable(); 587 588 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK) { 589 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("odpg disable failed\n")); 590 return MV_FAIL; 591 } 592 593 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 594 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS); 595 596 /* double loop on bus, pup */ 597 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 598 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 599 /* check training done */ 600 for (bus_num = 0; 601 bus_num < octets_per_if_num; 602 bus_num++) { 603 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 604 605 if (per_bit_rl_pup_status[if_id][bus_num] 606 == 0) { 607 curr_min_delay = 0; 608 for (bit_num = 0; bit_num < 8; 609 bit_num++) { 610 if (ddr3_tip_if_polling 611 (dev_num, 612 ACCESS_TYPE_UNICAST, 613 if_id, (1 << 25), 614 (1 << 25), 615 mask_results_dq_reg_map 616 [bus_num * 8 + bit_num], 617 MAX_POLLING_ITERATIONS) != 618 MV_OK) { 619 DEBUG_LEVELING 620 (DEBUG_LEVEL_ERROR, 621 ("\n_r_l: DDR3 poll failed(2) for bus %d bit %d\n", 622 bus_num, 623 bit_num)); 624 } else { 625 /* read result per pup */ 626 CHECK_STATUS 627 (ddr3_tip_if_read 628 (dev_num, 629 ACCESS_TYPE_UNICAST, 630 if_id, 631 mask_results_dq_reg_map 632 [bus_num * 8 + 633 bit_num], 634 data_read, 635 MASK_ALL_BITS)); 636 data = 637 (data_read 638 [if_id] & 639 0x1f) | 640 ((data_read 641 [if_id] & 642 0xe0) << 1); 643 if (curr_min_delay == 0) 644 curr_min_delay = 645 data; 646 else if (data < 647 curr_min_delay) 648 curr_min_delay = 649 data; 650 if (data > data2_write[if_id][bus_num]) 651 data2_write 652 [if_id] 653 [bus_num] = 654 data; 655 } 656 } 657 658 if (data2_write[if_id][bus_num] <= 659 (curr_min_delay + 660 MAX_DQ_READ_LEVELING_DELAY)) { 661 per_bit_rl_pup_status[if_id] 662 [bus_num] = 1; 663 } 664 } 665 } 666 } 667 668 /* check if there is need to search new phyreg3 value */ 669 if (curr_numb < 2) { 670 /* if there is DLL that is not checked yet */ 671 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 672 if_id++) { 673 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 674 for (bus_num = 0; 675 bus_num < octets_per_if_num; 676 bus_num++) { 677 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, 678 bus_num); 679 if (per_bit_rl_pup_status[if_id] 680 [bus_num] != 1) { 681 /* go to next ADLL value */ 682 CHECK_STATUS 683 (ddr3_tip_bus_write 684 (dev_num, 685 ACCESS_TYPE_UNICAST, 686 if_id, 687 ACCESS_TYPE_UNICAST, 688 bus_num, DDR_PHY_DATA, 689 CRX_PHY_REG(0), 690 (phyreg3_arr[if_id] 691 [bus_num] + 692 adll_array[curr_numb]))); 693 break_loop = 1; 694 break; 695 } 696 } 697 if (break_loop) 698 break; 699 } 700 } /* if (curr_numb < 2) */ 701 if (!break_loop) 702 break; 703 } /* for ( curr_numb = 0; curr_numb <3; curr_numb++) */ 704 705 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 706 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 707 for (bus_num = 0; bus_num < octets_per_if_num; 708 bus_num++) { 709 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_num); 710 if (per_bit_rl_pup_status[if_id][bus_num] == 1) 711 ddr3_tip_bus_write(dev_num, 712 ACCESS_TYPE_UNICAST, 713 if_id, 714 ACCESS_TYPE_UNICAST, 715 bus_num, DDR_PHY_DATA, 716 RL_PHY_REG(effective_cs), 717 data2_write[if_id] 718 [bus_num]); 719 else 720 is_any_pup_fail = 1; 721 } 722 723 /* TBD flow does not support multi CS */ 724 /* 725 * cs_bitmask = tm->interface_params[if_id]. 726 * as_bus_params[bus_num].cs_bitmask; 727 */ 728 /* divide by 4 is used for retrieving the CS number */ 729 /* 730 * TBD BC2 - what is the PHY address for other 731 * CS ddr3_tip_write_cs_result() ??? 732 */ 733 /* 734 * find what should be written to PHY 735 * - max delay that is less than threshold 736 */ 737 if (is_any_pup_fail == 1) { 738 training_result[training_stage][if_id] = TEST_FAILED; 739 if (debug_mode == 0) 740 return MV_FAIL; 741 } 742 } 743 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n")); 744 745 /* 746 * Phase 3: Exit Read Leveling 747 */ 748 749 CHECK_STATUS(ddr3_tip_if_write 750 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 751 TRAINING_SW_2_REG, (1 << 3), (1 << 3))); 752 CHECK_STATUS(ddr3_tip_if_write 753 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 754 TRAINING_SW_1_REG, (1 << 16), (1 << 16))); 755 /* set ODPG to functional */ 756 CHECK_STATUS(ddr3_tip_if_write 757 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 758 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 759 /* 760 * Copy the result from the effective CS search to the real 761 * Functional CS 762 */ 763 ddr3_tip_write_cs_result(dev_num, RL_PHY_REG(0)); 764 CHECK_STATUS(ddr3_tip_if_write 765 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 766 ODPG_DATA_CTRL_REG, 0x0, MASK_ALL_BITS)); 767 768 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 769 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 770 /* restore cs enable value */ 771 CHECK_STATUS(ddr3_tip_if_write 772 (dev_num, ACCESS_TYPE_UNICAST, if_id, 773 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 774 MASK_ALL_BITS)); 775 if (odt_config != 0) { 776 CHECK_STATUS(ddr3_tip_write_additional_odt_setting 777 (dev_num, if_id)); 778 } 779 } 780 781 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 782 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 783 if (training_result[training_stage][if_id] == TEST_FAILED) 784 return MV_FAIL; 785 } 786 787 return MV_OK; 788 } 789 790 int ddr3_tip_calc_cs_mask(u32 dev_num, u32 if_id, u32 effective_cs, 791 u32 *cs_mask) 792 { 793 u32 all_bus_cs = 0, same_bus_cs; 794 u32 bus_cnt; 795 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 796 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 797 798 *cs_mask = same_bus_cs = CS_BIT_MASK; 799 800 /* 801 * In some of the devices (such as BC2), the CS is per pup and there 802 * for mixed mode is valid on like other devices where CS configuration 803 * is per interface. 804 * In order to know that, we do 'Or' and 'And' operation between all 805 * CS (of the pups). 806 * If they are they are not the same then it's mixed mode so all CS 807 * should be configured (when configuring the MRS) 808 */ 809 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 810 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 811 812 all_bus_cs |= tm->interface_params[if_id]. 813 as_bus_params[bus_cnt].cs_bitmask; 814 same_bus_cs &= tm->interface_params[if_id]. 815 as_bus_params[bus_cnt].cs_bitmask; 816 817 /* cs enable is active low */ 818 *cs_mask &= ~tm->interface_params[if_id]. 819 as_bus_params[bus_cnt].cs_bitmask; 820 } 821 822 if (all_bus_cs == same_bus_cs) 823 *cs_mask = (*cs_mask | (~(1 << effective_cs))) & CS_BIT_MASK; 824 825 return MV_OK; 826 } 827 828 /* 829 * Dynamic write leveling 830 */ 831 int ddr3_tip_dynamic_write_leveling(u32 dev_num, int phase_remove) 832 { 833 u32 reg_data = 0, temp = 0, iter, if_id, bus_cnt; 834 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 }; 835 u32 cs_mask[MAX_INTERFACE_NUM]; 836 u32 read_data_sample_delay_vals[MAX_INTERFACE_NUM] = { 0 }; 837 u32 read_data_ready_delay_vals[MAX_INTERFACE_NUM] = { 0 }; 838 /* 0 for failure */ 839 u32 res_values[MAX_INTERFACE_NUM * MAX_BUS_NUM] = { 0 }; 840 u32 test_res = 0; /* 0 - success for all pup */ 841 u32 data_read[MAX_INTERFACE_NUM]; 842 u8 wl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM]; 843 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 844 u32 cs_mask0[MAX_INTERFACE_NUM] = { 0 }; 845 u32 max_cs = ddr3_tip_max_cs_get(dev_num); 846 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 847 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 848 849 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 850 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 851 852 training_result[training_stage][if_id] = TEST_SUCCESS; 853 854 /* save Read Data Sample Delay */ 855 CHECK_STATUS(ddr3_tip_if_read 856 (dev_num, ACCESS_TYPE_UNICAST, if_id, 857 RD_DATA_SMPL_DLYS_REG, 858 read_data_sample_delay_vals, MASK_ALL_BITS)); 859 /* save Read Data Ready Delay */ 860 CHECK_STATUS(ddr3_tip_if_read 861 (dev_num, ACCESS_TYPE_UNICAST, if_id, 862 RD_DATA_RDY_DLYS_REG, read_data_ready_delay_vals, 863 MASK_ALL_BITS)); 864 /* save current cs reg val */ 865 CHECK_STATUS(ddr3_tip_if_read 866 (dev_num, ACCESS_TYPE_UNICAST, if_id, 867 DUAL_DUNIT_CFG_REG, cs_enable_reg_val, MASK_ALL_BITS)); 868 } 869 870 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) < MV_TIP_REV_3) { 871 /* Enable multi-CS */ 872 CHECK_STATUS(ddr3_tip_if_write 873 (dev_num, ACCESS_TYPE_UNICAST, if_id, 874 DUAL_DUNIT_CFG_REG, 0, (1 << 3))); 875 } 876 877 /* 878 * Phase 1: DRAM 2 Write Leveling mode 879 */ 880 881 /*Assert 10 refresh commands to DRAM to all CS */ 882 for (iter = 0; iter < WL_ITERATION_NUM; iter++) { 883 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 884 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 885 CHECK_STATUS(ddr3_tip_if_write 886 (dev_num, ACCESS_TYPE_UNICAST, 887 if_id, SDRAM_OP_REG, 888 (u32)((~(0xf) << 8) | 0x2), 0xf1f)); 889 } 890 } 891 /* check controller back to normal */ 892 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 893 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 894 if (ddr3_tip_if_polling 895 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f, 896 SDRAM_OP_REG, MAX_POLLING_ITERATIONS) != MV_OK) { 897 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 898 ("WL: DDR3 poll failed(3)")); 899 } 900 } 901 902 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 903 /*enable write leveling to all cs - Q off , WL n */ 904 /* calculate interface cs mask */ 905 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 906 0x1000, 0x1080)); 907 908 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 909 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 910 /* cs enable is active low */ 911 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs, 912 &cs_mask[if_id]); 913 } 914 915 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 916 /* Enable Output buffer to relevant CS - Q on , WL on */ 917 CHECK_STATUS(ddr3_tip_write_mrs_cmd 918 (dev_num, cs_mask, MR_CMD1, 0x80, 0x1080)); 919 920 /*enable odt for relevant CS */ 921 CHECK_STATUS(ddr3_tip_if_write 922 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 923 0x1498, (0x3 << (effective_cs * 2)), 0xf)); 924 } else { 925 /* FIXME: should be the same as _CPU case */ 926 CHECK_STATUS(ddr3_tip_write_mrs_cmd 927 (dev_num, cs_mask, MR_CMD1, 0xc0, 0x12c4)); 928 } 929 930 /* 931 * Phase 2: Set training IP to write leveling mode 932 */ 933 934 CHECK_STATUS(ddr3_tip_dynamic_write_leveling_seq(dev_num)); 935 936 /* phase 3: trigger training */ 937 mv_ddr_training_enable(); 938 939 /* check for training done */ 940 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, data_read) != MV_OK) { 941 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, ("training done failed\n")); 942 } else { /* check for training pass */ 943 reg_data = data_read[0]; 944 #if defined(CONFIG_ARMADA_38X) /* JIRA #1498 for 16 bit with ECC */ 945 if (tm->bus_act_mask == 0xb) /* set to data to 0 to skip the check */ 946 reg_data = 0; 947 #endif 948 if (reg_data != PASS) 949 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("training result failed\n")); 950 951 /* check for training completion per bus */ 952 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 953 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 954 /* training status */ 955 ddr3_tip_if_read(0, ACCESS_TYPE_UNICAST, 0, 956 mask_results_pup_reg_map[bus_cnt], 957 data_read, MASK_ALL_BITS); 958 reg_data = data_read[0]; 959 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("WL: IF %d BUS %d reg 0x%x\n", 960 0, bus_cnt, reg_data)); 961 if ((reg_data & (1 << 25)) == 0) 962 res_values[bus_cnt] = 1; 963 ddr3_tip_if_read(0, ACCESS_TYPE_UNICAST, 0, 964 mask_results_pup_reg_map[bus_cnt], 965 data_read, 0xff); 966 /* 967 * Save the read value that should be 968 * write to PHY register 969 */ 970 wl_values[effective_cs][bus_cnt][0] = (u8)data_read[0]; 971 } 972 } 973 974 /* 975 * Phase 3.5: Validate result 976 */ 977 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 978 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 979 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) { 980 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 981 /* 982 * Read result control register according to subphy 983 * "16" below is for a half-phase 984 */ 985 reg_data = wl_values[effective_cs][bus_cnt][if_id] + 16; 986 /* 987 * Write to WL register: ADLL [4:0], Phase [8:6], 988 * Centralization ADLL [15:10] + 0x10 989 */ 990 reg_data = (reg_data & 0x1f) | 991 (((reg_data & 0xe0) >> 5) << 6) | 992 (((reg_data & 0x1f) + phy_reg1_val) << 10); 993 /* Search with WL CS0 subphy reg */ 994 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 995 ACCESS_TYPE_UNICAST, bus_cnt, 996 DDR_PHY_DATA, WL_PHY_REG(0), reg_data); 997 /* 998 * Check for change in data read from DRAM. 999 * If changed, fix the result 1000 */ 1001 CHECK_STATUS(ddr3_tip_if_read 1002 (dev_num, 1003 ACCESS_TYPE_UNICAST, 1004 if_id, 1005 TRAINING_WL_REG, 1006 data_read, MASK_ALL_BITS)); 1007 if (((data_read[if_id] & (1 << (bus_cnt + 20))) >> 1008 (bus_cnt + 20)) == 0) { 1009 DEBUG_LEVELING( 1010 DEBUG_LEVEL_ERROR, 1011 ("WLValues was changed from 0x%X", 1012 wl_values[effective_cs] 1013 [bus_cnt][if_id])); 1014 wl_values[effective_cs] 1015 [bus_cnt][if_id] += 32; 1016 DEBUG_LEVELING( 1017 DEBUG_LEVEL_ERROR, 1018 ("to 0x%X", 1019 wl_values[effective_cs] 1020 [bus_cnt][if_id])); 1021 } 1022 } 1023 } 1024 1025 /* 1026 * Phase 4: Exit write leveling mode 1027 */ 1028 1029 /* disable DQs toggling */ 1030 CHECK_STATUS(ddr3_tip_if_write 1031 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1032 WL_DQS_PATTERN_REG, 0x0, 0x1)); 1033 1034 /* Update MRS 1 (WL off) */ 1035 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 1036 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 1037 0x1000, 0x1080)); 1038 } else { 1039 /* FIXME: should be same as _CPU case */ 1040 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MR_CMD1, 1041 0x1000, 0x12c4)); 1042 } 1043 1044 /* Update MRS 1 (return to functional mode - Q on , WL off) */ 1045 CHECK_STATUS(ddr3_tip_write_mrs_cmd 1046 (dev_num, cs_mask0, MR_CMD1, 0x0, 0x1080)); 1047 1048 /* set phy to normal mode */ 1049 CHECK_STATUS(ddr3_tip_if_write 1050 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1051 TRAINING_SW_2_REG, 0x5, 0x7)); 1052 1053 /* exit sw override mode */ 1054 CHECK_STATUS(ddr3_tip_if_write 1055 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1056 TRAINING_SW_2_REG, 0x4, 0x7)); 1057 } 1058 1059 /* 1060 * Phase 5: Load WL values to each PHY 1061 */ 1062 1063 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1064 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1065 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1066 test_res = 0; 1067 for (bus_cnt = 0; 1068 bus_cnt < octets_per_if_num; 1069 bus_cnt++) { 1070 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt); 1071 /* check if result == pass */ 1072 if (res_values 1073 [(if_id * 1074 octets_per_if_num) + 1075 bus_cnt] == 0) { 1076 /* 1077 * read result control register 1078 * according to pup 1079 */ 1080 reg_data = 1081 wl_values[effective_cs][bus_cnt] 1082 [if_id]; 1083 /* 1084 * Write into write leveling register 1085 * ([4:0] ADLL, [8:6] Phase, [15:10] 1086 * (centralization) ADLL + 0x10) 1087 */ 1088 reg_data = 1089 (reg_data & 0x1f) | 1090 (((reg_data & 0xe0) >> 5) << 6) | 1091 (((reg_data & 0x1f) + 1092 phy_reg1_val) << 10); 1093 /* 1094 * in case phase remove should be executed 1095 * need to remove more than one phase. 1096 * this will take place only in low frequency, 1097 * where there could be more than one phase between sub-phys 1098 */ 1099 if (phase_remove == 1) { 1100 temp = (reg_data >> WR_LVL_PH_SEL_OFFS) & WR_LVL_PH_SEL_PHASE1; 1101 reg_data &= ~(WR_LVL_PH_SEL_MASK << WR_LVL_PH_SEL_OFFS); 1102 reg_data |= (temp << WR_LVL_PH_SEL_OFFS); 1103 } 1104 1105 ddr3_tip_bus_write( 1106 dev_num, 1107 ACCESS_TYPE_UNICAST, 1108 if_id, 1109 ACCESS_TYPE_UNICAST, 1110 bus_cnt, 1111 DDR_PHY_DATA, 1112 WL_PHY_REG(effective_cs), 1113 reg_data); 1114 } else { 1115 test_res = 1; 1116 /* 1117 * read result control register 1118 * according to pup 1119 */ 1120 CHECK_STATUS(ddr3_tip_if_read 1121 (dev_num, 1122 ACCESS_TYPE_UNICAST, 1123 if_id, 1124 mask_results_pup_reg_map 1125 [bus_cnt], data_read, 1126 0xff)); 1127 reg_data = data_read[if_id]; 1128 DEBUG_LEVELING( 1129 DEBUG_LEVEL_ERROR, 1130 ("WL: IF %d BUS %d failed, reg 0x%x\n", 1131 if_id, bus_cnt, reg_data)); 1132 } 1133 } 1134 1135 if (test_res != 0) { 1136 training_result[training_stage][if_id] = 1137 TEST_FAILED; 1138 } 1139 } 1140 } 1141 /* Set to 0 after each loop to avoid illegal value may be used */ 1142 effective_cs = 0; 1143 1144 /* 1145 * Copy the result from the effective CS search to the real 1146 * Functional CS 1147 */ 1148 /* ddr3_tip_write_cs_result(dev_num, WL_PHY_REG(0); */ 1149 /* restore saved values */ 1150 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1151 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1152 /* restore Read Data Sample Delay */ 1153 CHECK_STATUS(ddr3_tip_if_write 1154 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1155 RD_DATA_SMPL_DLYS_REG, 1156 read_data_sample_delay_vals[if_id], 1157 MASK_ALL_BITS)); 1158 1159 /* restore Read Data Ready Delay */ 1160 CHECK_STATUS(ddr3_tip_if_write 1161 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1162 RD_DATA_RDY_DLYS_REG, 1163 read_data_ready_delay_vals[if_id], 1164 MASK_ALL_BITS)); 1165 1166 /* enable multi cs */ 1167 CHECK_STATUS(ddr3_tip_if_write 1168 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1169 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 1170 MASK_ALL_BITS)); 1171 } 1172 1173 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) { 1174 /* Disable modt0 for CS0 training - need to adjust for multi-CS 1175 * in case of ddr4 set 0xf else 0 1176 */ 1177 if (odt_config != 0) { 1178 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1179 SDRAM_ODT_CTRL_HIGH_REG, 0x0, 0xf)); 1180 } 1181 else { 1182 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1183 SDRAM_ODT_CTRL_HIGH_REG, 0xf, 0xf)); 1184 } 1185 1186 } 1187 1188 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1189 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1190 if (training_result[training_stage][if_id] == TEST_FAILED) 1191 return MV_FAIL; 1192 } 1193 1194 return MV_OK; 1195 } 1196 1197 /* 1198 * Dynamic write leveling supplementary 1199 */ 1200 int ddr3_tip_dynamic_write_leveling_supp(u32 dev_num) 1201 { 1202 int adll_offset; 1203 u32 if_id, bus_id, data, data_tmp; 1204 int is_if_fail = 0; 1205 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1206 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1207 1208 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1209 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1210 is_if_fail = 0; 1211 1212 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1213 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1214 wr_supp_res[if_id][bus_id].is_pup_fail = 1; 1215 CHECK_STATUS(ddr3_tip_bus_read 1216 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1217 bus_id, DDR_PHY_DATA, 1218 CTX_PHY_REG(effective_cs), 1219 &data)); 1220 DEBUG_LEVELING( 1221 DEBUG_LEVEL_TRACE, 1222 ("WL Supp: adll_offset=0 data delay = %d\n", 1223 data)); 1224 if (ddr3_tip_wl_supp_align_phase_shift 1225 (dev_num, if_id, bus_id) == MV_OK) { 1226 DEBUG_LEVELING( 1227 DEBUG_LEVEL_TRACE, 1228 ("WL Supp: IF %d bus_id %d adll_offset=0 Success !\n", 1229 if_id, bus_id)); 1230 continue; 1231 } 1232 1233 /* change adll */ 1234 adll_offset = 5; 1235 CHECK_STATUS(ddr3_tip_bus_write 1236 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1237 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1238 CTX_PHY_REG(effective_cs), 1239 data + adll_offset)); 1240 CHECK_STATUS(ddr3_tip_bus_read 1241 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1242 bus_id, DDR_PHY_DATA, 1243 CTX_PHY_REG(effective_cs), 1244 &data_tmp)); 1245 DEBUG_LEVELING( 1246 DEBUG_LEVEL_TRACE, 1247 ("WL Supp: adll_offset= %d data delay = %d\n", 1248 adll_offset, data_tmp)); 1249 1250 if (ddr3_tip_wl_supp_align_phase_shift 1251 (dev_num, if_id, bus_id) == MV_OK) { 1252 DEBUG_LEVELING( 1253 DEBUG_LEVEL_TRACE, 1254 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n", 1255 if_id, bus_id, adll_offset)); 1256 continue; 1257 } 1258 1259 /* change adll */ 1260 adll_offset = -5; 1261 CHECK_STATUS(ddr3_tip_bus_write 1262 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1263 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1264 CTX_PHY_REG(effective_cs), 1265 data + adll_offset)); 1266 CHECK_STATUS(ddr3_tip_bus_read 1267 (dev_num, if_id, ACCESS_TYPE_UNICAST, 1268 bus_id, DDR_PHY_DATA, 1269 CTX_PHY_REG(effective_cs), 1270 &data_tmp)); 1271 DEBUG_LEVELING( 1272 DEBUG_LEVEL_TRACE, 1273 ("WL Supp: adll_offset= %d data delay = %d\n", 1274 adll_offset, data_tmp)); 1275 if (ddr3_tip_wl_supp_align_phase_shift 1276 (dev_num, if_id, bus_id) == MV_OK) { 1277 DEBUG_LEVELING( 1278 DEBUG_LEVEL_TRACE, 1279 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n", 1280 if_id, bus_id, adll_offset)); 1281 continue; 1282 } else { 1283 DEBUG_LEVELING( 1284 DEBUG_LEVEL_ERROR, 1285 ("WL Supp: IF %d bus_id %d Failed !\n", 1286 if_id, bus_id)); 1287 is_if_fail = 1; 1288 } 1289 } 1290 1291 if (is_if_fail == 1) { 1292 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 1293 ("WL Supp: CS# %d: IF %d failed\n", 1294 effective_cs, if_id)); 1295 training_result[training_stage][if_id] = TEST_FAILED; 1296 } else { 1297 training_result[training_stage][if_id] = TEST_SUCCESS; 1298 } 1299 } 1300 1301 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1302 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1303 if (training_result[training_stage][if_id] == TEST_FAILED) 1304 return MV_FAIL; 1305 } 1306 1307 return MV_OK; 1308 } 1309 1310 /* 1311 * Phase Shift 1312 */ 1313 static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id, 1314 u32 bus_id) 1315 { 1316 u32 original_phase; 1317 u32 data, write_data; 1318 1319 wr_supp_res[if_id][bus_id].stage = PHASE_SHIFT; 1320 if (ddr3_tip_xsb_compare_test 1321 (dev_num, if_id, bus_id, 0) == MV_OK) 1322 return MV_OK; 1323 1324 /* Read current phase */ 1325 CHECK_STATUS(ddr3_tip_bus_read 1326 (dev_num, if_id, ACCESS_TYPE_UNICAST, bus_id, 1327 DDR_PHY_DATA, WL_PHY_REG(effective_cs), &data)); 1328 original_phase = (data >> 6) & 0x7; 1329 1330 /* Set phase (0x0[6-8]) -2 */ 1331 if (original_phase >= 1) { 1332 if (original_phase == 1) 1333 write_data = data & ~0x1df; 1334 else 1335 write_data = (data & ~0x1c0) | 1336 ((original_phase - 2) << 6); 1337 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1338 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1339 WL_PHY_REG(effective_cs), write_data); 1340 if (ddr3_tip_xsb_compare_test 1341 (dev_num, if_id, bus_id, -2) == MV_OK) 1342 return MV_OK; 1343 } 1344 1345 /* Set phase (0x0[6-8]) +2 */ 1346 if (original_phase <= 5) { 1347 write_data = (data & ~0x1c0) | 1348 ((original_phase + 2) << 6); 1349 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1350 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1351 WL_PHY_REG(effective_cs), write_data); 1352 if (ddr3_tip_xsb_compare_test 1353 (dev_num, if_id, bus_id, 2) == MV_OK) 1354 return MV_OK; 1355 } 1356 1357 /* Set phase (0x0[6-8]) +4 */ 1358 if (original_phase <= 3) { 1359 write_data = (data & ~0x1c0) | 1360 ((original_phase + 4) << 6); 1361 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1362 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1363 WL_PHY_REG(effective_cs), write_data); 1364 if (ddr3_tip_xsb_compare_test 1365 (dev_num, if_id, bus_id, 4) == MV_OK) 1366 return MV_OK; 1367 } 1368 1369 /* Set phase (0x0[6-8]) +6 */ 1370 if (original_phase <= 1) { 1371 write_data = (data & ~0x1c0) | 1372 ((original_phase + 6) << 6); 1373 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1374 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1375 WL_PHY_REG(effective_cs), write_data); 1376 if (ddr3_tip_xsb_compare_test 1377 (dev_num, if_id, bus_id, 6) == MV_OK) 1378 return MV_OK; 1379 } 1380 1381 /* Write original WL result back */ 1382 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 1383 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA, 1384 WL_PHY_REG(effective_cs), data); 1385 wr_supp_res[if_id][bus_id].is_pup_fail = 1; 1386 1387 return MV_FAIL; 1388 } 1389 1390 /* 1391 * Compare Test 1392 */ 1393 static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id, 1394 u32 edge_offset) 1395 { 1396 u32 num_of_succ_byte_compare, word_in_pattern; 1397 u32 word_offset, i, num_of_word_mult; 1398 u32 read_pattern[TEST_PATTERN_LENGTH * 2]; 1399 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table(); 1400 u32 pattern_test_pattern_table[8]; 1401 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1402 1403 /* 3 below for INTERFACE_BUS_MASK_16BIT */ 1404 num_of_word_mult = (tm->bus_act_mask == 3) ? 1 : 2; 1405 1406 for (i = 0; i < 8; i++) { 1407 pattern_test_pattern_table[i] = 1408 pattern_table_get_word(dev_num, PATTERN_TEST, (u8)i); 1409 } 1410 1411 /* External write, read and compare */ 1412 CHECK_STATUS(ddr3_tip_load_pattern_to_mem(dev_num, PATTERN_TEST)); 1413 1414 CHECK_STATUS(ddr3_tip_reset_fifo_ptr(dev_num)); 1415 1416 CHECK_STATUS(ddr3_tip_ext_read 1417 (dev_num, if_id, 1418 ((pattern_table[PATTERN_TEST].start_addr << 3) + 1419 ((SDRAM_CS_SIZE + 1) * effective_cs)), 1, read_pattern)); 1420 1421 DEBUG_LEVELING( 1422 DEBUG_LEVEL_TRACE, 1423 ("XSB-compt CS#%d: IF %d bus_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1424 effective_cs, if_id, bus_id, 1425 read_pattern[0], read_pattern[1], 1426 read_pattern[2], read_pattern[3], 1427 read_pattern[4], read_pattern[5], 1428 read_pattern[6], read_pattern[7])); 1429 1430 /* compare byte per pup */ 1431 num_of_succ_byte_compare = 0; 1432 for (word_in_pattern = start_xsb_offset; 1433 word_in_pattern < (TEST_PATTERN_LENGTH * num_of_word_mult); 1434 word_in_pattern++) { 1435 word_offset = word_in_pattern; 1436 if ((word_offset > (TEST_PATTERN_LENGTH * 2 - 1))) 1437 continue; 1438 1439 if ((read_pattern[word_in_pattern] & pup_mask_table[bus_id]) == 1440 (pattern_test_pattern_table[word_offset] & 1441 pup_mask_table[bus_id])) 1442 num_of_succ_byte_compare++; 1443 } 1444 1445 if ((TEST_PATTERN_LENGTH * num_of_word_mult - start_xsb_offset) == 1446 num_of_succ_byte_compare) { 1447 wr_supp_res[if_id][bus_id].stage = edge_offset; 1448 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1449 ("supplementary: shift to %d for if %d pup %d success\n", 1450 edge_offset, if_id, bus_id)); 1451 wr_supp_res[if_id][bus_id].is_pup_fail = 0; 1452 1453 return MV_OK; 1454 } else { 1455 DEBUG_LEVELING( 1456 DEBUG_LEVEL_TRACE, 1457 ("XSB-compt CS#%d: IF %d bus_id %d num_of_succ_byte_compare %d - Fail!\n", 1458 effective_cs, if_id, bus_id, num_of_succ_byte_compare)); 1459 1460 DEBUG_LEVELING( 1461 DEBUG_LEVEL_TRACE, 1462 ("XSB-compt: expected 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1463 pattern_test_pattern_table[0], 1464 pattern_test_pattern_table[1], 1465 pattern_test_pattern_table[2], 1466 pattern_test_pattern_table[3], 1467 pattern_test_pattern_table[4], 1468 pattern_test_pattern_table[5], 1469 pattern_test_pattern_table[6], 1470 pattern_test_pattern_table[7])); 1471 DEBUG_LEVELING( 1472 DEBUG_LEVEL_TRACE, 1473 ("XSB-compt: recieved 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 1474 read_pattern[0], read_pattern[1], 1475 read_pattern[2], read_pattern[3], 1476 read_pattern[4], read_pattern[5], 1477 read_pattern[6], read_pattern[7])); 1478 1479 return MV_FAIL; 1480 } 1481 } 1482 1483 /* 1484 * Dynamic write leveling sequence 1485 */ 1486 static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num) 1487 { 1488 u32 bus_id, dq_id; 1489 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1490 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1491 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1492 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1493 1494 CHECK_STATUS(ddr3_tip_if_write 1495 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1496 TRAINING_SW_2_REG, 0x1, 0x5)); 1497 CHECK_STATUS(ddr3_tip_if_write 1498 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1499 TRAINING_WL_REG, 0x50, 0xff)); 1500 CHECK_STATUS(ddr3_tip_if_write 1501 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1502 TRAINING_WL_REG, 0x5c, 0xff)); 1503 CHECK_STATUS(ddr3_tip_if_write 1504 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1505 GENERAL_TRAINING_OPCODE_REG, 0x381b82, 0x3c3faf)); 1506 CHECK_STATUS(ddr3_tip_if_write 1507 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1508 OPCODE_REG0_REG(1), (0x3 << 25), (0x3ffff << 9))); 1509 CHECK_STATUS(ddr3_tip_if_write 1510 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1511 OPCODE_REG1_REG(1), 0x80, 0xffff)); 1512 CHECK_STATUS(ddr3_tip_if_write 1513 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1514 WL_DONE_CNTR_REF_REG, 0x14, 0xff)); 1515 CHECK_STATUS(ddr3_tip_if_write 1516 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1517 TRAINING_WL_REG, 0xff5c, 0xffff)); 1518 1519 /* mask PBS */ 1520 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1521 CHECK_STATUS(ddr3_tip_if_write 1522 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1523 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1524 0x1 << 24)); 1525 } 1526 1527 /* Mask all results */ 1528 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1529 CHECK_STATUS(ddr3_tip_if_write 1530 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1531 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1532 0x1 << 24)); 1533 } 1534 1535 /* Unmask only wanted */ 1536 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1537 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1538 CHECK_STATUS(ddr3_tip_if_write 1539 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1540 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24)); 1541 } 1542 1543 CHECK_STATUS(ddr3_tip_if_write 1544 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1545 WL_DQS_PATTERN_REG, 0x1, 0x1)); 1546 1547 return MV_OK; 1548 } 1549 1550 /* 1551 * Dynamic read leveling sequence 1552 */ 1553 static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num) 1554 { 1555 u32 bus_id, dq_id; 1556 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1557 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1558 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1559 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1560 1561 /* mask PBS */ 1562 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1563 CHECK_STATUS(ddr3_tip_if_write 1564 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1565 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1566 0x1 << 24)); 1567 } 1568 1569 /* Mask all results */ 1570 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1571 CHECK_STATUS(ddr3_tip_if_write 1572 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1573 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1574 0x1 << 24)); 1575 } 1576 1577 /* Unmask only wanted */ 1578 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1579 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1580 CHECK_STATUS(ddr3_tip_if_write 1581 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1582 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24)); 1583 } 1584 1585 return MV_OK; 1586 } 1587 1588 /* 1589 * Dynamic read leveling sequence 1590 */ 1591 static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num) 1592 { 1593 u32 bus_id, dq_id; 1594 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map(); 1595 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 1596 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1597 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1598 1599 /* mask PBS */ 1600 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1601 CHECK_STATUS(ddr3_tip_if_write 1602 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1603 mask_results_dq_reg_map[dq_id], 0x1 << 24, 1604 0x1 << 24)); 1605 } 1606 1607 /* Mask all results */ 1608 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) { 1609 CHECK_STATUS(ddr3_tip_if_write 1610 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1611 mask_results_pup_reg_map[bus_id], 0x1 << 24, 1612 0x1 << 24)); 1613 } 1614 1615 /* Unmask only wanted */ 1616 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) { 1617 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, dq_id / 8); 1618 CHECK_STATUS(ddr3_tip_if_write 1619 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1620 mask_results_dq_reg_map[dq_id], 0x0 << 24, 1621 0x1 << 24)); 1622 } 1623 1624 return MV_OK; 1625 } 1626 1627 /* 1628 * Print write leveling supplementary results 1629 */ 1630 int ddr3_tip_print_wl_supp_result(u32 dev_num) 1631 { 1632 u32 bus_id = 0, if_id = 0; 1633 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 1634 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1635 1636 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1637 ("I/F0 PUP0 Result[0 - success, 1-fail] ...\n")); 1638 1639 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1640 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1641 for (bus_id = 0; bus_id < octets_per_if_num; 1642 bus_id++) { 1643 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1644 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1645 ("%d ,", wr_supp_res[if_id] 1646 [bus_id].is_pup_fail)); 1647 } 1648 } 1649 DEBUG_LEVELING( 1650 DEBUG_LEVEL_INFO, 1651 ("I/F0 PUP0 Stage[0-phase_shift, 1-clock_shift, 2-align_shift] ...\n")); 1652 1653 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1654 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1655 for (bus_id = 0; bus_id < octets_per_if_num; 1656 bus_id++) { 1657 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id); 1658 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1659 ("%d ,", wr_supp_res[if_id] 1660 [bus_id].stage)); 1661 } 1662 } 1663 1664 return MV_OK; 1665 } 1666 1667 #define RD_FIFO_PTR_LOW_STAT_INDIR_ADDR 0x9a 1668 #define RD_FIFO_PTR_HIGH_STAT_INDIR_ADDR 0x9b 1669 /* position of falling dqs edge in fifo; walking 1 */ 1670 #define RD_FIFO_DQS_FALL_EDGE_POS_0 0x1 1671 #define RD_FIFO_DQS_FALL_EDGE_POS_1 0x2 1672 #define RD_FIFO_DQS_FALL_EDGE_POS_2 0x4 1673 #define RD_FIFO_DQS_FALL_EDGE_POS_3 0x8 1674 #define RD_FIFO_DQS_FALL_EDGE_POS_4 0x10 /* lock */ 1675 /* position of rising dqs edge in fifo; walking 0 */ 1676 #define RD_FIFO_DQS_RISE_EDGE_POS_0 0x1fff 1677 #define RD_FIFO_DQS_RISE_EDGE_POS_1 0x3ffe 1678 #define RD_FIFO_DQS_RISE_EDGE_POS_2 0x3ffd 1679 #define RD_FIFO_DQS_RISE_EDGE_POS_3 0x3ffb 1680 #define RD_FIFO_DQS_RISE_EDGE_POS_4 0x3ff7 /* lock */ 1681 #define TEST_ADDR 0x8 1682 #define TAPS_PER_UI 32 1683 #define UI_PER_RD_SAMPLE 4 1684 #define TAPS_PER_RD_SAMPLE ((UI_PER_RD_SAMPLE) * (TAPS_PER_UI)) 1685 #define MAX_RD_SAMPLES 32 1686 #define MAX_RL_VALUE ((MAX_RD_SAMPLES) * (TAPS_PER_RD_SAMPLE)) 1687 #define RD_FIFO_DLY 8 1688 #define STEP_SIZE 64 1689 #define RL_JITTER_WIDTH_LMT 20 1690 #define ADLL_TAPS_IN_CYCLE 64 1691 1692 enum rl_dqs_burst_state { 1693 RL_AHEAD = 0, 1694 RL_INSIDE, 1695 RL_BEHIND 1696 }; 1697 int mv_ddr_rl_dqs_burst(u32 dev_num, u32 if_id, u32 freq) 1698 { 1699 enum rl_dqs_burst_state rl_state[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1700 enum hws_ddr_phy subphy_type = DDR_PHY_DATA; 1701 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 1702 int cl_val = tm->interface_params[0].cas_l; 1703 int rl_adll_val, rl_phase_val, sdr_cycle_incr, rd_sample, rd_ready; 1704 int final_rd_sample, final_rd_ready; 1705 int i, subphy_id, step; 1706 int pass_lock_num = 0; 1707 int init_pass_lock_num; 1708 int phase_delta; 1709 int min_phase, max_phase; 1710 u32 max_cs = ddr3_tip_max_cs_get(dev_num); 1711 u32 rl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1712 u32 rl_min_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1713 u32 rl_max_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM] = { { {0} } }; 1714 u32 rl_val, rl_min_val[NUM_OF_CS], rl_max_val[NUM_OF_CS]; 1715 u32 reg_val_low, reg_val_high; 1716 u32 reg_val, reg_mask; 1717 uintptr_t test_addr = TEST_ADDR; 1718 1719 /* initialization */ 1720 if (ddr3_if_ecc_enabled()) { 1721 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, TRAINING_SW_2_REG, 1722 ®_val, MASK_ALL_BITS); 1723 reg_mask = (TRAINING_ECC_MUX_MASK << TRAINING_ECC_MUX_OFFS) | 1724 (TRAINING_SW_OVRD_MASK << TRAINING_SW_OVRD_OFFS); 1725 reg_val &= ~reg_mask; 1726 reg_val |= (TRAINING_ECC_MUX_DIS << TRAINING_ECC_MUX_OFFS) | 1727 (TRAINING_SW_OVRD_ENA << TRAINING_SW_OVRD_OFFS); 1728 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, TRAINING_SW_2_REG, 1729 reg_val, MASK_ALL_BITS); 1730 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, TRAINING_REG, 1731 ®_val, MASK_ALL_BITS); 1732 reg_mask = (TRN_START_MASK << TRN_START_OFFS); 1733 reg_val &= ~reg_mask; 1734 reg_val |= TRN_START_ENA << TRN_START_OFFS; 1735 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, TRAINING_REG, 1736 reg_val, MASK_ALL_BITS); 1737 } 1738 1739 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) 1740 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) 1741 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) 1742 if (IS_BUS_ACTIVE(tm->bus_act_mask, subphy_id) == 0) 1743 pass_lock_num++; /* increment on inactive subphys */ 1744 1745 init_pass_lock_num = pass_lock_num / max_cs; 1746 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1747 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 1748 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1749 training_result[training_stage][if_id] = TEST_SUCCESS; 1750 } 1751 } 1752 1753 /* search for dqs edges per subphy */ 1754 if_id = 0; 1755 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1756 pass_lock_num = init_pass_lock_num; 1757 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ODPG_DATA_CTRL_REG, 1758 effective_cs << ODPG_DATA_CS_OFFS, 1759 ODPG_DATA_CS_MASK << ODPG_DATA_CS_OFFS); 1760 rl_min_val[effective_cs] = MAX_RL_VALUE; 1761 rl_max_val[effective_cs] = 0; 1762 step = STEP_SIZE; 1763 for (i = 0; i < MAX_RL_VALUE; i += step) { 1764 rl_val = 0; 1765 sdr_cycle_incr = i / TAPS_PER_RD_SAMPLE; /* sdr cycle increment */ 1766 rd_sample = cl_val + 2 * sdr_cycle_incr; 1767 /* fifo out to in delay in search is constant */ 1768 rd_ready = rd_sample + RD_FIFO_DLY; 1769 1770 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_SMPL_DLYS_REG, 1771 rd_sample << RD_SMPL_DLY_CS_OFFS(effective_cs), 1772 RD_SMPL_DLY_CS_MASK << RD_SMPL_DLY_CS_OFFS(effective_cs)); 1773 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_RDY_DLYS_REG, 1774 rd_ready << RD_RDY_DLY_CS_OFFS(effective_cs), 1775 RD_RDY_DLY_CS_MASK << RD_RDY_DLY_CS_OFFS(effective_cs)); 1776 1777 /* one sdr (single data rate) cycle incremented on every four phases of ddr clock */ 1778 sdr_cycle_incr = i % TAPS_PER_RD_SAMPLE; 1779 rl_adll_val = sdr_cycle_incr % MAX_RD_SAMPLES; 1780 rl_phase_val = sdr_cycle_incr / MAX_RD_SAMPLES; 1781 rl_val = ((rl_adll_val & RL_REF_DLY_MASK) << RL_REF_DLY_OFFS) | 1782 ((rl_phase_val & RL_PH_SEL_MASK) << RL_PH_SEL_OFFS); 1783 1784 /* write to all subphys (even to not connected or locked) */ 1785 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, 1786 0, DDR_PHY_DATA, RL_PHY_REG(effective_cs), rl_val); 1787 1788 /* reset read fifo assertion */ 1789 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1790 DATA_PUP_RD_RESET_ENA << DATA_PUP_RD_RESET_OFFS, 1791 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1792 1793 /* reset read fifo deassertion */ 1794 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1795 DATA_PUP_RD_RESET_DIS << DATA_PUP_RD_RESET_OFFS, 1796 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1797 1798 /* perform one read burst */ 1799 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) 1800 readq(test_addr); 1801 else 1802 readl(test_addr); 1803 1804 /* progress read ptr; decide on rl state per byte */ 1805 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 1806 if (rl_state[effective_cs][subphy_id][if_id] == RL_BEHIND) 1807 continue; /* skip locked subphys */ 1808 ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST, subphy_id, DDR_PHY_DATA, 1809 RD_FIFO_PTR_LOW_STAT_INDIR_ADDR, ®_val_low); 1810 ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST, subphy_id, DDR_PHY_DATA, 1811 RD_FIFO_PTR_HIGH_STAT_INDIR_ADDR, ®_val_high); 1812 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1813 ("%s: cs %d, step %d, subphy %d, state %d, low 0x%04x, high 0x%04x; move to ", 1814 __func__, effective_cs, i, subphy_id, 1815 rl_state[effective_cs][subphy_id][if_id], 1816 reg_val_low, reg_val_high)); 1817 1818 switch (rl_state[effective_cs][subphy_id][if_id]) { 1819 case RL_AHEAD: 1820 /* improve search resolution getting closer to the window */ 1821 if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_4 && 1822 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_4) { 1823 rl_state[effective_cs][subphy_id][if_id] = RL_INSIDE; 1824 rl_values[effective_cs][subphy_id][if_id] = i; 1825 rl_min_values[effective_cs][subphy_id][if_id] = i; 1826 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1827 ("new state %d\n", 1828 rl_state[effective_cs][subphy_id][if_id])); 1829 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_3 && 1830 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_3) { 1831 step = (step < 2) ? step : 2; 1832 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_2 && 1833 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_2) { 1834 step = (step < 16) ? step : 16; 1835 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_1 && 1836 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_1) { 1837 step = (step < 32) ? step : 32; 1838 } else if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_0 && 1839 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_0) { 1840 step = (step < 64) ? step : 64; 1841 } else { 1842 /* otherwise, step is unchanged */ 1843 } 1844 break; 1845 case RL_INSIDE: 1846 if (reg_val_low == RD_FIFO_DQS_FALL_EDGE_POS_4 && 1847 reg_val_high == RD_FIFO_DQS_RISE_EDGE_POS_4) { 1848 rl_max_values[effective_cs][subphy_id][if_id] = i; 1849 if ((rl_max_values[effective_cs][subphy_id][if_id] - 1850 rl_min_values[effective_cs][subphy_id][if_id]) > 1851 ADLL_TAPS_IN_CYCLE) { 1852 rl_state[effective_cs][subphy_id][if_id] = RL_BEHIND; 1853 rl_values[effective_cs][subphy_id][if_id] = 1854 (i + rl_values[effective_cs][subphy_id][if_id]) / 2; 1855 pass_lock_num++; 1856 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1857 ("new lock %d\n", pass_lock_num)); 1858 if (rl_min_val[effective_cs] > 1859 rl_values[effective_cs][subphy_id][if_id]) 1860 rl_min_val[effective_cs] = 1861 rl_values[effective_cs][subphy_id][if_id]; 1862 if (rl_max_val[effective_cs] < 1863 rl_values[effective_cs][subphy_id][if_id]) 1864 rl_max_val[effective_cs] = 1865 rl_values[effective_cs][subphy_id][if_id]; 1866 step = 2; 1867 } 1868 } 1869 if (reg_val_low != RD_FIFO_DQS_FALL_EDGE_POS_4 || 1870 reg_val_high != RD_FIFO_DQS_RISE_EDGE_POS_4) { 1871 if ((i - rl_values[effective_cs][subphy_id][if_id]) < 1872 RL_JITTER_WIDTH_LMT) { 1873 /* inside the jitter; not valid segment */ 1874 rl_state[effective_cs][subphy_id][if_id] = RL_AHEAD; 1875 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1876 ("new state %d; jitter on mask\n", 1877 rl_state[effective_cs][subphy_id][if_id])); 1878 } else { /* finished valid segment */ 1879 rl_state[effective_cs][subphy_id][if_id] = RL_BEHIND; 1880 rl_values[effective_cs][subphy_id][if_id] = 1881 (i + rl_values[effective_cs][subphy_id][if_id]) / 2; 1882 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1883 ("new state %d, solution %d\n", 1884 rl_state[effective_cs][subphy_id][if_id], 1885 rl_values[effective_cs][subphy_id][if_id])); 1886 pass_lock_num++; 1887 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, 1888 ("new lock %d\n", pass_lock_num)); 1889 if (rl_min_val[effective_cs] > 1890 rl_values[effective_cs][subphy_id][if_id]) 1891 rl_min_val[effective_cs] = 1892 rl_values[effective_cs][subphy_id][if_id]; 1893 if (rl_max_val[effective_cs] < 1894 rl_values[effective_cs][subphy_id][if_id]) 1895 rl_max_val[effective_cs] = 1896 rl_values[effective_cs][subphy_id][if_id]; 1897 step = 2; 1898 } 1899 } 1900 break; 1901 case RL_BEHIND: /* do nothing */ 1902 break; 1903 } 1904 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("\n")); 1905 } 1906 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("pass_lock_num %d\n", pass_lock_num)); 1907 /* exit condition */ 1908 if (pass_lock_num == MAX_BUS_NUM) 1909 break; 1910 } /* for-loop on i */ 1911 1912 if (pass_lock_num != MAX_BUS_NUM) { 1913 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 1914 ("%s: cs %d, pass_lock_num %d, max_bus_num %d, init_pass_lock_num %d\n", 1915 __func__, effective_cs, pass_lock_num, MAX_BUS_NUM, init_pass_lock_num)); 1916 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 1917 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, subphy_id); 1918 DEBUG_LEVELING(DEBUG_LEVEL_ERROR, 1919 ("%s: subphy %d %s\n", 1920 __func__, subphy_id, 1921 (rl_state[effective_cs][subphy_id][if_id] == RL_BEHIND) ? 1922 "locked" : "not locked")); 1923 } 1924 } 1925 } /* for-loop on effective_cs */ 1926 1927 /* post-processing read leveling results */ 1928 if_id = 0; 1929 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1930 phase_delta = 0; 1931 i = rl_min_val[effective_cs]; 1932 sdr_cycle_incr = i / TAPS_PER_RD_SAMPLE; /* sdr cycle increment */ 1933 rd_sample = cl_val + 2 * sdr_cycle_incr; 1934 rd_ready = rd_sample + RD_FIFO_DLY; 1935 min_phase = (rl_min_val[effective_cs] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE)) % MAX_RD_SAMPLES; 1936 max_phase = (rl_max_val[effective_cs] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE)) % MAX_RD_SAMPLES; 1937 final_rd_sample = rd_sample; 1938 final_rd_ready = rd_ready; 1939 1940 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_SMPL_DLYS_REG, 1941 rd_sample << RD_SMPL_DLY_CS_OFFS(effective_cs), 1942 RD_SMPL_DLY_CS_MASK << RD_SMPL_DLY_CS_OFFS(effective_cs)); 1943 ddr3_tip_if_write(0, ACCESS_TYPE_UNICAST, 0, RD_DATA_RDY_DLYS_REG, 1944 rd_ready << RD_RDY_DLY_CS_OFFS(effective_cs), 1945 RD_RDY_DLY_CS_MASK << RD_RDY_DLY_CS_OFFS(effective_cs)); 1946 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1947 ("%s: cs %d, min phase %d, max phase %d, read sample %d\n", 1948 __func__, effective_cs, min_phase, max_phase, rd_sample)); 1949 1950 for (subphy_id = 0; subphy_id < MAX_BUS_NUM; subphy_id++) { 1951 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, subphy_id); 1952 /* reduce sdr cycle per cs; extract rl adll and phase values */ 1953 i = rl_values[effective_cs][subphy_id][if_id] - (sdr_cycle_incr * TAPS_PER_RD_SAMPLE); 1954 rl_adll_val = i % MAX_RD_SAMPLES; 1955 rl_phase_val = i / MAX_RD_SAMPLES; 1956 rl_phase_val -= phase_delta; 1957 DEBUG_LEVELING(DEBUG_LEVEL_INFO, 1958 ("%s: final results: cs %d, subphy %d, read sample %d read ready %d, rl_phase_val %d, rl_adll_val %d\n", 1959 __func__, effective_cs, subphy_id, final_rd_sample, 1960 final_rd_ready, rl_phase_val, rl_adll_val)); 1961 1962 rl_val = ((rl_adll_val & RL_REF_DLY_MASK) << RL_REF_DLY_OFFS) | 1963 ((rl_phase_val & RL_PH_SEL_MASK) << RL_PH_SEL_OFFS); 1964 ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_UNICAST, 1965 subphy_id, subphy_type, RL_PHY_REG(effective_cs), rl_val); 1966 } 1967 } /* for-loop on effective cs */ 1968 1969 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 1970 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1971 if (odt_config != 0) 1972 CHECK_STATUS(ddr3_tip_write_additional_odt_setting(dev_num, if_id)); 1973 } 1974 1975 /* reset read fifo assertion */ 1976 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1977 DATA_PUP_RD_RESET_ENA << DATA_PUP_RD_RESET_OFFS, 1978 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1979 1980 /* reset read fifo deassertion */ 1981 ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, if_id, SDRAM_CFG_REG, 1982 DATA_PUP_RD_RESET_DIS << DATA_PUP_RD_RESET_OFFS, 1983 DATA_PUP_RD_RESET_MASK << DATA_PUP_RD_RESET_OFFS); 1984 1985 return MV_OK; 1986 } 1987