1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6 #include <common.h> 7 #include <spl.h> 8 #include <asm/io.h> 9 #include <asm/arch/cpu.h> 10 #include <asm/arch/soc.h> 11 12 #include "ddr3_init.h" 13 14 #define TYPICAL_PBS_VALUE 12 15 16 u32 nominal_adll[MAX_INTERFACE_NUM * MAX_BUS_NUM]; 17 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM]; 18 u8 result_mat[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS]; 19 u8 result_mat_rx_dqs[MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM]; 20 /* 4-EEWA, 3-EWA, 2-SWA, 1-Fail, 0-Pass */ 21 u8 result_all_bit[MAX_BUS_NUM * BUS_WIDTH_IN_BITS * MAX_INTERFACE_NUM]; 22 u8 max_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 23 u8 min_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 24 u8 max_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 25 u8 min_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 26 u32 pbsdelay_per_pup[NUM_OF_PBS_MODES][MAX_INTERFACE_NUM][MAX_BUS_NUM]; 27 u8 adll_shift_lock[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 28 u8 adll_shift_val[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 29 enum hws_pattern pbs_pattern = PATTERN_VREF; 30 static u8 pup_state[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 31 32 /* 33 * Name: ddr3_tip_pbs 34 * Desc: PBS 35 * Args: TBD 36 * Notes: 37 * Returns: OK if success, other error code if fail. 38 */ 39 int ddr3_tip_pbs(u32 dev_num, enum pbs_dir pbs_mode) 40 { 41 u32 res0[MAX_INTERFACE_NUM]; 42 int adll_tap = MEGA / freq_val[medium_freq] / 64; 43 int pad_num = 0; 44 enum hws_search_dir search_dir = 45 (pbs_mode == PBS_RX_MODE) ? HWS_HIGH2LOW : HWS_LOW2HIGH; 46 enum hws_dir dir = (pbs_mode == PBS_RX_MODE) ? OPER_READ : OPER_WRITE; 47 int iterations = (pbs_mode == PBS_RX_MODE) ? 31 : 63; 48 u32 res_valid_mask = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f; 49 int init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations; 50 enum hws_edge_compare search_edge = EDGE_FP; 51 u32 pup = 0, bit = 0, if_id = 0, all_lock = 0, cs_num = 0; 52 int reg_addr = 0; 53 u32 validation_val = 0; 54 u32 cs_enable_reg_val[MAX_INTERFACE_NUM]; 55 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 56 u8 temp = 0; 57 struct hws_topology_map *tm = ddr3_get_topology_map(); 58 59 /* save current cs enable reg val */ 60 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 61 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 62 63 /* save current cs enable reg val */ 64 CHECK_STATUS(ddr3_tip_if_read 65 (dev_num, ACCESS_TYPE_UNICAST, if_id, 66 CS_ENABLE_REG, cs_enable_reg_val, MASK_ALL_BITS)); 67 68 /* enable single cs */ 69 CHECK_STATUS(ddr3_tip_if_write 70 (dev_num, ACCESS_TYPE_UNICAST, if_id, 71 CS_ENABLE_REG, (1 << 3), (1 << 3))); 72 } 73 74 reg_addr = (pbs_mode == PBS_RX_MODE) ? 75 (READ_CENTRALIZATION_PHY_REG + 76 (effective_cs * CS_REGISTER_ADDR_OFFSET)) : 77 (WRITE_CENTRALIZATION_PHY_REG + 78 (effective_cs * CS_REGISTER_ADDR_OFFSET)); 79 read_adll_value(nominal_adll, reg_addr, MASK_ALL_BITS); 80 81 /* stage 1 shift ADLL */ 82 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, 83 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, 84 PARAM_NOT_CARE, RESULT_PER_BIT, 85 HWS_CONTROL_ELEMENT_ADLL, search_dir, dir, 86 tm->if_act_mask, init_val, iterations, 87 pbs_pattern, search_edge, CS_SINGLE, cs_num, 88 train_status); 89 validation_val = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0; 90 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 91 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 92 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 93 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 94 min_adll_per_pup[if_id][pup] = 95 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f; 96 pup_state[if_id][pup] = 0x3; 97 adll_shift_lock[if_id][pup] = 1; 98 max_adll_per_pup[if_id][pup] = 0x0; 99 } 100 } 101 102 /* EBA */ 103 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 104 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 105 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 106 CHECK_STATUS(ddr3_tip_if_read 107 (dev_num, ACCESS_TYPE_MULTICAST, 108 PARAM_NOT_CARE, 109 mask_results_dq_reg_map[ 110 bit + pup * BUS_WIDTH_IN_BITS], 111 res0, MASK_ALL_BITS)); 112 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 113 if_id++) { 114 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 115 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 116 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n", 117 if_id, bit, pup, 118 res0[if_id])); 119 if (pup_state[if_id][pup] != 3) 120 continue; 121 /* if not EBA state than move to next pup */ 122 123 if ((res0[if_id] & 0x2000000) == 0) { 124 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 125 ("-- Fail Training IP\n")); 126 /* training machine failed */ 127 pup_state[if_id][pup] = 1; 128 adll_shift_lock[if_id][pup] = 0; 129 continue; 130 } 131 132 else if ((res0[if_id] & res_valid_mask) == 133 validation_val) { 134 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 135 ("-- FAIL EBA %d %d %d %d\n", 136 if_id, bit, pup, 137 res0[if_id])); 138 pup_state[if_id][pup] = 4; 139 /* this pup move to EEBA */ 140 adll_shift_lock[if_id][pup] = 0; 141 continue; 142 } else { 143 /* 144 * The search ended in Pass we need 145 * Fail 146 */ 147 res0[if_id] = 148 (pbs_mode == PBS_RX_MODE) ? 149 ((res0[if_id] & 150 res_valid_mask) + 1) : 151 ((res0[if_id] & 152 res_valid_mask) - 1); 153 max_adll_per_pup[if_id][pup] = 154 (max_adll_per_pup[if_id][pup] < 155 res0[if_id]) ? 156 (u8)res0[if_id] : 157 max_adll_per_pup[if_id][pup]; 158 min_adll_per_pup[if_id][pup] = 159 (res0[if_id] > 160 min_adll_per_pup[if_id][pup]) ? 161 min_adll_per_pup[if_id][pup] : 162 (u8) 163 res0[if_id]; 164 /* 165 * vs the Rx we are searching for the 166 * smallest value of DQ shift so all 167 * Bus would fail 168 */ 169 adll_shift_val[if_id][pup] = 170 (pbs_mode == PBS_RX_MODE) ? 171 max_adll_per_pup[if_id][pup] : 172 min_adll_per_pup[if_id][pup]; 173 } 174 } 175 } 176 } 177 178 /* EEBA */ 179 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 180 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 181 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 182 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 183 184 if (pup_state[if_id][pup] != 4) 185 continue; 186 /* 187 * if pup state different from EEBA than move to 188 * next pup 189 */ 190 reg_addr = (pbs_mode == PBS_RX_MODE) ? 191 (0x54 + effective_cs * 0x10) : 192 (0x14 + effective_cs * 0x10); 193 CHECK_STATUS(ddr3_tip_bus_write 194 (dev_num, ACCESS_TYPE_UNICAST, if_id, 195 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, 196 reg_addr, 0x1f)); 197 reg_addr = (pbs_mode == PBS_RX_MODE) ? 198 (0x55 + effective_cs * 0x10) : 199 (0x15 + effective_cs * 0x10); 200 CHECK_STATUS(ddr3_tip_bus_write 201 (dev_num, ACCESS_TYPE_UNICAST, if_id, 202 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, 203 reg_addr, 0x1f)); 204 /* initialize the Edge2 Max. */ 205 adll_shift_val[if_id][pup] = 0; 206 min_adll_per_pup[if_id][pup] = 207 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f; 208 max_adll_per_pup[if_id][pup] = 0x0; 209 210 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, 211 PARAM_NOT_CARE, 212 ACCESS_TYPE_MULTICAST, 213 PARAM_NOT_CARE, RESULT_PER_BIT, 214 HWS_CONTROL_ELEMENT_ADLL, 215 search_dir, dir, 216 tm->if_act_mask, init_val, 217 iterations, pbs_pattern, 218 search_edge, CS_SINGLE, cs_num, 219 train_status); 220 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 221 ("ADLL shift results:\n")); 222 223 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 224 CHECK_STATUS(ddr3_tip_if_read 225 (dev_num, ACCESS_TYPE_MULTICAST, 226 PARAM_NOT_CARE, 227 mask_results_dq_reg_map[ 228 bit + pup * 229 BUS_WIDTH_IN_BITS], 230 res0, MASK_ALL_BITS)); 231 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 232 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n", 233 if_id, bit, pup, 234 res0[if_id])); 235 236 if ((res0[if_id] & 0x2000000) == 0) { 237 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 238 (" -- EEBA Fail\n")); 239 bit = BUS_WIDTH_IN_BITS; 240 /* exit bit loop */ 241 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 242 ("-- EEBA Fail Training IP\n")); 243 /* 244 * training machine failed but pass 245 * before in the EBA so maybe the DQS 246 * shift change env. 247 */ 248 pup_state[if_id][pup] = 2; 249 adll_shift_lock[if_id][pup] = 0; 250 reg_addr = (pbs_mode == PBS_RX_MODE) ? 251 (0x54 + effective_cs * 0x10) : 252 (0x14 + effective_cs * 0x10); 253 CHECK_STATUS(ddr3_tip_bus_write 254 (dev_num, 255 ACCESS_TYPE_UNICAST, 256 if_id, 257 ACCESS_TYPE_UNICAST, pup, 258 DDR_PHY_DATA, reg_addr, 259 0x0)); 260 reg_addr = (pbs_mode == PBS_RX_MODE) ? 261 (0x55 + effective_cs * 0x10) : 262 (0x15 + effective_cs * 0x10); 263 CHECK_STATUS(ddr3_tip_bus_write 264 (dev_num, 265 ACCESS_TYPE_UNICAST, 266 if_id, 267 ACCESS_TYPE_UNICAST, pup, 268 DDR_PHY_DATA, reg_addr, 269 0x0)); 270 continue; 271 } else if ((res0[if_id] & res_valid_mask) == 272 validation_val) { 273 /* exit bit loop */ 274 bit = BUS_WIDTH_IN_BITS; 275 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 276 ("-- FAIL EEBA\n")); 277 /* this pup move to SBA */ 278 pup_state[if_id][pup] = 2; 279 adll_shift_lock[if_id][pup] = 0; 280 reg_addr = (pbs_mode == PBS_RX_MODE) ? 281 (0x54 + effective_cs * 0x10) : 282 (0x14 + effective_cs * 0x10); 283 CHECK_STATUS(ddr3_tip_bus_write 284 (dev_num, 285 ACCESS_TYPE_UNICAST, 286 if_id, 287 ACCESS_TYPE_UNICAST, pup, 288 DDR_PHY_DATA, reg_addr, 289 0x0)); 290 reg_addr = (pbs_mode == PBS_RX_MODE) ? 291 (0x55 + effective_cs * 0x10) : 292 (0x15 + effective_cs * 0x10); 293 CHECK_STATUS(ddr3_tip_bus_write 294 (dev_num, 295 ACCESS_TYPE_UNICAST, 296 if_id, 297 ACCESS_TYPE_UNICAST, pup, 298 DDR_PHY_DATA, reg_addr, 299 0x0)); 300 continue; 301 } else { 302 adll_shift_lock[if_id][pup] = 1; 303 /* 304 * The search ended in Pass we need 305 * Fail 306 */ 307 res0[if_id] = 308 (pbs_mode == PBS_RX_MODE) ? 309 ((res0[if_id] & 310 res_valid_mask) + 1) : 311 ((res0[if_id] & 312 res_valid_mask) - 1); 313 max_adll_per_pup[if_id][pup] = 314 (max_adll_per_pup[if_id][pup] < 315 res0[if_id]) ? 316 (u8)res0[if_id] : 317 max_adll_per_pup[if_id][pup]; 318 min_adll_per_pup[if_id][pup] = 319 (res0[if_id] > 320 min_adll_per_pup[if_id][pup]) ? 321 min_adll_per_pup[if_id][pup] : 322 (u8)res0[if_id]; 323 /* 324 * vs the Rx we are searching for the 325 * smallest value of DQ shift so all Bus 326 * would fail 327 */ 328 adll_shift_val[if_id][pup] = 329 (pbs_mode == PBS_RX_MODE) ? 330 max_adll_per_pup[if_id][pup] : 331 min_adll_per_pup[if_id][pup]; 332 } 333 } 334 } 335 } 336 337 /* Print Stage result */ 338 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 339 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 340 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 341 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 342 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 343 ("FP I/F %d, ADLL Shift for EBA: pup[%d] Lock status = %d Lock Val = %d,%d\n", 344 if_id, pup, 345 adll_shift_lock[if_id][pup], 346 max_adll_per_pup[if_id][pup], 347 min_adll_per_pup[if_id][pup])); 348 } 349 } 350 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 351 ("Update ADLL Shift of all pups:\n")); 352 353 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 354 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 355 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 356 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 357 if (adll_shift_lock[if_id][pup] != 1) 358 continue; 359 /* if pup not locked continue to next pup */ 360 361 reg_addr = (pbs_mode == PBS_RX_MODE) ? 362 (0x3 + effective_cs * 4) : 363 (0x1 + effective_cs * 4); 364 CHECK_STATUS(ddr3_tip_bus_write 365 (dev_num, ACCESS_TYPE_UNICAST, if_id, 366 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, 367 reg_addr, adll_shift_val[if_id][pup])); 368 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 369 ("FP I/F %d, Pup[%d] = %d\n", if_id, 370 pup, adll_shift_val[if_id][pup])); 371 } 372 } 373 374 /* PBS EEBA&EBA */ 375 /* Start the Per Bit Skew search */ 376 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 377 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 378 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 379 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 380 max_pbs_per_pup[if_id][pup] = 0x0; 381 min_pbs_per_pup[if_id][pup] = 0x1f; 382 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 383 /* reset result for PBS */ 384 result_all_bit[bit + pup * BUS_WIDTH_IN_BITS + 385 if_id * MAX_BUS_NUM * 386 BUS_WIDTH_IN_BITS] = 0; 387 } 388 } 389 } 390 391 iterations = 31; 392 search_dir = HWS_LOW2HIGH; 393 /* !!!!! ran sh (search_dir == HWS_LOW2HIGH)?0:iterations; */ 394 init_val = 0; 395 396 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 397 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 398 RESULT_PER_BIT, HWS_CONTROL_ELEMENT_DQ_SKEW, 399 search_dir, dir, tm->if_act_mask, init_val, 400 iterations, pbs_pattern, search_edge, 401 CS_SINGLE, cs_num, train_status); 402 403 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 404 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 405 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 406 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 407 if (adll_shift_lock[if_id][pup] != 1) { 408 /* if pup not lock continue to next pup */ 409 continue; 410 } 411 412 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 413 CHECK_STATUS(ddr3_tip_if_read 414 (dev_num, ACCESS_TYPE_MULTICAST, 415 PARAM_NOT_CARE, 416 mask_results_dq_reg_map[ 417 bit + 418 pup * BUS_WIDTH_IN_BITS], 419 res0, MASK_ALL_BITS)); 420 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 421 ("Per Bit Skew search, FP I/F %d, bit:%d, pup:%d res0 0x%x\n", 422 if_id, bit, pup, 423 res0[if_id])); 424 if ((res0[if_id] & 0x2000000) == 0) { 425 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 426 ("--EBA PBS Fail - Training IP machine\n")); 427 /* exit the bit loop */ 428 bit = BUS_WIDTH_IN_BITS; 429 /* 430 * ADLL is no long in lock need new 431 * search 432 */ 433 adll_shift_lock[if_id][pup] = 0; 434 /* Move to SBA */ 435 pup_state[if_id][pup] = 2; 436 max_pbs_per_pup[if_id][pup] = 0x0; 437 min_pbs_per_pup[if_id][pup] = 0x1f; 438 continue; 439 } else { 440 temp = (u8)(res0[if_id] & 441 res_valid_mask); 442 max_pbs_per_pup[if_id][pup] = 443 (temp > 444 max_pbs_per_pup[if_id][pup]) ? 445 temp : 446 max_pbs_per_pup[if_id][pup]; 447 min_pbs_per_pup[if_id][pup] = 448 (temp < 449 min_pbs_per_pup[if_id][pup]) ? 450 temp : 451 min_pbs_per_pup[if_id][pup]; 452 result_all_bit[bit + 453 pup * BUS_WIDTH_IN_BITS + 454 if_id * MAX_BUS_NUM * 455 BUS_WIDTH_IN_BITS] = 456 temp; 457 } 458 } 459 } 460 } 461 462 /* Check all Pup lock */ 463 all_lock = 1; 464 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 465 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 466 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 467 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 468 all_lock = all_lock * adll_shift_lock[if_id][pup]; 469 } 470 } 471 472 /* Only if not all Pups Lock */ 473 if (all_lock == 0) { 474 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 475 ("##########ADLL shift for SBA###########\n")); 476 477 /* ADLL shift for SBA */ 478 search_dir = (pbs_mode == PBS_RX_MODE) ? HWS_LOW2HIGH : 479 HWS_HIGH2LOW; 480 init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations; 481 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 482 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 483 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 484 if_id++) { 485 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 486 if (adll_shift_lock[if_id][pup] == 1) { 487 /*if pup lock continue to next pup */ 488 continue; 489 } 490 /*init the var altogth init before */ 491 adll_shift_lock[if_id][pup] = 0; 492 reg_addr = (pbs_mode == PBS_RX_MODE) ? 493 (0x54 + effective_cs * 0x10) : 494 (0x14 + effective_cs * 0x10); 495 CHECK_STATUS(ddr3_tip_bus_write 496 (dev_num, ACCESS_TYPE_UNICAST, 497 if_id, ACCESS_TYPE_UNICAST, pup, 498 DDR_PHY_DATA, reg_addr, 0)); 499 reg_addr = (pbs_mode == PBS_RX_MODE) ? 500 (0x55 + effective_cs * 0x10) : 501 (0x15 + effective_cs * 0x10); 502 CHECK_STATUS(ddr3_tip_bus_write 503 (dev_num, ACCESS_TYPE_UNICAST, 504 if_id, ACCESS_TYPE_UNICAST, pup, 505 DDR_PHY_DATA, reg_addr, 0)); 506 reg_addr = (pbs_mode == PBS_RX_MODE) ? 507 (0x5f + effective_cs * 0x10) : 508 (0x1f + effective_cs * 0x10); 509 CHECK_STATUS(ddr3_tip_bus_write 510 (dev_num, ACCESS_TYPE_UNICAST, 511 if_id, ACCESS_TYPE_UNICAST, pup, 512 DDR_PHY_DATA, reg_addr, 0)); 513 /* initilaze the Edge2 Max. */ 514 adll_shift_val[if_id][pup] = 0; 515 min_adll_per_pup[if_id][pup] = 0x1f; 516 max_adll_per_pup[if_id][pup] = 0x0; 517 518 ddr3_tip_ip_training(dev_num, 519 ACCESS_TYPE_MULTICAST, 520 PARAM_NOT_CARE, 521 ACCESS_TYPE_MULTICAST, 522 PARAM_NOT_CARE, 523 RESULT_PER_BIT, 524 HWS_CONTROL_ELEMENT_ADLL, 525 search_dir, dir, 526 tm->if_act_mask, 527 init_val, iterations, 528 pbs_pattern, 529 search_edge, CS_SINGLE, 530 cs_num, train_status); 531 532 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 533 CHECK_STATUS(ddr3_tip_if_read 534 (dev_num, 535 ACCESS_TYPE_MULTICAST, 536 PARAM_NOT_CARE, 537 mask_results_dq_reg_map 538 [bit + 539 pup * 540 BUS_WIDTH_IN_BITS], 541 res0, MASK_ALL_BITS)); 542 DEBUG_PBS_ENGINE( 543 DEBUG_LEVEL_INFO, 544 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n", 545 if_id, bit, pup, res0[if_id])); 546 if ((res0[if_id] & 0x2000000) == 0) { 547 /* exit the bit loop */ 548 bit = BUS_WIDTH_IN_BITS; 549 /* Fail SBA --> Fail PBS */ 550 pup_state[if_id][pup] = 1; 551 DEBUG_PBS_ENGINE 552 (DEBUG_LEVEL_INFO, 553 (" SBA Fail\n")); 554 continue; 555 } else { 556 /* 557 * - increment to get all 558 * 8 bit lock. 559 */ 560 adll_shift_lock[if_id][pup]++; 561 /* 562 * The search ended in Pass 563 * we need Fail 564 */ 565 res0[if_id] = 566 (pbs_mode == PBS_RX_MODE) ? 567 ((res0[if_id] & res_valid_mask) + 1) : 568 ((res0[if_id] & res_valid_mask) - 1); 569 max_adll_per_pup[if_id][pup] = 570 (max_adll_per_pup[if_id] 571 [pup] < res0[if_id]) ? 572 (u8)res0[if_id] : 573 max_adll_per_pup[if_id][pup]; 574 min_adll_per_pup[if_id][pup] = 575 (res0[if_id] > 576 min_adll_per_pup[if_id] 577 [pup]) ? 578 min_adll_per_pup[if_id][pup] : 579 (u8)res0[if_id]; 580 /* 581 * vs the Rx we are searching for 582 * the smallest value of DQ shift 583 * so all Bus would fail 584 */ 585 adll_shift_val[if_id][pup] = 586 (pbs_mode == PBS_RX_MODE) ? 587 max_adll_per_pup[if_id][pup] : 588 min_adll_per_pup[if_id][pup]; 589 } 590 } 591 /* 1 is lock */ 592 adll_shift_lock[if_id][pup] = 593 (adll_shift_lock[if_id][pup] == 8) ? 594 1 : 0; 595 reg_addr = (pbs_mode == PBS_RX_MODE) ? 596 (0x3 + effective_cs * 4) : 597 (0x1 + effective_cs * 4); 598 CHECK_STATUS(ddr3_tip_bus_write 599 (dev_num, ACCESS_TYPE_UNICAST, 600 if_id, ACCESS_TYPE_UNICAST, pup, 601 DDR_PHY_DATA, reg_addr, 602 adll_shift_val[if_id][pup])); 603 DEBUG_PBS_ENGINE( 604 DEBUG_LEVEL_INFO, 605 ("adll_shift_lock[%x][%x] = %x\n", 606 if_id, pup, 607 adll_shift_lock[if_id][pup])); 608 } 609 } 610 611 /* End ADLL Shift for SBA */ 612 /* Start the Per Bit Skew search */ 613 /* The ADLL shift finished with a Pass */ 614 search_edge = (pbs_mode == PBS_RX_MODE) ? EDGE_PF : EDGE_FP; 615 search_dir = (pbs_mode == PBS_RX_MODE) ? 616 HWS_LOW2HIGH : HWS_HIGH2LOW; 617 iterations = 0x1f; 618 /* - The initial value is different in Rx and Tx mode */ 619 init_val = (pbs_mode == PBS_RX_MODE) ? 0 : iterations; 620 621 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, 622 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, 623 PARAM_NOT_CARE, RESULT_PER_BIT, 624 HWS_CONTROL_ELEMENT_DQ_SKEW, 625 search_dir, dir, tm->if_act_mask, 626 init_val, iterations, pbs_pattern, 627 search_edge, CS_SINGLE, cs_num, 628 train_status); 629 630 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 631 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 632 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 633 if_id++) { 634 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 635 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 636 CHECK_STATUS(ddr3_tip_if_read 637 (dev_num, 638 ACCESS_TYPE_MULTICAST, 639 PARAM_NOT_CARE, 640 mask_results_dq_reg_map 641 [bit + 642 pup * 643 BUS_WIDTH_IN_BITS], 644 res0, MASK_ALL_BITS)); 645 if (pup_state[if_id][pup] != 2) { 646 /* 647 * if pup is not SBA continue 648 * to next pup 649 */ 650 bit = BUS_WIDTH_IN_BITS; 651 continue; 652 } 653 DEBUG_PBS_ENGINE( 654 DEBUG_LEVEL_INFO, 655 ("Per Bit Skew search, PF I/F %d, bit:%d, pup:%d res0 0x%x\n", 656 if_id, bit, pup, res0[if_id])); 657 if ((res0[if_id] & 0x2000000) == 0) { 658 DEBUG_PBS_ENGINE 659 (DEBUG_LEVEL_INFO, 660 ("SBA Fail\n")); 661 662 max_pbs_per_pup[if_id][pup] = 663 0x1f; 664 result_all_bit[ 665 bit + pup * 666 BUS_WIDTH_IN_BITS + 667 if_id * MAX_BUS_NUM * 668 BUS_WIDTH_IN_BITS] = 669 0x1f; 670 } else { 671 temp = (u8)(res0[if_id] & 672 res_valid_mask); 673 max_pbs_per_pup[if_id][pup] = 674 (temp > 675 max_pbs_per_pup[if_id] 676 [pup]) ? temp : 677 max_pbs_per_pup 678 [if_id][pup]; 679 min_pbs_per_pup[if_id][pup] = 680 (temp < 681 min_pbs_per_pup[if_id] 682 [pup]) ? temp : 683 min_pbs_per_pup 684 [if_id][pup]; 685 result_all_bit[ 686 bit + pup * 687 BUS_WIDTH_IN_BITS + 688 if_id * MAX_BUS_NUM * 689 BUS_WIDTH_IN_BITS] = 690 temp; 691 adll_shift_lock[if_id][pup] = 1; 692 } 693 } 694 } 695 } 696 697 /* Check all Pup state */ 698 all_lock = 1; 699 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 700 /* 701 * DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 702 * ("pup_state[%d][%d] = %d\n",if_id,pup,pup_state 703 * [if_id][pup])); 704 */ 705 } 706 } 707 708 /* END OF SBA */ 709 /* Norm */ 710 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 711 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 712 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 713 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 714 if_id++) { 715 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 716 /* if pup not lock continue to next pup */ 717 if (adll_shift_lock[if_id][pup] != 1) { 718 DEBUG_PBS_ENGINE( 719 DEBUG_LEVEL_ERROR, 720 ("PBS failed for IF #%d\n", 721 if_id)); 722 training_result[training_stage][if_id] 723 = TEST_FAILED; 724 725 result_mat[if_id][pup][bit] = 0; 726 max_pbs_per_pup[if_id][pup] = 0; 727 min_pbs_per_pup[if_id][pup] = 0; 728 } else { 729 training_result[ 730 training_stage][if_id] = 731 (training_result[training_stage] 732 [if_id] == TEST_FAILED) ? 733 TEST_FAILED : TEST_SUCCESS; 734 result_mat[if_id][pup][bit] = 735 result_all_bit[ 736 bit + pup * 737 BUS_WIDTH_IN_BITS + 738 if_id * MAX_BUS_NUM * 739 BUS_WIDTH_IN_BITS] - 740 min_pbs_per_pup[if_id][pup]; 741 } 742 DEBUG_PBS_ENGINE( 743 DEBUG_LEVEL_INFO, 744 ("The abs min_pbs[%d][%d] = %d\n", 745 if_id, pup, 746 min_pbs_per_pup[if_id][pup])); 747 } 748 } 749 } 750 751 /* Clean all results */ 752 ddr3_tip_clean_pbs_result(dev_num, pbs_mode); 753 754 /* DQ PBS register update with the final result */ 755 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 756 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 757 for (pup = 0; pup < tm->num_of_bus_per_interface; pup++) { 758 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 759 760 DEBUG_PBS_ENGINE( 761 DEBUG_LEVEL_INFO, 762 ("Final Results: if_id %d, pup %d, Pup State: %d\n", 763 if_id, pup, pup_state[if_id][pup])); 764 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 765 if (dq_map_table == NULL) { 766 DEBUG_PBS_ENGINE( 767 DEBUG_LEVEL_ERROR, 768 ("dq_map_table not initialized\n")); 769 return MV_FAIL; 770 } 771 pad_num = dq_map_table[ 772 bit + pup * BUS_WIDTH_IN_BITS + 773 if_id * BUS_WIDTH_IN_BITS * 774 tm->num_of_bus_per_interface]; 775 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 776 ("result_mat: %d ", 777 result_mat[if_id][pup] 778 [bit])); 779 reg_addr = (pbs_mode == PBS_RX_MODE) ? 780 (PBS_RX_PHY_REG + effective_cs * 0x10) : 781 (PBS_TX_PHY_REG + effective_cs * 0x10); 782 CHECK_STATUS(ddr3_tip_bus_write 783 (dev_num, ACCESS_TYPE_UNICAST, 784 if_id, ACCESS_TYPE_UNICAST, pup, 785 DDR_PHY_DATA, reg_addr + pad_num, 786 result_mat[if_id][pup][bit])); 787 } 788 pbsdelay_per_pup[pbs_mode][if_id][pup] = 789 (max_pbs_per_pup[if_id][pup] == 790 min_pbs_per_pup[if_id][pup]) ? 791 TYPICAL_PBS_VALUE : 792 ((max_adll_per_pup[if_id][pup] - 793 min_adll_per_pup[if_id][pup]) * adll_tap / 794 (max_pbs_per_pup[if_id][pup] - 795 min_pbs_per_pup[if_id][pup])); 796 797 /* RX results ready, write RX also */ 798 if (pbs_mode == PBS_TX_MODE) { 799 /* Write TX results */ 800 reg_addr = (0x14 + effective_cs * 0x10); 801 CHECK_STATUS(ddr3_tip_bus_write 802 (dev_num, ACCESS_TYPE_UNICAST, 803 if_id, ACCESS_TYPE_UNICAST, pup, 804 DDR_PHY_DATA, reg_addr, 805 (max_pbs_per_pup[if_id][pup] - 806 min_pbs_per_pup[if_id][pup]) / 807 2)); 808 reg_addr = (0x15 + effective_cs * 0x10); 809 CHECK_STATUS(ddr3_tip_bus_write 810 (dev_num, ACCESS_TYPE_UNICAST, 811 if_id, ACCESS_TYPE_UNICAST, pup, 812 DDR_PHY_DATA, reg_addr, 813 (max_pbs_per_pup[if_id][pup] - 814 min_pbs_per_pup[if_id][pup]) / 815 2)); 816 817 /* Write previously stored RX results */ 818 reg_addr = (0x54 + effective_cs * 0x10); 819 CHECK_STATUS(ddr3_tip_bus_write 820 (dev_num, ACCESS_TYPE_UNICAST, 821 if_id, ACCESS_TYPE_UNICAST, pup, 822 DDR_PHY_DATA, reg_addr, 823 result_mat_rx_dqs[if_id][pup] 824 [effective_cs])); 825 reg_addr = (0x55 + effective_cs * 0x10); 826 CHECK_STATUS(ddr3_tip_bus_write 827 (dev_num, ACCESS_TYPE_UNICAST, 828 if_id, ACCESS_TYPE_UNICAST, pup, 829 DDR_PHY_DATA, reg_addr, 830 result_mat_rx_dqs[if_id][pup] 831 [effective_cs])); 832 } else { 833 /* 834 * RX results may affect RL results correctess, 835 * so just store the results that will written 836 * in TX stage 837 */ 838 result_mat_rx_dqs[if_id][pup][effective_cs] = 839 (max_pbs_per_pup[if_id][pup] - 840 min_pbs_per_pup[if_id][pup]) / 2; 841 } 842 DEBUG_PBS_ENGINE( 843 DEBUG_LEVEL_INFO, 844 (", PBS tap=%d [psec] ==> skew observed = %d\n", 845 pbsdelay_per_pup[pbs_mode][if_id][pup], 846 ((max_pbs_per_pup[if_id][pup] - 847 min_pbs_per_pup[if_id][pup]) * 848 pbsdelay_per_pup[pbs_mode][if_id][pup]))); 849 } 850 } 851 852 /* Write back to the phy the default values */ 853 reg_addr = (pbs_mode == PBS_RX_MODE) ? 854 (READ_CENTRALIZATION_PHY_REG + effective_cs * 4) : 855 (WRITE_CENTRALIZATION_PHY_REG + effective_cs * 4); 856 write_adll_value(nominal_adll, reg_addr); 857 858 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 859 reg_addr = (pbs_mode == PBS_RX_MODE) ? 860 (0x5a + effective_cs * 0x10) : 861 (0x1a + effective_cs * 0x10); 862 CHECK_STATUS(ddr3_tip_bus_write 863 (dev_num, ACCESS_TYPE_UNICAST, if_id, 864 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, reg_addr, 865 0)); 866 867 /* restore cs enable value */ 868 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 869 CHECK_STATUS(ddr3_tip_if_write 870 (dev_num, ACCESS_TYPE_UNICAST, if_id, 871 CS_ENABLE_REG, cs_enable_reg_val[if_id], 872 MASK_ALL_BITS)); 873 } 874 875 /* exit test mode */ 876 CHECK_STATUS(ddr3_tip_if_write 877 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 878 ODPG_WRITE_READ_MODE_ENABLE_REG, 0xffff, MASK_ALL_BITS)); 879 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 880 /* 881 * meaning that there is no VW exist at all (No lock at 882 * the EBA ADLL shift at EBS) 883 */ 884 if (pup_state[if_id][pup] == 1) 885 return MV_FAIL; 886 } 887 888 return MV_OK; 889 } 890 891 /* 892 * Name: ddr3_tip_pbs_rx. 893 * Desc: PBS TX 894 * Args: TBD 895 * Notes: 896 * Returns: OK if success, other error code if fail. 897 */ 898 int ddr3_tip_pbs_rx(u32 uidev_num) 899 { 900 return ddr3_tip_pbs(uidev_num, PBS_RX_MODE); 901 } 902 903 /* 904 * Name: ddr3_tip_pbs_tx. 905 * Desc: PBS TX 906 * Args: TBD 907 * Notes: 908 * Returns: OK if success, other error code if fail. 909 */ 910 int ddr3_tip_pbs_tx(u32 uidev_num) 911 { 912 return ddr3_tip_pbs(uidev_num, PBS_TX_MODE); 913 } 914 915 #ifndef EXCLUDE_SWITCH_DEBUG 916 /* 917 * Print PBS Result 918 */ 919 int ddr3_tip_print_all_pbs_result(u32 dev_num) 920 { 921 u32 curr_cs; 922 u32 max_cs = hws_ddr3_tip_max_cs_get(); 923 924 for (curr_cs = 0; curr_cs < max_cs; curr_cs++) { 925 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_RX_MODE); 926 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_TX_MODE); 927 } 928 929 return MV_OK; 930 } 931 932 /* 933 * Print PBS Result 934 */ 935 int ddr3_tip_print_pbs_result(u32 dev_num, u32 cs_num, enum pbs_dir pbs_mode) 936 { 937 u32 data_value = 0, bit = 0, if_id = 0, pup = 0; 938 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ? 939 (PBS_RX_PHY_REG + cs_num * 0x10) : 940 (PBS_TX_PHY_REG + cs_num * 0x10); 941 struct hws_topology_map *tm = ddr3_get_topology_map(); 942 943 printf("CS%d, %s ,PBS\n", cs_num, 944 (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx"); 945 946 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 947 printf("%s, DQ", (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx"); 948 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 949 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 950 printf("%d ,PBS,,, ", bit); 951 for (pup = 0; pup <= tm->num_of_bus_per_interface; 952 pup++) { 953 VALIDATE_ACTIVE(tm->bus_act_mask, pup); 954 CHECK_STATUS(ddr3_tip_bus_read 955 (dev_num, if_id, 956 ACCESS_TYPE_UNICAST, pup, 957 DDR_PHY_DATA, reg_addr + bit, 958 &data_value)); 959 printf("%d , ", data_value); 960 } 961 } 962 printf("\n"); 963 } 964 printf("\n"); 965 966 return MV_OK; 967 } 968 #endif 969 970 /* 971 * Fixup PBS Result 972 */ 973 int ddr3_tip_clean_pbs_result(u32 dev_num, enum pbs_dir pbs_mode) 974 { 975 u32 if_id, pup, bit; 976 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ? 977 (PBS_RX_PHY_REG + effective_cs * 0x10) : 978 (PBS_TX_PHY_REG + effective_cs * 0x10); 979 struct hws_topology_map *tm = ddr3_get_topology_map(); 980 981 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 982 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 983 for (pup = 0; pup <= tm->num_of_bus_per_interface; pup++) { 984 for (bit = 0; bit <= BUS_WIDTH_IN_BITS + 3; bit++) { 985 CHECK_STATUS(ddr3_tip_bus_write 986 (dev_num, ACCESS_TYPE_UNICAST, 987 if_id, ACCESS_TYPE_UNICAST, pup, 988 DDR_PHY_DATA, reg_addr + bit, 0)); 989 } 990 } 991 } 992 993 return MV_OK; 994 } 995