1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (C) Marvell International Ltd. and its affiliates 4 */ 5 6 #include "ddr3_init.h" 7 #include "mv_ddr_training_db.h" 8 #include "mv_ddr_common.h" 9 #include "mv_ddr_regs.h" 10 11 #define TYPICAL_PBS_VALUE 12 12 13 u32 nominal_adll[MAX_INTERFACE_NUM * MAX_BUS_NUM]; 14 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM]; 15 u8 result_mat[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS]; 16 u8 result_mat_rx_dqs[MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM]; 17 /* 4-EEWA, 3-EWA, 2-SWA, 1-Fail, 0-Pass */ 18 u8 result_all_bit[MAX_BUS_NUM * BUS_WIDTH_IN_BITS * MAX_INTERFACE_NUM]; 19 u8 max_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 20 u8 min_pbs_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 21 u8 max_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 22 u8 min_adll_per_pup[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 23 u32 pbsdelay_per_pup[NUM_OF_PBS_MODES][MAX_INTERFACE_NUM][MAX_BUS_NUM][MAX_CS_NUM]; 24 u8 adll_shift_lock[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 25 u8 adll_shift_val[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 26 enum hws_pattern pbs_pattern = PATTERN_VREF; 27 static u8 pup_state[MAX_INTERFACE_NUM][MAX_BUS_NUM]; 28 29 /* 30 * Name: ddr3_tip_pbs 31 * Desc: PBS 32 * Args: TBD 33 * Notes: 34 * Returns: OK if success, other error code if fail. 35 */ 36 int ddr3_tip_pbs(u32 dev_num, enum pbs_dir pbs_mode) 37 { 38 u32 res0[MAX_INTERFACE_NUM]; 39 int adll_tap = MEGA / mv_ddr_freq_get(medium_freq) / 64; 40 int pad_num = 0; 41 enum hws_search_dir search_dir = 42 (pbs_mode == PBS_RX_MODE) ? HWS_HIGH2LOW : HWS_LOW2HIGH; 43 enum hws_dir dir = (pbs_mode == PBS_RX_MODE) ? OPER_READ : OPER_WRITE; 44 int iterations = (pbs_mode == PBS_RX_MODE) ? 31 : 63; 45 u32 res_valid_mask = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f; 46 int init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations; 47 enum hws_edge_compare search_edge = EDGE_FP; 48 u32 pup = 0, bit = 0, if_id = 0, all_lock = 0, cs_num = 0; 49 u32 reg_addr = 0; 50 u32 validation_val = 0; 51 u32 cs_enable_reg_val[MAX_INTERFACE_NUM]; 52 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg(); 53 u8 temp = 0; 54 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 55 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 56 57 /* save current cs enable reg val */ 58 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 59 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 60 61 /* save current cs enable reg val */ 62 CHECK_STATUS(ddr3_tip_if_read 63 (dev_num, ACCESS_TYPE_UNICAST, if_id, 64 DUAL_DUNIT_CFG_REG, cs_enable_reg_val, MASK_ALL_BITS)); 65 66 /* enable single cs */ 67 CHECK_STATUS(ddr3_tip_if_write 68 (dev_num, ACCESS_TYPE_UNICAST, if_id, 69 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3))); 70 } 71 72 reg_addr = (pbs_mode == PBS_RX_MODE) ? 73 CRX_PHY_REG(effective_cs) : 74 CTX_PHY_REG(effective_cs); 75 ddr3_tip_read_adll_value(dev_num, nominal_adll, reg_addr, MASK_ALL_BITS); 76 77 /* stage 1 shift ADLL */ 78 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, 79 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, 80 PARAM_NOT_CARE, RESULT_PER_BIT, 81 HWS_CONTROL_ELEMENT_ADLL, search_dir, dir, 82 tm->if_act_mask, init_val, iterations, 83 pbs_pattern, search_edge, CS_SINGLE, cs_num, 84 train_status); 85 validation_val = (pbs_mode == PBS_RX_MODE) ? 0x1f : 0; 86 for (pup = 0; pup < octets_per_if_num; pup++) { 87 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 88 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 89 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 90 min_adll_per_pup[if_id][pup] = 91 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f; 92 pup_state[if_id][pup] = 0x3; 93 adll_shift_lock[if_id][pup] = 1; 94 max_adll_per_pup[if_id][pup] = 0x0; 95 } 96 } 97 98 /* EBA */ 99 for (pup = 0; pup < octets_per_if_num; pup++) { 100 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 101 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 102 CHECK_STATUS(ddr3_tip_if_read 103 (dev_num, ACCESS_TYPE_MULTICAST, 104 PARAM_NOT_CARE, 105 mask_results_dq_reg_map[ 106 bit + pup * BUS_WIDTH_IN_BITS], 107 res0, MASK_ALL_BITS)); 108 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 109 if_id++) { 110 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 111 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 112 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n", 113 if_id, bit, pup, 114 res0[if_id])); 115 if (pup_state[if_id][pup] != 3) 116 continue; 117 /* if not EBA state than move to next pup */ 118 119 if ((res0[if_id] & 0x2000000) == 0) { 120 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 121 ("-- Fail Training IP\n")); 122 /* training machine failed */ 123 pup_state[if_id][pup] = 1; 124 adll_shift_lock[if_id][pup] = 0; 125 continue; 126 } 127 128 else if ((res0[if_id] & res_valid_mask) == 129 validation_val) { 130 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 131 ("-- FAIL EBA %d %d %d %d\n", 132 if_id, bit, pup, 133 res0[if_id])); 134 pup_state[if_id][pup] = 4; 135 /* this pup move to EEBA */ 136 adll_shift_lock[if_id][pup] = 0; 137 continue; 138 } else { 139 /* 140 * The search ended in Pass we need 141 * Fail 142 */ 143 res0[if_id] = 144 (pbs_mode == PBS_RX_MODE) ? 145 ((res0[if_id] & 146 res_valid_mask) + 1) : 147 ((res0[if_id] & 148 res_valid_mask) - 1); 149 max_adll_per_pup[if_id][pup] = 150 (max_adll_per_pup[if_id][pup] < 151 res0[if_id]) ? 152 (u8)res0[if_id] : 153 max_adll_per_pup[if_id][pup]; 154 min_adll_per_pup[if_id][pup] = 155 (res0[if_id] > 156 min_adll_per_pup[if_id][pup]) ? 157 min_adll_per_pup[if_id][pup] : 158 (u8) 159 res0[if_id]; 160 /* 161 * vs the Rx we are searching for the 162 * smallest value of DQ shift so all 163 * Bus would fail 164 */ 165 adll_shift_val[if_id][pup] = 166 (pbs_mode == PBS_RX_MODE) ? 167 max_adll_per_pup[if_id][pup] : 168 min_adll_per_pup[if_id][pup]; 169 } 170 } 171 } 172 } 173 174 /* EEBA */ 175 for (pup = 0; pup < octets_per_if_num; pup++) { 176 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 177 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 178 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 179 180 if (pup_state[if_id][pup] != 4) 181 continue; 182 /* 183 * if pup state different from EEBA than move to 184 * next pup 185 */ 186 reg_addr = (pbs_mode == PBS_RX_MODE) ? 187 (0x54 + effective_cs * 0x10) : 188 (0x14 + effective_cs * 0x10); 189 CHECK_STATUS(ddr3_tip_bus_write 190 (dev_num, ACCESS_TYPE_UNICAST, if_id, 191 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, 192 reg_addr, 0x1f)); 193 reg_addr = (pbs_mode == PBS_RX_MODE) ? 194 (0x55 + effective_cs * 0x10) : 195 (0x15 + effective_cs * 0x10); 196 CHECK_STATUS(ddr3_tip_bus_write 197 (dev_num, ACCESS_TYPE_UNICAST, if_id, 198 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, 199 reg_addr, 0x1f)); 200 /* initialize the Edge2 Max. */ 201 adll_shift_val[if_id][pup] = 0; 202 min_adll_per_pup[if_id][pup] = 203 (pbs_mode == PBS_RX_MODE) ? 0x1f : 0x3f; 204 max_adll_per_pup[if_id][pup] = 0x0; 205 206 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, 207 PARAM_NOT_CARE, 208 ACCESS_TYPE_MULTICAST, 209 PARAM_NOT_CARE, RESULT_PER_BIT, 210 HWS_CONTROL_ELEMENT_ADLL, 211 search_dir, dir, 212 tm->if_act_mask, init_val, 213 iterations, pbs_pattern, 214 search_edge, CS_SINGLE, cs_num, 215 train_status); 216 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 217 ("ADLL shift results:\n")); 218 219 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 220 CHECK_STATUS(ddr3_tip_if_read 221 (dev_num, ACCESS_TYPE_MULTICAST, 222 PARAM_NOT_CARE, 223 mask_results_dq_reg_map[ 224 bit + pup * 225 BUS_WIDTH_IN_BITS], 226 res0, MASK_ALL_BITS)); 227 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 228 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n", 229 if_id, bit, pup, 230 res0[if_id])); 231 232 if ((res0[if_id] & 0x2000000) == 0) { 233 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 234 (" -- EEBA Fail\n")); 235 bit = BUS_WIDTH_IN_BITS; 236 /* exit bit loop */ 237 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 238 ("-- EEBA Fail Training IP\n")); 239 /* 240 * training machine failed but pass 241 * before in the EBA so maybe the DQS 242 * shift change env. 243 */ 244 pup_state[if_id][pup] = 2; 245 adll_shift_lock[if_id][pup] = 0; 246 reg_addr = (pbs_mode == PBS_RX_MODE) ? 247 (0x54 + effective_cs * 0x10) : 248 (0x14 + effective_cs * 0x10); 249 CHECK_STATUS(ddr3_tip_bus_write 250 (dev_num, 251 ACCESS_TYPE_UNICAST, 252 if_id, 253 ACCESS_TYPE_UNICAST, pup, 254 DDR_PHY_DATA, reg_addr, 255 0x0)); 256 reg_addr = (pbs_mode == PBS_RX_MODE) ? 257 (0x55 + effective_cs * 0x10) : 258 (0x15 + effective_cs * 0x10); 259 CHECK_STATUS(ddr3_tip_bus_write 260 (dev_num, 261 ACCESS_TYPE_UNICAST, 262 if_id, 263 ACCESS_TYPE_UNICAST, pup, 264 DDR_PHY_DATA, reg_addr, 265 0x0)); 266 continue; 267 } else if ((res0[if_id] & res_valid_mask) == 268 validation_val) { 269 /* exit bit loop */ 270 bit = BUS_WIDTH_IN_BITS; 271 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 272 ("-- FAIL EEBA\n")); 273 /* this pup move to SBA */ 274 pup_state[if_id][pup] = 2; 275 adll_shift_lock[if_id][pup] = 0; 276 reg_addr = (pbs_mode == PBS_RX_MODE) ? 277 (0x54 + effective_cs * 0x10) : 278 (0x14 + effective_cs * 0x10); 279 CHECK_STATUS(ddr3_tip_bus_write 280 (dev_num, 281 ACCESS_TYPE_UNICAST, 282 if_id, 283 ACCESS_TYPE_UNICAST, pup, 284 DDR_PHY_DATA, reg_addr, 285 0x0)); 286 reg_addr = (pbs_mode == PBS_RX_MODE) ? 287 (0x55 + effective_cs * 0x10) : 288 (0x15 + effective_cs * 0x10); 289 CHECK_STATUS(ddr3_tip_bus_write 290 (dev_num, 291 ACCESS_TYPE_UNICAST, 292 if_id, 293 ACCESS_TYPE_UNICAST, pup, 294 DDR_PHY_DATA, reg_addr, 295 0x0)); 296 continue; 297 } else { 298 adll_shift_lock[if_id][pup] = 1; 299 /* 300 * The search ended in Pass we need 301 * Fail 302 */ 303 res0[if_id] = 304 (pbs_mode == PBS_RX_MODE) ? 305 ((res0[if_id] & 306 res_valid_mask) + 1) : 307 ((res0[if_id] & 308 res_valid_mask) - 1); 309 max_adll_per_pup[if_id][pup] = 310 (max_adll_per_pup[if_id][pup] < 311 res0[if_id]) ? 312 (u8)res0[if_id] : 313 max_adll_per_pup[if_id][pup]; 314 min_adll_per_pup[if_id][pup] = 315 (res0[if_id] > 316 min_adll_per_pup[if_id][pup]) ? 317 min_adll_per_pup[if_id][pup] : 318 (u8)res0[if_id]; 319 /* 320 * vs the Rx we are searching for the 321 * smallest value of DQ shift so all Bus 322 * would fail 323 */ 324 adll_shift_val[if_id][pup] = 325 (pbs_mode == PBS_RX_MODE) ? 326 max_adll_per_pup[if_id][pup] : 327 min_adll_per_pup[if_id][pup]; 328 } 329 } 330 } 331 } 332 333 /* Print Stage result */ 334 for (pup = 0; pup < octets_per_if_num; pup++) { 335 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 336 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 337 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 338 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 339 ("FP I/F %d, ADLL Shift for EBA: pup[%d] Lock status = %d Lock Val = %d,%d\n", 340 if_id, pup, 341 adll_shift_lock[if_id][pup], 342 max_adll_per_pup[if_id][pup], 343 min_adll_per_pup[if_id][pup])); 344 } 345 } 346 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 347 ("Update ADLL Shift of all pups:\n")); 348 349 for (pup = 0; pup < octets_per_if_num; pup++) { 350 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 351 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 352 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 353 if (adll_shift_lock[if_id][pup] != 1) 354 continue; 355 /* if pup not locked continue to next pup */ 356 357 reg_addr = (pbs_mode == PBS_RX_MODE) ? 358 (0x3 + effective_cs * 4) : 359 (0x1 + effective_cs * 4); 360 CHECK_STATUS(ddr3_tip_bus_write 361 (dev_num, ACCESS_TYPE_UNICAST, if_id, 362 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, 363 reg_addr, adll_shift_val[if_id][pup])); 364 DEBUG_PBS_ENGINE(DEBUG_LEVEL_TRACE, 365 ("FP I/F %d, Pup[%d] = %d\n", if_id, 366 pup, adll_shift_val[if_id][pup])); 367 } 368 } 369 370 /* PBS EEBA&EBA */ 371 /* Start the Per Bit Skew search */ 372 for (pup = 0; pup < octets_per_if_num; pup++) { 373 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 374 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 375 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 376 max_pbs_per_pup[if_id][pup] = 0x0; 377 min_pbs_per_pup[if_id][pup] = 0x1f; 378 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 379 /* reset result for PBS */ 380 result_all_bit[bit + pup * BUS_WIDTH_IN_BITS + 381 if_id * MAX_BUS_NUM * 382 BUS_WIDTH_IN_BITS] = 0; 383 } 384 } 385 } 386 387 iterations = 31; 388 search_dir = HWS_LOW2HIGH; 389 /* !!!!! ran sh (search_dir == HWS_LOW2HIGH)?0:iterations; */ 390 init_val = 0; 391 392 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 393 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 394 RESULT_PER_BIT, HWS_CONTROL_ELEMENT_DQ_SKEW, 395 search_dir, dir, tm->if_act_mask, init_val, 396 iterations, pbs_pattern, search_edge, 397 CS_SINGLE, cs_num, train_status); 398 399 for (pup = 0; pup < octets_per_if_num; pup++) { 400 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 401 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 402 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 403 if (adll_shift_lock[if_id][pup] != 1) { 404 /* if pup not lock continue to next pup */ 405 continue; 406 } 407 408 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 409 CHECK_STATUS(ddr3_tip_if_read 410 (dev_num, ACCESS_TYPE_MULTICAST, 411 PARAM_NOT_CARE, 412 mask_results_dq_reg_map[ 413 bit + 414 pup * BUS_WIDTH_IN_BITS], 415 res0, MASK_ALL_BITS)); 416 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 417 ("Per Bit Skew search, FP I/F %d, bit:%d, pup:%d res0 0x%x\n", 418 if_id, bit, pup, 419 res0[if_id])); 420 if ((res0[if_id] & 0x2000000) == 0) { 421 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 422 ("--EBA PBS Fail - Training IP machine\n")); 423 /* exit the bit loop */ 424 bit = BUS_WIDTH_IN_BITS; 425 /* 426 * ADLL is no long in lock need new 427 * search 428 */ 429 adll_shift_lock[if_id][pup] = 0; 430 /* Move to SBA */ 431 pup_state[if_id][pup] = 2; 432 max_pbs_per_pup[if_id][pup] = 0x0; 433 min_pbs_per_pup[if_id][pup] = 0x1f; 434 continue; 435 } else { 436 temp = (u8)(res0[if_id] & 437 res_valid_mask); 438 max_pbs_per_pup[if_id][pup] = 439 (temp > 440 max_pbs_per_pup[if_id][pup]) ? 441 temp : 442 max_pbs_per_pup[if_id][pup]; 443 min_pbs_per_pup[if_id][pup] = 444 (temp < 445 min_pbs_per_pup[if_id][pup]) ? 446 temp : 447 min_pbs_per_pup[if_id][pup]; 448 result_all_bit[bit + 449 pup * BUS_WIDTH_IN_BITS + 450 if_id * MAX_BUS_NUM * 451 BUS_WIDTH_IN_BITS] = 452 temp; 453 } 454 } 455 } 456 } 457 458 /* Check all Pup lock */ 459 all_lock = 1; 460 for (pup = 0; pup < octets_per_if_num; pup++) { 461 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 462 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 463 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 464 all_lock = all_lock * adll_shift_lock[if_id][pup]; 465 } 466 } 467 468 /* Only if not all Pups Lock */ 469 if (all_lock == 0) { 470 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 471 ("##########ADLL shift for SBA###########\n")); 472 473 /* ADLL shift for SBA */ 474 search_dir = (pbs_mode == PBS_RX_MODE) ? HWS_LOW2HIGH : 475 HWS_HIGH2LOW; 476 init_val = (search_dir == HWS_LOW2HIGH) ? 0 : iterations; 477 for (pup = 0; pup < octets_per_if_num; pup++) { 478 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 479 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 480 if_id++) { 481 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 482 if (adll_shift_lock[if_id][pup] == 1) { 483 /*if pup lock continue to next pup */ 484 continue; 485 } 486 /*init the var altogth init before */ 487 adll_shift_lock[if_id][pup] = 0; 488 reg_addr = (pbs_mode == PBS_RX_MODE) ? 489 (0x54 + effective_cs * 0x10) : 490 (0x14 + effective_cs * 0x10); 491 CHECK_STATUS(ddr3_tip_bus_write 492 (dev_num, ACCESS_TYPE_UNICAST, 493 if_id, ACCESS_TYPE_UNICAST, pup, 494 DDR_PHY_DATA, reg_addr, 0)); 495 reg_addr = (pbs_mode == PBS_RX_MODE) ? 496 (0x55 + effective_cs * 0x10) : 497 (0x15 + effective_cs * 0x10); 498 CHECK_STATUS(ddr3_tip_bus_write 499 (dev_num, ACCESS_TYPE_UNICAST, 500 if_id, ACCESS_TYPE_UNICAST, pup, 501 DDR_PHY_DATA, reg_addr, 0)); 502 reg_addr = (pbs_mode == PBS_RX_MODE) ? 503 (0x5f + effective_cs * 0x10) : 504 (0x1f + effective_cs * 0x10); 505 CHECK_STATUS(ddr3_tip_bus_write 506 (dev_num, ACCESS_TYPE_UNICAST, 507 if_id, ACCESS_TYPE_UNICAST, pup, 508 DDR_PHY_DATA, reg_addr, 0)); 509 /* initilaze the Edge2 Max. */ 510 adll_shift_val[if_id][pup] = 0; 511 min_adll_per_pup[if_id][pup] = 0x1f; 512 max_adll_per_pup[if_id][pup] = 0x0; 513 514 ddr3_tip_ip_training(dev_num, 515 ACCESS_TYPE_MULTICAST, 516 PARAM_NOT_CARE, 517 ACCESS_TYPE_MULTICAST, 518 PARAM_NOT_CARE, 519 RESULT_PER_BIT, 520 HWS_CONTROL_ELEMENT_ADLL, 521 search_dir, dir, 522 tm->if_act_mask, 523 init_val, iterations, 524 pbs_pattern, 525 search_edge, CS_SINGLE, 526 cs_num, train_status); 527 528 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 529 CHECK_STATUS(ddr3_tip_if_read 530 (dev_num, 531 ACCESS_TYPE_MULTICAST, 532 PARAM_NOT_CARE, 533 mask_results_dq_reg_map 534 [bit + 535 pup * 536 BUS_WIDTH_IN_BITS], 537 res0, MASK_ALL_BITS)); 538 DEBUG_PBS_ENGINE( 539 DEBUG_LEVEL_INFO, 540 ("FP I/F %d, bit:%d, pup:%d res0 0x%x\n", 541 if_id, bit, pup, res0[if_id])); 542 if ((res0[if_id] & 0x2000000) == 0) { 543 /* exit the bit loop */ 544 bit = BUS_WIDTH_IN_BITS; 545 /* Fail SBA --> Fail PBS */ 546 pup_state[if_id][pup] = 1; 547 DEBUG_PBS_ENGINE 548 (DEBUG_LEVEL_INFO, 549 (" SBA Fail\n")); 550 continue; 551 } else { 552 /* 553 * - increment to get all 554 * 8 bit lock. 555 */ 556 adll_shift_lock[if_id][pup]++; 557 /* 558 * The search ended in Pass 559 * we need Fail 560 */ 561 res0[if_id] = 562 (pbs_mode == PBS_RX_MODE) ? 563 ((res0[if_id] & res_valid_mask) + 1) : 564 ((res0[if_id] & res_valid_mask) - 1); 565 max_adll_per_pup[if_id][pup] = 566 (max_adll_per_pup[if_id] 567 [pup] < res0[if_id]) ? 568 (u8)res0[if_id] : 569 max_adll_per_pup[if_id][pup]; 570 min_adll_per_pup[if_id][pup] = 571 (res0[if_id] > 572 min_adll_per_pup[if_id] 573 [pup]) ? 574 min_adll_per_pup[if_id][pup] : 575 (u8)res0[if_id]; 576 /* 577 * vs the Rx we are searching for 578 * the smallest value of DQ shift 579 * so all Bus would fail 580 */ 581 adll_shift_val[if_id][pup] = 582 (pbs_mode == PBS_RX_MODE) ? 583 max_adll_per_pup[if_id][pup] : 584 min_adll_per_pup[if_id][pup]; 585 } 586 } 587 /* 1 is lock */ 588 adll_shift_lock[if_id][pup] = 589 (adll_shift_lock[if_id][pup] == 8) ? 590 1 : 0; 591 reg_addr = (pbs_mode == PBS_RX_MODE) ? 592 (0x3 + effective_cs * 4) : 593 (0x1 + effective_cs * 4); 594 CHECK_STATUS(ddr3_tip_bus_write 595 (dev_num, ACCESS_TYPE_UNICAST, 596 if_id, ACCESS_TYPE_UNICAST, pup, 597 DDR_PHY_DATA, reg_addr, 598 adll_shift_val[if_id][pup])); 599 DEBUG_PBS_ENGINE( 600 DEBUG_LEVEL_INFO, 601 ("adll_shift_lock[%x][%x] = %x\n", 602 if_id, pup, 603 adll_shift_lock[if_id][pup])); 604 } 605 } 606 607 /* End ADLL Shift for SBA */ 608 /* Start the Per Bit Skew search */ 609 /* The ADLL shift finished with a Pass */ 610 search_edge = (pbs_mode == PBS_RX_MODE) ? EDGE_PF : EDGE_FP; 611 search_dir = (pbs_mode == PBS_RX_MODE) ? 612 HWS_LOW2HIGH : HWS_HIGH2LOW; 613 iterations = 0x1f; 614 /* - The initial value is different in Rx and Tx mode */ 615 init_val = (pbs_mode == PBS_RX_MODE) ? 0 : iterations; 616 617 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_MULTICAST, 618 PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, 619 PARAM_NOT_CARE, RESULT_PER_BIT, 620 HWS_CONTROL_ELEMENT_DQ_SKEW, 621 search_dir, dir, tm->if_act_mask, 622 init_val, iterations, pbs_pattern, 623 search_edge, CS_SINGLE, cs_num, 624 train_status); 625 626 for (pup = 0; pup < octets_per_if_num; pup++) { 627 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 628 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 629 if_id++) { 630 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 631 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 632 CHECK_STATUS(ddr3_tip_if_read 633 (dev_num, 634 ACCESS_TYPE_MULTICAST, 635 PARAM_NOT_CARE, 636 mask_results_dq_reg_map 637 [bit + 638 pup * 639 BUS_WIDTH_IN_BITS], 640 res0, MASK_ALL_BITS)); 641 if (pup_state[if_id][pup] != 2) { 642 /* 643 * if pup is not SBA continue 644 * to next pup 645 */ 646 bit = BUS_WIDTH_IN_BITS; 647 continue; 648 } 649 DEBUG_PBS_ENGINE( 650 DEBUG_LEVEL_INFO, 651 ("Per Bit Skew search, PF I/F %d, bit:%d, pup:%d res0 0x%x\n", 652 if_id, bit, pup, res0[if_id])); 653 if ((res0[if_id] & 0x2000000) == 0) { 654 DEBUG_PBS_ENGINE 655 (DEBUG_LEVEL_INFO, 656 ("SBA Fail\n")); 657 658 max_pbs_per_pup[if_id][pup] = 659 0x1f; 660 result_all_bit[ 661 bit + pup * 662 BUS_WIDTH_IN_BITS + 663 if_id * MAX_BUS_NUM * 664 BUS_WIDTH_IN_BITS] = 665 0x1f; 666 } else { 667 temp = (u8)(res0[if_id] & 668 res_valid_mask); 669 max_pbs_per_pup[if_id][pup] = 670 (temp > 671 max_pbs_per_pup[if_id] 672 [pup]) ? temp : 673 max_pbs_per_pup 674 [if_id][pup]; 675 min_pbs_per_pup[if_id][pup] = 676 (temp < 677 min_pbs_per_pup[if_id] 678 [pup]) ? temp : 679 min_pbs_per_pup 680 [if_id][pup]; 681 result_all_bit[ 682 bit + pup * 683 BUS_WIDTH_IN_BITS + 684 if_id * MAX_BUS_NUM * 685 BUS_WIDTH_IN_BITS] = 686 temp; 687 adll_shift_lock[if_id][pup] = 1; 688 } 689 } 690 } 691 } 692 693 /* Check all Pup state */ 694 all_lock = 1; 695 for (pup = 0; pup < octets_per_if_num; pup++) { 696 /* 697 * DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 698 * ("pup_state[%d][%d] = %d\n",if_id,pup,pup_state 699 * [if_id][pup])); 700 */ 701 } 702 } 703 704 /* END OF SBA */ 705 /* Norm */ 706 for (pup = 0; pup < octets_per_if_num; pup++) { 707 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 708 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 709 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; 710 if_id++) { 711 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 712 /* if pup not lock continue to next pup */ 713 if (adll_shift_lock[if_id][pup] != 1) { 714 DEBUG_PBS_ENGINE( 715 DEBUG_LEVEL_ERROR, 716 ("PBS failed for IF #%d\n", 717 if_id)); 718 training_result[training_stage][if_id] 719 = TEST_FAILED; 720 721 result_mat[if_id][pup][bit] = 0; 722 max_pbs_per_pup[if_id][pup] = 0; 723 min_pbs_per_pup[if_id][pup] = 0; 724 } else { 725 training_result[ 726 training_stage][if_id] = 727 (training_result[training_stage] 728 [if_id] == TEST_FAILED) ? 729 TEST_FAILED : TEST_SUCCESS; 730 result_mat[if_id][pup][bit] = 731 result_all_bit[ 732 bit + pup * 733 BUS_WIDTH_IN_BITS + 734 if_id * MAX_BUS_NUM * 735 BUS_WIDTH_IN_BITS] - 736 min_pbs_per_pup[if_id][pup]; 737 } 738 DEBUG_PBS_ENGINE( 739 DEBUG_LEVEL_INFO, 740 ("The abs min_pbs[%d][%d] = %d\n", 741 if_id, pup, 742 min_pbs_per_pup[if_id][pup])); 743 } 744 } 745 } 746 747 /* Clean all results */ 748 ddr3_tip_clean_pbs_result(dev_num, pbs_mode); 749 750 /* DQ PBS register update with the final result */ 751 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 752 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 753 for (pup = 0; pup < octets_per_if_num; pup++) { 754 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 755 756 DEBUG_PBS_ENGINE( 757 DEBUG_LEVEL_INFO, 758 ("Final Results: if_id %d, pup %d, Pup State: %d\n", 759 if_id, pup, pup_state[if_id][pup])); 760 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 761 if (dq_map_table == NULL) { 762 DEBUG_PBS_ENGINE( 763 DEBUG_LEVEL_ERROR, 764 ("dq_map_table not initialized\n")); 765 return MV_FAIL; 766 } 767 pad_num = dq_map_table[ 768 bit + pup * BUS_WIDTH_IN_BITS + 769 if_id * BUS_WIDTH_IN_BITS * 770 MAX_BUS_NUM]; 771 DEBUG_PBS_ENGINE(DEBUG_LEVEL_INFO, 772 ("result_mat: %d ", 773 result_mat[if_id][pup] 774 [bit])); 775 reg_addr = (pbs_mode == PBS_RX_MODE) ? 776 PBS_RX_PHY_REG(effective_cs, 0) : 777 PBS_TX_PHY_REG(effective_cs, 0); 778 CHECK_STATUS(ddr3_tip_bus_write 779 (dev_num, ACCESS_TYPE_UNICAST, 780 if_id, ACCESS_TYPE_UNICAST, pup, 781 DDR_PHY_DATA, reg_addr + pad_num, 782 result_mat[if_id][pup][bit])); 783 } 784 785 if (max_pbs_per_pup[if_id][pup] == min_pbs_per_pup[if_id][pup]) { 786 temp = TYPICAL_PBS_VALUE; 787 } else { 788 temp = ((max_adll_per_pup[if_id][pup] - 789 min_adll_per_pup[if_id][pup]) * 790 adll_tap / 791 (max_pbs_per_pup[if_id][pup] - 792 min_pbs_per_pup[if_id][pup])); 793 } 794 pbsdelay_per_pup[pbs_mode] 795 [if_id][pup][effective_cs] = temp; 796 797 /* RX results ready, write RX also */ 798 if (pbs_mode == PBS_TX_MODE) { 799 /* Write TX results */ 800 reg_addr = (0x14 + effective_cs * 0x10); 801 CHECK_STATUS(ddr3_tip_bus_write 802 (dev_num, ACCESS_TYPE_UNICAST, 803 if_id, ACCESS_TYPE_UNICAST, pup, 804 DDR_PHY_DATA, reg_addr, 805 (max_pbs_per_pup[if_id][pup] - 806 min_pbs_per_pup[if_id][pup]) / 807 2)); 808 reg_addr = (0x15 + effective_cs * 0x10); 809 CHECK_STATUS(ddr3_tip_bus_write 810 (dev_num, ACCESS_TYPE_UNICAST, 811 if_id, ACCESS_TYPE_UNICAST, pup, 812 DDR_PHY_DATA, reg_addr, 813 (max_pbs_per_pup[if_id][pup] - 814 min_pbs_per_pup[if_id][pup]) / 815 2)); 816 817 /* Write previously stored RX results */ 818 reg_addr = (0x54 + effective_cs * 0x10); 819 CHECK_STATUS(ddr3_tip_bus_write 820 (dev_num, ACCESS_TYPE_UNICAST, 821 if_id, ACCESS_TYPE_UNICAST, pup, 822 DDR_PHY_DATA, reg_addr, 823 result_mat_rx_dqs[if_id][pup] 824 [effective_cs])); 825 reg_addr = (0x55 + effective_cs * 0x10); 826 CHECK_STATUS(ddr3_tip_bus_write 827 (dev_num, ACCESS_TYPE_UNICAST, 828 if_id, ACCESS_TYPE_UNICAST, pup, 829 DDR_PHY_DATA, reg_addr, 830 result_mat_rx_dqs[if_id][pup] 831 [effective_cs])); 832 } else { 833 /* 834 * RX results may affect RL results correctess, 835 * so just store the results that will written 836 * in TX stage 837 */ 838 result_mat_rx_dqs[if_id][pup][effective_cs] = 839 (max_pbs_per_pup[if_id][pup] - 840 min_pbs_per_pup[if_id][pup]) / 2; 841 } 842 DEBUG_PBS_ENGINE( 843 DEBUG_LEVEL_INFO, 844 (", PBS tap=%d [psec] ==> skew observed = %d\n", 845 temp, 846 ((max_pbs_per_pup[if_id][pup] - 847 min_pbs_per_pup[if_id][pup]) * 848 temp))); 849 } 850 } 851 852 /* Write back to the phy the default values */ 853 reg_addr = (pbs_mode == PBS_RX_MODE) ? 854 CRX_PHY_REG(effective_cs) : 855 CTX_PHY_REG(effective_cs); 856 ddr3_tip_write_adll_value(dev_num, nominal_adll, reg_addr); 857 858 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 859 reg_addr = (pbs_mode == PBS_RX_MODE) ? 860 (0x5a + effective_cs * 0x10) : 861 (0x1a + effective_cs * 0x10); 862 CHECK_STATUS(ddr3_tip_bus_write 863 (dev_num, ACCESS_TYPE_UNICAST, if_id, 864 ACCESS_TYPE_UNICAST, pup, DDR_PHY_DATA, reg_addr, 865 0)); 866 867 /* restore cs enable value */ 868 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 869 CHECK_STATUS(ddr3_tip_if_write 870 (dev_num, ACCESS_TYPE_UNICAST, if_id, 871 DUAL_DUNIT_CFG_REG, cs_enable_reg_val[if_id], 872 MASK_ALL_BITS)); 873 } 874 875 /* exit test mode */ 876 CHECK_STATUS(ddr3_tip_if_write 877 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 878 ODPG_WR_RD_MODE_ENA_REG, 0xffff, MASK_ALL_BITS)); 879 880 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 881 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 882 for (pup = 0; pup < octets_per_if_num; pup++) { 883 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 884 /* 885 * no valid window found 886 * (no lock at EBA ADLL shift at EBS) 887 */ 888 if (pup_state[if_id][pup] == 1) 889 return MV_FAIL; 890 } 891 } 892 893 return MV_OK; 894 } 895 896 /* 897 * Name: ddr3_tip_pbs_rx. 898 * Desc: PBS TX 899 * Args: TBD 900 * Notes: 901 * Returns: OK if success, other error code if fail. 902 */ 903 int ddr3_tip_pbs_rx(u32 uidev_num) 904 { 905 return ddr3_tip_pbs(uidev_num, PBS_RX_MODE); 906 } 907 908 /* 909 * Name: ddr3_tip_pbs_tx. 910 * Desc: PBS TX 911 * Args: TBD 912 * Notes: 913 * Returns: OK if success, other error code if fail. 914 */ 915 int ddr3_tip_pbs_tx(u32 uidev_num) 916 { 917 return ddr3_tip_pbs(uidev_num, PBS_TX_MODE); 918 } 919 920 #ifdef DDR_VIEWER_TOOL 921 /* 922 * Print PBS Result 923 */ 924 int ddr3_tip_print_all_pbs_result(u32 dev_num) 925 { 926 u32 curr_cs; 927 unsigned int max_cs = mv_ddr_cs_num_get(); 928 929 for (curr_cs = 0; curr_cs < max_cs; curr_cs++) { 930 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_RX_MODE); 931 ddr3_tip_print_pbs_result(dev_num, curr_cs, PBS_TX_MODE); 932 } 933 934 return MV_OK; 935 } 936 937 /* 938 * Print PBS Result 939 */ 940 int ddr3_tip_print_pbs_result(u32 dev_num, u32 cs_num, enum pbs_dir pbs_mode) 941 { 942 u32 data_value = 0, bit = 0, if_id = 0, pup = 0; 943 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ? 944 PBS_RX_PHY_REG(cs_num, 0) : 945 PBS_TX_PHY_REG(cs_num , 0); 946 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 947 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 948 949 printf("%s,CS%d,PBS,ADLLRATIO,,,", 950 (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx", cs_num); 951 952 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 953 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 954 for (pup = 0; pup < octets_per_if_num; pup++) { 955 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 956 printf("%d,", 957 pbsdelay_per_pup[pbs_mode][if_id][pup][cs_num]); 958 } 959 } 960 printf("CS%d, %s ,PBS\n", cs_num, 961 (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx"); 962 963 for (bit = 0; bit < BUS_WIDTH_IN_BITS; bit++) { 964 printf("%s, DQ", (pbs_mode == PBS_RX_MODE) ? "Rx" : "Tx"); 965 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 966 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 967 printf("%d ,PBS,,, ", bit); 968 for (pup = 0; pup <= octets_per_if_num; 969 pup++) { 970 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup); 971 CHECK_STATUS(ddr3_tip_bus_read 972 (dev_num, if_id, 973 ACCESS_TYPE_UNICAST, pup, 974 DDR_PHY_DATA, reg_addr + bit, 975 &data_value)); 976 printf("%d , ", data_value); 977 } 978 } 979 printf("\n"); 980 } 981 printf("\n"); 982 983 return MV_OK; 984 } 985 #endif /* DDR_VIEWER_TOOL */ 986 987 /* 988 * Fixup PBS Result 989 */ 990 int ddr3_tip_clean_pbs_result(u32 dev_num, enum pbs_dir pbs_mode) 991 { 992 u32 if_id, pup, bit; 993 u32 reg_addr = (pbs_mode == PBS_RX_MODE) ? 994 PBS_RX_PHY_REG(effective_cs, 0) : 995 PBS_TX_PHY_REG(effective_cs, 0); 996 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); 997 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); 998 999 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1000 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); 1001 for (pup = 0; pup <= octets_per_if_num; pup++) { 1002 for (bit = 0; bit <= BUS_WIDTH_IN_BITS + 3; bit++) { 1003 CHECK_STATUS(ddr3_tip_bus_write 1004 (dev_num, ACCESS_TYPE_UNICAST, 1005 if_id, ACCESS_TYPE_UNICAST, pup, 1006 DDR_PHY_DATA, reg_addr + bit, 0)); 1007 } 1008 } 1009 } 1010 1011 return MV_OK; 1012 } 1013