1 /* 2 * Copyright (C) Marvell International Ltd. and its affiliates 3 * 4 * SPDX-License-Identifier: GPL-2.0 5 */ 6 7 #include <common.h> 8 #include <spl.h> 9 #include <asm/io.h> 10 #include <asm/arch/cpu.h> 11 #include <asm/arch/soc.h> 12 13 #include "ddr3_init.h" 14 15 #define GET_MAX_VALUE(x, y) \ 16 ((x) > (y)) ? (x) : (y) 17 #define CEIL_DIVIDE(x, y) \ 18 ((x - (x / y) * y) == 0) ? ((x / y) - 1) : (x / y) 19 20 #define TIME_2_CLOCK_CYCLES CEIL_DIVIDE 21 22 #define GET_CS_FROM_MASK(mask) (cs_mask2_num[mask]) 23 #define CS_CBE_VALUE(cs_num) (cs_cbe_reg[cs_num]) 24 25 u32 window_mem_addr = 0; 26 u32 phy_reg0_val = 0; 27 u32 phy_reg1_val = 8; 28 u32 phy_reg2_val = 0; 29 u32 phy_reg3_val = 0xa; 30 enum hws_ddr_freq init_freq = DDR_FREQ_667; 31 enum hws_ddr_freq low_freq = DDR_FREQ_LOW_FREQ; 32 enum hws_ddr_freq medium_freq; 33 u32 debug_dunit = 0; 34 u32 odt_additional = 1; 35 u32 *dq_map_table = NULL; 36 u32 odt_config = 1; 37 38 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ALLEYCAT3) || \ 39 defined(CONFIG_ARMADA_39X) 40 u32 is_pll_before_init = 0, is_adll_calib_before_init = 0, is_dfs_in_init = 0; 41 u32 dfs_low_freq = 130; 42 #else 43 u32 is_pll_before_init = 0, is_adll_calib_before_init = 1, is_dfs_in_init = 0; 44 u32 dfs_low_freq = 100; 45 #endif 46 u32 g_rtt_nom_c_s0, g_rtt_nom_c_s1; 47 u8 calibration_update_control; /* 2 external only, 1 is internal only */ 48 49 enum hws_result training_result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM]; 50 enum auto_tune_stage training_stage = INIT_CONTROLLER; 51 u32 finger_test = 0, p_finger_start = 11, p_finger_end = 64, 52 n_finger_start = 11, n_finger_end = 64, 53 p_finger_step = 3, n_finger_step = 3; 54 u32 clamp_tbl[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 }; 55 56 /* Initiate to 0xff, this variable is define by user in debug mode */ 57 u32 mode2_t = 0xff; 58 u32 xsb_validate_type = 0; 59 u32 xsb_validation_base_address = 0xf000; 60 u32 first_active_if = 0; 61 u32 dfs_low_phy1 = 0x1f; 62 u32 multicast_id = 0; 63 int use_broadcast = 0; 64 struct hws_tip_freq_config_info *freq_info_table = NULL; 65 u8 is_cbe_required = 0; 66 u32 debug_mode = 0; 67 u32 delay_enable = 0; 68 int rl_mid_freq_wa = 0; 69 70 u32 effective_cs = 0; 71 72 u32 mask_tune_func = (SET_MEDIUM_FREQ_MASK_BIT | 73 WRITE_LEVELING_MASK_BIT | 74 LOAD_PATTERN_2_MASK_BIT | 75 READ_LEVELING_MASK_BIT | 76 SET_TARGET_FREQ_MASK_BIT | WRITE_LEVELING_TF_MASK_BIT | 77 READ_LEVELING_TF_MASK_BIT | 78 CENTRALIZATION_RX_MASK_BIT | CENTRALIZATION_TX_MASK_BIT); 79 80 void ddr3_print_version(void) 81 { 82 printf(DDR3_TIP_VERSION_STRING); 83 } 84 85 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num); 86 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type, 87 u32 if_id, u32 cl_value, u32 cwl_value); 88 static int ddr3_tip_ddr3_auto_tune(u32 dev_num); 89 static int is_bus_access_done(u32 dev_num, u32 if_id, 90 u32 dunit_reg_adrr, u32 bit); 91 #ifdef ODT_TEST_SUPPORT 92 static int odt_test(u32 dev_num, enum hws_algo_type algo_type); 93 #endif 94 95 int adll_calibration(u32 dev_num, enum hws_access_type access_type, 96 u32 if_id, enum hws_ddr_freq frequency); 97 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type, 98 u32 if_id, enum hws_ddr_freq frequency); 99 100 static struct page_element page_param[] = { 101 /* 102 * 8bits 16 bits 103 * page-size(K) page-size(K) mask 104 */ 105 { 1, 2, 2}, 106 /* 512M */ 107 { 1, 2, 3}, 108 /* 1G */ 109 { 1, 2, 0}, 110 /* 2G */ 111 { 1, 2, 4}, 112 /* 4G */ 113 { 2, 2, 5} 114 /* 8G */ 115 }; 116 117 static u8 mem_size_config[MEM_SIZE_LAST] = { 118 0x2, /* 512Mbit */ 119 0x3, /* 1Gbit */ 120 0x0, /* 2Gbit */ 121 0x4, /* 4Gbit */ 122 0x5 /* 8Gbit */ 123 }; 124 125 static u8 cs_mask2_num[] = { 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3 }; 126 127 static struct reg_data odpg_default_value[] = { 128 {0x1034, 0x38000, MASK_ALL_BITS}, 129 {0x1038, 0x0, MASK_ALL_BITS}, 130 {0x10b0, 0x0, MASK_ALL_BITS}, 131 {0x10b8, 0x0, MASK_ALL_BITS}, 132 {0x10c0, 0x0, MASK_ALL_BITS}, 133 {0x10f0, 0x0, MASK_ALL_BITS}, 134 {0x10f4, 0x0, MASK_ALL_BITS}, 135 {0x10f8, 0xff, MASK_ALL_BITS}, 136 {0x10fc, 0xffff, MASK_ALL_BITS}, 137 {0x1130, 0x0, MASK_ALL_BITS}, 138 {0x1830, 0x2000000, MASK_ALL_BITS}, 139 {0x14d0, 0x0, MASK_ALL_BITS}, 140 {0x14d4, 0x0, MASK_ALL_BITS}, 141 {0x14d8, 0x0, MASK_ALL_BITS}, 142 {0x14dc, 0x0, MASK_ALL_BITS}, 143 {0x1454, 0x0, MASK_ALL_BITS}, 144 {0x1594, 0x0, MASK_ALL_BITS}, 145 {0x1598, 0x0, MASK_ALL_BITS}, 146 {0x159c, 0x0, MASK_ALL_BITS}, 147 {0x15a0, 0x0, MASK_ALL_BITS}, 148 {0x15a4, 0x0, MASK_ALL_BITS}, 149 {0x15a8, 0x0, MASK_ALL_BITS}, 150 {0x15ac, 0x0, MASK_ALL_BITS}, 151 {0x1604, 0x0, MASK_ALL_BITS}, 152 {0x1608, 0x0, MASK_ALL_BITS}, 153 {0x160c, 0x0, MASK_ALL_BITS}, 154 {0x1610, 0x0, MASK_ALL_BITS}, 155 {0x1614, 0x0, MASK_ALL_BITS}, 156 {0x1618, 0x0, MASK_ALL_BITS}, 157 {0x1624, 0x0, MASK_ALL_BITS}, 158 {0x1690, 0x0, MASK_ALL_BITS}, 159 {0x1694, 0x0, MASK_ALL_BITS}, 160 {0x1698, 0x0, MASK_ALL_BITS}, 161 {0x169c, 0x0, MASK_ALL_BITS}, 162 {0x14b8, 0x6f67, MASK_ALL_BITS}, 163 {0x1630, 0x0, MASK_ALL_BITS}, 164 {0x1634, 0x0, MASK_ALL_BITS}, 165 {0x1638, 0x0, MASK_ALL_BITS}, 166 {0x163c, 0x0, MASK_ALL_BITS}, 167 {0x16b0, 0x0, MASK_ALL_BITS}, 168 {0x16b4, 0x0, MASK_ALL_BITS}, 169 {0x16b8, 0x0, MASK_ALL_BITS}, 170 {0x16bc, 0x0, MASK_ALL_BITS}, 171 {0x16c0, 0x0, MASK_ALL_BITS}, 172 {0x16c4, 0x0, MASK_ALL_BITS}, 173 {0x16c8, 0x0, MASK_ALL_BITS}, 174 {0x16cc, 0x1, MASK_ALL_BITS}, 175 {0x16f0, 0x1, MASK_ALL_BITS}, 176 {0x16f4, 0x0, MASK_ALL_BITS}, 177 {0x16f8, 0x0, MASK_ALL_BITS}, 178 {0x16fc, 0x0, MASK_ALL_BITS} 179 }; 180 181 static int ddr3_tip_bus_access(u32 dev_num, enum hws_access_type interface_access, 182 u32 if_id, enum hws_access_type phy_access, 183 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr, 184 u32 data_value, enum hws_operation oper_type); 185 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id); 186 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id); 187 188 /* 189 * Update global training parameters by data from user 190 */ 191 int ddr3_tip_tune_training_params(u32 dev_num, 192 struct tune_train_params *params) 193 { 194 if (params->ck_delay != -1) 195 ck_delay = params->ck_delay; 196 if (params->ck_delay_16 != -1) 197 ck_delay_16 = params->ck_delay_16; 198 if (params->phy_reg3_val != -1) 199 phy_reg3_val = params->phy_reg3_val; 200 201 return MV_OK; 202 } 203 204 /* 205 * Configure CS 206 */ 207 int ddr3_tip_configure_cs(u32 dev_num, u32 if_id, u32 cs_num, u32 enable) 208 { 209 u32 data, addr_hi, data_high; 210 u32 mem_index; 211 struct hws_topology_map *tm = ddr3_get_topology_map(); 212 213 if (enable == 1) { 214 data = (tm->interface_params[if_id].bus_width == 215 BUS_WIDTH_8) ? 0 : 1; 216 CHECK_STATUS(ddr3_tip_if_write 217 (dev_num, ACCESS_TYPE_UNICAST, if_id, 218 SDRAM_ACCESS_CONTROL_REG, (data << (cs_num * 4)), 219 0x3 << (cs_num * 4))); 220 mem_index = tm->interface_params[if_id].memory_size; 221 222 addr_hi = mem_size_config[mem_index] & 0x3; 223 CHECK_STATUS(ddr3_tip_if_write 224 (dev_num, ACCESS_TYPE_UNICAST, if_id, 225 SDRAM_ACCESS_CONTROL_REG, 226 (addr_hi << (2 + cs_num * 4)), 227 0x3 << (2 + cs_num * 4))); 228 229 data_high = (mem_size_config[mem_index] & 0x4) >> 2; 230 CHECK_STATUS(ddr3_tip_if_write 231 (dev_num, ACCESS_TYPE_UNICAST, if_id, 232 SDRAM_ACCESS_CONTROL_REG, 233 data_high << (20 + cs_num), 1 << (20 + cs_num))); 234 235 /* Enable Address Select Mode */ 236 CHECK_STATUS(ddr3_tip_if_write 237 (dev_num, ACCESS_TYPE_UNICAST, if_id, 238 SDRAM_ACCESS_CONTROL_REG, 1 << (16 + cs_num), 239 1 << (16 + cs_num))); 240 } 241 switch (cs_num) { 242 case 0: 243 case 1: 244 case 2: 245 CHECK_STATUS(ddr3_tip_if_write 246 (dev_num, ACCESS_TYPE_UNICAST, if_id, 247 DDR_CONTROL_LOW_REG, (enable << (cs_num + 11)), 248 1 << (cs_num + 11))); 249 break; 250 case 3: 251 CHECK_STATUS(ddr3_tip_if_write 252 (dev_num, ACCESS_TYPE_UNICAST, if_id, 253 DDR_CONTROL_LOW_REG, (enable << 15), 1 << 15)); 254 break; 255 } 256 257 return MV_OK; 258 } 259 260 /* 261 * Calculate number of CS 262 */ 263 static int calc_cs_num(u32 dev_num, u32 if_id, u32 *cs_num) 264 { 265 u32 cs; 266 u32 bus_cnt; 267 u32 cs_count; 268 u32 cs_bitmask; 269 u32 curr_cs_num = 0; 270 struct hws_topology_map *tm = ddr3_get_topology_map(); 271 272 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) { 273 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 274 cs_count = 0; 275 cs_bitmask = tm->interface_params[if_id]. 276 as_bus_params[bus_cnt].cs_bitmask; 277 for (cs = 0; cs < MAX_CS_NUM; cs++) { 278 if ((cs_bitmask >> cs) & 1) 279 cs_count++; 280 } 281 282 if (curr_cs_num == 0) { 283 curr_cs_num = cs_count; 284 } else if (cs_count != curr_cs_num) { 285 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 286 ("CS number is different per bus (IF %d BUS %d cs_num %d curr_cs_num %d)\n", 287 if_id, bus_cnt, cs_count, 288 curr_cs_num)); 289 return MV_NOT_SUPPORTED; 290 } 291 } 292 *cs_num = curr_cs_num; 293 294 return MV_OK; 295 } 296 297 /* 298 * Init Controller Flow 299 */ 300 int hws_ddr3_tip_init_controller(u32 dev_num, struct init_cntr_param *init_cntr_prm) 301 { 302 u32 if_id; 303 u32 cs_num; 304 u32 t_refi = 0, t_hclk = 0, t_ckclk = 0, t_faw = 0, t_pd = 0, 305 t_wr = 0, t2t = 0, txpdll = 0; 306 u32 data_value = 0, bus_width = 0, page_size = 0, cs_cnt = 0, 307 mem_mask = 0, bus_index = 0; 308 enum hws_speed_bin speed_bin_index = SPEED_BIN_DDR_2133N; 309 enum hws_mem_size memory_size = MEM_2G; 310 enum hws_ddr_freq freq = init_freq; 311 enum hws_timing timing; 312 u32 cs_mask = 0; 313 u32 cl_value = 0, cwl_val = 0; 314 u32 refresh_interval_cnt = 0, bus_cnt = 0, adll_tap = 0; 315 enum hws_access_type access_type = ACCESS_TYPE_UNICAST; 316 u32 data_read[MAX_INTERFACE_NUM]; 317 struct hws_topology_map *tm = ddr3_get_topology_map(); 318 319 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 320 ("Init_controller, do_mrs_phy=%d, is_ctrl64_bit=%d\n", 321 init_cntr_prm->do_mrs_phy, 322 init_cntr_prm->is_ctrl64_bit)); 323 324 if (init_cntr_prm->init_phy == 1) { 325 CHECK_STATUS(ddr3_tip_configure_phy(dev_num)); 326 } 327 328 if (generic_init_controller == 1) { 329 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 330 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 331 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 332 ("active IF %d\n", if_id)); 333 mem_mask = 0; 334 for (bus_index = 0; 335 bus_index < GET_TOPOLOGY_NUM_OF_BUSES(); 336 bus_index++) { 337 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index); 338 mem_mask |= 339 tm->interface_params[if_id]. 340 as_bus_params[bus_index].mirror_enable_bitmask; 341 } 342 343 if (mem_mask != 0) { 344 CHECK_STATUS(ddr3_tip_if_write 345 (dev_num, ACCESS_TYPE_MULTICAST, 346 if_id, CS_ENABLE_REG, 0, 347 0x8)); 348 } 349 350 memory_size = 351 tm->interface_params[if_id]. 352 memory_size; 353 speed_bin_index = 354 tm->interface_params[if_id]. 355 speed_bin_index; 356 freq = init_freq; 357 t_refi = 358 (tm->interface_params[if_id]. 359 interface_temp == 360 HWS_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW; 361 t_refi *= 1000; /* psec */ 362 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 363 ("memy_size %d speed_bin_ind %d freq %d t_refi %d\n", 364 memory_size, speed_bin_index, freq, 365 t_refi)); 366 /* HCLK & CK CLK in 2:1[ps] */ 367 /* t_ckclk is external clock */ 368 t_ckclk = (MEGA / freq_val[freq]); 369 /* t_hclk is internal clock */ 370 t_hclk = 2 * t_ckclk; 371 refresh_interval_cnt = t_refi / t_hclk; /* no units */ 372 bus_width = 373 (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) 374 == 1) ? (16) : (32); 375 376 if (init_cntr_prm->is_ctrl64_bit) 377 bus_width = 64; 378 379 data_value = 380 (refresh_interval_cnt | 0x4000 | 381 ((bus_width == 382 32) ? 0x8000 : 0) | 0x1000000) & ~(1 << 26); 383 384 /* Interface Bus Width */ 385 /* SRMode */ 386 CHECK_STATUS(ddr3_tip_if_write 387 (dev_num, access_type, if_id, 388 SDRAM_CONFIGURATION_REG, data_value, 389 0x100ffff)); 390 391 /* Interleave first command pre-charge enable (TBD) */ 392 CHECK_STATUS(ddr3_tip_if_write 393 (dev_num, access_type, if_id, 394 SDRAM_OPEN_PAGE_CONTROL_REG, (1 << 10), 395 (1 << 10))); 396 397 /* PHY configuration */ 398 /* 399 * Postamble Length = 1.5cc, Addresscntl to clk skew 400 * \BD, Preamble length normal, parralal ADLL enable 401 */ 402 CHECK_STATUS(ddr3_tip_if_write 403 (dev_num, access_type, if_id, 404 DRAM_PHY_CONFIGURATION, 0x28, 0x3e)); 405 if (init_cntr_prm->is_ctrl64_bit) { 406 /* positive edge */ 407 CHECK_STATUS(ddr3_tip_if_write 408 (dev_num, access_type, if_id, 409 DRAM_PHY_CONFIGURATION, 0x0, 410 0xff80)); 411 } 412 413 /* calibration block disable */ 414 /* Xbar Read buffer select (for Internal access) */ 415 CHECK_STATUS(ddr3_tip_if_write 416 (dev_num, access_type, if_id, 417 CALIB_MACHINE_CTRL_REG, 0x1200c, 418 0x7dffe01c)); 419 CHECK_STATUS(ddr3_tip_if_write 420 (dev_num, access_type, if_id, 421 CALIB_MACHINE_CTRL_REG, 422 calibration_update_control << 3, 0x3 << 3)); 423 424 /* Pad calibration control - enable */ 425 CHECK_STATUS(ddr3_tip_if_write 426 (dev_num, access_type, if_id, 427 CALIB_MACHINE_CTRL_REG, 0x1, 0x1)); 428 429 cs_mask = 0; 430 data_value = 0x7; 431 /* 432 * Address ctrl \96 Part of the Generic code 433 * The next configuration is done: 434 * 1) Memory Size 435 * 2) Bus_width 436 * 3) CS# 437 * 4) Page Number 438 * 5) t_faw 439 * Per Dunit get from the Map_topology the parameters: 440 * Bus_width 441 * t_faw is per Dunit not per CS 442 */ 443 page_size = 444 (tm->interface_params[if_id]. 445 bus_width == 446 BUS_WIDTH_8) ? page_param[memory_size]. 447 page_size_8bit : page_param[memory_size]. 448 page_size_16bit; 449 450 t_faw = 451 (page_size == 1) ? speed_bin_table(speed_bin_index, 452 SPEED_BIN_TFAW1K) 453 : speed_bin_table(speed_bin_index, 454 SPEED_BIN_TFAW2K); 455 456 data_value = TIME_2_CLOCK_CYCLES(t_faw, t_ckclk); 457 data_value = data_value << 24; 458 CHECK_STATUS(ddr3_tip_if_write 459 (dev_num, access_type, if_id, 460 SDRAM_ACCESS_CONTROL_REG, data_value, 461 0x7f000000)); 462 463 data_value = 464 (tm->interface_params[if_id]. 465 bus_width == BUS_WIDTH_8) ? 0 : 1; 466 467 /* create merge cs mask for all cs available in dunit */ 468 for (bus_cnt = 0; 469 bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); 470 bus_cnt++) { 471 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 472 cs_mask |= 473 tm->interface_params[if_id]. 474 as_bus_params[bus_cnt].cs_bitmask; 475 } 476 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 477 ("Init_controller IF %d cs_mask %d\n", 478 if_id, cs_mask)); 479 /* 480 * Configure the next upon the Map Topology \96 If the 481 * Dunit is CS0 Configure CS0 if it is multi CS 482 * configure them both: The Bust_width it\92s the 483 * Memory Bus width \96 x8 or x16 484 */ 485 for (cs_cnt = 0; cs_cnt < NUM_OF_CS; cs_cnt++) { 486 ddr3_tip_configure_cs(dev_num, if_id, cs_cnt, 487 ((cs_mask & (1 << cs_cnt)) ? 1 488 : 0)); 489 } 490 491 if (init_cntr_prm->do_mrs_phy) { 492 /* 493 * MR0 \96 Part of the Generic code 494 * The next configuration is done: 495 * 1) Burst Length 496 * 2) CAS Latency 497 * get for each dunit what is it Speed_bin & 498 * Target Frequency. From those both parameters 499 * get the appropriate Cas_l from the CL table 500 */ 501 cl_value = 502 tm->interface_params[if_id]. 503 cas_l; 504 cwl_val = 505 tm->interface_params[if_id]. 506 cas_wl; 507 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 508 ("cl_value 0x%x cwl_val 0x%x\n", 509 cl_value, cwl_val)); 510 511 data_value = 512 ((cl_mask_table[cl_value] & 0x1) << 2) | 513 ((cl_mask_table[cl_value] & 0xe) << 3); 514 CHECK_STATUS(ddr3_tip_if_write 515 (dev_num, access_type, if_id, 516 MR0_REG, data_value, 517 (0x7 << 4) | (1 << 2))); 518 CHECK_STATUS(ddr3_tip_if_write 519 (dev_num, access_type, if_id, 520 MR0_REG, twr_mask_table[t_wr + 1], 521 0xe00)); 522 523 /* 524 * MR1: Set RTT and DIC Design GL values 525 * configured by user 526 */ 527 CHECK_STATUS(ddr3_tip_if_write 528 (dev_num, ACCESS_TYPE_MULTICAST, 529 PARAM_NOT_CARE, MR1_REG, 530 g_dic | g_rtt_nom, 0x266)); 531 532 /* MR2 - Part of the Generic code */ 533 /* 534 * The next configuration is done: 535 * 1) SRT 536 * 2) CAS Write Latency 537 */ 538 data_value = (cwl_mask_table[cwl_val] << 3); 539 data_value |= 540 ((tm->interface_params[if_id]. 541 interface_temp == 542 HWS_TEMP_HIGH) ? (1 << 7) : 0); 543 CHECK_STATUS(ddr3_tip_if_write 544 (dev_num, access_type, if_id, 545 MR2_REG, data_value, 546 (0x7 << 3) | (0x1 << 7) | (0x3 << 547 9))); 548 } 549 550 ddr3_tip_write_odt(dev_num, access_type, if_id, 551 cl_value, cwl_val); 552 ddr3_tip_set_timing(dev_num, access_type, if_id, freq); 553 554 CHECK_STATUS(ddr3_tip_if_write 555 (dev_num, access_type, if_id, 556 DUNIT_CONTROL_HIGH_REG, 0x177, 557 0x1000177)); 558 559 if (init_cntr_prm->is_ctrl64_bit) { 560 /* disable 0.25 cc delay */ 561 CHECK_STATUS(ddr3_tip_if_write 562 (dev_num, access_type, if_id, 563 DUNIT_CONTROL_HIGH_REG, 0x0, 564 0x800)); 565 } 566 567 /* reset bit 7 */ 568 CHECK_STATUS(ddr3_tip_if_write 569 (dev_num, access_type, if_id, 570 DUNIT_CONTROL_HIGH_REG, 571 (init_cntr_prm->msys_init << 7), (1 << 7))); 572 573 timing = tm->interface_params[if_id].timing; 574 575 if (mode2_t != 0xff) { 576 t2t = mode2_t; 577 } else if (timing != HWS_TIM_DEFAULT) { 578 /* Board topology map is forcing timing */ 579 t2t = (timing == HWS_TIM_2T) ? 1 : 0; 580 } else { 581 /* calculate number of CS (per interface) */ 582 CHECK_STATUS(calc_cs_num 583 (dev_num, if_id, &cs_num)); 584 t2t = (cs_num == 1) ? 0 : 1; 585 } 586 587 CHECK_STATUS(ddr3_tip_if_write 588 (dev_num, access_type, if_id, 589 DDR_CONTROL_LOW_REG, t2t << 3, 590 0x3 << 3)); 591 /* move the block to ddr3_tip_set_timing - start */ 592 t_pd = GET_MAX_VALUE(t_ckclk * 3, 593 speed_bin_table(speed_bin_index, 594 SPEED_BIN_TPD)); 595 t_pd = TIME_2_CLOCK_CYCLES(t_pd, t_ckclk); 596 txpdll = GET_MAX_VALUE(t_ckclk * 10, 24); 597 txpdll = CEIL_DIVIDE((txpdll - 1), t_ckclk); 598 CHECK_STATUS(ddr3_tip_if_write 599 (dev_num, access_type, if_id, 600 DDR_TIMING_REG, txpdll << 4, 601 0x1f << 4)); 602 CHECK_STATUS(ddr3_tip_if_write 603 (dev_num, access_type, if_id, 604 DDR_TIMING_REG, 0x28 << 9, 0x3f << 9)); 605 CHECK_STATUS(ddr3_tip_if_write 606 (dev_num, access_type, if_id, 607 DDR_TIMING_REG, 0xa << 21, 0xff << 21)); 608 609 /* move the block to ddr3_tip_set_timing - end */ 610 /* AUTO_ZQC_TIMING */ 611 CHECK_STATUS(ddr3_tip_if_write 612 (dev_num, access_type, if_id, 613 TIMING_REG, (AUTO_ZQC_TIMING | (2 << 20)), 614 0x3fffff)); 615 CHECK_STATUS(ddr3_tip_if_read 616 (dev_num, access_type, if_id, 617 DRAM_PHY_CONFIGURATION, data_read, 0x30)); 618 data_value = 619 (data_read[if_id] == 0) ? (1 << 11) : 0; 620 CHECK_STATUS(ddr3_tip_if_write 621 (dev_num, access_type, if_id, 622 DUNIT_CONTROL_HIGH_REG, data_value, 623 (1 << 11))); 624 625 /* Set Active control for ODT write transactions */ 626 CHECK_STATUS(ddr3_tip_if_write 627 (dev_num, ACCESS_TYPE_MULTICAST, 628 PARAM_NOT_CARE, 0x1494, g_odt_config, 629 MASK_ALL_BITS)); 630 } 631 } else { 632 #ifdef STATIC_ALGO_SUPPORT 633 CHECK_STATUS(ddr3_tip_static_init_controller(dev_num)); 634 #if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X) 635 CHECK_STATUS(ddr3_tip_static_phy_init_controller(dev_num)); 636 #endif 637 #endif /* STATIC_ALGO_SUPPORT */ 638 } 639 640 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 641 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 642 CHECK_STATUS(ddr3_tip_rank_control(dev_num, if_id)); 643 644 if (init_cntr_prm->do_mrs_phy) { 645 CHECK_STATUS(ddr3_tip_pad_inv(dev_num, if_id)); 646 } 647 648 /* Pad calibration control - disable */ 649 CHECK_STATUS(ddr3_tip_if_write 650 (dev_num, access_type, if_id, 651 CALIB_MACHINE_CTRL_REG, 0x0, 0x1)); 652 CHECK_STATUS(ddr3_tip_if_write 653 (dev_num, access_type, if_id, 654 CALIB_MACHINE_CTRL_REG, 655 calibration_update_control << 3, 0x3 << 3)); 656 } 657 658 CHECK_STATUS(ddr3_tip_enable_init_sequence(dev_num)); 659 660 if (delay_enable != 0) { 661 adll_tap = MEGA / (freq_val[freq] * 64); 662 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap); 663 } 664 665 return MV_OK; 666 } 667 668 /* 669 * Load Topology map 670 */ 671 int hws_ddr3_tip_load_topology_map(u32 dev_num, struct hws_topology_map *tm) 672 { 673 enum hws_speed_bin speed_bin_index; 674 enum hws_ddr_freq freq = DDR_FREQ_LIMIT; 675 u32 if_id; 676 677 freq_val[DDR_FREQ_LOW_FREQ] = dfs_low_freq; 678 tm = ddr3_get_topology_map(); 679 CHECK_STATUS(ddr3_tip_get_first_active_if 680 ((u8)dev_num, tm->if_act_mask, 681 &first_active_if)); 682 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 683 ("board IF_Mask=0x%x num_of_bus_per_interface=0x%x\n", 684 tm->if_act_mask, 685 tm->num_of_bus_per_interface)); 686 687 /* 688 * if CL, CWL values are missing in topology map, then fill them 689 * according to speedbin tables 690 */ 691 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 692 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 693 speed_bin_index = 694 tm->interface_params[if_id].speed_bin_index; 695 /* TBD memory frequency of interface 0 only is used ! */ 696 freq = tm->interface_params[first_active_if].memory_freq; 697 698 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 699 ("speed_bin_index =%d freq=%d cl=%d cwl=%d\n", 700 speed_bin_index, freq_val[freq], 701 tm->interface_params[if_id]. 702 cas_l, 703 tm->interface_params[if_id]. 704 cas_wl)); 705 706 if (tm->interface_params[if_id].cas_l == 0) { 707 tm->interface_params[if_id].cas_l = 708 cas_latency_table[speed_bin_index].cl_val[freq]; 709 } 710 711 if (tm->interface_params[if_id].cas_wl == 0) { 712 tm->interface_params[if_id].cas_wl = 713 cas_write_latency_table[speed_bin_index].cl_val[freq]; 714 } 715 } 716 717 return MV_OK; 718 } 719 720 /* 721 * RANK Control Flow 722 */ 723 static int ddr3_tip_rank_control(u32 dev_num, u32 if_id) 724 { 725 u32 data_value = 0, bus_cnt; 726 struct hws_topology_map *tm = ddr3_get_topology_map(); 727 728 for (bus_cnt = 1; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) { 729 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 730 if ((tm->interface_params[if_id]. 731 as_bus_params[0].cs_bitmask != 732 tm->interface_params[if_id]. 733 as_bus_params[bus_cnt].cs_bitmask) || 734 (tm->interface_params[if_id]. 735 as_bus_params[0].mirror_enable_bitmask != 736 tm->interface_params[if_id]. 737 as_bus_params[bus_cnt].mirror_enable_bitmask)) 738 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 739 ("WARNING:Wrong configuration for pup #%d CS mask and CS mirroring for all pups should be the same\n", 740 bus_cnt)); 741 } 742 743 data_value |= tm->interface_params[if_id]. 744 as_bus_params[0].cs_bitmask; 745 data_value |= tm->interface_params[if_id]. 746 as_bus_params[0].mirror_enable_bitmask << 4; 747 748 CHECK_STATUS(ddr3_tip_if_write 749 (dev_num, ACCESS_TYPE_UNICAST, if_id, RANK_CTRL_REG, 750 data_value, 0xff)); 751 752 return MV_OK; 753 } 754 755 /* 756 * PAD Inverse Flow 757 */ 758 static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id) 759 { 760 u32 bus_cnt, data_value, ck_swap_pup_ctrl; 761 struct hws_topology_map *tm = ddr3_get_topology_map(); 762 763 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) { 764 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 765 if (tm->interface_params[if_id]. 766 as_bus_params[bus_cnt].is_dqs_swap == 1) { 767 /* dqs swap */ 768 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 769 if_id, bus_cnt, 770 DDR_PHY_DATA, 771 PHY_CONTROL_PHY_REG, 0xc0, 772 0xc0); 773 } 774 775 if (tm->interface_params[if_id]. 776 as_bus_params[bus_cnt].is_ck_swap == 1) { 777 if (bus_cnt <= 1) 778 data_value = 0x5 << 2; 779 else 780 data_value = 0xa << 2; 781 782 /* mask equals data */ 783 /* ck swap pup is only control pup #0 ! */ 784 ck_swap_pup_ctrl = 0; 785 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 786 if_id, ck_swap_pup_ctrl, 787 DDR_PHY_CONTROL, 788 PHY_CONTROL_PHY_REG, 789 data_value, data_value); 790 } 791 } 792 793 return MV_OK; 794 } 795 796 /* 797 * Run Training Flow 798 */ 799 int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type) 800 { 801 int ret = MV_OK, ret_tune = MV_OK; 802 803 #ifdef ODT_TEST_SUPPORT 804 if (finger_test == 1) 805 return odt_test(dev_num, algo_type); 806 #endif 807 808 if (algo_type == ALGO_TYPE_DYNAMIC) { 809 ret = ddr3_tip_ddr3_auto_tune(dev_num); 810 } else { 811 #ifdef STATIC_ALGO_SUPPORT 812 { 813 enum hws_ddr_freq freq; 814 freq = init_freq; 815 816 /* add to mask */ 817 if (is_adll_calib_before_init != 0) { 818 printf("with adll calib before init\n"); 819 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 820 0, freq); 821 } 822 /* 823 * Frequency per interface is not relevant, 824 * only interface 0 825 */ 826 ret = ddr3_tip_run_static_alg(dev_num, 827 freq); 828 } 829 #endif 830 } 831 832 if (ret != MV_OK) { 833 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 834 ("Run_alg: tuning failed %d\n", ret_tune)); 835 } 836 837 return ret; 838 } 839 840 #ifdef ODT_TEST_SUPPORT 841 /* 842 * ODT Test 843 */ 844 static int odt_test(u32 dev_num, enum hws_algo_type algo_type) 845 { 846 int ret = MV_OK, ret_tune = MV_OK; 847 int pfinger_val = 0, nfinger_val; 848 849 for (pfinger_val = p_finger_start; pfinger_val <= p_finger_end; 850 pfinger_val += p_finger_step) { 851 for (nfinger_val = n_finger_start; nfinger_val <= n_finger_end; 852 nfinger_val += n_finger_step) { 853 if (finger_test != 0) { 854 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 855 ("pfinger_val %d nfinger_val %d\n", 856 pfinger_val, nfinger_val)); 857 p_finger = pfinger_val; 858 n_finger = nfinger_val; 859 } 860 861 if (algo_type == ALGO_TYPE_DYNAMIC) { 862 ret = ddr3_tip_ddr3_auto_tune(dev_num); 863 } else { 864 /* 865 * Frequency per interface is not relevant, 866 * only interface 0 867 */ 868 ret = ddr3_tip_run_static_alg(dev_num, 869 init_freq); 870 } 871 } 872 } 873 874 if (ret_tune != MV_OK) { 875 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 876 ("Run_alg: tuning failed %d\n", ret_tune)); 877 ret = (ret == MV_OK) ? ret_tune : ret; 878 } 879 880 return ret; 881 } 882 #endif 883 884 /* 885 * Select Controller 886 */ 887 int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable) 888 { 889 if (config_func_info[dev_num].tip_dunit_mux_select_func != NULL) { 890 return config_func_info[dev_num]. 891 tip_dunit_mux_select_func((u8)dev_num, enable); 892 } 893 894 return MV_FAIL; 895 } 896 897 /* 898 * Dunit Register Write 899 */ 900 int ddr3_tip_if_write(u32 dev_num, enum hws_access_type interface_access, 901 u32 if_id, u32 reg_addr, u32 data_value, u32 mask) 902 { 903 if (config_func_info[dev_num].tip_dunit_write_func != NULL) { 904 return config_func_info[dev_num]. 905 tip_dunit_write_func((u8)dev_num, interface_access, 906 if_id, reg_addr, 907 data_value, mask); 908 } 909 910 return MV_FAIL; 911 } 912 913 /* 914 * Dunit Register Read 915 */ 916 int ddr3_tip_if_read(u32 dev_num, enum hws_access_type interface_access, 917 u32 if_id, u32 reg_addr, u32 *data, u32 mask) 918 { 919 if (config_func_info[dev_num].tip_dunit_read_func != NULL) { 920 return config_func_info[dev_num]. 921 tip_dunit_read_func((u8)dev_num, interface_access, 922 if_id, reg_addr, 923 data, mask); 924 } 925 926 return MV_FAIL; 927 } 928 929 /* 930 * Dunit Register Polling 931 */ 932 int ddr3_tip_if_polling(u32 dev_num, enum hws_access_type access_type, 933 u32 if_id, u32 exp_value, u32 mask, u32 offset, 934 u32 poll_tries) 935 { 936 u32 poll_cnt = 0, interface_num = 0, start_if, end_if; 937 u32 read_data[MAX_INTERFACE_NUM]; 938 int ret; 939 int is_fail = 0, is_if_fail; 940 struct hws_topology_map *tm = ddr3_get_topology_map(); 941 942 if (access_type == ACCESS_TYPE_MULTICAST) { 943 start_if = 0; 944 end_if = MAX_INTERFACE_NUM - 1; 945 } else { 946 start_if = if_id; 947 end_if = if_id; 948 } 949 950 for (interface_num = start_if; interface_num <= end_if; interface_num++) { 951 /* polling bit 3 for n times */ 952 VALIDATE_ACTIVE(tm->if_act_mask, interface_num); 953 954 is_if_fail = 0; 955 for (poll_cnt = 0; poll_cnt < poll_tries; poll_cnt++) { 956 ret = 957 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, 958 interface_num, offset, read_data, 959 mask); 960 if (ret != MV_OK) 961 return ret; 962 963 if (read_data[interface_num] == exp_value) 964 break; 965 } 966 967 if (poll_cnt >= poll_tries) { 968 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 969 ("max poll IF #%d\n", interface_num)); 970 is_fail = 1; 971 is_if_fail = 1; 972 } 973 974 training_result[training_stage][interface_num] = 975 (is_if_fail == 1) ? TEST_FAILED : TEST_SUCCESS; 976 } 977 978 return (is_fail == 0) ? MV_OK : MV_FAIL; 979 } 980 981 /* 982 * Bus read access 983 */ 984 int ddr3_tip_bus_read(u32 dev_num, u32 if_id, 985 enum hws_access_type phy_access, u32 phy_id, 986 enum hws_ddr_phy phy_type, u32 reg_addr, u32 *data) 987 { 988 u32 bus_index = 0; 989 u32 data_read[MAX_INTERFACE_NUM]; 990 struct hws_topology_map *tm = ddr3_get_topology_map(); 991 992 if (phy_access == ACCESS_TYPE_MULTICAST) { 993 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES(); 994 bus_index++) { 995 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index); 996 CHECK_STATUS(ddr3_tip_bus_access 997 (dev_num, ACCESS_TYPE_UNICAST, 998 if_id, ACCESS_TYPE_UNICAST, 999 bus_index, phy_type, reg_addr, 0, 1000 OPERATION_READ)); 1001 CHECK_STATUS(ddr3_tip_if_read 1002 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1003 PHY_REG_FILE_ACCESS, data_read, 1004 MASK_ALL_BITS)); 1005 data[bus_index] = (data_read[if_id] & 0xffff); 1006 } 1007 } else { 1008 CHECK_STATUS(ddr3_tip_bus_access 1009 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1010 phy_access, phy_id, phy_type, reg_addr, 0, 1011 OPERATION_READ)); 1012 CHECK_STATUS(ddr3_tip_if_read 1013 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1014 PHY_REG_FILE_ACCESS, data_read, MASK_ALL_BITS)); 1015 1016 /* 1017 * only 16 lsb bit are valid in Phy (each register is different, 1018 * some can actually be less than 16 bits) 1019 */ 1020 *data = (data_read[if_id] & 0xffff); 1021 } 1022 1023 return MV_OK; 1024 } 1025 1026 /* 1027 * Bus write access 1028 */ 1029 int ddr3_tip_bus_write(u32 dev_num, enum hws_access_type interface_access, 1030 u32 if_id, enum hws_access_type phy_access, 1031 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr, 1032 u32 data_value) 1033 { 1034 CHECK_STATUS(ddr3_tip_bus_access 1035 (dev_num, interface_access, if_id, phy_access, 1036 phy_id, phy_type, reg_addr, data_value, OPERATION_WRITE)); 1037 1038 return MV_OK; 1039 } 1040 1041 /* 1042 * Bus access routine (relevant for both read & write) 1043 */ 1044 static int ddr3_tip_bus_access(u32 dev_num, enum hws_access_type interface_access, 1045 u32 if_id, enum hws_access_type phy_access, 1046 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr, 1047 u32 data_value, enum hws_operation oper_type) 1048 { 1049 u32 addr_low = 0x3f & reg_addr; 1050 u32 addr_hi = ((0xc0 & reg_addr) >> 6); 1051 u32 data_p1 = 1052 (oper_type << 30) + (addr_hi << 28) + (phy_access << 27) + 1053 (phy_type << 26) + (phy_id << 22) + (addr_low << 16) + 1054 (data_value & 0xffff); 1055 u32 data_p2 = data_p1 + (1 << 31); 1056 u32 start_if, end_if; 1057 struct hws_topology_map *tm = ddr3_get_topology_map(); 1058 1059 CHECK_STATUS(ddr3_tip_if_write 1060 (dev_num, interface_access, if_id, PHY_REG_FILE_ACCESS, 1061 data_p1, MASK_ALL_BITS)); 1062 CHECK_STATUS(ddr3_tip_if_write 1063 (dev_num, interface_access, if_id, PHY_REG_FILE_ACCESS, 1064 data_p2, MASK_ALL_BITS)); 1065 1066 if (interface_access == ACCESS_TYPE_UNICAST) { 1067 start_if = if_id; 1068 end_if = if_id; 1069 } else { 1070 start_if = 0; 1071 end_if = MAX_INTERFACE_NUM - 1; 1072 } 1073 1074 /* polling for read/write execution done */ 1075 for (if_id = start_if; if_id <= end_if; if_id++) { 1076 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1077 CHECK_STATUS(is_bus_access_done 1078 (dev_num, if_id, PHY_REG_FILE_ACCESS, 31)); 1079 } 1080 1081 return MV_OK; 1082 } 1083 1084 /* 1085 * Check bus access done 1086 */ 1087 static int is_bus_access_done(u32 dev_num, u32 if_id, u32 dunit_reg_adrr, 1088 u32 bit) 1089 { 1090 u32 rd_data = 1; 1091 u32 cnt = 0; 1092 u32 data_read[MAX_INTERFACE_NUM]; 1093 1094 CHECK_STATUS(ddr3_tip_if_read 1095 (dev_num, ACCESS_TYPE_UNICAST, if_id, dunit_reg_adrr, 1096 data_read, MASK_ALL_BITS)); 1097 rd_data = data_read[if_id]; 1098 rd_data &= (1 << bit); 1099 1100 while (rd_data != 0) { 1101 if (cnt++ >= MAX_POLLING_ITERATIONS) 1102 break; 1103 1104 CHECK_STATUS(ddr3_tip_if_read 1105 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1106 dunit_reg_adrr, data_read, MASK_ALL_BITS)); 1107 rd_data = data_read[if_id]; 1108 rd_data &= (1 << bit); 1109 } 1110 1111 if (cnt < MAX_POLLING_ITERATIONS) 1112 return MV_OK; 1113 else 1114 return MV_FAIL; 1115 } 1116 1117 /* 1118 * Phy read-modify-write 1119 */ 1120 int ddr3_tip_bus_read_modify_write(u32 dev_num, enum hws_access_type access_type, 1121 u32 interface_id, u32 phy_id, 1122 enum hws_ddr_phy phy_type, u32 reg_addr, 1123 u32 data_value, u32 reg_mask) 1124 { 1125 u32 data_val = 0, if_id, start_if, end_if; 1126 struct hws_topology_map *tm = ddr3_get_topology_map(); 1127 1128 if (access_type == ACCESS_TYPE_MULTICAST) { 1129 start_if = 0; 1130 end_if = MAX_INTERFACE_NUM - 1; 1131 } else { 1132 start_if = interface_id; 1133 end_if = interface_id; 1134 } 1135 1136 for (if_id = start_if; if_id <= end_if; if_id++) { 1137 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1138 CHECK_STATUS(ddr3_tip_bus_read 1139 (dev_num, if_id, ACCESS_TYPE_UNICAST, phy_id, 1140 phy_type, reg_addr, &data_val)); 1141 data_value = (data_val & (~reg_mask)) | (data_value & reg_mask); 1142 CHECK_STATUS(ddr3_tip_bus_write 1143 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1144 ACCESS_TYPE_UNICAST, phy_id, phy_type, reg_addr, 1145 data_value)); 1146 } 1147 1148 return MV_OK; 1149 } 1150 1151 /* 1152 * ADLL Calibration 1153 */ 1154 int adll_calibration(u32 dev_num, enum hws_access_type access_type, 1155 u32 if_id, enum hws_ddr_freq frequency) 1156 { 1157 struct hws_tip_freq_config_info freq_config_info; 1158 u32 bus_cnt = 0; 1159 struct hws_topology_map *tm = ddr3_get_topology_map(); 1160 1161 /* Reset Diver_b assert -> de-assert */ 1162 CHECK_STATUS(ddr3_tip_if_write 1163 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG, 1164 0, 0x10000000)); 1165 mdelay(10); 1166 CHECK_STATUS(ddr3_tip_if_write 1167 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG, 1168 0x10000000, 0x10000000)); 1169 1170 if (config_func_info[dev_num].tip_get_freq_config_info_func != NULL) { 1171 CHECK_STATUS(config_func_info[dev_num]. 1172 tip_get_freq_config_info_func((u8)dev_num, frequency, 1173 &freq_config_info)); 1174 } else { 1175 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1176 ("tip_get_freq_config_info_func is NULL")); 1177 return MV_NOT_INITIALIZED; 1178 } 1179 1180 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) { 1181 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 1182 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1183 (dev_num, access_type, if_id, bus_cnt, 1184 DDR_PHY_DATA, BW_PHY_REG, 1185 freq_config_info.bw_per_freq << 8, 0x700)); 1186 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1187 (dev_num, access_type, if_id, bus_cnt, 1188 DDR_PHY_DATA, RATE_PHY_REG, 1189 freq_config_info.rate_per_freq, 0x7)); 1190 } 1191 1192 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */ 1193 CHECK_STATUS(ddr3_tip_if_write 1194 (dev_num, access_type, if_id, DRAM_PHY_CONFIGURATION, 1195 0, (0x80000000 | 0x40000000))); 1196 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ])); 1197 CHECK_STATUS(ddr3_tip_if_write 1198 (dev_num, access_type, if_id, DRAM_PHY_CONFIGURATION, 1199 (0x80000000 | 0x40000000), (0x80000000 | 0x40000000))); 1200 1201 /* polling for ADLL Done */ 1202 if (ddr3_tip_if_polling(dev_num, access_type, if_id, 1203 0x3ff03ff, 0x3ff03ff, PHY_LOCK_STATUS_REG, 1204 MAX_POLLING_ITERATIONS) != MV_OK) { 1205 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1206 ("Freq_set: DDR3 poll failed(1)")); 1207 } 1208 1209 /* pup data_pup reset assert-> deassert */ 1210 CHECK_STATUS(ddr3_tip_if_write 1211 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG, 1212 0, 0x60000000)); 1213 mdelay(10); 1214 CHECK_STATUS(ddr3_tip_if_write 1215 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG, 1216 0x60000000, 0x60000000)); 1217 1218 return MV_OK; 1219 } 1220 1221 int ddr3_tip_freq_set(u32 dev_num, enum hws_access_type access_type, 1222 u32 if_id, enum hws_ddr_freq frequency) 1223 { 1224 u32 cl_value = 0, cwl_value = 0, mem_mask = 0, val = 0, 1225 bus_cnt = 0, t_hclk = 0, t_wr = 0, 1226 refresh_interval_cnt = 0, cnt_id; 1227 u32 t_refi = 0, end_if, start_if; 1228 u32 bus_index = 0; 1229 int is_dll_off = 0; 1230 enum hws_speed_bin speed_bin_index = 0; 1231 struct hws_tip_freq_config_info freq_config_info; 1232 enum hws_result *flow_result = training_result[training_stage]; 1233 u32 adll_tap = 0; 1234 u32 cs_mask[MAX_INTERFACE_NUM]; 1235 struct hws_topology_map *tm = ddr3_get_topology_map(); 1236 1237 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1238 ("dev %d access %d IF %d freq %d\n", dev_num, 1239 access_type, if_id, frequency)); 1240 1241 if (frequency == DDR_FREQ_LOW_FREQ) 1242 is_dll_off = 1; 1243 if (access_type == ACCESS_TYPE_MULTICAST) { 1244 start_if = 0; 1245 end_if = MAX_INTERFACE_NUM - 1; 1246 } else { 1247 start_if = if_id; 1248 end_if = if_id; 1249 } 1250 1251 /* calculate interface cs mask - Oferb 4/11 */ 1252 /* speed bin can be different for each interface */ 1253 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1254 /* cs enable is active low */ 1255 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1256 cs_mask[if_id] = CS_BIT_MASK; 1257 training_result[training_stage][if_id] = TEST_SUCCESS; 1258 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs, 1259 &cs_mask[if_id]); 1260 } 1261 1262 /* speed bin can be different for each interface */ 1263 /* 1264 * moti b - need to remove the loop for multicas access functions 1265 * and loop the unicast access functions 1266 */ 1267 for (if_id = start_if; if_id <= end_if; if_id++) { 1268 if (IS_ACTIVE(tm->if_act_mask, if_id) == 0) 1269 continue; 1270 1271 flow_result[if_id] = TEST_SUCCESS; 1272 speed_bin_index = 1273 tm->interface_params[if_id].speed_bin_index; 1274 if (tm->interface_params[if_id].memory_freq == 1275 frequency) { 1276 cl_value = 1277 tm->interface_params[if_id].cas_l; 1278 cwl_value = 1279 tm->interface_params[if_id].cas_wl; 1280 } else { 1281 cl_value = 1282 cas_latency_table[speed_bin_index].cl_val[frequency]; 1283 cwl_value = 1284 cas_write_latency_table[speed_bin_index]. 1285 cl_val[frequency]; 1286 } 1287 1288 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1289 ("Freq_set dev 0x%x access 0x%x if 0x%x freq 0x%x speed %d:\n\t", 1290 dev_num, access_type, if_id, 1291 frequency, speed_bin_index)); 1292 1293 for (cnt_id = 0; cnt_id < DDR_FREQ_LIMIT; cnt_id++) { 1294 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, 1295 ("%d ", 1296 cas_latency_table[speed_bin_index]. 1297 cl_val[cnt_id])); 1298 } 1299 1300 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, ("\n")); 1301 mem_mask = 0; 1302 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES(); 1303 bus_index++) { 1304 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index); 1305 mem_mask |= 1306 tm->interface_params[if_id]. 1307 as_bus_params[bus_index].mirror_enable_bitmask; 1308 } 1309 1310 if (mem_mask != 0) { 1311 /* motib redundant in KW28 */ 1312 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1313 if_id, 1314 CS_ENABLE_REG, 0, 0x8)); 1315 } 1316 1317 /* dll state after exiting SR */ 1318 if (is_dll_off == 1) { 1319 CHECK_STATUS(ddr3_tip_if_write 1320 (dev_num, access_type, if_id, 1321 DFS_REG, 0x1, 0x1)); 1322 } else { 1323 CHECK_STATUS(ddr3_tip_if_write 1324 (dev_num, access_type, if_id, 1325 DFS_REG, 0, 0x1)); 1326 } 1327 1328 CHECK_STATUS(ddr3_tip_if_write 1329 (dev_num, access_type, if_id, 1330 DUNIT_MMASK_REG, 0, 0x1)); 1331 /* DFS - block transactions */ 1332 CHECK_STATUS(ddr3_tip_if_write 1333 (dev_num, access_type, if_id, 1334 DFS_REG, 0x2, 0x2)); 1335 1336 /* disable ODT in case of dll off */ 1337 if (is_dll_off == 1) { 1338 CHECK_STATUS(ddr3_tip_if_write 1339 (dev_num, access_type, if_id, 1340 0x1874, 0, 0x244)); 1341 CHECK_STATUS(ddr3_tip_if_write 1342 (dev_num, access_type, if_id, 1343 0x1884, 0, 0x244)); 1344 CHECK_STATUS(ddr3_tip_if_write 1345 (dev_num, access_type, if_id, 1346 0x1894, 0, 0x244)); 1347 CHECK_STATUS(ddr3_tip_if_write 1348 (dev_num, access_type, if_id, 1349 0x18a4, 0, 0x244)); 1350 } 1351 1352 /* DFS - Enter Self-Refresh */ 1353 CHECK_STATUS(ddr3_tip_if_write 1354 (dev_num, access_type, if_id, DFS_REG, 0x4, 1355 0x4)); 1356 /* polling on self refresh entry */ 1357 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, 1358 if_id, 0x8, 0x8, DFS_REG, 1359 MAX_POLLING_ITERATIONS) != MV_OK) { 1360 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1361 ("Freq_set: DDR3 poll failed on SR entry\n")); 1362 } 1363 1364 /* PLL configuration */ 1365 if (config_func_info[dev_num].tip_set_freq_divider_func != NULL) { 1366 config_func_info[dev_num]. 1367 tip_set_freq_divider_func(dev_num, if_id, 1368 frequency); 1369 } 1370 1371 /* PLL configuration End */ 1372 1373 /* adjust t_refi to new frequency */ 1374 t_refi = (tm->interface_params[if_id].interface_temp == 1375 HWS_TEMP_HIGH) ? TREFI_LOW : TREFI_HIGH; 1376 t_refi *= 1000; /*psec */ 1377 1378 /* HCLK in[ps] */ 1379 t_hclk = MEGA / (freq_val[frequency] / 2); 1380 refresh_interval_cnt = t_refi / t_hclk; /* no units */ 1381 val = 0x4000 | refresh_interval_cnt; 1382 CHECK_STATUS(ddr3_tip_if_write 1383 (dev_num, access_type, if_id, 1384 SDRAM_CONFIGURATION_REG, val, 0x7fff)); 1385 1386 /* DFS - CL/CWL/WR parameters after exiting SR */ 1387 CHECK_STATUS(ddr3_tip_if_write 1388 (dev_num, access_type, if_id, DFS_REG, 1389 (cl_mask_table[cl_value] << 8), 0xf00)); 1390 CHECK_STATUS(ddr3_tip_if_write 1391 (dev_num, access_type, if_id, DFS_REG, 1392 (cwl_mask_table[cwl_value] << 12), 0x7000)); 1393 t_wr = speed_bin_table(speed_bin_index, SPEED_BIN_TWR); 1394 t_wr = (t_wr / 1000); 1395 CHECK_STATUS(ddr3_tip_if_write 1396 (dev_num, access_type, if_id, DFS_REG, 1397 (twr_mask_table[t_wr + 1] << 16), 0x70000)); 1398 1399 /* Restore original RTT values if returning from DLL OFF mode */ 1400 if (is_dll_off == 1) { 1401 CHECK_STATUS(ddr3_tip_if_write 1402 (dev_num, access_type, if_id, 0x1874, 1403 g_dic | g_rtt_nom, 0x266)); 1404 CHECK_STATUS(ddr3_tip_if_write 1405 (dev_num, access_type, if_id, 0x1884, 1406 g_dic | g_rtt_nom, 0x266)); 1407 CHECK_STATUS(ddr3_tip_if_write 1408 (dev_num, access_type, if_id, 0x1894, 1409 g_dic | g_rtt_nom, 0x266)); 1410 CHECK_STATUS(ddr3_tip_if_write 1411 (dev_num, access_type, if_id, 0x18a4, 1412 g_dic | g_rtt_nom, 0x266)); 1413 } 1414 1415 /* Reset Diver_b assert -> de-assert */ 1416 CHECK_STATUS(ddr3_tip_if_write 1417 (dev_num, access_type, if_id, 1418 SDRAM_CONFIGURATION_REG, 0, 0x10000000)); 1419 mdelay(10); 1420 CHECK_STATUS(ddr3_tip_if_write 1421 (dev_num, access_type, if_id, 1422 SDRAM_CONFIGURATION_REG, 0x10000000, 0x10000000)); 1423 1424 /* Adll configuration function of process and Frequency */ 1425 if (config_func_info[dev_num].tip_get_freq_config_info_func != NULL) { 1426 CHECK_STATUS(config_func_info[dev_num]. 1427 tip_get_freq_config_info_func(dev_num, frequency, 1428 &freq_config_info)); 1429 } 1430 /* TBD check milo5 using device ID ? */ 1431 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); 1432 bus_cnt++) { 1433 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt); 1434 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1435 (dev_num, ACCESS_TYPE_UNICAST, 1436 if_id, bus_cnt, DDR_PHY_DATA, 1437 0x92, 1438 freq_config_info. 1439 bw_per_freq << 8 1440 /*freq_mask[dev_num][frequency] << 8 */ 1441 , 0x700)); 1442 CHECK_STATUS(ddr3_tip_bus_read_modify_write 1443 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1444 bus_cnt, DDR_PHY_DATA, 0x94, 1445 freq_config_info.rate_per_freq, 0x7)); 1446 } 1447 1448 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */ 1449 CHECK_STATUS(ddr3_tip_if_write 1450 (dev_num, access_type, if_id, 1451 DRAM_PHY_CONFIGURATION, 0, 1452 (0x80000000 | 0x40000000))); 1453 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ])); 1454 CHECK_STATUS(ddr3_tip_if_write 1455 (dev_num, access_type, if_id, 1456 DRAM_PHY_CONFIGURATION, (0x80000000 | 0x40000000), 1457 (0x80000000 | 0x40000000))); 1458 1459 /* polling for ADLL Done */ 1460 if (ddr3_tip_if_polling 1461 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ff03ff, 1462 0x3ff03ff, PHY_LOCK_STATUS_REG, 1463 MAX_POLLING_ITERATIONS) != MV_OK) { 1464 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1465 ("Freq_set: DDR3 poll failed(1)\n")); 1466 } 1467 1468 /* pup data_pup reset assert-> deassert */ 1469 CHECK_STATUS(ddr3_tip_if_write 1470 (dev_num, access_type, if_id, 1471 SDRAM_CONFIGURATION_REG, 0, 0x60000000)); 1472 mdelay(10); 1473 CHECK_STATUS(ddr3_tip_if_write 1474 (dev_num, access_type, if_id, 1475 SDRAM_CONFIGURATION_REG, 0x60000000, 0x60000000)); 1476 1477 /* Set proper timing params before existing Self-Refresh */ 1478 ddr3_tip_set_timing(dev_num, access_type, if_id, frequency); 1479 if (delay_enable != 0) { 1480 adll_tap = MEGA / (freq_val[frequency] * 64); 1481 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap); 1482 } 1483 1484 /* Exit SR */ 1485 CHECK_STATUS(ddr3_tip_if_write 1486 (dev_num, access_type, if_id, DFS_REG, 0, 1487 0x4)); 1488 if (ddr3_tip_if_polling 1489 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x8, DFS_REG, 1490 MAX_POLLING_ITERATIONS) != MV_OK) { 1491 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1492 ("Freq_set: DDR3 poll failed(2)")); 1493 } 1494 1495 /* Refresh Command */ 1496 CHECK_STATUS(ddr3_tip_if_write 1497 (dev_num, access_type, if_id, 1498 SDRAM_OPERATION_REG, 0x2, 0xf1f)); 1499 if (ddr3_tip_if_polling 1500 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f, 1501 SDRAM_OPERATION_REG, MAX_POLLING_ITERATIONS) != MV_OK) { 1502 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1503 ("Freq_set: DDR3 poll failed(3)")); 1504 } 1505 1506 /* Release DFS Block */ 1507 CHECK_STATUS(ddr3_tip_if_write 1508 (dev_num, access_type, if_id, DFS_REG, 0, 1509 0x2)); 1510 /* Controller to MBUS Retry - normal */ 1511 CHECK_STATUS(ddr3_tip_if_write 1512 (dev_num, access_type, if_id, DUNIT_MMASK_REG, 1513 0x1, 0x1)); 1514 1515 /* MRO: Burst Length 8, CL , Auto_precharge 0x16cc */ 1516 val = 1517 ((cl_mask_table[cl_value] & 0x1) << 2) | 1518 ((cl_mask_table[cl_value] & 0xe) << 3); 1519 CHECK_STATUS(ddr3_tip_if_write 1520 (dev_num, access_type, if_id, MR0_REG, 1521 val, (0x7 << 4) | (1 << 2))); 1522 /* MR2: CWL = 10 , Auto Self-Refresh - disable */ 1523 val = (cwl_mask_table[cwl_value] << 3); 1524 /* 1525 * nklein 24.10.13 - should not be here - leave value as set in 1526 * the init configuration val |= (1 << 9); 1527 * val |= ((tm->interface_params[if_id]. 1528 * interface_temp == HWS_TEMP_HIGH) ? (1 << 7) : 0); 1529 */ 1530 /* nklein 24.10.13 - see above comment */ 1531 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1532 if_id, MR2_REG, 1533 val, (0x7 << 3))); 1534 1535 /* ODT TIMING */ 1536 val = ((cl_value - cwl_value + 1) << 4) | 1537 ((cl_value - cwl_value + 6) << 8) | 1538 ((cl_value - 1) << 12) | ((cl_value + 6) << 16); 1539 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1540 if_id, ODT_TIMING_LOW, 1541 val, 0xffff0)); 1542 val = 0x71 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12); 1543 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1544 if_id, ODT_TIMING_HI_REG, 1545 val, 0xffff)); 1546 1547 /* ODT Active */ 1548 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1549 if_id, 1550 DUNIT_ODT_CONTROL_REG, 1551 0xf, 0xf)); 1552 1553 /* re-write CL */ 1554 val = ((cl_mask_table[cl_value] & 0x1) << 2) | 1555 ((cl_mask_table[cl_value] & 0xe) << 3); 1556 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1557 0, MR0_REG, val, 1558 (0x7 << 4) | (1 << 2))); 1559 1560 /* re-write CWL */ 1561 val = (cwl_mask_table[cwl_value] << 3); 1562 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MRS2_CMD, 1563 val, (0x7 << 3))); 1564 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1565 0, MR2_REG, val, (0x7 << 3))); 1566 1567 if (mem_mask != 0) { 1568 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1569 if_id, 1570 CS_ENABLE_REG, 1571 1 << 3, 0x8)); 1572 } 1573 } 1574 1575 return MV_OK; 1576 } 1577 1578 /* 1579 * Set ODT values 1580 */ 1581 static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type, 1582 u32 if_id, u32 cl_value, u32 cwl_value) 1583 { 1584 /* ODT TIMING */ 1585 u32 val = (cl_value - cwl_value + 6); 1586 1587 val = ((cl_value - cwl_value + 1) << 4) | ((val & 0xf) << 8) | 1588 (((cl_value - 1) & 0xf) << 12) | 1589 (((cl_value + 6) & 0xf) << 16) | (((val & 0x10) >> 4) << 21); 1590 val |= (((cl_value - 1) >> 4) << 22) | (((cl_value + 6) >> 4) << 23); 1591 1592 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1593 ODT_TIMING_LOW, val, 0xffff0)); 1594 val = 0x71 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12); 1595 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1596 ODT_TIMING_HI_REG, val, 0xffff)); 1597 if (odt_additional == 1) { 1598 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, 1599 if_id, 1600 SDRAM_ODT_CONTROL_HIGH_REG, 1601 0xf, 0xf)); 1602 } 1603 1604 /* ODT Active */ 1605 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1606 DUNIT_ODT_CONTROL_REG, 0xf, 0xf)); 1607 1608 return MV_OK; 1609 } 1610 1611 /* 1612 * Set Timing values for training 1613 */ 1614 static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type, 1615 u32 if_id, enum hws_ddr_freq frequency) 1616 { 1617 u32 t_ckclk = 0, t_ras = 0; 1618 u32 t_rcd = 0, t_rp = 0, t_wr = 0, t_wtr = 0, t_rrd = 0, t_rtp = 0, 1619 t_rfc = 0, t_mod = 0; 1620 u32 val = 0, page_size = 0; 1621 enum hws_speed_bin speed_bin_index; 1622 enum hws_mem_size memory_size = MEM_2G; 1623 struct hws_topology_map *tm = ddr3_get_topology_map(); 1624 1625 speed_bin_index = tm->interface_params[if_id].speed_bin_index; 1626 memory_size = tm->interface_params[if_id].memory_size; 1627 page_size = 1628 (tm->interface_params[if_id].bus_width == 1629 BUS_WIDTH_8) ? page_param[memory_size]. 1630 page_size_8bit : page_param[memory_size].page_size_16bit; 1631 t_ckclk = (MEGA / freq_val[frequency]); 1632 t_rrd = (page_size == 1) ? speed_bin_table(speed_bin_index, 1633 SPEED_BIN_TRRD1K) : 1634 speed_bin_table(speed_bin_index, SPEED_BIN_TRRD2K); 1635 t_rrd = GET_MAX_VALUE(t_ckclk * 4, t_rrd); 1636 t_rtp = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index, 1637 SPEED_BIN_TRTP)); 1638 t_wtr = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index, 1639 SPEED_BIN_TWTR)); 1640 t_ras = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index, 1641 SPEED_BIN_TRAS), 1642 t_ckclk); 1643 t_rcd = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index, 1644 SPEED_BIN_TRCD), 1645 t_ckclk); 1646 t_rp = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index, 1647 SPEED_BIN_TRP), 1648 t_ckclk); 1649 t_wr = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index, 1650 SPEED_BIN_TWR), 1651 t_ckclk); 1652 t_wtr = TIME_2_CLOCK_CYCLES(t_wtr, t_ckclk); 1653 t_rrd = TIME_2_CLOCK_CYCLES(t_rrd, t_ckclk); 1654 t_rtp = TIME_2_CLOCK_CYCLES(t_rtp, t_ckclk); 1655 t_rfc = TIME_2_CLOCK_CYCLES(rfc_table[memory_size] * 1000, t_ckclk); 1656 t_mod = GET_MAX_VALUE(t_ckclk * 24, 15000); 1657 t_mod = TIME_2_CLOCK_CYCLES(t_mod, t_ckclk); 1658 1659 /* SDRAM Timing Low */ 1660 val = (t_ras & 0xf) | (t_rcd << 4) | (t_rp << 8) | (t_wr << 12) | 1661 (t_wtr << 16) | (((t_ras & 0x30) >> 4) << 20) | (t_rrd << 24) | 1662 (t_rtp << 28); 1663 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1664 SDRAM_TIMING_LOW_REG, val, 0xff3fffff)); 1665 1666 /* SDRAM Timing High */ 1667 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1668 SDRAM_TIMING_HIGH_REG, 1669 t_rfc & 0x7f, 0x7f)); 1670 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1671 SDRAM_TIMING_HIGH_REG, 1672 0x180, 0x180)); 1673 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1674 SDRAM_TIMING_HIGH_REG, 1675 0x600, 0x600)); 1676 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1677 SDRAM_TIMING_HIGH_REG, 1678 0x1800, 0xf800)); 1679 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1680 SDRAM_TIMING_HIGH_REG, 1681 ((t_rfc & 0x380) >> 7) << 16, 0x70000)); 1682 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1683 SDRAM_TIMING_HIGH_REG, 0, 1684 0x380000)); 1685 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1686 SDRAM_TIMING_HIGH_REG, 1687 (t_mod & 0xf) << 25, 0x1e00000)); 1688 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1689 SDRAM_TIMING_HIGH_REG, 1690 (t_mod >> 4) << 30, 0xc0000000)); 1691 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1692 SDRAM_TIMING_HIGH_REG, 1693 0x16000000, 0x1e000000)); 1694 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id, 1695 SDRAM_TIMING_HIGH_REG, 1696 0x40000000, 0xc0000000)); 1697 1698 return MV_OK; 1699 } 1700 1701 /* 1702 * Mode Read 1703 */ 1704 int hws_ddr3_tip_mode_read(u32 dev_num, struct mode_info *mode_info) 1705 { 1706 u32 ret; 1707 1708 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1709 MR0_REG, mode_info->reg_mr0, MASK_ALL_BITS); 1710 if (ret != MV_OK) 1711 return ret; 1712 1713 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1714 MR1_REG, mode_info->reg_mr1, MASK_ALL_BITS); 1715 if (ret != MV_OK) 1716 return ret; 1717 1718 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1719 MR2_REG, mode_info->reg_mr2, MASK_ALL_BITS); 1720 if (ret != MV_OK) 1721 return ret; 1722 1723 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1724 MR3_REG, mode_info->reg_mr2, MASK_ALL_BITS); 1725 if (ret != MV_OK) 1726 return ret; 1727 1728 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1729 READ_DATA_SAMPLE_DELAY, mode_info->read_data_sample, 1730 MASK_ALL_BITS); 1731 if (ret != MV_OK) 1732 return ret; 1733 1734 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1735 READ_DATA_READY_DELAY, mode_info->read_data_ready, 1736 MASK_ALL_BITS); 1737 if (ret != MV_OK) 1738 return ret; 1739 1740 return MV_OK; 1741 } 1742 1743 /* 1744 * Get first active IF 1745 */ 1746 int ddr3_tip_get_first_active_if(u8 dev_num, u32 interface_mask, 1747 u32 *interface_id) 1748 { 1749 u32 if_id; 1750 struct hws_topology_map *tm = ddr3_get_topology_map(); 1751 1752 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1753 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1754 if (interface_mask & (1 << if_id)) { 1755 *interface_id = if_id; 1756 break; 1757 } 1758 } 1759 1760 return MV_OK; 1761 } 1762 1763 /* 1764 * Write CS Result 1765 */ 1766 int ddr3_tip_write_cs_result(u32 dev_num, u32 offset) 1767 { 1768 u32 if_id, bus_num, cs_bitmask, data_val, cs_num; 1769 struct hws_topology_map *tm = ddr3_get_topology_map(); 1770 1771 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1772 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1773 for (bus_num = 0; bus_num < tm->num_of_bus_per_interface; 1774 bus_num++) { 1775 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num); 1776 cs_bitmask = 1777 tm->interface_params[if_id]. 1778 as_bus_params[bus_num].cs_bitmask; 1779 if (cs_bitmask != effective_cs) { 1780 cs_num = GET_CS_FROM_MASK(cs_bitmask); 1781 ddr3_tip_bus_read(dev_num, if_id, 1782 ACCESS_TYPE_UNICAST, bus_num, 1783 DDR_PHY_DATA, 1784 offset + 1785 CS_REG_VALUE(effective_cs), 1786 &data_val); 1787 ddr3_tip_bus_write(dev_num, 1788 ACCESS_TYPE_UNICAST, 1789 if_id, 1790 ACCESS_TYPE_UNICAST, 1791 bus_num, DDR_PHY_DATA, 1792 offset + 1793 CS_REG_VALUE(cs_num), 1794 data_val); 1795 } 1796 } 1797 } 1798 1799 return MV_OK; 1800 } 1801 1802 /* 1803 * Write MRS 1804 */ 1805 int ddr3_tip_write_mrs_cmd(u32 dev_num, u32 *cs_mask_arr, u32 cmd, 1806 u32 data, u32 mask) 1807 { 1808 u32 if_id, reg; 1809 struct hws_topology_map *tm = ddr3_get_topology_map(); 1810 1811 reg = (cmd == MRS1_CMD) ? MR1_REG : MR2_REG; 1812 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1813 PARAM_NOT_CARE, reg, data, mask)); 1814 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1815 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1816 CHECK_STATUS(ddr3_tip_if_write 1817 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1818 SDRAM_OPERATION_REG, 1819 (cs_mask_arr[if_id] << 8) | cmd, 0xf1f)); 1820 } 1821 1822 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1823 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1824 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 1825 0x1f, SDRAM_OPERATION_REG, 1826 MAX_POLLING_ITERATIONS) != MV_OK) { 1827 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 1828 ("write_mrs_cmd: Poll cmd fail")); 1829 } 1830 } 1831 1832 return MV_OK; 1833 } 1834 1835 /* 1836 * Reset XSB Read FIFO 1837 */ 1838 int ddr3_tip_reset_fifo_ptr(u32 dev_num) 1839 { 1840 u32 if_id = 0; 1841 1842 /* Configure PHY reset value to 0 in order to "clean" the FIFO */ 1843 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1844 if_id, 0x15c8, 0, 0xff000000)); 1845 /* 1846 * Move PHY to RL mode (only in RL mode the PHY overrides FIFO values 1847 * during FIFO reset) 1848 */ 1849 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1850 if_id, TRAINING_SW_2_REG, 1851 0x1, 0x9)); 1852 /* In order that above configuration will influence the PHY */ 1853 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1854 if_id, 0x15b0, 1855 0x80000000, 0x80000000)); 1856 /* Reset read fifo assertion */ 1857 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1858 if_id, 0x1400, 0, 0x40000000)); 1859 /* Reset read fifo deassertion */ 1860 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1861 if_id, 0x1400, 1862 0x40000000, 0x40000000)); 1863 /* Move PHY back to functional mode */ 1864 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1865 if_id, TRAINING_SW_2_REG, 1866 0x8, 0x9)); 1867 /* Stop training machine */ 1868 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1869 if_id, 0x15b4, 0x10000, 0x10000)); 1870 1871 return MV_OK; 1872 } 1873 1874 /* 1875 * Reset Phy registers 1876 */ 1877 int ddr3_tip_ddr3_reset_phy_regs(u32 dev_num) 1878 { 1879 u32 if_id, phy_id, cs; 1880 struct hws_topology_map *tm = ddr3_get_topology_map(); 1881 1882 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 1883 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1884 for (phy_id = 0; phy_id < tm->num_of_bus_per_interface; 1885 phy_id++) { 1886 VALIDATE_ACTIVE(tm->bus_act_mask, phy_id); 1887 CHECK_STATUS(ddr3_tip_bus_write 1888 (dev_num, ACCESS_TYPE_UNICAST, 1889 if_id, ACCESS_TYPE_UNICAST, 1890 phy_id, DDR_PHY_DATA, 1891 WL_PHY_REG + 1892 CS_REG_VALUE(effective_cs), 1893 phy_reg0_val)); 1894 CHECK_STATUS(ddr3_tip_bus_write 1895 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1896 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 1897 RL_PHY_REG + CS_REG_VALUE(effective_cs), 1898 phy_reg2_val)); 1899 CHECK_STATUS(ddr3_tip_bus_write 1900 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1901 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 1902 READ_CENTRALIZATION_PHY_REG + 1903 CS_REG_VALUE(effective_cs), phy_reg3_val)); 1904 CHECK_STATUS(ddr3_tip_bus_write 1905 (dev_num, ACCESS_TYPE_UNICAST, if_id, 1906 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA, 1907 WRITE_CENTRALIZATION_PHY_REG + 1908 CS_REG_VALUE(effective_cs), phy_reg3_val)); 1909 } 1910 } 1911 1912 /* Set Receiver Calibration value */ 1913 for (cs = 0; cs < MAX_CS_NUM; cs++) { 1914 /* PHY register 0xdb bits[5:0] - configure to 63 */ 1915 CHECK_STATUS(ddr3_tip_bus_write 1916 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1917 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1918 DDR_PHY_DATA, CSN_IOB_VREF_REG(cs), 63)); 1919 } 1920 1921 return MV_OK; 1922 } 1923 1924 /* 1925 * Restore Dunit registers 1926 */ 1927 int ddr3_tip_restore_dunit_regs(u32 dev_num) 1928 { 1929 u32 index_cnt; 1930 1931 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1932 PARAM_NOT_CARE, CALIB_MACHINE_CTRL_REG, 1933 0x1, 0x1)); 1934 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1935 PARAM_NOT_CARE, CALIB_MACHINE_CTRL_REG, 1936 calibration_update_control << 3, 1937 0x3 << 3)); 1938 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 1939 PARAM_NOT_CARE, 1940 ODPG_WRITE_READ_MODE_ENABLE_REG, 1941 0xffff, MASK_ALL_BITS)); 1942 1943 for (index_cnt = 0; index_cnt < ARRAY_SIZE(odpg_default_value); 1944 index_cnt++) { 1945 CHECK_STATUS(ddr3_tip_if_write 1946 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 1947 odpg_default_value[index_cnt].reg_addr, 1948 odpg_default_value[index_cnt].reg_data, 1949 odpg_default_value[index_cnt].reg_mask)); 1950 } 1951 1952 return MV_OK; 1953 } 1954 1955 /* 1956 * Auto tune main flow 1957 */ 1958 static int ddr3_tip_ddr3_training_main_flow(u32 dev_num) 1959 { 1960 enum hws_ddr_freq freq = init_freq; 1961 struct init_cntr_param init_cntr_prm; 1962 int ret = MV_OK; 1963 u32 if_id; 1964 u32 max_cs = hws_ddr3_tip_max_cs_get(); 1965 struct hws_topology_map *tm = ddr3_get_topology_map(); 1966 1967 #ifndef EXCLUDE_SWITCH_DEBUG 1968 if (debug_training == DEBUG_LEVEL_TRACE) { 1969 CHECK_STATUS(print_device_info((u8)dev_num)); 1970 } 1971 #endif 1972 1973 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 1974 CHECK_STATUS(ddr3_tip_ddr3_reset_phy_regs(dev_num)); 1975 } 1976 /* Set to 0 after each loop to avoid illegal value may be used */ 1977 effective_cs = 0; 1978 1979 freq = init_freq; 1980 if (is_pll_before_init != 0) { 1981 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { 1982 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 1983 config_func_info[dev_num].tip_set_freq_divider_func( 1984 (u8)dev_num, if_id, freq); 1985 } 1986 } 1987 1988 if (is_adll_calib_before_init != 0) { 1989 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 1990 ("with adll calib before init\n")); 1991 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq); 1992 } 1993 1994 if (is_reg_dump != 0) { 1995 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 1996 ("Dump before init controller\n")); 1997 ddr3_tip_reg_dump(dev_num); 1998 } 1999 2000 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) { 2001 training_stage = INIT_CONTROLLER; 2002 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2003 ("INIT_CONTROLLER_MASK_BIT\n")); 2004 init_cntr_prm.do_mrs_phy = 1; 2005 init_cntr_prm.is_ctrl64_bit = 0; 2006 init_cntr_prm.init_phy = 1; 2007 init_cntr_prm.msys_init = 0; 2008 ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm); 2009 if (is_reg_dump != 0) 2010 ddr3_tip_reg_dump(dev_num); 2011 if (ret != MV_OK) { 2012 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2013 ("hws_ddr3_tip_init_controller failure\n")); 2014 if (debug_mode == 0) 2015 return MV_FAIL; 2016 } 2017 } 2018 2019 #ifdef STATIC_ALGO_SUPPORT 2020 if (mask_tune_func & STATIC_LEVELING_MASK_BIT) { 2021 training_stage = STATIC_LEVELING; 2022 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2023 ("STATIC_LEVELING_MASK_BIT\n")); 2024 ret = ddr3_tip_run_static_alg(dev_num, freq); 2025 if (is_reg_dump != 0) 2026 ddr3_tip_reg_dump(dev_num); 2027 if (ret != MV_OK) { 2028 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2029 ("ddr3_tip_run_static_alg failure\n")); 2030 if (debug_mode == 0) 2031 return MV_FAIL; 2032 } 2033 } 2034 #endif 2035 2036 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) { 2037 training_stage = SET_LOW_FREQ; 2038 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2039 ("SET_LOW_FREQ_MASK_BIT %d\n", 2040 freq_val[low_freq])); 2041 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2042 PARAM_NOT_CARE, low_freq); 2043 if (is_reg_dump != 0) 2044 ddr3_tip_reg_dump(dev_num); 2045 if (ret != MV_OK) { 2046 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2047 ("ddr3_tip_freq_set failure\n")); 2048 if (debug_mode == 0) 2049 return MV_FAIL; 2050 } 2051 } 2052 2053 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2054 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) { 2055 training_stage = LOAD_PATTERN; 2056 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2057 ("LOAD_PATTERN_MASK_BIT #%d\n", 2058 effective_cs)); 2059 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2060 if (is_reg_dump != 0) 2061 ddr3_tip_reg_dump(dev_num); 2062 if (ret != MV_OK) { 2063 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2064 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n", 2065 effective_cs)); 2066 if (debug_mode == 0) 2067 return MV_FAIL; 2068 } 2069 } 2070 } 2071 /* Set to 0 after each loop to avoid illegal value may be used */ 2072 effective_cs = 0; 2073 2074 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) { 2075 training_stage = SET_MEDIUM_FREQ; 2076 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2077 ("SET_MEDIUM_FREQ_MASK_BIT %d\n", 2078 freq_val[medium_freq])); 2079 ret = 2080 ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2081 PARAM_NOT_CARE, medium_freq); 2082 if (is_reg_dump != 0) 2083 ddr3_tip_reg_dump(dev_num); 2084 if (ret != MV_OK) { 2085 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2086 ("ddr3_tip_freq_set failure\n")); 2087 if (debug_mode == 0) 2088 return MV_FAIL; 2089 } 2090 } 2091 2092 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) { 2093 training_stage = WRITE_LEVELING; 2094 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2095 ("WRITE_LEVELING_MASK_BIT\n")); 2096 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) { 2097 ret = ddr3_tip_dynamic_write_leveling(dev_num); 2098 } else { 2099 /* Use old WL */ 2100 ret = ddr3_tip_legacy_dynamic_write_leveling(dev_num); 2101 } 2102 2103 if (is_reg_dump != 0) 2104 ddr3_tip_reg_dump(dev_num); 2105 if (ret != MV_OK) { 2106 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2107 ("ddr3_tip_dynamic_write_leveling failure\n")); 2108 if (debug_mode == 0) 2109 return MV_FAIL; 2110 } 2111 } 2112 2113 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2114 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) { 2115 training_stage = LOAD_PATTERN_2; 2116 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2117 ("LOAD_PATTERN_2_MASK_BIT CS #%d\n", 2118 effective_cs)); 2119 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2120 if (is_reg_dump != 0) 2121 ddr3_tip_reg_dump(dev_num); 2122 if (ret != MV_OK) { 2123 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2124 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n", 2125 effective_cs)); 2126 if (debug_mode == 0) 2127 return MV_FAIL; 2128 } 2129 } 2130 } 2131 /* Set to 0 after each loop to avoid illegal value may be used */ 2132 effective_cs = 0; 2133 2134 if (mask_tune_func & READ_LEVELING_MASK_BIT) { 2135 training_stage = READ_LEVELING; 2136 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2137 ("READ_LEVELING_MASK_BIT\n")); 2138 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) { 2139 ret = ddr3_tip_dynamic_read_leveling(dev_num, medium_freq); 2140 } else { 2141 /* Use old RL */ 2142 ret = ddr3_tip_legacy_dynamic_read_leveling(dev_num); 2143 } 2144 2145 if (is_reg_dump != 0) 2146 ddr3_tip_reg_dump(dev_num); 2147 if (ret != MV_OK) { 2148 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2149 ("ddr3_tip_dynamic_read_leveling failure\n")); 2150 if (debug_mode == 0) 2151 return MV_FAIL; 2152 } 2153 } 2154 2155 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) { 2156 training_stage = WRITE_LEVELING_SUPP; 2157 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2158 ("WRITE_LEVELING_SUPP_MASK_BIT\n")); 2159 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num); 2160 if (is_reg_dump != 0) 2161 ddr3_tip_reg_dump(dev_num); 2162 if (ret != MV_OK) { 2163 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2164 ("ddr3_tip_dynamic_write_leveling_supp failure\n")); 2165 if (debug_mode == 0) 2166 return MV_FAIL; 2167 } 2168 } 2169 2170 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2171 if (mask_tune_func & PBS_RX_MASK_BIT) { 2172 training_stage = PBS_RX; 2173 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2174 ("PBS_RX_MASK_BIT CS #%d\n", 2175 effective_cs)); 2176 ret = ddr3_tip_pbs_rx(dev_num); 2177 if (is_reg_dump != 0) 2178 ddr3_tip_reg_dump(dev_num); 2179 if (ret != MV_OK) { 2180 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2181 ("ddr3_tip_pbs_rx failure CS #%d\n", 2182 effective_cs)); 2183 if (debug_mode == 0) 2184 return MV_FAIL; 2185 } 2186 } 2187 } 2188 2189 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2190 if (mask_tune_func & PBS_TX_MASK_BIT) { 2191 training_stage = PBS_TX; 2192 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2193 ("PBS_TX_MASK_BIT CS #%d\n", 2194 effective_cs)); 2195 ret = ddr3_tip_pbs_tx(dev_num); 2196 if (is_reg_dump != 0) 2197 ddr3_tip_reg_dump(dev_num); 2198 if (ret != MV_OK) { 2199 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2200 ("ddr3_tip_pbs_tx failure CS #%d\n", 2201 effective_cs)); 2202 if (debug_mode == 0) 2203 return MV_FAIL; 2204 } 2205 } 2206 } 2207 /* Set to 0 after each loop to avoid illegal value may be used */ 2208 effective_cs = 0; 2209 2210 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) { 2211 training_stage = SET_TARGET_FREQ; 2212 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2213 ("SET_TARGET_FREQ_MASK_BIT %d\n", 2214 freq_val[tm-> 2215 interface_params[first_active_if]. 2216 memory_freq])); 2217 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST, 2218 PARAM_NOT_CARE, 2219 tm->interface_params[first_active_if]. 2220 memory_freq); 2221 if (is_reg_dump != 0) 2222 ddr3_tip_reg_dump(dev_num); 2223 if (ret != MV_OK) { 2224 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2225 ("ddr3_tip_freq_set failure\n")); 2226 if (debug_mode == 0) 2227 return MV_FAIL; 2228 } 2229 } 2230 2231 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) { 2232 training_stage = WRITE_LEVELING_TF; 2233 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2234 ("WRITE_LEVELING_TF_MASK_BIT\n")); 2235 ret = ddr3_tip_dynamic_write_leveling(dev_num); 2236 if (is_reg_dump != 0) 2237 ddr3_tip_reg_dump(dev_num); 2238 if (ret != MV_OK) { 2239 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2240 ("ddr3_tip_dynamic_write_leveling TF failure\n")); 2241 if (debug_mode == 0) 2242 return MV_FAIL; 2243 } 2244 } 2245 2246 if (mask_tune_func & LOAD_PATTERN_HIGH_MASK_BIT) { 2247 training_stage = LOAD_PATTERN_HIGH; 2248 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("LOAD_PATTERN_HIGH\n")); 2249 ret = ddr3_tip_load_all_pattern_to_mem(dev_num); 2250 if (is_reg_dump != 0) 2251 ddr3_tip_reg_dump(dev_num); 2252 if (ret != MV_OK) { 2253 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2254 ("ddr3_tip_load_all_pattern_to_mem failure\n")); 2255 if (debug_mode == 0) 2256 return MV_FAIL; 2257 } 2258 } 2259 2260 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) { 2261 training_stage = READ_LEVELING_TF; 2262 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2263 ("READ_LEVELING_TF_MASK_BIT\n")); 2264 ret = ddr3_tip_dynamic_read_leveling(dev_num, tm-> 2265 interface_params[first_active_if]. 2266 memory_freq); 2267 if (is_reg_dump != 0) 2268 ddr3_tip_reg_dump(dev_num); 2269 if (ret != MV_OK) { 2270 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2271 ("ddr3_tip_dynamic_read_leveling TF failure\n")); 2272 if (debug_mode == 0) 2273 return MV_FAIL; 2274 } 2275 } 2276 2277 if (mask_tune_func & DM_PBS_TX_MASK_BIT) { 2278 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_PBS_TX_MASK_BIT\n")); 2279 } 2280 2281 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2282 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) { 2283 training_stage = VREF_CALIBRATION; 2284 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("VREF\n")); 2285 ret = ddr3_tip_vref(dev_num); 2286 if (is_reg_dump != 0) { 2287 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2288 ("VREF Dump\n")); 2289 ddr3_tip_reg_dump(dev_num); 2290 } 2291 if (ret != MV_OK) { 2292 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2293 ("ddr3_tip_vref failure\n")); 2294 if (debug_mode == 0) 2295 return MV_FAIL; 2296 } 2297 } 2298 } 2299 /* Set to 0 after each loop to avoid illegal value may be used */ 2300 effective_cs = 0; 2301 2302 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2303 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) { 2304 training_stage = CENTRALIZATION_RX; 2305 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2306 ("CENTRALIZATION_RX_MASK_BIT CS #%d\n", 2307 effective_cs)); 2308 ret = ddr3_tip_centralization_rx(dev_num); 2309 if (is_reg_dump != 0) 2310 ddr3_tip_reg_dump(dev_num); 2311 if (ret != MV_OK) { 2312 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2313 ("ddr3_tip_centralization_rx failure CS #%d\n", 2314 effective_cs)); 2315 if (debug_mode == 0) 2316 return MV_FAIL; 2317 } 2318 } 2319 } 2320 /* Set to 0 after each loop to avoid illegal value may be used */ 2321 effective_cs = 0; 2322 2323 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2324 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) { 2325 training_stage = WRITE_LEVELING_SUPP_TF; 2326 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2327 ("WRITE_LEVELING_SUPP_TF_MASK_BIT CS #%d\n", 2328 effective_cs)); 2329 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num); 2330 if (is_reg_dump != 0) 2331 ddr3_tip_reg_dump(dev_num); 2332 if (ret != MV_OK) { 2333 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2334 ("ddr3_tip_dynamic_write_leveling_supp TF failure CS #%d\n", 2335 effective_cs)); 2336 if (debug_mode == 0) 2337 return MV_FAIL; 2338 } 2339 } 2340 } 2341 /* Set to 0 after each loop to avoid illegal value may be used */ 2342 effective_cs = 0; 2343 2344 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) { 2345 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) { 2346 training_stage = CENTRALIZATION_TX; 2347 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2348 ("CENTRALIZATION_TX_MASK_BIT CS #%d\n", 2349 effective_cs)); 2350 ret = ddr3_tip_centralization_tx(dev_num); 2351 if (is_reg_dump != 0) 2352 ddr3_tip_reg_dump(dev_num); 2353 if (ret != MV_OK) { 2354 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2355 ("ddr3_tip_centralization_tx failure CS #%d\n", 2356 effective_cs)); 2357 if (debug_mode == 0) 2358 return MV_FAIL; 2359 } 2360 } 2361 } 2362 /* Set to 0 after each loop to avoid illegal value may be used */ 2363 effective_cs = 0; 2364 2365 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("restore registers to default\n")); 2366 /* restore register values */ 2367 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num)); 2368 2369 if (is_reg_dump != 0) 2370 ddr3_tip_reg_dump(dev_num); 2371 2372 return MV_OK; 2373 } 2374 2375 /* 2376 * DDR3 Dynamic training flow 2377 */ 2378 static int ddr3_tip_ddr3_auto_tune(u32 dev_num) 2379 { 2380 u32 if_id, stage, ret; 2381 int is_if_fail = 0, is_auto_tune_fail = 0; 2382 2383 training_stage = INIT_CONTROLLER; 2384 2385 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2386 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) 2387 training_result[stage][if_id] = NO_TEST_DONE; 2388 } 2389 2390 ret = ddr3_tip_ddr3_training_main_flow(dev_num); 2391 2392 /* activate XSB test */ 2393 if (xsb_validate_type != 0) { 2394 run_xsb_test(dev_num, xsb_validation_base_address, 1, 1, 2395 0x1024); 2396 } 2397 2398 if (is_reg_dump != 0) 2399 ddr3_tip_reg_dump(dev_num); 2400 2401 /* print log */ 2402 CHECK_STATUS(ddr3_tip_print_log(dev_num, window_mem_addr)); 2403 2404 if (ret != MV_OK) { 2405 CHECK_STATUS(ddr3_tip_print_stability_log(dev_num)); 2406 } 2407 2408 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2409 is_if_fail = 0; 2410 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) { 2411 if (training_result[stage][if_id] == TEST_FAILED) 2412 is_if_fail = 1; 2413 } 2414 if (is_if_fail == 1) { 2415 is_auto_tune_fail = 1; 2416 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, 2417 ("Auto Tune failed for IF %d\n", 2418 if_id)); 2419 } 2420 } 2421 2422 if ((ret == MV_FAIL) || (is_auto_tune_fail == 1)) 2423 return MV_FAIL; 2424 else 2425 return MV_OK; 2426 } 2427 2428 /* 2429 * Enable init sequence 2430 */ 2431 int ddr3_tip_enable_init_sequence(u32 dev_num) 2432 { 2433 int is_fail = 0; 2434 u32 if_id = 0, mem_mask = 0, bus_index = 0; 2435 struct hws_topology_map *tm = ddr3_get_topology_map(); 2436 2437 /* Enable init sequence */ 2438 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 0, 2439 SDRAM_INIT_CONTROL_REG, 0x1, 0x1)); 2440 2441 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) { 2442 VALIDATE_ACTIVE(tm->if_act_mask, if_id); 2443 2444 if (ddr3_tip_if_polling 2445 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1, 2446 SDRAM_INIT_CONTROL_REG, 2447 MAX_POLLING_ITERATIONS) != MV_OK) { 2448 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2449 ("polling failed IF %d\n", 2450 if_id)); 2451 is_fail = 1; 2452 continue; 2453 } 2454 2455 mem_mask = 0; 2456 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES(); 2457 bus_index++) { 2458 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index); 2459 mem_mask |= 2460 tm->interface_params[if_id]. 2461 as_bus_params[bus_index].mirror_enable_bitmask; 2462 } 2463 2464 if (mem_mask != 0) { 2465 /* Disable Multi CS */ 2466 CHECK_STATUS(ddr3_tip_if_write 2467 (dev_num, ACCESS_TYPE_MULTICAST, 2468 if_id, CS_ENABLE_REG, 1 << 3, 2469 1 << 3)); 2470 } 2471 } 2472 2473 return (is_fail == 0) ? MV_OK : MV_FAIL; 2474 } 2475 2476 int ddr3_tip_register_dq_table(u32 dev_num, u32 *table) 2477 { 2478 dq_map_table = table; 2479 2480 return MV_OK; 2481 } 2482 2483 /* 2484 * Check if pup search is locked 2485 */ 2486 int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode) 2487 { 2488 u32 bit_start = 0, bit_end = 0, bit_id; 2489 2490 if (read_mode == RESULT_PER_BIT) { 2491 bit_start = 0; 2492 bit_end = BUS_WIDTH_IN_BITS - 1; 2493 } else { 2494 bit_start = 0; 2495 bit_end = 0; 2496 } 2497 2498 for (bit_id = bit_start; bit_id <= bit_end; bit_id++) { 2499 if (GET_LOCK_RESULT(pup_buf[bit_id]) == 0) 2500 return 0; 2501 } 2502 2503 return 1; 2504 } 2505 2506 /* 2507 * Get minimum buffer value 2508 */ 2509 u8 ddr3_tip_get_buf_min(u8 *buf_ptr) 2510 { 2511 u8 min_val = 0xff; 2512 u8 cnt = 0; 2513 2514 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) { 2515 if (buf_ptr[cnt] < min_val) 2516 min_val = buf_ptr[cnt]; 2517 } 2518 2519 return min_val; 2520 } 2521 2522 /* 2523 * Get maximum buffer value 2524 */ 2525 u8 ddr3_tip_get_buf_max(u8 *buf_ptr) 2526 { 2527 u8 max_val = 0; 2528 u8 cnt = 0; 2529 2530 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) { 2531 if (buf_ptr[cnt] > max_val) 2532 max_val = buf_ptr[cnt]; 2533 } 2534 2535 return max_val; 2536 } 2537 2538 /* 2539 * The following functions return memory parameters: 2540 * bus and device width, device size 2541 */ 2542 2543 u32 hws_ddr3_get_bus_width(void) 2544 { 2545 struct hws_topology_map *tm = ddr3_get_topology_map(); 2546 2547 return (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 2548 1) ? 16 : 32; 2549 } 2550 2551 u32 hws_ddr3_get_device_width(u32 if_id) 2552 { 2553 struct hws_topology_map *tm = ddr3_get_topology_map(); 2554 2555 return (tm->interface_params[if_id].bus_width == 2556 BUS_WIDTH_8) ? 8 : 16; 2557 } 2558 2559 u32 hws_ddr3_get_device_size(u32 if_id) 2560 { 2561 struct hws_topology_map *tm = ddr3_get_topology_map(); 2562 2563 if (tm->interface_params[if_id].memory_size >= 2564 MEM_SIZE_LAST) { 2565 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2566 ("Error: Wrong device size of Cs: %d", 2567 tm->interface_params[if_id].memory_size)); 2568 return 0; 2569 } else { 2570 return 1 << tm->interface_params[if_id].memory_size; 2571 } 2572 } 2573 2574 int hws_ddr3_calc_mem_cs_size(u32 if_id, u32 cs, u32 *cs_size) 2575 { 2576 u32 cs_mem_size, dev_size; 2577 2578 dev_size = hws_ddr3_get_device_size(if_id); 2579 if (dev_size != 0) { 2580 cs_mem_size = ((hws_ddr3_get_bus_width() / 2581 hws_ddr3_get_device_width(if_id)) * dev_size); 2582 2583 /* the calculated result in Gbytex16 to avoid float using */ 2584 2585 if (cs_mem_size == 2) { 2586 *cs_size = _128M; 2587 } else if (cs_mem_size == 4) { 2588 *cs_size = _256M; 2589 } else if (cs_mem_size == 8) { 2590 *cs_size = _512M; 2591 } else if (cs_mem_size == 16) { 2592 *cs_size = _1G; 2593 } else if (cs_mem_size == 32) { 2594 *cs_size = _2G; 2595 } else { 2596 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2597 ("Error: Wrong Memory size of Cs: %d", cs)); 2598 return MV_FAIL; 2599 } 2600 return MV_OK; 2601 } else { 2602 return MV_FAIL; 2603 } 2604 } 2605 2606 int hws_ddr3_cs_base_adr_calc(u32 if_id, u32 cs, u32 *cs_base_addr) 2607 { 2608 u32 cs_mem_size = 0; 2609 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE 2610 u32 physical_mem_size; 2611 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE; 2612 #endif 2613 2614 if (hws_ddr3_calc_mem_cs_size(if_id, cs, &cs_mem_size) != MV_OK) 2615 return MV_FAIL; 2616 2617 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE 2618 struct hws_topology_map *tm = ddr3_get_topology_map(); 2619 /* 2620 * if number of address pins doesn't allow to use max mem size that 2621 * is defined in topology mem size is defined by 2622 * DEVICE_MAX_DRAM_ADDRESS_SIZE 2623 */ 2624 physical_mem_size = 2625 mv_hwsmem_size[tm->interface_params[0].memory_size]; 2626 2627 if (hws_ddr3_get_device_width(cs) == 16) { 2628 /* 2629 * 16bit mem device can be twice more - no need in less 2630 * significant pin 2631 */ 2632 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2; 2633 } 2634 2635 if (physical_mem_size > max_mem_size) { 2636 cs_mem_size = max_mem_size * 2637 (hws_ddr3_get_bus_width() / 2638 hws_ddr3_get_device_width(if_id)); 2639 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, 2640 ("Updated Physical Mem size is from 0x%x to %x\n", 2641 physical_mem_size, 2642 DEVICE_MAX_DRAM_ADDRESS_SIZE)); 2643 } 2644 #endif 2645 2646 /* calculate CS base addr */ 2647 *cs_base_addr = ((cs_mem_size) * cs) & 0xffff0000; 2648 2649 return MV_OK; 2650 } 2651